]> git.lizzy.rs Git - rust.git/commitdiff
Auto merge of #49981 - nox:fix-signed-niches, r=eddyb
authorbors <bors@rust-lang.org>
Wed, 18 Apr 2018 12:22:11 +0000 (12:22 +0000)
committerbors <bors@rust-lang.org>
Wed, 18 Apr 2018 12:22:11 +0000 (12:22 +0000)
Properly handle ranges of signed enums using both extremums (fixes #49973)

Fixes #49973.

334 files changed:
CONTRIBUTING.md
config.toml.example
src/Cargo.lock
src/bootstrap/builder.rs
src/bootstrap/check.rs
src/bootstrap/compile.rs
src/bootstrap/config.rs
src/bootstrap/configure.py
src/bootstrap/dist.rs
src/bootstrap/doc.rs
src/bootstrap/install.rs
src/bootstrap/native.rs
src/bootstrap/sanity.rs
src/bootstrap/test.rs
src/bootstrap/tool.rs
src/bootstrap/util.rs
src/ci/docker/wasm32-unknown/Dockerfile
src/doc/rustdoc/src/command-line-arguments.md
src/doc/rustdoc/src/documentation-tests.md
src/liballoc/boxed.rs
src/liballoc/btree/map.rs
src/liballoc/linked_list.rs
src/liballoc/raw_vec.rs
src/liballoc/str.rs
src/liballoc/string.rs
src/liballoc/vec.rs
src/liballoc/vec_deque.rs
src/libarena/Cargo.toml
src/libarena/lib.rs
src/libcore/alloc.rs
src/libcore/fmt/mod.rs
src/libcore/iter/iterator.rs
src/libcore/lib.rs
src/libcore/ops/range.rs
src/libcore/panic.rs
src/libcore/str/pattern.rs
src/libcore/sync/atomic.rs
src/libcore/tests/lib.rs
src/libpanic_abort/lib.rs
src/libpanic_unwind/lib.rs
src/librustc/hir/map/mod.rs
src/librustc/hir/mod.rs
src/librustc/ich/impls_cstore.rs
src/librustc/ich/impls_ty.rs
src/librustc/infer/error_reporting/mod.rs
src/librustc/infer/mod.rs
src/librustc/lint/context.rs
src/librustc/middle/cstore.rs
src/librustc/mir/interpret/error.rs
src/librustc/mir/interpret/mod.rs
src/librustc/mir/mod.rs
src/librustc/session/config.rs
src/librustc/session/mod.rs
src/librustc/traits/mod.rs
src/librustc/traits/structural_impls.rs
src/librustc/ty/context.rs
src/librustc/ty/item_path.rs
src/librustc/ty/layout.rs
src/librustc/ty/maps/job.rs
src/librustc/ty/maps/mod.rs
src/librustc/ty/maps/on_disk_cache.rs
src/librustc/ty/maps/plumbing.rs
src/librustc/ty/structural_impls.rs
src/librustc/ty/sty.rs
src/librustc_allocator/expand.rs
src/librustc_data_structures/array_vec.rs
src/librustc_driver/driver.rs
src/librustc_driver/lib.rs
src/librustc_driver/test.rs
src/librustc_errors/emitter.rs
src/librustc_errors/lib.rs
src/librustc_incremental/persist/save.rs
src/librustc_metadata/creader.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/schema.rs
src/librustc_mir/borrow_check/borrow_set.rs [new file with mode: 0644]
src/librustc_mir/borrow_check/error_reporting.rs
src/librustc_mir/borrow_check/flows.rs
src/librustc_mir/borrow_check/mod.rs
src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs
src/librustc_mir/borrow_check/nll/mod.rs
src/librustc_mir/borrow_check/nll/type_check/mod.rs
src/librustc_mir/borrow_check/nll/universal_regions.rs
src/librustc_mir/borrow_check/place_ext.rs [new file with mode: 0644]
src/librustc_mir/dataflow/impls/borrows.rs
src/librustc_mir/dataflow/impls/mod.rs
src/librustc_mir/dataflow/mod.rs
src/librustc_mir/dataflow/move_paths/mod.rs
src/librustc_mir/interpret/const_eval.rs
src/librustc_mir/interpret/eval_context.rs
src/librustc_mir/interpret/step.rs
src/librustc_mir/lib.rs
src/librustc_mir/monomorphize/collector.rs
src/librustc_resolve/build_reduced_graph.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/resolve_imports.rs
src/librustc_save_analysis/lib.rs
src/librustc_traits/lowering.rs
src/librustc_trans/attributes.rs
src/librustc_trans/debuginfo/metadata.rs
src/librustc_trans/debuginfo/mod.rs
src/librustc_trans/lib.rs
src/librustc_trans/llvm_util.rs
src/librustc_trans/mir/constant.rs
src/librustc_trans_utils/trans_crate.rs
src/librustc_typeck/check/demand.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/lib.rs
src/librustdoc/clean/mod.rs
src/librustdoc/core.rs
src/librustdoc/html/layout.rs
src/librustdoc/html/render.rs
src/librustdoc/html/static/main.js
src/librustdoc/lib.rs
src/librustdoc/markdown.rs
src/librustdoc/test.rs
src/libstd/collections/hash/table.rs
src/libstd/ffi/os_str.rs
src/libstd/lib.rs
src/libstd/macros.rs
src/libstd/panicking.rs
src/libstd/sys/cloudabi/stdio.rs
src/libstd/sys/redox/stdio.rs
src/libstd/sys/unix/mod.rs
src/libstd/sys/unix/stdio.rs
src/libstd/sys/wasm/rwlock.rs
src/libstd/sys/wasm/stdio.rs
src/libstd/sys/windows/mutex.rs
src/libstd/sys/windows/stdio.rs
src/libstd/sys_common/backtrace.rs
src/libstd/sys_common/mod.rs
src/libstd/sys_common/thread_local.rs
src/libstd/sys_common/util.rs
src/libstd/thread/local.rs
src/libstd/thread/mod.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/feature_gate.rs
src/libsyntax_pos/hygiene.rs
src/libsyntax_pos/symbol.rs
src/libtest/lib.rs
src/libunwind/macros.rs
src/rustc/rustc.rs
src/stdsimd
src/test/compile-fail/E0501.rs
src/test/compile-fail/borrowck/borrowck-asm.rs
src/test/compile-fail/borrowck/borrowck-closures-mut-and-imm.rs
src/test/compile-fail/borrowck/borrowck-describe-lvalue.rs
src/test/compile-fail/borrowck/borrowck-imm-ref-to-mut-rec-field-issue-3162-c.rs
src/test/compile-fail/borrowck/borrowck-lend-flow-match.rs
src/test/compile-fail/borrowck/borrowck-match-already-borrowed.rs
src/test/compile-fail/borrowck/borrowck-mut-borrow-linear-errors.rs
src/test/compile-fail/borrowck/borrowck-overloaded-index-ref-index.rs
src/test/compile-fail/borrowck/borrowck-pat-reassign-binding.rs
src/test/compile-fail/borrowck/borrowck-union-borrow.rs
src/test/compile-fail/borrowck/two-phase-across-loop.rs [new file with mode: 0644]
src/test/compile-fail/borrowck/two-phase-activation-sharing-interference.rs
src/test/compile-fail/borrowck/two-phase-allow-access-during-reservation.rs
src/test/compile-fail/borrowck/two-phase-cannot-nest-mut-self-calls.rs
src/test/compile-fail/borrowck/two-phase-nonrecv-autoref.rs
src/test/compile-fail/borrowck/two-phase-reservation-sharing-interference-2.rs
src/test/compile-fail/borrowck/two-phase-reservation-sharing-interference.rs
src/test/compile-fail/borrowck/two-phase-sneaky.rs
src/test/compile-fail/coerce-overloaded-autoderef.rs
src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs
src/test/compile-fail/const-size_of-cycle.rs
src/test/compile-fail/cycle-projection-based-on-where-clause.rs
src/test/compile-fail/cycle-trait-default-type-trait.rs
src/test/compile-fail/cycle-trait-supertrait-direct.rs
src/test/compile-fail/hrtb-identity-fn-borrows.rs
src/test/compile-fail/infinite-vec-type-recursion.rs
src/test/compile-fail/issue-20772.rs
src/test/compile-fail/issue-20825.rs
src/test/compile-fail/issue-21177.rs
src/test/compile-fail/issue-22673.rs
src/test/compile-fail/issue-26548.rs
src/test/compile-fail/issue-34373.rs
src/test/compile-fail/issue-36082.rs
src/test/compile-fail/issue-44415.rs
src/test/compile-fail/mir_check_cast_closure.rs
src/test/compile-fail/mir_check_cast_reify.rs
src/test/compile-fail/mir_check_cast_unsafe_fn.rs
src/test/compile-fail/mir_check_cast_unsize.rs
src/test/compile-fail/mut-pattern-internal-mutability.rs
src/test/compile-fail/nll/loan_ends_mid_block_pair.rs
src/test/compile-fail/nll/loan_ends_mid_block_vec.rs
src/test/compile-fail/nll/reference-carried-through-struct-field.rs
src/test/compile-fail/nll/region-ends-after-if-condition.rs
src/test/compile-fail/nll/return_from_loop.rs
src/test/compile-fail/nll/where_clauses_in_functions.rs
src/test/compile-fail/nll/where_clauses_in_structs.rs
src/test/compile-fail/regions-pattern-typing-issue-19997.rs
src/test/compile-fail/regions-static-bound.rs
src/test/compile-fail/resolve-self-in-impl.rs
src/test/incremental/static_cycle/b.rs [new file with mode: 0644]
src/test/mir-opt/nll/liveness-call-subtlety.rs
src/test/mir-opt/nll/liveness-drop-intra-block.rs
src/test/mir-opt/nll/liveness-interblock.rs
src/test/mir-opt/nll/named-lifetimes-basic.rs
src/test/mir-opt/nll/reborrow-basic.rs
src/test/mir-opt/nll/region-liveness-basic.rs
src/test/mir-opt/nll/region-liveness-two-disjoint-uses.rs
src/test/mir-opt/nll/region-subtyping-basic.rs
src/test/parse-fail/obsolete-proc.rs [deleted file]
src/test/run-make-fulldeps/save-analysis-rfc2126/Makefile [new file with mode: 0644]
src/test/run-make-fulldeps/save-analysis-rfc2126/extern_absolute_paths.rs [new file with mode: 0644]
src/test/run-make-fulldeps/save-analysis-rfc2126/extern_in_paths.rs [new file with mode: 0644]
src/test/run-make-fulldeps/save-analysis-rfc2126/krate2.rs [new file with mode: 0644]
src/test/run-make-fulldeps/save-analysis-rfc2126/validate_json.py [new file with mode: 0644]
src/test/run-make/wasm-panic-small/Makefile [new file with mode: 0644]
src/test/run-make/wasm-panic-small/foo.rs [new file with mode: 0644]
src/test/run-pass/borrowck/two-phase-baseline.rs
src/test/run-pass/impl-trait/example-calendar.rs
src/test/run-pass/issue-16819.rs
src/test/run-pass/macro-at-most-once-rep.rs
src/test/run-pass/simd-target-feature-mixup.rs
src/test/rustdoc-js/multi-query.js [new file with mode: 0644]
src/test/rustdoc-ui/intra-links-warning.rs [new file with mode: 0644]
src/test/rustdoc-ui/intra-links-warning.stderr [new file with mode: 0644]
src/test/rustdoc/all.rs
src/test/rustdoc/force-target-feature.rs [new file with mode: 0644]
src/test/ui/borrowck/borrowck-closures-two-mut.rs
src/test/ui/borrowck/borrowck-closures-two-mut.stderr
src/test/ui/borrowck/two-phase-method-receivers.rs
src/test/ui/borrowck/two-phase-multiple-activations.rs
src/test/ui/chalkify/lower_impl.rs
src/test/ui/chalkify/lower_impl.stderr
src/test/ui/cycle-trait-supertrait-indirect.rs
src/test/ui/cycle-trait-supertrait-indirect.stderr
src/test/ui/feature-gate-cfg-target-feature.rs [deleted file]
src/test/ui/feature-gate-cfg-target-feature.stderr [deleted file]
src/test/ui/feature-gate-target_feature.rs [deleted file]
src/test/ui/feature-gate-target_feature.stderr [deleted file]
src/test/ui/impl-trait/auto-trait-leak.rs
src/test/ui/impl-trait/auto-trait-leak.stderr
src/test/ui/issue-12511.rs
src/test/ui/issue-12511.stderr
src/test/ui/issue-23302-1.stderr
src/test/ui/issue-23302-2.stderr
src/test/ui/issue-23302-3.rs
src/test/ui/issue-23302-3.stderr
src/test/ui/issue-36163.stderr
src/test/ui/issue-45697-1.rs
src/test/ui/issue-45697-1.stderr
src/test/ui/issue-45697.rs
src/test/ui/issue-45697.stderr
src/test/ui/issue-46471-1.stderr
src/test/ui/issue-48803.rs [new file with mode: 0644]
src/test/ui/issue-48803.stderr [new file with mode: 0644]
src/test/ui/macros/macro-at-most-once-rep-ambig.rs
src/test/ui/macros/macro-at-most-once-rep-ambig.stderr
src/test/ui/nll/closure-requirements/escape-argument-callee.rs
src/test/ui/nll/closure-requirements/escape-argument-callee.stderr
src/test/ui/nll/closure-requirements/escape-argument.rs
src/test/ui/nll/closure-requirements/escape-upvar-nested.rs
src/test/ui/nll/closure-requirements/escape-upvar-ref.rs
src/test/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.rs
src/test/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-ref.rs
src/test/ui/nll/closure-requirements/propagate-approximated-ref.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.rs
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.rs
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.rs
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-val.rs
src/test/ui/nll/closure-requirements/propagate-approximated-val.stderr
src/test/ui/nll/closure-requirements/propagate-despite-same-free-region.rs
src/test/ui/nll/closure-requirements/propagate-despite-same-free-region.stderr
src/test/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.rs
src/test/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.stderr
src/test/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.rs
src/test/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.stderr
src/test/ui/nll/closure-requirements/propagate-from-trait-match.rs
src/test/ui/nll/closure-requirements/propagate-from-trait-match.stderr
src/test/ui/nll/closure-requirements/region-lbr-anon-does-not-outlive-static.rs
src/test/ui/nll/closure-requirements/region-lbr-anon-does-not-outlive-static.stderr
src/test/ui/nll/closure-requirements/region-lbr-named-does-not-outlive-static.rs
src/test/ui/nll/closure-requirements/region-lbr-named-does-not-outlive-static.stderr
src/test/ui/nll/closure-requirements/region-lbr1-does-not-outlive-ebr2.rs
src/test/ui/nll/closure-requirements/region-lbr1-does-not-outlive-ebr2.stderr
src/test/ui/nll/closure-requirements/region-lbr1-does-outlive-lbr2-because-implied-bound.rs
src/test/ui/nll/closure-requirements/return-wrong-bound-region.rs
src/test/ui/nll/closure-requirements/return-wrong-bound-region.stderr
src/test/ui/nll/constant.rs
src/test/ui/nll/drop-may-dangle.rs
src/test/ui/nll/drop-no-may-dangle.rs
src/test/ui/nll/get_default.rs
src/test/ui/nll/issue-47388.stderr
src/test/ui/nll/maybe-initialized-drop-implicit-fragment-drop.rs
src/test/ui/nll/maybe-initialized-drop-uninitialized.rs
src/test/ui/nll/maybe-initialized-drop-with-fragment.rs
src/test/ui/nll/maybe-initialized-drop-with-uninitialized-fragments.rs
src/test/ui/nll/maybe-initialized-drop.rs
src/test/ui/nll/projection-return.rs
src/test/ui/nll/ty-outlives/impl-trait-captures.rs
src/test/ui/nll/ty-outlives/impl-trait-captures.stderr
src/test/ui/nll/ty-outlives/impl-trait-outlives.rs
src/test/ui/nll/ty-outlives/impl-trait-outlives.stderr
src/test/ui/nll/ty-outlives/projection-implied-bounds.rs
src/test/ui/nll/ty-outlives/projection-implied-bounds.stderr
src/test/ui/nll/ty-outlives/projection-no-regions-closure.rs
src/test/ui/nll/ty-outlives/projection-no-regions-closure.stderr
src/test/ui/nll/ty-outlives/projection-no-regions-fn.rs
src/test/ui/nll/ty-outlives/projection-no-regions-fn.stderr
src/test/ui/nll/ty-outlives/projection-one-region-closure.rs
src/test/ui/nll/ty-outlives/projection-one-region-closure.stderr
src/test/ui/nll/ty-outlives/projection-one-region-trait-bound-closure.rs
src/test/ui/nll/ty-outlives/projection-one-region-trait-bound-closure.stderr
src/test/ui/nll/ty-outlives/projection-one-region-trait-bound-static-closure.rs
src/test/ui/nll/ty-outlives/projection-two-region-trait-bound-closure.rs
src/test/ui/nll/ty-outlives/projection-two-region-trait-bound-closure.stderr
src/test/ui/nll/ty-outlives/ty-param-closure-approximate-lower-bound.rs
src/test/ui/nll/ty-outlives/ty-param-closure-approximate-lower-bound.stderr
src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-return-type.rs
src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-return-type.stderr
src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-where-clause.rs
src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-where-clause.stderr
src/test/ui/nll/ty-outlives/ty-param-fn-body.rs
src/test/ui/nll/ty-outlives/ty-param-fn-body.stderr
src/test/ui/nll/ty-outlives/ty-param-fn.rs
src/test/ui/nll/ty-outlives/ty-param-fn.stderr
src/test/ui/nll/ty-outlives/ty-param-implied-bounds.rs
src/test/ui/resolve/issue-23305.rs
src/test/ui/resolve/issue-23305.stderr
src/test/ui/target-feature-gate.rs [new file with mode: 0644]
src/test/ui/target-feature-gate.stderr [new file with mode: 0644]
src/test/ui/target-feature-wrong.rs
src/test/ui/target-feature-wrong.stderr
src/tools/compiletest/src/main.rs
src/tools/compiletest/src/runtest.rs
src/tools/rustdoc-js/tester.js
src/tools/rustfmt

index 7a62405f059677affac001298e8f5fd2f9dbe515..73d4188d695498e68e10388ce24b0e57bab27b11 100644 (file)
@@ -121,6 +121,7 @@ configuration used in the build process. Some options to note:
 #### `[rust]`:
 - `debuginfo = true` - Build a compiler with debuginfo. Makes building rustc slower, but then you can use a debugger to debug `rustc`.
 - `debuginfo-lines = true` - An alternative to `debuginfo = true` that doesn't let you use a debugger, but doesn't make building rustc slower and still gives you line numbers in backtraces.
+- `debuginfo-tools = true` - Build the extended tools with debuginfo.
 - `debug-assertions = true` - Makes the log output of `debug!` work.
 - `optimize = false` - Disable optimizations to speed up compilation of stage1 rust, but makes the stage1 compiler x100 slower.
 
index 68bc7dfe720fe11f20313c299b4dc4bc44318b5d..effe00843810da1dcd993bfc48ec00f6df6e288c 100644 (file)
 # standard library.
 #debuginfo-only-std = false
 
+# Enable debuginfo for the extended tools: cargo, rls, rustfmt
+# Adding debuginfo makes them several times larger.
+#debuginfo-tools = false
+
 # Whether or not jemalloc is built and enabled
 #use-jemalloc = true
 
index daa8d0dc84cea5dda67d7daaa89befd03dc82e01..4f979b7e9f4970d13489001138e7693ba466ff8d 100644 (file)
@@ -66,6 +66,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 [[package]]
 name = "arena"
 version = "0.0.0"
+dependencies = [
+ "rustc_data_structures 0.0.0",
+]
 
 [[package]]
 name = "arrayvec"
@@ -1688,7 +1691,7 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-rustc_cratesio_shim"
-version = "98.0.0"
+version = "103.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1697,7 +1700,7 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-rustc_data_structures"
-version = "98.0.0"
+version = "103.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1705,20 +1708,20 @@ dependencies = [
  "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_errors"
-version = "98.0.0"
+version = "103.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "atty 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax_pos 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax_pos 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1726,32 +1729,32 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-serialize"
-version = "98.0.0"
+version = "103.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "rustc-ap-syntax"
-version = "98.0.0"
+version = "103.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_cratesio_shim 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_errors 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax_pos 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_cratesio_shim 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_errors 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax_pos 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "rustc-ap-syntax_pos"
-version = "98.0.0"
+version = "103.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2190,7 +2193,7 @@ dependencies = [
  "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3012,12 +3015,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "885f66b92757420572cbb02e033d4a9558c7413ca9b7ac206f28fd58ffdb44ea"
 "checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
 "checksum rls-vfs 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "be231e1e559c315bc60ced5ad2cc2d7a9c208ed7d4e2c126500149836fda19bb"
-"checksum rustc-ap-rustc_cratesio_shim 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2b576584b70d2b0c5f8a82c98a3eb39ef95eaf9187b90ad8858a149a55e94e85"
-"checksum rustc-ap-rustc_data_structures 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "be7c3367229e1497a65c754188842cc02f5e50e93cced2168f621c170cd08ee5"
-"checksum rustc-ap-rustc_errors 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "db6440cf26fe79acf54d9d0991835a2eabec4b7039da153889a16f50bda5a7ef"
-"checksum rustc-ap-serialize 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3854db2139a75e4d1898289c08dcd8487bec318975877c6268551afccab8844b"
-"checksum rustc-ap-syntax 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1852c80f5195a3da20023205bd1202254bf0282b9ffbaaa029a6beed31db3d"
-"checksum rustc-ap-syntax_pos 98.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc60c04eccec0304b3684584b696669b2cfdfbeacee615bb5a9f431aafa64ab9"
+"checksum rustc-ap-rustc_cratesio_shim 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "463834ac5ea777cb56c073586675fac37292f8425aafb3757efca7e6a76545aa"
+"checksum rustc-ap-rustc_data_structures 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d256eeab1b8639c2a1fd341e54f3613f8150bc262e4ec9361a29bbcb162906d"
+"checksum rustc-ap-rustc_errors 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cf6dd73033bb512845a6df347174c65ad430c92ecd35527e24d8bb186f5664ee"
+"checksum rustc-ap-serialize 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "356d338dbe538c7d6428296872d5d68da8f091e34eb89bca3b3f245ed0785e5e"
+"checksum rustc-ap-syntax 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0f02edede4ba70963a7dac2308876f03f76f9edd48a035e5abc8fa37c57a77c8"
+"checksum rustc-ap-syntax_pos 103.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8e50d4c38121fa8ded3ffbf94926ec74c95f24316c3b80de84fbfb42c005cf"
 "checksum rustc-demangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11fb43a206a04116ffd7cfcf9bcb941f8eb6cc7ff667272246b0a1c74259a3cb"
 "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
 "checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637"
index 7ff64af919671f977497bff7abf05dfac28e694c..20c89f99b2b0c9af40b860fa441333d9c4949bca 100644 (file)
@@ -154,18 +154,17 @@ fn maybe_run(&self, builder: &Builder, pathset: &PathSet) {
             eprintln!("{:?} not skipped for {:?} -- not in {:?}", pathset,
                 self.name, builder.config.exclude);
         }
-        let build = builder.build;
-        let hosts = &build.hosts;
+        let hosts = &builder.hosts;
 
         // Determine the targets participating in this rule.
         let targets = if self.only_hosts {
-            if !build.config.run_host_only {
+            if !builder.config.run_host_only {
                 return; // don't run anything
             } else {
-                &build.hosts
+                &builder.hosts
             }
         } else {
-            &build.targets
+            &builder.targets
         };
 
         for host in hosts {
@@ -326,7 +325,7 @@ macro_rules! describe {
                 test::TheBook, test::UnstableBook,
                 test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme,
                 // Run run-make last, since these won't pass without make on Windows
-                test::RunMake),
+                test::RunMake, test::RustdocUi),
             Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
             Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
                 doc::Standalone, doc::Std, doc::Test, doc::WhitelistedRustc, doc::Rustc,
@@ -476,7 +475,7 @@ fn run(self, builder: &Builder) -> Interned<PathBuf> {
 
     pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf {
         self.sysroot_libdir(compiler, compiler.host)
-            .with_file_name(self.build.config.rust_codegen_backends_dir.clone())
+            .with_file_name(self.config.rust_codegen_backends_dir.clone())
     }
 
     /// Returns the compiler's libdir where it stores the dynamic libraries that
@@ -486,7 +485,7 @@ pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf {
     /// Windows.
     pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf {
         if compiler.is_snapshot(self) {
-            self.build.rustc_snapshot_libdir()
+            self.rustc_snapshot_libdir()
         } else {
             self.sysroot(compiler).join(libdir(&compiler.host))
         }
@@ -523,12 +522,12 @@ pub fn rustdoc_cmd(&self, host: Interned<String>) -> Command {
         let compiler = self.compiler(self.top_stage, host);
         cmd.env("RUSTC_STAGE", compiler.stage.to_string())
            .env("RUSTC_SYSROOT", self.sysroot(compiler))
-           .env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.build.build))
-           .env("CFG_RELEASE_CHANNEL", &self.build.config.channel)
+           .env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.config.build))
+           .env("CFG_RELEASE_CHANNEL", &self.config.channel)
            .env("RUSTDOC_REAL", self.rustdoc(host))
-           .env("RUSTDOC_CRATE_VERSION", self.build.rust_version())
+           .env("RUSTDOC_CRATE_VERSION", self.rust_version())
            .env("RUSTC_BOOTSTRAP", "1");
-        if let Some(linker) = self.build.linker(host) {
+        if let Some(linker) = self.linker(host) {
             cmd.env("RUSTC_TARGET_LINKER", linker);
         }
         cmd
@@ -609,23 +608,27 @@ pub fn cargo(&self,
              .env("TEST_MIRI", self.config.test_miri.to_string())
              .env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir());
 
-        if let Some(host_linker) = self.build.linker(compiler.host) {
+        if let Some(host_linker) = self.linker(compiler.host) {
             cargo.env("RUSTC_HOST_LINKER", host_linker);
         }
-        if let Some(target_linker) = self.build.linker(target) {
+        if let Some(target_linker) = self.linker(target) {
             cargo.env("RUSTC_TARGET_LINKER", target_linker);
         }
         if let Some(ref error_format) = self.config.rustc_error_format {
             cargo.env("RUSTC_ERROR_FORMAT", error_format);
         }
         if cmd != "build" && cmd != "check" {
-            cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build)));
+            cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.config.build)));
         }
 
-        if mode != Mode::Tool {
-            // Tools don't get debuginfo right now, e.g. cargo and rls don't
-            // get compiled with debuginfo.
-            // Adding debuginfo increases their sizes by a factor of 3-4.
+        if mode == Mode::Tool {
+            // Tools like cargo and rls don't get debuginfo by default right now, but this can be
+            // enabled in the config.  Adding debuginfo makes them several times larger.
+            if self.config.rust_debuginfo_tools {
+                cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
+                cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
+            }
+        } else {
             cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
             cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string());
             cargo.env("RUSTC_FORCE_UNSTABLE", "1");
@@ -673,7 +676,7 @@ pub fn cargo(&self,
         //
         // If LLVM support is disabled we need to use the snapshot compiler to compile
         // build scripts, as the new compiler doesn't support executables.
-        if mode == Mode::Libstd || !self.build.config.llvm_enabled {
+        if mode == Mode::Libstd || !self.config.llvm_enabled {
             cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc)
                  .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
         } else {
@@ -757,7 +760,7 @@ pub fn cargo(&self,
         }
 
         // For `cargo doc` invocations, make rustdoc print the Rust version into the docs
-        cargo.env("RUSTDOC_CRATE_VERSION", self.build.rust_version());
+        cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version());
 
         // Environment variables *required* throughout the build
         //
@@ -765,7 +768,7 @@ pub fn cargo(&self,
         cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
 
         // Set this for all builds to make sure doc builds also get it.
-        cargo.env("CFG_RELEASE_CHANNEL", &self.build.config.channel);
+        cargo.env("CFG_RELEASE_CHANNEL", &self.config.channel);
 
         // This one's a bit tricky. As of the time of this writing the compiler
         // links to the `winapi` crate on crates.io. This crate provides raw
@@ -850,7 +853,7 @@ pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
                 panic!(out);
             }
             if let Some(out) = self.cache.get(&step) {
-                self.build.verbose(&format!("{}c {:?}", "  ".repeat(stack.len()), step));
+                self.verbose(&format!("{}c {:?}", "  ".repeat(stack.len()), step));
 
                 {
                     let mut graph = self.graph.borrow_mut();
@@ -865,7 +868,7 @@ pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
 
                 return out;
             }
-            self.build.verbose(&format!("{}> {:?}", "  ".repeat(stack.len()), step));
+            self.verbose(&format!("{}> {:?}", "  ".repeat(stack.len()), step));
             stack.push(Box::new(step.clone()));
         }
 
@@ -895,7 +898,7 @@ pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
 
         self.parent.set(prev_parent);
 
-        if self.build.config.print_step_timings && dur > Duration::from_millis(100) {
+        if self.config.print_step_timings && dur > Duration::from_millis(100) {
             println!("[TIMING] {:?} -- {}.{:03}",
                      step,
                      dur.as_secs(),
@@ -907,7 +910,7 @@ pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
             let cur_step = stack.pop().expect("step stack empty");
             assert_eq!(cur_step.downcast_ref(), Some(&step));
         }
-        self.build.verbose(&format!("{}< {:?}", "  ".repeat(self.stack.borrow().len()), step));
+        self.verbose(&format!("{}< {:?}", "  ".repeat(self.stack.borrow().len()), step));
         self.cache.put(step, out.clone());
         out
     }
index a39fad67ebea4ac967bffe828dd8d39c7375b624..adebd424d7eb644ea028935460cd07628aec7ce2 100644 (file)
@@ -12,7 +12,7 @@
 
 use compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, add_to_sysroot};
 use builder::{RunConfig, Builder, ShouldRun, Step};
-use {Build, Compiler, Mode};
+use {Compiler, Mode};
 use cache::Interned;
 use std::path::PathBuf;
 
@@ -36,24 +36,23 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
-        let compiler = builder.compiler(0, build.build);
+        let compiler = builder.compiler(0, builder.config.build);
 
-        let out_dir = build.stage_out(compiler, Mode::Libstd);
-        build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+        let out_dir = builder.stage_out(compiler, Mode::Libstd);
+        builder.clear_if_dirty(&out_dir, &builder.rustc(compiler));
         let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "check");
         std_cargo(builder, &compiler, target, &mut cargo);
 
-        let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
+        let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage));
         println!("Checking std artifacts ({} -> {})", &compiler.host, target);
-        run_cargo(build,
+        run_cargo(builder,
                   &mut cargo,
-                  &libstd_stamp(build, compiler, target),
+                  &libstd_stamp(builder, compiler, target),
                   true);
 
         let libdir = builder.sysroot_libdir(compiler, target);
-        add_to_sysroot(&build, &libdir, &libstd_stamp(build, compiler, target));
+        add_to_sysroot(&builder, &libdir, &libstd_stamp(builder, compiler, target));
     }
 }
 
@@ -83,26 +82,25 @@ fn make_run(run: RunConfig) {
     /// the `compiler` targeting the `target` architecture. The artifacts
     /// created will also be linked into the sysroot directory.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
-        let compiler = builder.compiler(0, build.build);
+        let compiler = builder.compiler(0, builder.config.build);
         let target = self.target;
 
         let stage_out = builder.stage_out(compiler, Mode::Librustc);
-        build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target));
-        build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target));
+        builder.clear_if_dirty(&stage_out, &libstd_stamp(builder, compiler, target));
+        builder.clear_if_dirty(&stage_out, &libtest_stamp(builder, compiler, target));
 
         let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "check");
-        rustc_cargo(build, &mut cargo);
+        rustc_cargo(builder, &mut cargo);
 
-        let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
+        let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage));
         println!("Checking compiler artifacts ({} -> {})", &compiler.host, target);
-        run_cargo(build,
+        run_cargo(builder,
                   &mut cargo,
-                  &librustc_stamp(build, compiler, target),
+                  &librustc_stamp(builder, compiler, target),
                   true);
 
         let libdir = builder.sysroot_libdir(compiler, target);
-        add_to_sysroot(&build, &libdir, &librustc_stamp(build, compiler, target));
+        add_to_sysroot(&builder, &libdir, &librustc_stamp(builder, compiler, target));
     }
 }
 
@@ -126,41 +124,40 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
-        let compiler = builder.compiler(0, build.build);
+        let compiler = builder.compiler(0, builder.config.build);
 
-        let out_dir = build.stage_out(compiler, Mode::Libtest);
-        build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+        let out_dir = builder.stage_out(compiler, Mode::Libtest);
+        builder.clear_if_dirty(&out_dir, &libstd_stamp(builder, compiler, target));
         let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "check");
-        test_cargo(build, &compiler, target, &mut cargo);
+        test_cargo(builder, &compiler, target, &mut cargo);
 
-        let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
+        let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage));
         println!("Checking test artifacts ({} -> {})", &compiler.host, target);
-        run_cargo(build,
+        run_cargo(builder,
                   &mut cargo,
-                  &libtest_stamp(build, compiler, target),
+                  &libtest_stamp(builder, compiler, target),
                   true);
 
         let libdir = builder.sysroot_libdir(compiler, target);
-        add_to_sysroot(&build, &libdir, &libtest_stamp(build, compiler, target));
+        add_to_sysroot(builder, &libdir, &libtest_stamp(builder, compiler, target));
     }
 }
 
 /// Cargo's output path for the standard library in a given stage, compiled
 /// by a particular compiler for the specified target.
-pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp")
+pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+    builder.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp")
 }
 
 /// Cargo's output path for libtest in a given stage, compiled by a particular
 /// compiler for the specified target.
-pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp")
+pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+    builder.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp")
 }
 
 /// Cargo's output path for librustc in a given stage, compiled by a particular
 /// compiler for the specified target.
-pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp")
+pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+    builder.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp")
 }
index b411b19bd53d21047bd4f93956195070b1e9779c..07bce77af8d24083844f26e173cdb4604829c4a4 100644 (file)
@@ -31,7 +31,7 @@
 use serde_json;
 
 use util::{exe, libdir, is_dylib, CiEnv};
-use {Build, Compiler, Mode};
+use {Compiler, Mode};
 use native;
 use tool;
 
@@ -65,14 +65,13 @@ fn make_run(run: RunConfig) {
     /// using the `compiler` targeting the `target` architecture. The artifacts
     /// created will also be linked into the sysroot directory.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
         let compiler = self.compiler;
 
         builder.ensure(StartupObjects { compiler, target });
 
-        if build.force_use_stage1(compiler, target) {
-            let from = builder.compiler(1, build.build);
+        if builder.force_use_stage1(compiler, target) {
+            let from = builder.compiler(1, builder.config.build);
             builder.ensure(Std {
                 compiler: from,
                 target,
@@ -83,7 +82,7 @@ fn run(self, builder: &Builder) {
             // still contain the musl startup objects.
             if target.contains("musl") {
                 let libdir = builder.sysroot_libdir(compiler, target);
-                copy_musl_third_party_objects(build, target, &libdir);
+                copy_musl_third_party_objects(builder, target, &libdir);
             }
 
             builder.ensure(StdLink {
@@ -96,24 +95,24 @@ fn run(self, builder: &Builder) {
 
         if target.contains("musl") {
             let libdir = builder.sysroot_libdir(compiler, target);
-            copy_musl_third_party_objects(build, target, &libdir);
+            copy_musl_third_party_objects(builder, target, &libdir);
         }
 
-        let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-        build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+        let out_dir = builder.cargo_out(compiler, Mode::Libstd, target);
+        builder.clear_if_dirty(&out_dir, &builder.rustc(compiler));
         let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "build");
         std_cargo(builder, &compiler, target, &mut cargo);
 
-        let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
-        build.info(&format!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+        let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage));
+        builder.info(&format!("Building stage{} std artifacts ({} -> {})", compiler.stage,
                 &compiler.host, target));
-        run_cargo(build,
+        run_cargo(builder,
                   &mut cargo,
-                  &libstd_stamp(build, compiler, target),
+                  &libstd_stamp(builder, compiler, target),
                   false);
 
         builder.ensure(StdLink {
-            compiler: builder.compiler(compiler.stage, build.build),
+            compiler: builder.compiler(compiler.stage, builder.config.build),
             target_compiler: compiler,
             target,
         });
@@ -126,17 +125,17 @@ fn run(self, builder: &Builder) {
 /// with a glibc-targeting toolchain, given we have the appropriate startup
 /// files. As those shipped with glibc won't work, copy the ones provided by
 /// musl so we have them on linux-gnu hosts.
-fn copy_musl_third_party_objects(build: &Build,
+fn copy_musl_third_party_objects(builder: &Builder,
                                  target: Interned<String>,
                                  into: &Path) {
     for &obj in &["crt1.o", "crti.o", "crtn.o"] {
-        build.copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
+        builder.copy(&builder.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
     }
 }
 
 /// Configure cargo to compile the standard library, adding appropriate env vars
 /// and such.
-pub fn std_cargo(build: &Builder,
+pub fn std_cargo(builder: &Builder,
                  compiler: &Compiler,
                  target: Interned<String>,
                  cargo: &mut Command) {
@@ -144,27 +143,27 @@ pub fn std_cargo(build: &Builder,
         cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
     }
 
-    if build.no_std(target) == Some(true) {
+    if builder.no_std(target) == Some(true) {
         // for no-std targets we only compile a few no_std crates
         cargo.arg("--features").arg("c mem")
             .args(&["-p", "alloc"])
             .args(&["-p", "compiler_builtins"])
             .args(&["-p", "std_unicode"])
             .arg("--manifest-path")
-            .arg(build.src.join("src/rustc/compiler_builtins_shim/Cargo.toml"));
+            .arg(builder.src.join("src/rustc/compiler_builtins_shim/Cargo.toml"));
     } else {
-        let mut features = build.std_features();
+        let mut features = builder.std_features();
 
         // When doing a local rebuild we tell cargo that we're stage1 rather than
         // stage0. This works fine if the local rust and being-built rust have the
         // same view of what the default allocator is, but fails otherwise. Since
         // we don't have a way to express an allocator preference yet, work
         // around the issue in the case of a local rebuild with jemalloc disabled.
-        if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
+        if compiler.stage == 0 && builder.local_rebuild && !builder.config.use_jemalloc {
             features.push_str(" force_alloc_system");
         }
 
-        if compiler.stage != 0 && build.config.sanitizers {
+        if compiler.stage != 0 && builder.config.sanitizers {
             // This variable is used by the sanitizer runtime crates, e.g.
             // rustc_lsan, to build the sanitizer runtime from C code
             // When this variable is missing, those crates won't compile the C code,
@@ -172,8 +171,8 @@ pub fn std_cargo(build: &Builder,
             // missing
             // We also only build the runtimes when --enable-sanitizers (or its
             // config.toml equivalent) is used
-            let llvm_config = build.ensure(native::Llvm {
-                target: build.config.build,
+            let llvm_config = builder.ensure(native::Llvm {
+                target: builder.config.build,
                 emscripten: false,
             });
             cargo.env("LLVM_CONFIG", llvm_config);
@@ -181,15 +180,15 @@ pub fn std_cargo(build: &Builder,
 
         cargo.arg("--features").arg(features)
             .arg("--manifest-path")
-            .arg(build.src.join("src/libstd/Cargo.toml"));
+            .arg(builder.src.join("src/libstd/Cargo.toml"));
 
-        if let Some(target) = build.config.target_config.get(&target) {
+        if let Some(target) = builder.config.target_config.get(&target) {
             if let Some(ref jemalloc) = target.jemalloc {
                 cargo.env("JEMALLOC_OVERRIDE", jemalloc);
             }
         }
         if target.contains("musl") {
-            if let Some(p) = build.musl_root(target) {
+            if let Some(p) = builder.musl_root(target) {
                 cargo.env("MUSL_ROOT", p);
             }
         }
@@ -219,24 +218,23 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// libraries for `target`, and this method will find them in the relevant
     /// output directory.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target_compiler = self.target_compiler;
         let target = self.target;
-        build.info(&format!("Copying stage{} std from stage{} ({} -> {} / {})",
+        builder.info(&format!("Copying stage{} std from stage{} ({} -> {} / {})",
                 target_compiler.stage,
                 compiler.stage,
                 &compiler.host,
                 target_compiler.host,
                 target));
         let libdir = builder.sysroot_libdir(target_compiler, target);
-        add_to_sysroot(&build, &libdir, &libstd_stamp(build, compiler, target));
+        add_to_sysroot(builder, &libdir, &libstd_stamp(builder, compiler, target));
 
-        if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
+        if builder.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
             // The sanitizers are only built in stage1 or above, so the dylibs will
             // be missing in stage0 and causes panic. See the `std()` function above
             // for reason why the sanitizers are not built in stage0.
-            copy_apple_sanitizer_dylibs(&build, &build.native_dir(target), "osx", &libdir);
+            copy_apple_sanitizer_dylibs(builder, &builder.native_dir(target), "osx", &libdir);
         }
 
         builder.ensure(tool::CleanTools {
@@ -247,7 +245,7 @@ fn run(self, builder: &Builder) {
     }
 }
 
-fn copy_apple_sanitizer_dylibs(build: &Build, native_dir: &Path, platform: &str, into: &Path) {
+fn copy_apple_sanitizer_dylibs(builder: &Builder, native_dir: &Path, platform: &str, into: &Path) {
     for &sanitizer in &["asan", "tsan"] {
         let filename = format!("libclang_rt.{}_{}_dynamic.dylib", sanitizer, platform);
         let mut src_path = native_dir.join(sanitizer);
@@ -255,7 +253,7 @@ fn copy_apple_sanitizer_dylibs(build: &Build, native_dir: &Path, platform: &str,
         src_path.push("lib");
         src_path.push("darwin");
         src_path.push(&filename);
-        build.copy(&src_path, &into.join(filename));
+        builder.copy(&src_path, &into.join(filename));
     }
 }
 
@@ -286,15 +284,14 @@ fn make_run(run: RunConfig) {
     /// files, so we just use the nightly snapshot compiler to always build them (as
     /// no other compilers are guaranteed to be available).
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let for_compiler = self.compiler;
         let target = self.target;
         if !target.contains("pc-windows-gnu") {
             return
         }
 
-        let src_dir = &build.src.join("src/rtstartup");
-        let dst_dir = &build.native_dir(target).join("rtstartup");
+        let src_dir = &builder.src.join("src/rtstartup");
+        let dst_dir = &builder.native_dir(target).join("rtstartup");
         let sysroot_dir = &builder.sysroot_libdir(for_compiler, target);
         t!(fs::create_dir_all(dst_dir));
 
@@ -302,8 +299,8 @@ fn run(self, builder: &Builder) {
             let src_file = &src_dir.join(file.to_string() + ".rs");
             let dst_file = &dst_dir.join(file.to_string() + ".o");
             if !up_to_date(src_file, dst_file) {
-                let mut cmd = Command::new(&build.initial_rustc);
-                build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
+                let mut cmd = Command::new(&builder.initial_rustc);
+                builder.run(cmd.env("RUSTC_BOOTSTRAP", "1")
                             .arg("--cfg").arg("stage0")
                             .arg("--target").arg(target)
                             .arg("--emit=obj")
@@ -311,15 +308,15 @@ fn run(self, builder: &Builder) {
                             .arg(src_file));
             }
 
-            build.copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
+            builder.copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
         }
 
         for obj in ["crt2.o", "dllcrt2.o"].iter() {
-            let src = compiler_file(build,
-                                    build.cc(target),
+            let src = compiler_file(builder,
+                                    builder.cc(target),
                                     target,
                                     obj);
-            build.copy(&src, &sysroot_dir.join(obj));
+            builder.copy(&src, &sysroot_dir.join(obj));
         }
     }
 }
@@ -351,41 +348,41 @@ fn make_run(run: RunConfig) {
     /// the build using the `compiler` targeting the `target` architecture. The
     /// artifacts created will also be linked into the sysroot directory.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
         let compiler = self.compiler;
 
         builder.ensure(Std { compiler, target });
 
-        if build.force_use_stage1(compiler, target) {
+        if builder.force_use_stage1(compiler, target) {
             builder.ensure(Test {
-                compiler: builder.compiler(1, build.build),
+                compiler: builder.compiler(1, builder.config.build),
                 target,
             });
-            build.info(&format!("Uplifting stage1 test ({} -> {})", &build.build, target));
+            builder.info(
+                &format!("Uplifting stage1 test ({} -> {})", builder.config.build, target));
             builder.ensure(TestLink {
-                compiler: builder.compiler(1, build.build),
+                compiler: builder.compiler(1, builder.config.build),
                 target_compiler: compiler,
                 target,
             });
             return;
         }
 
-        let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-        build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+        let out_dir = builder.cargo_out(compiler, Mode::Libtest, target);
+        builder.clear_if_dirty(&out_dir, &libstd_stamp(builder, compiler, target));
         let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "build");
-        test_cargo(build, &compiler, target, &mut cargo);
+        test_cargo(builder, &compiler, target, &mut cargo);
 
-        let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
-        build.info(&format!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+        let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage));
+        builder.info(&format!("Building stage{} test artifacts ({} -> {})", compiler.stage,
                 &compiler.host, target));
-        run_cargo(build,
+        run_cargo(builder,
                   &mut cargo,
-                  &libtest_stamp(build, compiler, target),
+                  &libtest_stamp(builder, compiler, target),
                   false);
 
         builder.ensure(TestLink {
-            compiler: builder.compiler(compiler.stage, build.build),
+            compiler: builder.compiler(compiler.stage, builder.config.build),
             target_compiler: compiler,
             target,
         });
@@ -393,7 +390,7 @@ fn run(self, builder: &Builder) {
 }
 
 /// Same as `std_cargo`, but for libtest
-pub fn test_cargo(build: &Build,
+pub fn test_cargo(builder: &Builder,
                   _compiler: &Compiler,
                   _target: Interned<String>,
                   cargo: &mut Command) {
@@ -401,7 +398,7 @@ pub fn test_cargo(build: &Build,
         cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
     }
     cargo.arg("--manifest-path")
-        .arg(build.src.join("src/libtest/Cargo.toml"));
+        .arg(builder.src.join("src/libtest/Cargo.toml"));
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -420,18 +417,17 @@ fn should_run(run: ShouldRun) -> ShouldRun {
 
     /// Same as `std_link`, only for libtest
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target_compiler = self.target_compiler;
         let target = self.target;
-        build.info(&format!("Copying stage{} test from stage{} ({} -> {} / {})",
+        builder.info(&format!("Copying stage{} test from stage{} ({} -> {} / {})",
                 target_compiler.stage,
                 compiler.stage,
                 &compiler.host,
                 target_compiler.host,
                 target));
-        add_to_sysroot(&build, &builder.sysroot_libdir(target_compiler, target),
-                    &libtest_stamp(build, compiler, target));
+        add_to_sysroot(builder, &builder.sysroot_libdir(target_compiler, target),
+                    &libtest_stamp(builder, compiler, target));
         builder.ensure(tool::CleanTools {
             compiler: target_compiler,
             target,
@@ -468,20 +464,20 @@ fn make_run(run: RunConfig) {
     /// the `compiler` targeting the `target` architecture. The artifacts
     /// created will also be linked into the sysroot directory.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
 
         builder.ensure(Test { compiler, target });
 
-        if build.force_use_stage1(compiler, target) {
+        if builder.force_use_stage1(compiler, target) {
             builder.ensure(Rustc {
-                compiler: builder.compiler(1, build.build),
+                compiler: builder.compiler(1, builder.config.build),
                 target,
             });
-            build.info(&format!("Uplifting stage1 rustc ({} -> {})", &build.build, target));
+            builder.info(&format!("Uplifting stage1 rustc ({} -> {})",
+                builder.config.build, target));
             builder.ensure(RustcLink {
-                compiler: builder.compiler(1, build.build),
+                compiler: builder.compiler(1, builder.config.build),
                 target_compiler: compiler,
                 target,
             });
@@ -490,71 +486,71 @@ fn run(self, builder: &Builder) {
 
         // Ensure that build scripts have a std to link against.
         builder.ensure(Std {
-            compiler: builder.compiler(self.compiler.stage, build.build),
-            target: build.build,
+            compiler: builder.compiler(self.compiler.stage, builder.config.build),
+            target: builder.config.build,
         });
         let cargo_out = builder.cargo_out(compiler, Mode::Librustc, target);
-        build.clear_if_dirty(&cargo_out, &libstd_stamp(build, compiler, target));
-        build.clear_if_dirty(&cargo_out, &libtest_stamp(build, compiler, target));
+        builder.clear_if_dirty(&cargo_out, &libstd_stamp(builder, compiler, target));
+        builder.clear_if_dirty(&cargo_out, &libtest_stamp(builder, compiler, target));
 
         let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build");
-        rustc_cargo(build, &mut cargo);
+        rustc_cargo(builder, &mut cargo);
 
-        let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
-        build.info(&format!("Building stage{} compiler artifacts ({} -> {})",
+        let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage));
+        builder.info(&format!("Building stage{} compiler artifacts ({} -> {})",
                  compiler.stage, &compiler.host, target));
-        run_cargo(build,
+        run_cargo(builder,
                   &mut cargo,
-                  &librustc_stamp(build, compiler, target),
+                  &librustc_stamp(builder, compiler, target),
                   false);
 
         builder.ensure(RustcLink {
-            compiler: builder.compiler(compiler.stage, build.build),
+            compiler: builder.compiler(compiler.stage, builder.config.build),
             target_compiler: compiler,
             target,
         });
     }
 }
 
-pub fn rustc_cargo(build: &Build, cargo: &mut Command) {
-    cargo.arg("--features").arg(build.rustc_features())
+pub fn rustc_cargo(builder: &Builder, cargo: &mut Command) {
+    cargo.arg("--features").arg(builder.rustc_features())
          .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"));
-    rustc_cargo_env(build, cargo);
+         .arg(builder.src.join("src/rustc/Cargo.toml"));
+    rustc_cargo_env(builder, cargo);
 }
 
-fn rustc_cargo_env(build: &Build, cargo: &mut Command) {
+fn rustc_cargo_env(builder: &Builder, cargo: &mut Command) {
     // Set some configuration variables picked up by build scripts and
     // the compiler alike
-    cargo.env("CFG_RELEASE", build.rust_release())
-         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
-         .env("CFG_VERSION", build.rust_version())
-         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default())
-         .env("CFG_CODEGEN_BACKENDS_DIR", &build.config.rust_codegen_backends_dir);
+    cargo.env("CFG_RELEASE", builder.rust_release())
+         .env("CFG_RELEASE_CHANNEL", &builder.config.channel)
+         .env("CFG_VERSION", builder.rust_version())
+         .env("CFG_PREFIX", builder.config.prefix.clone().unwrap_or_default())
+         .env("CFG_CODEGEN_BACKENDS_DIR", &builder.config.rust_codegen_backends_dir);
 
-    let libdir_relative = build.config.libdir_relative().unwrap_or(Path::new("lib"));
+    let libdir_relative = builder.config.libdir_relative().unwrap_or(Path::new("lib"));
     cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
 
     // If we're not building a compiler with debugging information then remove
     // these two env vars which would be set otherwise.
-    if build.config.rust_debuginfo_only_std {
+    if builder.config.rust_debuginfo_only_std {
         cargo.env_remove("RUSTC_DEBUGINFO");
         cargo.env_remove("RUSTC_DEBUGINFO_LINES");
     }
 
-    if let Some(ref ver_date) = build.rust_info.commit_date() {
+    if let Some(ref ver_date) = builder.rust_info.commit_date() {
         cargo.env("CFG_VER_DATE", ver_date);
     }
-    if let Some(ref ver_hash) = build.rust_info.sha() {
+    if let Some(ref ver_hash) = builder.rust_info.sha() {
         cargo.env("CFG_VER_HASH", ver_hash);
     }
-    if !build.unstable_features() {
+    if !builder.unstable_features() {
         cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
     }
-    if let Some(ref s) = build.config.rustc_default_linker {
+    if let Some(ref s) = builder.config.rustc_default_linker {
         cargo.env("CFG_DEFAULT_LINKER", s);
     }
-    if build.config.rustc_parallel_queries {
+    if builder.config.rustc_parallel_queries {
         cargo.env("RUSTC_PARALLEL_QUERIES", "1");
     }
 }
@@ -575,18 +571,17 @@ fn should_run(run: ShouldRun) -> ShouldRun {
 
     /// Same as `std_link`, only for librustc
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target_compiler = self.target_compiler;
         let target = self.target;
-        build.info(&format!("Copying stage{} rustc from stage{} ({} -> {} / {})",
+        builder.info(&format!("Copying stage{} rustc from stage{} ({} -> {} / {})",
                  target_compiler.stage,
                  compiler.stage,
                  &compiler.host,
                  target_compiler.host,
                  target));
-        add_to_sysroot(&build, &builder.sysroot_libdir(target_compiler, target),
-                       &librustc_stamp(build, compiler, target));
+        add_to_sysroot(builder, &builder.sysroot_libdir(target_compiler, target),
+                       &librustc_stamp(builder, compiler, target));
         builder.ensure(tool::CleanTools {
             compiler: target_compiler,
             target,
@@ -624,15 +619,14 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
 
         builder.ensure(Rustc { compiler, target });
 
-        if build.force_use_stage1(compiler, target) {
+        if builder.force_use_stage1(compiler, target) {
             builder.ensure(CodegenBackend {
-                compiler: builder.compiler(1, build.build),
+                compiler: builder.compiler(1, builder.config.build),
                 target,
                 backend: self.backend,
             });
@@ -640,10 +634,10 @@ fn run(self, builder: &Builder) {
         }
 
         let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build");
-        let mut features = build.rustc_features().to_string();
+        let mut features = builder.rustc_features().to_string();
         cargo.arg("--manifest-path")
-            .arg(build.src.join("src/librustc_trans/Cargo.toml"));
-        rustc_cargo_env(build, &mut cargo);
+            .arg(builder.src.join("src/librustc_trans/Cargo.toml"));
+        rustc_cargo_env(builder, &mut cargo);
 
         match &*self.backend {
             "llvm" | "emscripten" => {
@@ -658,45 +652,45 @@ fn run(self, builder: &Builder) {
                     features.push_str(" emscripten");
                 }
 
-                build.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})",
+                builder.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})",
                          compiler.stage, &compiler.host, target, self.backend));
 
                 // Pass down configuration from the LLVM build into the build of
                 // librustc_llvm and librustc_trans.
-                if build.is_rust_llvm(target) {
+                if builder.is_rust_llvm(target) {
                     cargo.env("LLVM_RUSTLLVM", "1");
                 }
                 cargo.env("LLVM_CONFIG", &llvm_config);
                 if self.backend != "emscripten" {
-                    let target_config = build.config.target_config.get(&target);
+                    let target_config = builder.config.target_config.get(&target);
                     if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
                         cargo.env("CFG_LLVM_ROOT", s);
                     }
                 }
                 // Building with a static libstdc++ is only supported on linux right now,
                 // not for MSVC or macOS
-                if build.config.llvm_static_stdcpp &&
+                if builder.config.llvm_static_stdcpp &&
                    !target.contains("freebsd") &&
                    !target.contains("windows") &&
                    !target.contains("apple") {
-                    let file = compiler_file(build,
-                                             build.cxx(target).unwrap(),
+                    let file = compiler_file(builder,
+                                             builder.cxx(target).unwrap(),
                                              target,
                                              "libstdc++.a");
                     cargo.env("LLVM_STATIC_STDCPP", file);
                 }
-                if build.config.llvm_link_shared {
+                if builder.config.llvm_link_shared {
                     cargo.env("LLVM_LINK_SHARED", "1");
                 }
             }
             _ => panic!("unknown backend: {}", self.backend),
         }
 
-        let tmp_stamp = build.cargo_out(compiler, Mode::Librustc, target)
+        let tmp_stamp = builder.cargo_out(compiler, Mode::Librustc, target)
             .join(".tmp.stamp");
 
-        let _folder = build.fold_output(|| format!("stage{}-rustc_trans", compiler.stage));
-        let files = run_cargo(build,
+        let _folder = builder.fold_output(|| format!("stage{}-rustc_trans", compiler.stage));
+        let files = run_cargo(builder,
                               cargo.arg("--features").arg(features),
                               &tmp_stamp,
                               false);
@@ -717,7 +711,7 @@ fn run(self, builder: &Builder) {
                    codegen_backend.display(),
                    f.display());
         }
-        let stamp = codegen_backend_stamp(build, compiler, target, self.backend);
+        let stamp = codegen_backend_stamp(builder, compiler, target, self.backend);
         let codegen_backend = codegen_backend.to_str().unwrap();
         t!(t!(File::create(&stamp)).write_all(codegen_backend.as_bytes()));
     }
@@ -732,7 +726,6 @@ fn run(self, builder: &Builder) {
 fn copy_codegen_backends_to_sysroot(builder: &Builder,
                                     compiler: Compiler,
                                     target_compiler: Compiler) {
-    let build = builder.build;
     let target = target_compiler.host;
 
     // Note that this step is different than all the other `*Link` steps in
@@ -751,7 +744,7 @@ fn copy_codegen_backends_to_sysroot(builder: &Builder,
     }
 
     for backend in builder.config.rust_codegen_backends.iter() {
-        let stamp = codegen_backend_stamp(build, compiler, target, *backend);
+        let stamp = codegen_backend_stamp(builder, compiler, target, *backend);
         let mut dylib = String::new();
         t!(t!(File::open(&stamp)).read_to_string(&mut dylib));
         let file = Path::new(&dylib);
@@ -765,7 +758,7 @@ fn copy_codegen_backends_to_sysroot(builder: &Builder,
                     backend,
                     &filename[dot..])
         };
-        build.copy(&file, &dst.join(target_filename));
+        builder.copy(&file, &dst.join(target_filename));
     }
 }
 
@@ -786,36 +779,36 @@ fn copy_lld_to_sysroot(builder: &Builder,
 
 /// Cargo's output path for the standard library in a given stage, compiled
 /// by a particular compiler for the specified target.
-pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
+pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+    builder.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
 }
 
 /// Cargo's output path for libtest in a given stage, compiled by a particular
 /// compiler for the specified target.
-pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
+pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+    builder.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
 }
 
 /// Cargo's output path for librustc in a given stage, compiled by a particular
 /// compiler for the specified target.
-pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp")
+pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
+    builder.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp")
 }
 
-fn codegen_backend_stamp(build: &Build,
+fn codegen_backend_stamp(builder: &Builder,
                          compiler: Compiler,
                          target: Interned<String>,
                          backend: Interned<String>) -> PathBuf {
-    build.cargo_out(compiler, Mode::Librustc, target)
+    builder.cargo_out(compiler, Mode::Librustc, target)
         .join(format!(".librustc_trans-{}.stamp", backend))
 }
 
-fn compiler_file(build: &Build,
+fn compiler_file(builder: &Builder,
                  compiler: &Path,
                  target: Interned<String>,
                  file: &str) -> PathBuf {
     let mut cmd = Command::new(compiler);
-    cmd.args(build.cflags(target));
+    cmd.args(builder.cflags(target));
     cmd.arg(format!("-print-file-name={}", file));
     let out = output(&mut cmd);
     PathBuf::from(out.trim())
@@ -840,12 +833,11 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// thinks it is by default, but it's the same as the default for stages
     /// 1-3.
     fn run(self, builder: &Builder) -> Interned<PathBuf> {
-        let build = builder.build;
         let compiler = self.compiler;
         let sysroot = if compiler.stage == 0 {
-            build.out.join(&compiler.host).join("stage0-sysroot")
+            builder.out.join(&compiler.host).join("stage0-sysroot")
         } else {
-            build.out.join(&compiler.host).join(format!("stage{}", compiler.stage))
+            builder.out.join(&compiler.host).join(format!("stage{}", compiler.stage))
         };
         let _ = fs::remove_dir_all(&sysroot);
         t!(fs::create_dir_all(&sysroot));
@@ -872,14 +864,13 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// Prepare a new compiler from the artifacts in `stage`
     ///
     /// This will assemble a compiler in `build/$host/stage$stage`. The compiler
-    /// must have been previously produced by the `stage - 1` build.build
+    /// must have been previously produced by the `stage - 1` builder.build
     /// compiler.
     fn run(self, builder: &Builder) -> Compiler {
-        let build = builder.build;
         let target_compiler = self.target_compiler;
 
         if target_compiler.stage == 0 {
-            assert_eq!(build.build, target_compiler.host,
+            assert_eq!(builder.config.build, target_compiler.host,
                 "Cannot obtain compiler for non-native build triple at stage 0");
             // The stage 0 compiler for the build triple is always pre-built.
             return target_compiler;
@@ -902,14 +893,14 @@ fn run(self, builder: &Builder) -> Compiler {
         // FIXME: It may be faster if we build just a stage 1 compiler and then
         //        use that to bootstrap this compiler forward.
         let build_compiler =
-            builder.compiler(target_compiler.stage - 1, build.build);
+            builder.compiler(target_compiler.stage - 1, builder.config.build);
 
         // Build the libraries for this compiler to link to (i.e., the libraries
         // it uses at runtime). NOTE: Crates the target compiler compiles don't
         // link to these. (FIXME: Is that correct? It seems to be correct most
         // of the time but I think we do link to these for stage2/bin compilers
         // when not performing a full bootstrap).
-        if builder.build.config.keep_stage.map_or(false, |s| target_compiler.stage <= s) {
+        if builder.config.keep_stage.map_or(false, |s| target_compiler.stage <= s) {
             builder.verbose("skipping compilation of compiler due to --keep-stage");
             let compiler = build_compiler;
             for stage in 0..min(target_compiler.stage, builder.config.keep_stage.unwrap()) {
@@ -924,7 +915,7 @@ fn run(self, builder: &Builder) -> Compiler {
                 compiler: build_compiler,
                 target: target_compiler.host,
             });
-            for &backend in build.config.rust_codegen_backends.iter() {
+            for &backend in builder.config.rust_codegen_backends.iter() {
                 builder.ensure(CodegenBackend {
                     compiler: build_compiler,
                     target: target_compiler.host,
@@ -933,7 +924,7 @@ fn run(self, builder: &Builder) -> Compiler {
             }
         }
 
-        let lld_install = if build.config.lld_enabled {
+        let lld_install = if builder.config.lld_enabled {
             Some(builder.ensure(native::Lld {
                 target: target_compiler.host,
             }))
@@ -943,7 +934,7 @@ fn run(self, builder: &Builder) -> Compiler {
 
         let stage = target_compiler.stage;
         let host = target_compiler.host;
-        build.info(&format!("Assembling stage{} compiler ({})", stage, host));
+        builder.info(&format!("Assembling stage{} compiler ({})", stage, host));
 
         // Link in all dylibs to the libdir
         let sysroot = builder.sysroot(target_compiler);
@@ -965,7 +956,7 @@ fn run(self, builder: &Builder) -> Compiler {
         }
 
         // Link the compiler binary itself into place
-        let out_dir = build.cargo_out(build_compiler, Mode::Librustc, host);
+        let out_dir = builder.cargo_out(build_compiler, Mode::Librustc, host);
         let rustc = out_dir.join(exe("rustc", &*host));
         let bindir = sysroot.join("bin");
         t!(fs::create_dir_all(&bindir));
@@ -981,10 +972,10 @@ fn run(self, builder: &Builder) -> Compiler {
 ///
 /// For a particular stage this will link the file listed in `stamp` into the
 /// `sysroot_dst` provided.
-pub fn add_to_sysroot(build: &Build, sysroot_dst: &Path, stamp: &Path) {
+pub fn add_to_sysroot(builder: &Builder, sysroot_dst: &Path, stamp: &Path) {
     t!(fs::create_dir_all(&sysroot_dst));
-    for path in build.read_stamp_file(stamp) {
-        build.copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
+    for path in builder.read_stamp_file(stamp) {
+        builder.copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
     }
 }
 
@@ -1011,10 +1002,10 @@ fn stderr_isatty() -> bool {
     }
 }
 
-pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool)
+pub fn run_cargo(builder: &Builder, cargo: &mut Command, stamp: &Path, is_check: bool)
     -> Vec<PathBuf>
 {
-    if build.config.dry_run {
+    if builder.config.dry_run {
         return Vec::new();
     }
 
@@ -1032,7 +1023,7 @@ pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: boo
     // files we need to probe for later.
     let mut deps = Vec::new();
     let mut toplevel = Vec::new();
-    let ok = stream_cargo(build, cargo, &mut |msg| {
+    let ok = stream_cargo(builder, cargo, &mut |msg| {
         let filenames = match msg {
             CargoMessage::CompilerArtifact { filenames, .. } => filenames,
             _ => return,
@@ -1141,25 +1132,25 @@ pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: boo
     let max = max.unwrap();
     let max_path = max_path.unwrap();
     if stamp_contents == new_contents && max <= stamp_mtime {
-        build.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}",
+        builder.verbose(&format!("not updating {:?}; contents equal and {:?} <= {:?}",
                 stamp, max, stamp_mtime));
         return deps
     }
     if max > stamp_mtime {
-        build.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path));
+        builder.verbose(&format!("updating {:?} as {:?} changed", stamp, max_path));
     } else {
-        build.verbose(&format!("updating {:?} as deps changed", stamp));
+        builder.verbose(&format!("updating {:?} as deps changed", stamp));
     }
     t!(t!(File::create(stamp)).write_all(&new_contents));
     deps
 }
 
 pub fn stream_cargo(
-    build: &Build,
+    builder: &Builder,
     cargo: &mut Command,
     cb: &mut FnMut(CargoMessage),
 ) -> bool {
-    if build.config.dry_run {
+    if builder.config.dry_run {
         return true;
     }
     // Instruct Cargo to give us json messages on stdout, critically leaving
@@ -1167,7 +1158,7 @@ pub fn stream_cargo(
     cargo.arg("--message-format").arg("json")
          .stdout(Stdio::piped());
 
-    if stderr_isatty() && build.ci_env == CiEnv::None &&
+    if stderr_isatty() && builder.ci_env == CiEnv::None &&
         // if the terminal is reported as dumb, then we don't want to enable color for rustc
         env::var_os("TERM").map(|t| t != *"dumb").unwrap_or(true) {
         // since we pass message-format=json to cargo, we need to tell the rustc
@@ -1176,7 +1167,7 @@ pub fn stream_cargo(
         cargo.env("RUSTC_COLOR", "1");
     }
 
-    build.verbose(&format!("running: {:?}", cargo));
+    builder.verbose(&format!("running: {:?}", cargo));
     let mut child = match cargo.spawn() {
         Ok(child) => child,
         Err(e) => panic!("failed to execute command: {:?}\nerror: {}", cargo, e),
index 239316d45c49c4067cd3ab74074452e296644f7b..1b4b2c5fb2a54cab90d9bf633b6dfabd4f6b8d78 100644 (file)
@@ -94,6 +94,7 @@ pub struct Config {
     pub rust_debuginfo: bool,
     pub rust_debuginfo_lines: bool,
     pub rust_debuginfo_only_std: bool,
+    pub rust_debuginfo_tools: bool,
     pub rust_rpath: bool,
     pub rustc_parallel_queries: bool,
     pub rustc_default_linker: Option<String>,
@@ -282,6 +283,7 @@ struct Rust {
     debuginfo: Option<bool>,
     debuginfo_lines: Option<bool>,
     debuginfo_only_std: Option<bool>,
+    debuginfo_tools: Option<bool>,
     experimental_parallel_queries: Option<bool>,
     debug_jemalloc: Option<bool>,
     use_jemalloc: Option<bool>,
@@ -462,6 +464,7 @@ pub fn parse(args: &[String]) -> Config {
         let mut llvm_assertions = None;
         let mut debuginfo_lines = None;
         let mut debuginfo_only_std = None;
+        let mut debuginfo_tools = None;
         let mut debug = None;
         let mut debug_jemalloc = None;
         let mut debuginfo = None;
@@ -499,6 +502,7 @@ pub fn parse(args: &[String]) -> Config {
             debuginfo = rust.debuginfo;
             debuginfo_lines = rust.debuginfo_lines;
             debuginfo_only_std = rust.debuginfo_only_std;
+            debuginfo_tools = rust.debuginfo_tools;
             optimize = rust.optimize;
             ignore_git = rust.ignore_git;
             debug_jemalloc = rust.debug_jemalloc;
@@ -582,6 +586,7 @@ pub fn parse(args: &[String]) -> Config {
         };
         config.rust_debuginfo_lines = debuginfo_lines.unwrap_or(default);
         config.rust_debuginfo_only_std = debuginfo_only_std.unwrap_or(default);
+        config.rust_debuginfo_tools = debuginfo_tools.unwrap_or(false);
 
         let default = debug == Some(true);
         config.debug_jemalloc = debug_jemalloc.unwrap_or(default);
index b06968d313ba201b8ed8da8318c323c268088a74..a0123da6d8ff9ba077663f4f10334a03c271baef 100755 (executable)
@@ -79,6 +79,7 @@ o("llvm-release-debuginfo", "llvm.release-debuginfo", "build LLVM with debugger
 o("debuginfo", "rust.debuginfo", "build with debugger metadata")
 o("debuginfo-lines", "rust.debuginfo-lines", "build with line number debugger metadata")
 o("debuginfo-only-std", "rust.debuginfo-only-std", "build only libstd with debugging information")
+o("debuginfo-tools", "rust.debuginfo-tools", "build extended tools with debugging information")
 o("debug-jemalloc", "rust.debug-jemalloc", "build jemalloc with --enable-debug --enable-fill")
 v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file")
 
index e1f5d34bf67235c5e2a58225e4e4903f5b6b55a4..e21a59390b7b6ff6b091d9d07efd62420c8e2898 100644 (file)
@@ -26,7 +26,7 @@
 
 use build_helper::output;
 
-use {Build, Compiler, Mode};
+use {Compiler, Mode};
 use channel;
 use util::{libdir, is_dylib, exe};
 use builder::{Builder, RunConfig, ShouldRun, Step};
 use cache::{INTERNER, Interned};
 use time;
 
-pub fn pkgname(build: &Build, component: &str) -> String {
+pub fn pkgname(builder: &Builder, component: &str) -> String {
     if component == "cargo" {
-        format!("{}-{}", component, build.cargo_package_vers())
+        format!("{}-{}", component, builder.cargo_package_vers())
     } else if component == "rls" {
-        format!("{}-{}", component, build.rls_package_vers())
+        format!("{}-{}", component, builder.rls_package_vers())
     } else if component == "rustfmt" {
-        format!("{}-{}", component, build.rustfmt_package_vers())
+        format!("{}-{}", component, builder.rustfmt_package_vers())
     } else {
         assert!(component.starts_with("rust"));
-        format!("{}-{}", component, build.rust_package_vers())
+        format!("{}-{}", component, builder.rust_package_vers())
     }
 }
 
-fn distdir(build: &Build) -> PathBuf {
-    build.out.join("dist")
+fn distdir(builder: &Builder) -> PathBuf {
+    builder.out.join("dist")
 }
 
-pub fn tmpdir(build: &Build) -> PathBuf {
-    build.out.join("tmp/dist")
+pub fn tmpdir(builder: &Builder) -> PathBuf {
+    builder.out.join("tmp/dist")
 }
 
 fn rust_installer(builder: &Builder) -> Command {
@@ -84,26 +84,25 @@ fn make_run(run: RunConfig) {
 
     /// Builds the `rust-docs` installer component.
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let host = self.host;
 
-        let name = pkgname(build, "rust-docs");
+        let name = pkgname(builder, "rust-docs");
 
-        build.info(&format!("Dist docs ({})", host));
-        if !build.config.docs {
-            build.info(&format!("\tskipping - docs disabled"));
-            return distdir(build).join(format!("{}-{}.tar.gz", name, host));
+        builder.info(&format!("Dist docs ({})", host));
+        if !builder.config.docs {
+            builder.info(&format!("\tskipping - docs disabled"));
+            return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
         }
 
         builder.default_doc(None);
 
-        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
         let _ = fs::remove_dir_all(&image);
 
         let dst = image.join("share/doc/rust/html");
         t!(fs::create_dir_all(&dst));
-        let src = build.doc_out(host);
-        build.cp_r(&src, &dst);
+        let src = builder.doc_out(host);
+        builder.cp_r(&src, &dst);
 
         let mut cmd = rust_installer(builder);
         cmd.arg("generate")
@@ -111,16 +110,16 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=Rust-documentation-is-installed.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg(format!("--package-name={}-{}", name, host))
            .arg("--component-name=rust-docs")
            .arg("--legacy-manifest-dirs=rustlib,cargo")
            .arg("--bulk-dirs=share/doc/rust/html");
-        build.run(&mut cmd);
-        build.remove_dir(&image);
+        builder.run(&mut cmd);
+        builder.remove_dir(&image);
 
-        distdir(build).join(format!("{}-{}.tar.gz", name, host))
+        distdir(builder).join(format!("{}-{}.tar.gz", name, host))
     }
 }
 
@@ -147,26 +146,25 @@ fn make_run(run: RunConfig) {
 
     /// Builds the `rustc-docs` installer component.
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let host = self.host;
 
-        let name = pkgname(build, "rustc-docs");
+        let name = pkgname(builder, "rustc-docs");
 
-        build.info(&format!("Dist compiler docs ({})", host));
-        if !build.config.compiler_docs {
-            build.info(&format!("\tskipping - compiler docs disabled"));
-            return distdir(build).join(format!("{}-{}.tar.gz", name, host));
+        builder.info(&format!("Dist compiler docs ({})", host));
+        if !builder.config.compiler_docs {
+            builder.info(&format!("\tskipping - compiler docs disabled"));
+            return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
         }
 
         builder.default_doc(None);
 
-        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
         let _ = fs::remove_dir_all(&image);
 
         let dst = image.join("share/doc/rust/html");
         t!(fs::create_dir_all(&dst));
-        let src = build.compiler_doc_out(host);
-        build.cp_r(&src, &dst);
+        let src = builder.compiler_doc_out(host);
+        builder.cp_r(&src, &dst);
 
         let mut cmd = rust_installer(builder);
         cmd.arg("generate")
@@ -174,16 +172,16 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=Rustc-documentation-is-installed.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg(format!("--package-name={}-{}", name, host))
            .arg("--component-name=rustc-docs")
            .arg("--legacy-manifest-dirs=rustlib,cargo")
            .arg("--bulk-dirs=share/doc/rust/html");
-        build.run(&mut cmd);
-        build.remove_dir(&image);
+        builder.run(&mut cmd);
+        builder.remove_dir(&image);
 
-        distdir(build).join(format!("{}-{}.tar.gz", name, host))
+        distdir(builder).join(format!("{}-{}.tar.gz", name, host))
     }
 }
 
@@ -207,10 +205,10 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec<PathBuf> {
 }
 
 fn make_win_dist(
-    rust_root: &Path, plat_root: &Path, target_triple: Interned<String>, build: &Build
+    rust_root: &Path, plat_root: &Path, target_triple: Interned<String>, builder: &Builder
 ) {
     //Ask gcc where it keeps its stuff
-    let mut cmd = Command::new(build.cc(target_triple));
+    let mut cmd = Command::new(builder.cc(target_triple));
     cmd.arg("-print-search-dirs");
     let gcc_out = output(&mut cmd);
 
@@ -296,21 +294,21 @@ fn make_win_dist(
     let dist_bin_dir = rust_root.join("bin/");
     fs::create_dir_all(&dist_bin_dir).expect("creating dist_bin_dir failed");
     for src in rustc_dlls {
-        build.copy_to_folder(&src, &dist_bin_dir);
+        builder.copy_to_folder(&src, &dist_bin_dir);
     }
 
     //Copy platform tools to platform-specific bin directory
     let target_bin_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("bin");
     fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed");
     for src in target_tools {
-        build.copy_to_folder(&src, &target_bin_dir);
+        builder.copy_to_folder(&src, &target_bin_dir);
     }
 
     //Copy platform libs to platform-specific lib directory
     let target_lib_dir = plat_root.join("lib").join("rustlib").join(target_triple).join("lib");
     fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed");
     for src in target_libs {
-        build.copy_to_folder(&src, &target_lib_dir);
+        builder.copy_to_folder(&src, &target_lib_dir);
     }
 }
 
@@ -336,16 +334,15 @@ fn make_run(run: RunConfig) {
     /// This contains all the bits and pieces to run the MinGW Windows targets
     /// without any extra installed software (e.g. we bundle gcc, libraries, etc).
     fn run(self, builder: &Builder) -> Option<PathBuf> {
-        let build = builder.build;
         let host = self.host;
 
         if !host.contains("pc-windows-gnu") {
             return None;
         }
 
-        build.info(&format!("Dist mingw ({})", host));
-        let name = pkgname(build, "rust-mingw");
-        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        builder.info(&format!("Dist mingw ({})", host));
+        let name = pkgname(builder, "rust-mingw");
+        let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
         let _ = fs::remove_dir_all(&image);
         t!(fs::create_dir_all(&image));
 
@@ -353,7 +350,7 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
         // thrown away (this contains the runtime DLLs included in the rustc package
         // above) and the second argument is where to place all the MinGW components
         // (which is what we want).
-        make_win_dist(&tmpdir(build), &image, host, &build);
+        make_win_dist(&tmpdir(builder), &image, host, &builder);
 
         let mut cmd = rust_installer(builder);
         cmd.arg("generate")
@@ -361,14 +358,14 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=Rust-MinGW-is-installed.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg(format!("--package-name={}-{}", name, host))
            .arg("--component-name=rust-mingw")
            .arg("--legacy-manifest-dirs=rustlib,cargo");
-        build.run(&mut cmd);
+        builder.run(&mut cmd);
         t!(fs::remove_dir_all(&image));
-        Some(distdir(build).join(format!("{}-{}.tar.gz", name, host)))
+        Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host)))
     }
 }
 
@@ -394,15 +391,14 @@ fn make_run(run: RunConfig) {
 
     /// Creates the `rustc` installer component.
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let compiler = self.compiler;
         let host = self.compiler.host;
 
-        build.info(&format!("Dist rustc stage{} ({})", compiler.stage, compiler.host));
-        let name = pkgname(build, "rustc");
-        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        builder.info(&format!("Dist rustc stage{} ({})", compiler.stage, compiler.host));
+        let name = pkgname(builder, "rustc");
+        let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
         let _ = fs::remove_dir_all(&image);
-        let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
+        let overlay = tmpdir(builder).join(format!("{}-{}-overlay", name, host));
         let _ = fs::remove_dir_all(&overlay);
 
         // Prepare the rustc "image", what will actually end up getting installed
@@ -411,17 +407,17 @@ fn run(self, builder: &Builder) -> PathBuf {
         // Prepare the overlay which is part of the tarball but won't actually be
         // installed
         let cp = |file: &str| {
-            build.install(&build.src.join(file), &overlay, 0o644);
+            builder.install(&builder.src.join(file), &overlay, 0o644);
         };
         cp("COPYRIGHT");
         cp("LICENSE-APACHE");
         cp("LICENSE-MIT");
         cp("README.md");
         // tiny morsel of metadata is used by rust-packaging
-        let version = build.rust_version();
-        build.create(&overlay.join("version"), &version);
-        if let Some(sha) = build.rust_sha() {
-            build.create(&overlay.join("git-commit-hash"), &sha);
+        let version = builder.rust_version();
+        builder.create(&overlay.join("version"), &version);
+        if let Some(sha) = builder.rust_sha() {
+            builder.create(&overlay.join("git-commit-hash"), &sha);
         }
 
         // On MinGW we've got a few runtime DLL dependencies that we need to
@@ -435,11 +431,11 @@ fn run(self, builder: &Builder) -> PathBuf {
         // install will *also* include the rust-mingw package, which also needs
         // licenses, so to be safe we just include it here in all MinGW packages.
         if host.contains("pc-windows-gnu") {
-            make_win_dist(&image, &tmpdir(build), host, build);
+            make_win_dist(&image, &tmpdir(builder), host, builder);
 
             let dst = image.join("share/doc");
             t!(fs::create_dir_all(&dst));
-            build.cp_r(&build.src.join("src/etc/third-party"), &dst);
+            builder.cp_r(&builder.src.join("src/etc/third-party"), &dst);
         }
 
         // Finally, wrap everything up in a nice tarball!
@@ -449,37 +445,36 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=Rust-is-ready-to-roll.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg("--non-installed-overlay").arg(&overlay)
            .arg(format!("--package-name={}-{}", name, host))
            .arg("--component-name=rustc")
            .arg("--legacy-manifest-dirs=rustlib,cargo");
-        build.run(&mut cmd);
-        build.remove_dir(&image);
-        build.remove_dir(&overlay);
+        builder.run(&mut cmd);
+        builder.remove_dir(&image);
+        builder.remove_dir(&overlay);
 
-        return distdir(build).join(format!("{}-{}.tar.gz", name, host));
+        return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
 
         fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) {
             let host = compiler.host;
-            let build = builder.build;
             let src = builder.sysroot(compiler);
             let libdir = libdir(&host);
 
             // Copy rustc/rustdoc binaries
             t!(fs::create_dir_all(image.join("bin")));
-            build.cp_r(&src.join("bin"), &image.join("bin"));
+            builder.cp_r(&src.join("bin"), &image.join("bin"));
 
-            build.install(&builder.rustdoc(compiler.host), &image.join("bin"), 0o755);
+            builder.install(&builder.rustdoc(compiler.host), &image.join("bin"), 0o755);
 
             // Copy runtime DLLs needed by the compiler
             if libdir != "bin" {
-                for entry in build.read_dir(&src.join(libdir)) {
+                for entry in builder.read_dir(&src.join(libdir)) {
                     let name = entry.file_name();
                     if let Some(s) = name.to_str() {
                         if is_dylib(s) {
-                            build.install(&entry.path(), &image.join(libdir), 0o644);
+                            builder.install(&entry.path(), &image.join(libdir), 0o644);
                         }
                     }
                 }
@@ -490,7 +485,7 @@ fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) {
             let backends_rel = backends_src.strip_prefix(&src).unwrap();
             let backends_dst = image.join(&backends_rel);
             t!(fs::create_dir_all(&backends_dst));
-            build.cp_r(&backends_src, &backends_dst);
+            builder.cp_r(&backends_src, &backends_dst);
 
             // Copy over lld if it's there
             if builder.config.lld_enabled {
@@ -505,22 +500,22 @@ fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) {
                     .join("bin")
                     .join(&exe);
                 t!(fs::create_dir_all(&dst.parent().unwrap()));
-                build.copy(&src, &dst);
+                builder.copy(&src, &dst);
             }
 
             // Man pages
             t!(fs::create_dir_all(image.join("share/man/man1")));
-            let man_src = build.src.join("src/doc/man");
+            let man_src = builder.src.join("src/doc/man");
             let man_dst = image.join("share/man/man1");
             let month_year = t!(time::strftime("%B %Y", &time::now()));
             // don't use our `bootstrap::util::{copy, cp_r}`, because those try
             // to hardlink, and we don't want to edit the source templates
-            for file_entry in build.read_dir(&man_src) {
+            for file_entry in builder.read_dir(&man_src) {
                 let page_src = file_entry.path();
                 let page_dst = man_dst.join(file_entry.file_name());
                 t!(fs::copy(&page_src, &page_dst));
                 // template in month/year and version number
-                build.replace_in_file(&page_dst,
+                builder.replace_in_file(&page_dst,
                                 &[("<INSERT DATE HERE>", &month_year),
                                   ("<INSERT VERSION HERE>", channel::CFG_RELEASE_NUM)]);
             }
@@ -533,7 +528,7 @@ fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) {
 
             // Misc license info
             let cp = |file: &str| {
-                build.install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+                builder.install(&builder.src.join(file), &image.join("share/doc/rust"), 0o644);
             };
             cp("COPYRIGHT");
             cp("LICENSE-APACHE");
@@ -565,17 +560,16 @@ fn make_run(run: RunConfig) {
 
     /// Copies debugger scripts for `target` into the `sysroot` specified.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let host = self.host;
         let sysroot = self.sysroot;
         let dst = sysroot.join("lib/rustlib/etc");
         t!(fs::create_dir_all(&dst));
         let cp_debugger_script = |file: &str| {
-            build.install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+            builder.install(&builder.src.join("src/etc/").join(file), &dst, 0o644);
         };
         if host.contains("windows-msvc") {
             // windbg debugger scripts
-            build.install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
+            builder.install(&builder.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
                 0o755);
 
             cp_debugger_script("natvis/intrinsic.natvis");
@@ -585,14 +579,14 @@ fn run(self, builder: &Builder) {
             cp_debugger_script("debugger_pretty_printers_common.py");
 
             // gdb debugger scripts
-            build.install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+            builder.install(&builder.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
                     0o755);
 
             cp_debugger_script("gdb_load_rust_pretty_printers.py");
             cp_debugger_script("gdb_rust_pretty_printing.py");
 
             // lldb debugger scripts
-            build.install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+            builder.install(&builder.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
                     0o755);
 
             cp_debugger_script("lldb_rust_formatters.py");
@@ -616,33 +610,33 @@ fn should_run(run: ShouldRun) -> ShouldRun {
 
     fn make_run(run: RunConfig) {
         run.builder.ensure(Std {
-            compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+            compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
             target: run.target,
         });
     }
 
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
 
-        let name = pkgname(build, "rust-std");
-        build.info(&format!("Dist std stage{} ({} -> {})", compiler.stage, &compiler.host, target));
+        let name = pkgname(builder, "rust-std");
+        builder.info(&format!("Dist std stage{} ({} -> {})",
+            compiler.stage, &compiler.host, target));
 
         // The only true set of target libraries came from the build triple, so
         // let's reduce redundant work by only producing archives from that host.
-        if compiler.host != build.build {
-            build.info(&format!("\tskipping, not a build host"));
-            return distdir(build).join(format!("{}-{}.tar.gz", name, target));
+        if compiler.host != builder.config.build {
+            builder.info(&format!("\tskipping, not a build host"));
+            return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
         }
 
         // We want to package up as many target libraries as possible
         // for the `rust-std` package, so if this is a host target we
         // depend on librustc and otherwise we just depend on libtest.
-        if build.hosts.iter().any(|t| t == target) {
+        if builder.hosts.iter().any(|t| t == target) {
             builder.ensure(compile::Rustc { compiler, target });
         } else {
-            if build.no_std(target) == Some(true) {
+            if builder.no_std(target) == Some(true) {
                 // the `test` doesn't compile for no-std targets
                 builder.ensure(compile::Std { compiler, target });
             } else {
@@ -650,16 +644,16 @@ fn run(self, builder: &Builder) -> PathBuf {
             }
         }
 
-        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+        let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
         let _ = fs::remove_dir_all(&image);
 
         let dst = image.join("lib/rustlib").join(target);
         t!(fs::create_dir_all(&dst));
         let mut src = builder.sysroot_libdir(compiler, target).to_path_buf();
         src.pop(); // Remove the trailing /lib folder from the sysroot_libdir
-        build.cp_filtered(&src, &dst, &|path| {
+        builder.cp_filtered(&src, &dst, &|path| {
             let name = path.file_name().and_then(|s| s.to_str());
-            name != Some(build.config.rust_codegen_backends_dir.as_str()) &&
+            name != Some(builder.config.rust_codegen_backends_dir.as_str()) &&
                 name != Some("bin")
 
         });
@@ -670,14 +664,14 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=std-is-standing-at-the-ready.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg(format!("--package-name={}-{}", name, target))
            .arg(format!("--component-name=rust-std-{}", target))
            .arg("--legacy-manifest-dirs=rustlib,cargo");
-        build.run(&mut cmd);
-        build.remove_dir(&image);
-        distdir(build).join(format!("{}-{}.tar.gz", name, target))
+        builder.run(&mut cmd);
+        builder.remove_dir(&image);
+        distdir(builder).join(format!("{}-{}.tar.gz", name, target))
     }
 }
 
@@ -693,50 +687,49 @@ impl Step for Analysis {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("analysis").default_condition(builder.build.config.extended)
+        run.path("analysis").default_condition(builder.config.extended)
     }
 
     fn make_run(run: RunConfig) {
         run.builder.ensure(Analysis {
-            compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+            compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
             target: run.target,
         });
     }
 
     /// Creates a tarball of save-analysis metadata, if available.
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
-        assert!(build.config.extended);
-        build.info(&format!("Dist analysis"));
-        let name = pkgname(build, "rust-analysis");
+        assert!(builder.config.extended);
+        builder.info(&format!("Dist analysis"));
+        let name = pkgname(builder, "rust-analysis");
 
-        if &compiler.host != build.build {
-            build.info(&format!("\tskipping, not a build host"));
-            return distdir(build).join(format!("{}-{}.tar.gz", name, target));
+        if &compiler.host != builder.config.build {
+            builder.info(&format!("\tskipping, not a build host"));
+            return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
         }
 
         builder.ensure(Std { compiler, target });
 
         // Package save-analysis from stage1 if not doing a full bootstrap, as the
         // stage2 artifacts is simply copied from stage1 in that case.
-        let compiler = if build.force_use_stage1(compiler, target) {
+        let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
             compiler.clone()
         };
 
-        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+        let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
 
-        let src = build.stage_out(compiler, Mode::Libstd)
-            .join(target).join(build.cargo_dir()).join("deps");
+        let src = builder.stage_out(compiler, Mode::Libstd)
+            .join(target).join(builder.cargo_dir()).join("deps");
 
         let image_src = src.join("save-analysis");
         let dst = image.join("lib/rustlib").join(target).join("analysis");
         t!(fs::create_dir_all(&dst));
-        build.info(&format!("image_src: {:?}, dst: {:?}", image_src, dst));
-        build.cp_r(&image_src, &dst);
+        builder.info(&format!("image_src: {:?}, dst: {:?}", image_src, dst));
+        builder.cp_r(&image_src, &dst);
 
         let mut cmd = rust_installer(builder);
         cmd.arg("generate")
@@ -744,18 +737,18 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=save-analysis-saved.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg(format!("--package-name={}-{}", name, target))
            .arg(format!("--component-name=rust-analysis-{}", target))
            .arg("--legacy-manifest-dirs=rustlib,cargo");
-        build.run(&mut cmd);
-        build.remove_dir(&image);
-        distdir(build).join(format!("{}-{}.tar.gz", name, target))
+        builder.run(&mut cmd);
+        builder.remove_dir(&image);
+        distdir(builder).join(format!("{}-{}.tar.gz", name, target))
     }
 }
 
-fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
+fn copy_src_dirs(builder: &Builder, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
     fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
         let spath = match path.to_str() {
             Some(path) => path,
@@ -794,7 +787,8 @@ fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
     for item in src_dirs {
         let dst = &dst_dir.join(item);
         t!(fs::create_dir_all(dst));
-        build.cp_filtered(&build.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path));
+        builder.cp_filtered(
+            &builder.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path));
     }
 }
 
@@ -817,11 +811,10 @@ fn make_run(run: RunConfig) {
 
     /// Creates the `rust-src` installer component
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
-        build.info(&format!("Dist src"));
+        builder.info(&format!("Dist src"));
 
-        let name = pkgname(build, "rust-src");
-        let image = tmpdir(build).join(format!("{}-image", name));
+        let name = pkgname(builder, "rust-src");
+        let image = tmpdir(builder).join(format!("{}-image", name));
         let _ = fs::remove_dir_all(&image);
 
         let dst = image.join("lib/rustlib/src");
@@ -866,9 +859,9 @@ fn run(self, builder: &Builder) -> PathBuf {
             "src/jemalloc/test/unit",
         ];
 
-        copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
+        copy_src_dirs(builder, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
         for file in src_files.iter() {
-            build.copy(&build.src.join(file), &dst_src.join(file));
+            builder.copy(&builder.src.join(file), &dst_src.join(file));
         }
 
         // Create source tarball in rust-installer format
@@ -878,15 +871,15 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=Awesome-Source.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg(format!("--package-name={}", name))
            .arg("--component-name=rust-src")
            .arg("--legacy-manifest-dirs=rustlib,cargo");
-        build.run(&mut cmd);
+        builder.run(&mut cmd);
 
-        build.remove_dir(&image);
-        distdir(build).join(&format!("{}.tar.gz", name))
+        builder.remove_dir(&image);
+        distdir(builder).join(&format!("{}.tar.gz", name))
     }
 }
 
@@ -912,12 +905,11 @@ fn make_run(run: RunConfig) {
 
     /// Creates the plain source tarball
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
-        build.info(&format!("Create plain source tarball"));
+        builder.info(&format!("Create plain source tarball"));
 
         // Make sure that the root folder of tarball has the correct name
-        let plain_name = format!("{}-src", pkgname(build, "rustc"));
-        let plain_dst_src = tmpdir(build).join(&plain_name);
+        let plain_name = format!("{}-src", pkgname(builder, "rustc"));
+        let plain_dst_src = tmpdir(builder).join(&plain_name);
         let _ = fs::remove_dir_all(&plain_dst_src);
         t!(fs::create_dir_all(&plain_dst_src));
 
@@ -937,68 +929,68 @@ fn run(self, builder: &Builder) -> PathBuf {
             "src",
         ];
 
-        copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
+        copy_src_dirs(builder, &src_dirs[..], &[], &plain_dst_src);
 
         // Copy the files normally
         for item in &src_files {
-            build.copy(&build.src.join(item), &plain_dst_src.join(item));
+            builder.copy(&builder.src.join(item), &plain_dst_src.join(item));
         }
 
         // Create the version file
-        build.create(&plain_dst_src.join("version"), &build.rust_version());
-        if let Some(sha) = build.rust_sha() {
-            build.create(&plain_dst_src.join("git-commit-hash"), &sha);
+        builder.create(&plain_dst_src.join("version"), &builder.rust_version());
+        if let Some(sha) = builder.rust_sha() {
+            builder.create(&plain_dst_src.join("git-commit-hash"), &sha);
         }
 
         // If we're building from git sources, we need to vendor a complete distribution.
-        if build.rust_info.is_git() {
+        if builder.rust_info.is_git() {
             // Get cargo-vendor installed, if it isn't already.
             let mut has_cargo_vendor = false;
-            let mut cmd = Command::new(&build.initial_cargo);
+            let mut cmd = Command::new(&builder.initial_cargo);
             for line in output(cmd.arg("install").arg("--list")).lines() {
                 has_cargo_vendor |= line.starts_with("cargo-vendor ");
             }
             if !has_cargo_vendor {
-                let mut cmd = Command::new(&build.initial_cargo);
+                let mut cmd = Command::new(&builder.initial_cargo);
                 cmd.arg("install")
                    .arg("--force")
                    .arg("--debug")
                    .arg("--vers").arg(CARGO_VENDOR_VERSION)
                    .arg("cargo-vendor")
-                   .env("RUSTC", &build.initial_rustc);
-                if let Some(dir) = build.openssl_install_dir(build.config.build) {
+                   .env("RUSTC", &builder.initial_rustc);
+                if let Some(dir) = builder.openssl_install_dir(builder.config.build) {
                     builder.ensure(native::Openssl {
-                        target: build.config.build,
+                        target: builder.config.build,
                     });
                     cmd.env("OPENSSL_DIR", dir);
                 }
-                build.run(&mut cmd);
+                builder.run(&mut cmd);
             }
 
             // Vendor all Cargo dependencies
-            let mut cmd = Command::new(&build.initial_cargo);
+            let mut cmd = Command::new(&builder.initial_cargo);
             cmd.arg("vendor")
                .current_dir(&plain_dst_src.join("src"));
-            build.run(&mut cmd);
+            builder.run(&mut cmd);
         }
 
         // Create plain source tarball
-        let plain_name = format!("rustc-{}-src", build.rust_package_vers());
-        let mut tarball = distdir(build).join(&format!("{}.tar.gz", plain_name));
+        let plain_name = format!("rustc-{}-src", builder.rust_package_vers());
+        let mut tarball = distdir(builder).join(&format!("{}.tar.gz", plain_name));
         tarball.set_extension(""); // strip .gz
         tarball.set_extension(""); // strip .tar
         if let Some(dir) = tarball.parent() {
-            build.create_dir(&dir);
+            builder.create_dir(&dir);
         }
-        build.info(&format!("running installer"));
+        builder.info(&format!("running installer"));
         let mut cmd = rust_installer(builder);
         cmd.arg("tarball")
            .arg("--input").arg(&plain_name)
            .arg("--output").arg(&tarball)
            .arg("--work-dir=.")
-           .current_dir(tmpdir(build));
-        build.run(&mut cmd);
-        distdir(build).join(&format!("{}.tar.gz", plain_name))
+           .current_dir(tmpdir(builder));
+        builder.run(&mut cmd);
+        distdir(builder).join(&format!("{}.tar.gz", plain_name))
     }
 }
 
@@ -1043,52 +1035,51 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
 
-        build.info(&format!("Dist cargo stage{} ({})", stage, target));
-        let src = build.src.join("src/tools/cargo");
+        builder.info(&format!("Dist cargo stage{} ({})", stage, target));
+        let src = builder.src.join("src/tools/cargo");
         let etc = src.join("src/etc");
-        let release_num = build.release_num("cargo");
-        let name = pkgname(build, "cargo");
-        let version = builder.cargo_info.version(build, &release_num);
+        let release_num = builder.release_num("cargo");
+        let name = pkgname(builder, "cargo");
+        let version = builder.cargo_info.version(builder, &release_num);
 
-        let tmp = tmpdir(build);
+        let tmp = tmpdir(builder);
         let image = tmp.join("cargo-image");
         drop(fs::remove_dir_all(&image));
-        build.create_dir(&image);
+        builder.create_dir(&image);
 
         // Prepare the image directory
-        build.create_dir(&image.join("share/zsh/site-functions"));
-        build.create_dir(&image.join("etc/bash_completion.d"));
+        builder.create_dir(&image.join("share/zsh/site-functions"));
+        builder.create_dir(&image.join("etc/bash_completion.d"));
         let cargo = builder.ensure(tool::Cargo {
-            compiler: builder.compiler(stage, build.build),
+            compiler: builder.compiler(stage, builder.config.build),
             target
         });
-        build.install(&cargo, &image.join("bin"), 0o755);
+        builder.install(&cargo, &image.join("bin"), 0o755);
         for man in t!(etc.join("man").read_dir()) {
             let man = t!(man);
-            build.install(&man.path(), &image.join("share/man/man1"), 0o644);
+            builder.install(&man.path(), &image.join("share/man/man1"), 0o644);
         }
-        build.install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
-        build.copy(&etc.join("cargo.bashcomp.sh"),
+        builder.install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
+        builder.copy(&etc.join("cargo.bashcomp.sh"),
              &image.join("etc/bash_completion.d/cargo"));
         let doc = image.join("share/doc/cargo");
-        build.install(&src.join("README.md"), &doc, 0o644);
-        build.install(&src.join("LICENSE-MIT"), &doc, 0o644);
-        build.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
-        build.install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
+        builder.install(&src.join("README.md"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
 
         // Prepare the overlay
         let overlay = tmp.join("cargo-overlay");
         drop(fs::remove_dir_all(&overlay));
-        build.create_dir(&overlay);
-        build.install(&src.join("README.md"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
-        build.create(&overlay.join("version"), &version);
+        builder.create_dir(&overlay);
+        builder.install(&src.join("README.md"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
+        builder.create(&overlay.join("version"), &version);
 
         // Generate the installer tarball
         let mut cmd = rust_installer(builder);
@@ -1097,14 +1088,14 @@ fn run(self, builder: &Builder) -> PathBuf {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=Rust-is-ready-to-roll.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg("--non-installed-overlay").arg(&overlay)
            .arg(format!("--package-name={}-{}", name, target))
            .arg("--component-name=cargo")
            .arg("--legacy-manifest-dirs=rustlib,cargo");
-        build.run(&mut cmd);
-        distdir(build).join(format!("{}-{}.tar.gz", name, target))
+        builder.run(&mut cmd);
+        distdir(builder).join(format!("{}-{}.tar.gz", name, target))
     }
 }
 
@@ -1130,18 +1121,17 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) -> Option<PathBuf> {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
-        assert!(build.config.extended);
+        assert!(builder.config.extended);
 
-        build.info(&format!("Dist RLS stage{} ({})", stage, target));
-        let src = build.src.join("src/tools/rls");
-        let release_num = build.release_num("rls");
-        let name = pkgname(build, "rls");
-        let version = build.rls_info.version(build, &release_num);
+        builder.info(&format!("Dist RLS stage{} ({})", stage, target));
+        let src = builder.src.join("src/tools/rls");
+        let release_num = builder.release_num("rls");
+        let name = pkgname(builder, "rls");
+        let version = builder.rls_info.version(builder, &release_num);
 
-        let tmp = tmpdir(build);
+        let tmp = tmpdir(builder);
         let image = tmp.join("rls-image");
         drop(fs::remove_dir_all(&image));
         t!(fs::create_dir_all(&image));
@@ -1150,24 +1140,24 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
         // We expect RLS to build, because we've exited this step above if tool
         // state for RLS isn't testing.
         let rls = builder.ensure(tool::Rls {
-            compiler: builder.compiler(stage, build.build),
+            compiler: builder.compiler(stage, builder.config.build),
             target, extra_features: Vec::new()
         }).or_else(|| { println!("Unable to build RLS, skipping dist"); None })?;
 
-        build.install(&rls, &image.join("bin"), 0o755);
+        builder.install(&rls, &image.join("bin"), 0o755);
         let doc = image.join("share/doc/rls");
-        build.install(&src.join("README.md"), &doc, 0o644);
-        build.install(&src.join("LICENSE-MIT"), &doc, 0o644);
-        build.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+        builder.install(&src.join("README.md"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
 
         // Prepare the overlay
         let overlay = tmp.join("rls-overlay");
         drop(fs::remove_dir_all(&overlay));
         t!(fs::create_dir_all(&overlay));
-        build.install(&src.join("README.md"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-        build.create(&overlay.join("version"), &version);
+        builder.install(&src.join("README.md"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        builder.create(&overlay.join("version"), &version);
 
         // Generate the installer tarball
         let mut cmd = rust_installer(builder);
@@ -1176,15 +1166,15 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=RLS-ready-to-serve.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg("--non-installed-overlay").arg(&overlay)
            .arg(format!("--package-name={}-{}", name, target))
            .arg("--legacy-manifest-dirs=rustlib,cargo")
            .arg("--component-name=rls-preview");
 
-        build.run(&mut cmd);
-        Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+        builder.run(&mut cmd);
+        Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
     }
 }
 
@@ -1211,46 +1201,45 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) -> Option<PathBuf> {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
 
-        build.info(&format!("Dist Rustfmt stage{} ({})", stage, target));
-        let src = build.src.join("src/tools/rustfmt");
-        let release_num = build.release_num("rustfmt");
-        let name = pkgname(build, "rustfmt");
-        let version = build.rustfmt_info.version(build, &release_num);
+        builder.info(&format!("Dist Rustfmt stage{} ({})", stage, target));
+        let src = builder.src.join("src/tools/rustfmt");
+        let release_num = builder.release_num("rustfmt");
+        let name = pkgname(builder, "rustfmt");
+        let version = builder.rustfmt_info.version(builder, &release_num);
 
-        let tmp = tmpdir(build);
+        let tmp = tmpdir(builder);
         let image = tmp.join("rustfmt-image");
         drop(fs::remove_dir_all(&image));
-        build.create_dir(&image);
+        builder.create_dir(&image);
 
         // Prepare the image directory
         let rustfmt = builder.ensure(tool::Rustfmt {
-            compiler: builder.compiler(stage, build.build),
+            compiler: builder.compiler(stage, builder.config.build),
             target, extra_features: Vec::new()
         }).or_else(|| { println!("Unable to build Rustfmt, skipping dist"); None })?;
         let cargofmt = builder.ensure(tool::Cargofmt {
-            compiler: builder.compiler(stage, build.build),
+            compiler: builder.compiler(stage, builder.config.build),
             target, extra_features: Vec::new()
         }).or_else(|| { println!("Unable to build Cargofmt, skipping dist"); None })?;
 
-        build.install(&rustfmt, &image.join("bin"), 0o755);
-        build.install(&cargofmt, &image.join("bin"), 0o755);
+        builder.install(&rustfmt, &image.join("bin"), 0o755);
+        builder.install(&cargofmt, &image.join("bin"), 0o755);
         let doc = image.join("share/doc/rustfmt");
-        build.install(&src.join("README.md"), &doc, 0o644);
-        build.install(&src.join("LICENSE-MIT"), &doc, 0o644);
-        build.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+        builder.install(&src.join("README.md"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
 
         // Prepare the overlay
         let overlay = tmp.join("rustfmt-overlay");
         drop(fs::remove_dir_all(&overlay));
-        build.create_dir(&overlay);
-        build.install(&src.join("README.md"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-        build.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-        build.create(&overlay.join("version"), &version);
+        builder.create_dir(&overlay);
+        builder.install(&src.join("README.md"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        builder.install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        builder.create(&overlay.join("version"), &version);
 
         // Generate the installer tarball
         let mut cmd = rust_installer(builder);
@@ -1259,15 +1248,15 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
            .arg("--rel-manifest-dir=rustlib")
            .arg("--success-message=rustfmt-ready-to-fmt.")
            .arg("--image-dir").arg(&image)
-           .arg("--work-dir").arg(&tmpdir(build))
-           .arg("--output-dir").arg(&distdir(build))
+           .arg("--work-dir").arg(&tmpdir(builder))
+           .arg("--output-dir").arg(&distdir(builder))
            .arg("--non-installed-overlay").arg(&overlay)
            .arg(format!("--package-name={}-{}", name, target))
            .arg("--legacy-manifest-dirs=rustlib,cargo")
            .arg("--component-name=rustfmt-preview");
 
-        build.run(&mut cmd);
-        Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+        builder.run(&mut cmd);
+        Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
     }
 }
 
@@ -1291,18 +1280,17 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     fn make_run(run: RunConfig) {
         run.builder.ensure(Extended {
             stage: run.builder.top_stage,
-            host: run.builder.build.build,
+            host: run.builder.config.build,
             target: run.target,
         });
     }
 
     /// Creates a combined installer for the specified target in the provided stage.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
 
-        build.info(&format!("Dist extended stage{} ({})", stage, target));
+        builder.info(&format!("Dist extended stage{} ({})", stage, target));
 
         let rustc_installer = builder.ensure(Rustc {
             compiler: builder.compiler(stage, target),
@@ -1322,21 +1310,21 @@ fn run(self, builder: &Builder) {
             target,
         });
 
-        let tmp = tmpdir(build);
+        let tmp = tmpdir(builder);
         let overlay = tmp.join("extended-overlay");
-        let etc = build.src.join("src/etc/installer");
+        let etc = builder.src.join("src/etc/installer");
         let work = tmp.join("work");
 
         let _ = fs::remove_dir_all(&overlay);
-        build.install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
-        build.install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
-        build.install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
-        let version = build.rust_version();
-        build.create(&overlay.join("version"), &version);
-        if let Some(sha) = build.rust_sha() {
-            build.create(&overlay.join("git-commit-hash"), &sha);
+        builder.install(&builder.src.join("COPYRIGHT"), &overlay, 0o644);
+        builder.install(&builder.src.join("LICENSE-APACHE"), &overlay, 0o644);
+        builder.install(&builder.src.join("LICENSE-MIT"), &overlay, 0o644);
+        let version = builder.rust_version();
+        builder.create(&overlay.join("version"), &version);
+        if let Some(sha) = builder.rust_sha() {
+            builder.create(&overlay.join("git-commit-hash"), &sha);
         }
-        build.install(&etc.join("README.md"), &overlay, 0o644);
+        builder.install(&etc.join("README.md"), &overlay, 0o644);
 
         // When rust-std package split from rustc, we needed to ensure that during
         // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
@@ -1349,7 +1337,7 @@ fn run(self, builder: &Builder) {
         tarballs.extend(rustfmt_installer.clone());
         tarballs.push(analysis_installer);
         tarballs.push(std_installer);
-        if build.config.docs {
+        if builder.config.docs {
             tarballs.push(docs_installer);
         }
         if target.contains("pc-windows-gnu") {
@@ -1367,17 +1355,17 @@ fn run(self, builder: &Builder) {
             .arg("--rel-manifest-dir=rustlib")
             .arg("--success-message=Rust-is-ready-to-roll.")
             .arg("--work-dir").arg(&work)
-            .arg("--output-dir").arg(&distdir(build))
-            .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
+            .arg("--output-dir").arg(&distdir(builder))
+            .arg(format!("--package-name={}-{}", pkgname(builder, "rust"), target))
             .arg("--legacy-manifest-dirs=rustlib,cargo")
             .arg("--input-tarballs").arg(input_tarballs)
             .arg("--non-installed-overlay").arg(&overlay);
-        build.run(&mut cmd);
+        builder.run(&mut cmd);
 
         let mut license = String::new();
-        license += &build.read(&build.src.join("COPYRIGHT"));
-        license += &build.read(&build.src.join("LICENSE-APACHE"));
-        license += &build.read(&build.src.join("LICENSE-MIT"));
+        license += &builder.read(&builder.src.join("COPYRIGHT"));
+        license += &builder.read(&builder.src.join("LICENSE-APACHE"));
+        license += &builder.read(&builder.src.join("LICENSE-MIT"));
         license.push_str("\n");
         license.push_str("\n");
 
@@ -1432,14 +1420,14 @@ fn filter(contents: &str, marker: &str) -> String {
                     .arg("--scripts").arg(pkg.join(component))
                     .arg("--nopayload")
                     .arg(pkg.join(component).with_extension("pkg"));
-                build.run(&mut cmd);
+                builder.run(&mut cmd);
             };
 
             let prepare = |name: &str| {
-                build.create_dir(&pkg.join(name));
-                build.cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target)),
+                builder.create_dir(&pkg.join(name));
+                builder.cp_r(&work.join(&format!("{}-{}", pkgname(builder, name), target)),
                         &pkg.join(name));
-                build.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
+                builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
                 pkgbuild(name);
             };
             prepare("rustc");
@@ -1453,20 +1441,20 @@ fn filter(contents: &str, marker: &str) -> String {
             }
 
             // create an 'uninstall' package
-            build.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
+            builder.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
             pkgbuild("uninstall");
 
-            build.create_dir(&pkg.join("res"));
-            build.create(&pkg.join("res/LICENSE.txt"), &license);
-            build.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
+            builder.create_dir(&pkg.join("res"));
+            builder.create(&pkg.join("res/LICENSE.txt"), &license);
+            builder.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
             let mut cmd = Command::new("productbuild");
             cmd.arg("--distribution").arg(xform(&etc.join("pkg/Distribution.xml")))
                 .arg("--resources").arg(pkg.join("res"))
-                .arg(distdir(build).join(format!("{}-{}.pkg",
-                                                    pkgname(build, "rust"),
+                .arg(distdir(builder).join(format!("{}-{}.pkg",
+                                                    pkgname(builder, "rust"),
                                                     target)))
                 .arg("--package-path").arg(&pkg);
-            build.run(&mut cmd);
+            builder.run(&mut cmd);
         }
 
         if target.contains("windows") {
@@ -1474,7 +1462,7 @@ fn filter(contents: &str, marker: &str) -> String {
             let _ = fs::remove_dir_all(&exe);
 
             let prepare = |name: &str| {
-                build.create_dir(&exe.join(name));
+                builder.create_dir(&exe.join(name));
                 let dir = if name == "rust-std" || name == "rust-analysis" {
                     format!("{}-{}", name, target)
                 } else if name == "rls" {
@@ -1482,10 +1470,10 @@ fn filter(contents: &str, marker: &str) -> String {
                 } else {
                     name.to_string()
                 };
-                build.cp_r(&work.join(&format!("{}-{}", pkgname(build, name), target))
+                builder.cp_r(&work.join(&format!("{}-{}", pkgname(builder, name), target))
                             .join(dir),
                         &exe.join(name));
-                build.remove(&exe.join(name).join("manifest.in"));
+                builder.remove(&exe.join(name).join("manifest.in"));
             };
             prepare("rustc");
             prepare("cargo");
@@ -1499,11 +1487,11 @@ fn filter(contents: &str, marker: &str) -> String {
                 prepare("rust-mingw");
             }
 
-            build.install(&xform(&etc.join("exe/rust.iss")), &exe, 0o644);
-            build.install(&etc.join("exe/modpath.iss"), &exe, 0o644);
-            build.install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
-            build.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
-            build.create(&exe.join("LICENSE.txt"), &license);
+            builder.install(&xform(&etc.join("exe/rust.iss")), &exe, 0o644);
+            builder.install(&etc.join("exe/modpath.iss"), &exe, 0o644);
+            builder.install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
+            builder.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
+            builder.create(&exe.join("LICENSE.txt"), &license);
 
             // Generate exe installer
             let mut cmd = Command::new("iscc");
@@ -1512,10 +1500,10 @@ fn filter(contents: &str, marker: &str) -> String {
             if target.contains("windows-gnu") {
                 cmd.arg("/dMINGW");
             }
-            add_env(build, &mut cmd, target);
-            build.run(&mut cmd);
-            build.install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
-                    &distdir(build),
+            add_env(builder, &mut cmd, target);
+            builder.run(&mut cmd);
+            builder.install(&exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)),
+                    &distdir(builder),
                     0o755);
 
             // Generate msi installer
@@ -1525,7 +1513,7 @@ fn filter(contents: &str, marker: &str) -> String {
             let light = wix.join("bin/light.exe");
 
             let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
-            build.run(Command::new(&heat)
+            builder.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
                             .arg("rustc")
@@ -1534,7 +1522,7 @@ fn filter(contents: &str, marker: &str) -> String {
                             .arg("-dr").arg("Rustc")
                             .arg("-var").arg("var.RustcDir")
                             .arg("-out").arg(exe.join("RustcGroup.wxs")));
-            build.run(Command::new(&heat)
+            builder.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
                             .arg("rust-docs")
@@ -1544,7 +1532,7 @@ fn filter(contents: &str, marker: &str) -> String {
                             .arg("-var").arg("var.DocsDir")
                             .arg("-out").arg(exe.join("DocsGroup.wxs"))
                             .arg("-t").arg(etc.join("msi/squash-components.xsl")));
-            build.run(Command::new(&heat)
+            builder.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
                             .arg("cargo")
@@ -1554,7 +1542,7 @@ fn filter(contents: &str, marker: &str) -> String {
                             .arg("-var").arg("var.CargoDir")
                             .arg("-out").arg(exe.join("CargoGroup.wxs"))
                             .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-            build.run(Command::new(&heat)
+            builder.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
                             .arg("rust-std")
@@ -1564,7 +1552,7 @@ fn filter(contents: &str, marker: &str) -> String {
                             .arg("-var").arg("var.StdDir")
                             .arg("-out").arg(exe.join("StdGroup.wxs")));
             if rls_installer.is_some() {
-                build.run(Command::new(&heat)
+                builder.run(Command::new(&heat)
                                 .current_dir(&exe)
                                 .arg("dir")
                                 .arg("rls")
@@ -1575,7 +1563,7 @@ fn filter(contents: &str, marker: &str) -> String {
                                 .arg("-out").arg(exe.join("RlsGroup.wxs"))
                                 .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
             }
-            build.run(Command::new(&heat)
+            builder.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
                             .arg("rust-analysis")
@@ -1586,7 +1574,7 @@ fn filter(contents: &str, marker: &str) -> String {
                             .arg("-out").arg(exe.join("AnalysisGroup.wxs"))
                             .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
             if target.contains("windows-gnu") {
-                build.run(Command::new(&heat)
+                builder.run(Command::new(&heat)
                                 .current_dir(&exe)
                                 .arg("dir")
                                 .arg("rust-mingw")
@@ -1612,7 +1600,7 @@ fn filter(contents: &str, marker: &str) -> String {
                     .arg("-arch").arg(&arch)
                     .arg("-out").arg(&output)
                     .arg(&input);
-                add_env(build, &mut cmd, target);
+                add_env(builder, &mut cmd, target);
 
                 if rls_installer.is_some() {
                     cmd.arg("-dRlsDir=rls");
@@ -1620,7 +1608,7 @@ fn filter(contents: &str, marker: &str) -> String {
                 if target.contains("windows-gnu") {
                     cmd.arg("-dGccDir=rust-mingw");
                 }
-                build.run(&mut cmd);
+                builder.run(&mut cmd);
             };
             candle(&xform(&etc.join("msi/rust.wxs")));
             candle(&etc.join("msi/ui.wxs"));
@@ -1638,11 +1626,11 @@ fn filter(contents: &str, marker: &str) -> String {
                 candle("GccGroup.wxs".as_ref());
             }
 
-            build.create(&exe.join("LICENSE.rtf"), &rtf);
-            build.install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
-            build.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
+            builder.create(&exe.join("LICENSE.rtf"), &rtf);
+            builder.install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
+            builder.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
 
-            let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
+            let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target);
             let mut cmd = Command::new(&light);
             cmd.arg("-nologo")
                 .arg("-ext").arg("WixUIExtension")
@@ -1668,28 +1656,28 @@ fn filter(contents: &str, marker: &str) -> String {
             // ICE57 wrongly complains about the shortcuts
             cmd.arg("-sice:ICE57");
 
-            build.run(&mut cmd);
+            builder.run(&mut cmd);
 
-            if !build.config.dry_run {
-                t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
+            if !builder.config.dry_run {
+                t!(fs::rename(exe.join(&filename), distdir(builder).join(&filename)));
             }
         }
     }
 }
 
-fn add_env(build: &Build, cmd: &mut Command, target: Interned<String>) {
+fn add_env(builder: &Builder, cmd: &mut Command, target: Interned<String>) {
     let mut parts = channel::CFG_RELEASE_NUM.split('.');
-    cmd.env("CFG_RELEASE_INFO", build.rust_version())
+    cmd.env("CFG_RELEASE_INFO", builder.rust_version())
        .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM)
-       .env("CFG_RELEASE", build.rust_release())
+       .env("CFG_RELEASE", builder.rust_release())
        .env("CFG_VER_MAJOR", parts.next().unwrap())
        .env("CFG_VER_MINOR", parts.next().unwrap())
        .env("CFG_VER_PATCH", parts.next().unwrap())
        .env("CFG_VER_BUILD", "0") // just needed to build
-       .env("CFG_PACKAGE_VERS", build.rust_package_vers())
-       .env("CFG_PACKAGE_NAME", pkgname(build, "rust"))
+       .env("CFG_PACKAGE_VERS", builder.rust_package_vers())
+       .env("CFG_PACKAGE_NAME", pkgname(builder, "rust"))
        .env("CFG_BUILD", target)
-       .env("CFG_CHANNEL", &build.config.channel);
+       .env("CFG_CHANNEL", &builder.config.channel);
 
     if target.contains("windows-gnu") {
        cmd.env("CFG_MINGW", "1")
@@ -1722,18 +1710,17 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let mut cmd = builder.tool_cmd(Tool::BuildManifest);
-        if build.config.dry_run {
+        if builder.config.dry_run {
             return;
         }
-        let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
+        let sign = builder.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
             panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
         });
-        let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
+        let addr = builder.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
             panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
         });
-        let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
+        let file = builder.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
             panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
         });
         let mut pass = String::new();
@@ -1742,15 +1729,15 @@ fn run(self, builder: &Builder) {
         let today = output(Command::new("date").arg("+%Y-%m-%d"));
 
         cmd.arg(sign);
-        cmd.arg(distdir(build));
+        cmd.arg(distdir(builder));
         cmd.arg(today.trim());
-        cmd.arg(build.rust_package_vers());
-        cmd.arg(build.package_vers(&build.release_num("cargo")));
-        cmd.arg(build.package_vers(&build.release_num("rls")));
-        cmd.arg(build.package_vers(&build.release_num("rustfmt")));
+        cmd.arg(builder.rust_package_vers());
+        cmd.arg(builder.package_vers(&builder.release_num("cargo")));
+        cmd.arg(builder.package_vers(&builder.release_num("rls")));
+        cmd.arg(builder.package_vers(&builder.release_num("rustfmt")));
         cmd.arg(addr);
 
-        build.create_dir(&distdir(build));
+        builder.create_dir(&distdir(builder));
 
         let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
         t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
index 620e125a43da1977f1bb01e748d0525eae6ea078..ae22260c564edf0bb8b5dea95a625246d8b5b145 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Documentation generation for rustbuild.
+//! Documentation generation for rustbuilder.
 //!
 //! This module implements generation for all bits and pieces of documentation
 //! for the Rust project. This notably includes suites like the rust book, the
@@ -23,7 +23,7 @@
 use std::io;
 use std::path::{PathBuf, Path};
 
-use {Build, Mode};
+use Mode;
 use build_helper::up_to_date;
 
 use util::symlink_dir;
@@ -47,7 +47,7 @@ impl Step for $name {
 
             fn should_run(run: ShouldRun) -> ShouldRun {
                 let builder = run.builder;
-                run.path($path).default_condition(builder.build.config.docs)
+                run.path($path).default_condition(builder.config.docs)
             }
 
             fn make_run(run: RunConfig) {
@@ -94,7 +94,7 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// This will not actually generate any documentation if the documentation has
     /// already been generated.
     fn run(self, builder: &Builder) {
-        let src = builder.build.src.join("src/doc");
+        let src = builder.src.join("src/doc");
         builder.ensure(RustbookSrc {
             target: self.target,
             name: self.name,
@@ -114,7 +114,7 @@ impl Step for UnstableBook {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/doc/unstable-book").default_condition(builder.build.config.docs)
+        run.path("src/doc/unstable-book").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -130,7 +130,7 @@ fn run(self, builder: &Builder) {
         builder.ensure(RustbookSrc {
             target: self.target,
             name: INTERNER.intern_str("unstable-book"),
-            src: builder.build.md_doc_out(self.target),
+            src: builder.md_doc_out(self.target),
         })
     }
 }
@@ -147,7 +147,7 @@ impl Step for CargoBook {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/tools/cargo/src/doc/book").default_condition(builder.build.config.docs)
+        run.path("src/tools/cargo/src/doc/book").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -158,22 +158,20 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
-
         let target = self.target;
         let name = self.name;
-        let src = build.src.join("src/tools/cargo/src/doc");
+        let src = builder.src.join("src/tools/cargo/src/doc");
 
-        let out = build.doc_out(target);
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
 
         let out = out.join(name);
 
-        build.info(&format!("Cargo Book ({}) - {}", target, name));
+        builder.info(&format!("Cargo Book ({}) - {}", target, name));
 
         let _ = fs::remove_dir_all(&out);
 
-        build.run(builder.tool_cmd(Tool::Rustbook)
+        builder.run(builder.tool_cmd(Tool::Rustbook)
                        .arg("build")
                        .arg(&src)
                        .arg("-d")
@@ -200,11 +198,10 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// This will not actually generate any documentation if the documentation has
     /// already been generated.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
         let name = self.name;
         let src = self.src;
-        let out = build.doc_out(target);
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
 
         let out = out.join(name);
@@ -215,9 +212,9 @@ fn run(self, builder: &Builder) {
         if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
             return
         }
-        build.info(&format!("Rustbook ({}) - {}", target, name));
+        builder.info(&format!("Rustbook ({}) - {}", target, name));
         let _ = fs::remove_dir_all(&out);
-        build.run(rustbook_cmd
+        builder.run(rustbook_cmd
                        .arg("build")
                        .arg(&src)
                        .arg("-d")
@@ -238,12 +235,12 @@ impl Step for TheBook {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/doc/book").default_condition(builder.build.config.docs)
+        run.path("src/doc/book").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
         run.builder.ensure(TheBook {
-            compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+            compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
             target: run.target,
             name: "book",
         });
@@ -259,7 +256,6 @@ fn make_run(run: RunConfig) {
     /// * Index page
     /// * Redirect pages
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
         let name = self.name;
@@ -283,12 +279,12 @@ fn run(self, builder: &Builder) {
 
         // build the index page
         let index = format!("{}/index.md", name);
-        build.info(&format!("Documenting book index ({})", target));
+        builder.info(&format!("Documenting book index ({})", target));
         invoke_rustdoc(builder, compiler, target, &index);
 
         // build the redirect pages
-        build.info(&format!("Documenting book redirect pages ({})", target));
-        for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
+        builder.info(&format!("Documenting book redirect pages ({})", target));
+        for file in t!(fs::read_dir(builder.src.join("src/doc/book/redirects"))) {
             let file = t!(file);
             let path = file.path();
             let path = path.to_str().unwrap();
@@ -299,13 +295,12 @@ fn run(self, builder: &Builder) {
 }
 
 fn invoke_rustdoc(builder: &Builder, compiler: Compiler, target: Interned<String>, markdown: &str) {
-    let build = builder.build;
-    let out = build.doc_out(target);
+    let out = builder.doc_out(target);
 
-    let path = build.src.join("src/doc").join(markdown);
+    let path = builder.src.join("src/doc").join(markdown);
 
-    let favicon = build.src.join("src/doc/favicon.inc");
-    let footer = build.src.join("src/doc/footer.inc");
+    let favicon = builder.src.join("src/doc/favicon.inc");
+    let footer = builder.src.join("src/doc/footer.inc");
     let version_info = out.join("version_info.html");
 
     let mut cmd = builder.rustdoc_cmd(compiler.host);
@@ -323,7 +318,7 @@ fn invoke_rustdoc(builder: &Builder, compiler: Compiler, target: Interned<String
         .arg("--markdown-css")
         .arg("../rust.css");
 
-    build.run(&mut cmd);
+    builder.run(&mut cmd);
 }
 
 #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
@@ -338,12 +333,12 @@ impl Step for Standalone {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/doc").default_condition(builder.build.config.docs)
+        run.path("src/doc").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
         run.builder.ensure(Standalone {
-            compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+            compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
             target: run.target,
         });
     }
@@ -357,31 +352,30 @@ fn make_run(run: RunConfig) {
     ///
     /// In the end, this is just a glorified wrapper around rustdoc!
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
         let compiler = self.compiler;
-        build.info(&format!("Documenting standalone ({})", target));
-        let out = build.doc_out(target);
+        builder.info(&format!("Documenting standalone ({})", target));
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
 
-        let favicon = build.src.join("src/doc/favicon.inc");
-        let footer = build.src.join("src/doc/footer.inc");
-        let full_toc = build.src.join("src/doc/full-toc.inc");
-        t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+        let favicon = builder.src.join("src/doc/favicon.inc");
+        let footer = builder.src.join("src/doc/footer.inc");
+        let full_toc = builder.src.join("src/doc/full-toc.inc");
+        t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
 
-        let version_input = build.src.join("src/doc/version_info.html.template");
+        let version_input = builder.src.join("src/doc/version_info.html.template");
         let version_info = out.join("version_info.html");
 
-        if !build.config.dry_run && !up_to_date(&version_input, &version_info) {
+        if !builder.config.dry_run && !up_to_date(&version_input, &version_info) {
             let mut info = String::new();
             t!(t!(File::open(&version_input)).read_to_string(&mut info));
-            let info = info.replace("VERSION", &build.rust_release())
-                           .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
-                           .replace("STAMP", build.rust_info.sha().unwrap_or(""));
+            let info = info.replace("VERSION", &builder.rust_release())
+                           .replace("SHORT_HASH", builder.rust_info.sha_short().unwrap_or(""))
+                           .replace("STAMP", builder.rust_info.sha().unwrap_or(""));
             t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
         }
 
-        for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+        for file in t!(fs::read_dir(builder.src.join("src/doc"))) {
             let file = t!(file);
             let path = file.path();
             let filename = path.file_name().unwrap().to_str().unwrap();
@@ -396,7 +390,7 @@ fn run(self, builder: &Builder) {
                up_to_date(&favicon, &html) &&
                up_to_date(&full_toc, &html) &&
                up_to_date(&version_info, &html) &&
-               (build.config.dry_run || up_to_date(&rustdoc, &html)) {
+               (builder.config.dry_run || up_to_date(&rustdoc, &html)) {
                 continue
             }
 
@@ -416,7 +410,7 @@ fn run(self, builder: &Builder) {
             } else {
                 cmd.arg("--markdown-css").arg("rust.css");
             }
-            build.run(&mut cmd);
+            builder.run(&mut cmd);
         }
     }
 }
@@ -433,7 +427,7 @@ impl Step for Std {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.all_krates("std").default_condition(builder.build.config.docs)
+        run.all_krates("std").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -448,22 +442,21 @@ fn make_run(run: RunConfig) {
     /// This will generate all documentation for the standard library and its
     /// dependencies. This is largely just a wrapper around `cargo doc`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
-        build.info(&format!("Documenting stage{} std ({})", stage, target));
-        let out = build.doc_out(target);
+        builder.info(&format!("Documenting stage{} std ({})", stage, target));
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
-        let compiler = builder.compiler(stage, build.build);
+        let compiler = builder.compiler(stage, builder.config.build);
         let rustdoc = builder.rustdoc(compiler.host);
-        let compiler = if build.force_use_stage1(compiler, target) {
+        let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
             compiler
         };
 
         builder.ensure(compile::Std { compiler, target });
-        let out_dir = build.stage_out(compiler, Mode::Libstd)
+        let out_dir = builder.stage_out(compiler, Mode::Libstd)
                            .join(target).join("doc");
 
         // Here what we're doing is creating a *symlink* (directory junction on
@@ -479,9 +472,9 @@ fn run(self, builder: &Builder) {
         //
         // This way rustdoc generates output directly into the output, and rustdoc
         // will also directly handle merging.
-        let my_out = build.crate_doc_out(target);
-        build.clear_if_dirty(&my_out, &rustdoc);
-        t!(symlink_dir_force(&build.config, &my_out, &out_dir));
+        let my_out = builder.crate_doc_out(target);
+        builder.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
 
         let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "doc");
         compile::std_cargo(builder, &compiler, target, &mut cargo);
@@ -497,8 +490,8 @@ fn run(self, builder: &Builder) {
             t!(fs::create_dir_all(out_dir.join(krate)));
         }
 
-        build.run(&mut cargo);
-        build.cp_r(&my_out, &out);
+        builder.run(&mut cargo);
+        builder.cp_r(&my_out, &out);
     }
 }
 
@@ -514,7 +507,7 @@ impl Step for Test {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.krate("test").default_condition(builder.build.config.docs)
+        run.krate("test").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -529,15 +522,14 @@ fn make_run(run: RunConfig) {
     /// This will generate all documentation for libtest and its dependencies. This
     /// is largely just a wrapper around `cargo doc`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
-        build.info(&format!("Documenting stage{} test ({})", stage, target));
-        let out = build.doc_out(target);
+        builder.info(&format!("Documenting stage{} test ({})", stage, target));
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
-        let compiler = builder.compiler(stage, build.build);
+        let compiler = builder.compiler(stage, builder.config.build);
         let rustdoc = builder.rustdoc(compiler.host);
-        let compiler = if build.force_use_stage1(compiler, target) {
+        let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
             compiler
@@ -547,21 +539,21 @@ fn run(self, builder: &Builder) {
         builder.ensure(Std { stage, target });
 
         builder.ensure(compile::Test { compiler, target });
-        let out_dir = build.stage_out(compiler, Mode::Libtest)
+        let out_dir = builder.stage_out(compiler, Mode::Libtest)
                            .join(target).join("doc");
 
         // See docs in std above for why we symlink
-        let my_out = build.crate_doc_out(target);
-        build.clear_if_dirty(&my_out, &rustdoc);
+        let my_out = builder.crate_doc_out(target);
+        builder.clear_if_dirty(&my_out, &rustdoc);
         t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
 
         let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "doc");
-        compile::test_cargo(build, &compiler, target, &mut cargo);
+        compile::test_cargo(builder, &compiler, target, &mut cargo);
 
         cargo.arg("--no-deps").arg("-p").arg("test");
 
-        build.run(&mut cargo);
-        build.cp_r(&my_out, &out);
+        builder.run(&mut cargo);
+        builder.cp_r(&my_out, &out);
     }
 }
 
@@ -578,7 +570,7 @@ impl Step for WhitelistedRustc {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.krate("rustc-main").default_condition(builder.build.config.docs)
+        run.krate("rustc-main").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -598,15 +590,14 @@ fn make_run(run: RunConfig) {
     /// here as we want to be able to keep it separate from the standard
     /// documentation. This is largely just a wrapper around `cargo doc`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
-        build.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target));
-        let out = build.doc_out(target);
+        builder.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target));
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
-        let compiler = builder.compiler(stage, build.build);
+        let compiler = builder.compiler(stage, builder.config.build);
         let rustdoc = builder.rustdoc(compiler.host);
-        let compiler = if build.force_use_stage1(compiler, target) {
+        let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
             compiler
@@ -616,16 +607,16 @@ fn run(self, builder: &Builder) {
         builder.ensure(Std { stage, target });
 
         builder.ensure(compile::Rustc { compiler, target });
-        let out_dir = build.stage_out(compiler, Mode::Librustc)
+        let out_dir = builder.stage_out(compiler, Mode::Librustc)
                            .join(target).join("doc");
 
         // See docs in std above for why we symlink
-        let my_out = build.crate_doc_out(target);
-        build.clear_if_dirty(&my_out, &rustdoc);
+        let my_out = builder.crate_doc_out(target);
+        builder.clear_if_dirty(&my_out, &rustdoc);
         t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
 
         let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc");
-        compile::rustc_cargo(build, &mut cargo);
+        compile::rustc_cargo(builder, &mut cargo);
 
         // We don't want to build docs for internal compiler dependencies in this
         // step (there is another step for that). Therefore, we whitelist the crates
@@ -635,8 +626,8 @@ fn run(self, builder: &Builder) {
             cargo.arg("-p").arg(krate);
         }
 
-        build.run(&mut cargo);
-        build.cp_r(&my_out, &out);
+        builder.run(&mut cargo);
+        builder.cp_r(&my_out, &out);
     }
 }
 
@@ -653,7 +644,7 @@ impl Step for Rustc {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.krate("rustc-main").default_condition(builder.build.config.docs)
+        run.krate("rustc-main").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -670,22 +661,21 @@ fn make_run(run: RunConfig) {
     /// we do not merge it with the other documentation from std, test and
     /// proc_macros. This is largely just a wrapper around `cargo doc`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let target = self.target;
-        build.info(&format!("Documenting stage{} compiler ({})", stage, target));
-        let out = build.compiler_doc_out(target);
+        builder.info(&format!("Documenting stage{} compiler ({})", stage, target));
+        let out = builder.compiler_doc_out(target);
         t!(fs::create_dir_all(&out));
-        let compiler = builder.compiler(stage, build.build);
+        let compiler = builder.compiler(stage, builder.config.build);
         let rustdoc = builder.rustdoc(compiler.host);
-        let compiler = if build.force_use_stage1(compiler, target) {
+        let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
             compiler
         };
 
-        if !build.config.compiler_docs {
-            build.info(&format!("\tskipping - compiler docs disabled"));
+        if !builder.config.compiler_docs {
+            builder.info(&format!("\tskipping - compiler docs disabled"));
             return;
         }
 
@@ -693,16 +683,16 @@ fn run(self, builder: &Builder) {
         builder.ensure(Std { stage, target });
 
         builder.ensure(compile::Rustc { compiler, target });
-        let out_dir = build.stage_out(compiler, Mode::Librustc)
+        let out_dir = builder.stage_out(compiler, Mode::Librustc)
                            .join(target).join("doc");
         // We do not symlink to the same shared folder that already contains std library
         // documentation from previous steps as we do not want to include that.
-        build.clear_if_dirty(&out, &rustdoc);
+        builder.clear_if_dirty(&out, &rustdoc);
         t!(symlink_dir_force(&builder.config, &out, &out_dir));
 
         let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc");
         cargo.env("RUSTDOCFLAGS", "--document-private-items");
-        compile::rustc_cargo(build, &mut cargo);
+        compile::rustc_cargo(builder, &mut cargo);
 
         // Only include compiler crates, no dependencies of those, such as `libc`.
         cargo.arg("--no-deps");
@@ -711,19 +701,19 @@ fn run(self, builder: &Builder) {
         let mut compiler_crates = HashSet::new();
         for root_crate in &["rustc", "rustc_driver"] {
             let interned_root_crate = INTERNER.intern_str(root_crate);
-            find_compiler_crates(&build, &interned_root_crate, &mut compiler_crates);
+            find_compiler_crates(builder, &interned_root_crate, &mut compiler_crates);
         }
 
         for krate in &compiler_crates {
             cargo.arg("-p").arg(krate);
         }
 
-        build.run(&mut cargo);
+        builder.run(&mut cargo);
     }
 }
 
 fn find_compiler_crates(
-    build: &Build,
+    builder: &Builder,
     name: &Interned<String>,
     crates: &mut HashSet<Interned<String>>
 ) {
@@ -731,9 +721,9 @@ fn find_compiler_crates(
     crates.insert(*name);
 
     // Look for dependencies.
-    for dep in build.crates.get(name).unwrap().deps.iter() {
-        if build.crates.get(dep).unwrap().is_local(build) {
-            find_compiler_crates(build, dep, crates);
+    for dep in builder.crates.get(name).unwrap().deps.iter() {
+        if builder.crates.get(dep).unwrap().is_local(builder) {
+            find_compiler_crates(builder, dep, crates);
         }
     }
 }
@@ -750,7 +740,7 @@ impl Step for ErrorIndex {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/tools/error_index_generator").default_condition(builder.build.config.docs)
+        run.path("src/tools/error_index_generator").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -762,21 +752,20 @@ fn make_run(run: RunConfig) {
     /// Generates the HTML rendered error-index by running the
     /// `error_index_generator` tool.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
 
-        build.info(&format!("Documenting error index ({})", target));
-        let out = build.doc_out(target);
+        builder.info(&format!("Documenting error index ({})", target));
+        let out = builder.doc_out(target);
         t!(fs::create_dir_all(&out));
         let mut index = builder.tool_cmd(Tool::ErrorIndex);
         index.arg("html");
         index.arg(out.join("error-index.html"));
 
         // FIXME: shouldn't have to pass this env var
-        index.env("CFG_BUILD", &build.build)
-             .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir());
+        index.env("CFG_BUILD", &builder.config.build)
+             .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());
 
-        build.run(&mut index);
+        builder.run(&mut index);
     }
 }
 
@@ -792,7 +781,7 @@ impl Step for UnstableBookGen {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/tools/unstable-book-gen").default_condition(builder.build.config.docs)
+        run.path("src/tools/unstable-book-gen").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -802,23 +791,22 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let target = self.target;
 
         builder.ensure(compile::Std {
-            compiler: builder.compiler(builder.top_stage, build.build),
+            compiler: builder.compiler(builder.top_stage, builder.config.build),
             target,
         });
 
-        build.info(&format!("Generating unstable book md files ({})", target));
-        let out = build.md_doc_out(target).join("unstable-book");
-        build.create_dir(&out);
-        build.remove_dir(&out);
+        builder.info(&format!("Generating unstable book md files ({})", target));
+        let out = builder.md_doc_out(target).join("unstable-book");
+        builder.create_dir(&out);
+        builder.remove_dir(&out);
         let mut cmd = builder.tool_cmd(Tool::UnstableBookGen);
-        cmd.arg(build.src.join("src"));
+        cmd.arg(builder.src.join("src"));
         cmd.arg(out);
 
-        build.run(&mut cmd);
+        builder.run(&mut cmd);
     }
 }
 
index d71fdb8a30c70d01be928a1fa2d0e30127b674c5..b37a007e86390249fec901326a0220ddd6ab11ba 100644 (file)
@@ -62,8 +62,7 @@ fn install_sh(
     stage: u32,
     host: Option<Interned<String>>
 ) {
-    let build = builder.build;
-    build.info(&format!("Install {} stage{} ({:?})", package, stage, host));
+    builder.info(&format!("Install {} stage{} ({:?})", package, stage, host));
 
     let prefix_default = PathBuf::from("/usr/local");
     let sysconfdir_default = PathBuf::from("/etc");
@@ -72,15 +71,15 @@ fn install_sh(
     let bindir_default = PathBuf::from("bin");
     let libdir_default = PathBuf::from("lib");
     let mandir_default = datadir_default.join("man");
-    let prefix = build.config.prefix.as_ref().map_or(prefix_default, |p| {
+    let prefix = builder.config.prefix.as_ref().map_or(prefix_default, |p| {
         fs::canonicalize(p).expect(&format!("could not canonicalize {}", p.display()))
     });
-    let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
-    let datadir = build.config.datadir.as_ref().unwrap_or(&datadir_default);
-    let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
-    let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
-    let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
-    let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
+    let sysconfdir = builder.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
+    let datadir = builder.config.datadir.as_ref().unwrap_or(&datadir_default);
+    let docdir = builder.config.docdir.as_ref().unwrap_or(&docdir_default);
+    let bindir = builder.config.bindir.as_ref().unwrap_or(&bindir_default);
+    let libdir = builder.config.libdir.as_ref().unwrap_or(&libdir_default);
+    let mandir = builder.config.mandir.as_ref().unwrap_or(&mandir_default);
 
     let sysconfdir = prefix.join(sysconfdir);
     let datadir = prefix.join(datadir);
@@ -99,18 +98,18 @@ fn install_sh(
     let libdir = add_destdir(&libdir, &destdir);
     let mandir = add_destdir(&mandir, &destdir);
 
-    let empty_dir = build.out.join("tmp/empty_dir");
+    let empty_dir = builder.out.join("tmp/empty_dir");
 
     t!(fs::create_dir_all(&empty_dir));
     let package_name = if let Some(host) = host {
-        format!("{}-{}", pkgname(build, name), host)
+        format!("{}-{}", pkgname(builder, name), host)
     } else {
-        pkgname(build, name)
+        pkgname(builder, name)
     };
 
     let mut cmd = Command::new("sh");
     cmd.current_dir(&empty_dir)
-        .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
+        .arg(sanitize_sh(&tmpdir(builder).join(&package_name).join("install.sh")))
         .arg(format!("--prefix={}", sanitize_sh(&prefix)))
         .arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir)))
         .arg(format!("--datadir={}", sanitize_sh(&datadir)))
@@ -119,7 +118,7 @@ fn install_sh(
         .arg(format!("--libdir={}", sanitize_sh(&libdir)))
         .arg(format!("--mandir={}", sanitize_sh(&mandir)))
         .arg("--disable-ldconfig");
-    build.run(&mut cmd);
+    builder.run(&mut cmd);
     t!(fs::remove_dir_all(&empty_dir));
 }
 
@@ -180,7 +179,7 @@ fn make_run(run: RunConfig) {
                 run.builder.ensure($name {
                     stage: run.builder.top_stage,
                     target: run.target,
-                    host: run.builder.build.build,
+                    host: run.builder.config.build,
                 });
             }
 
@@ -197,7 +196,7 @@ fn run($sel, $builder: &Builder) {
         install_docs(builder, self.stage, self.target);
     };
     Std, "src/libstd", true, only_hosts: true, {
-        for target in &builder.build.targets {
+        for target in &builder.targets {
             builder.ensure(dist::Std {
                 compiler: builder.compiler(self.stage, self.host),
                 target: *target
index db5891afd6b1fa59ac73c7c7a9590511d64299c4..2c2cf74d9790f554acf00e7da21a778d68820b46 100644 (file)
@@ -29,7 +29,6 @@
 use cmake;
 use cc;
 
-use Build;
 use util::{self, exe};
 use build_helper::up_to_date;
 use builder::{Builder, RunConfig, ShouldRun, Step};
@@ -60,39 +59,38 @@ fn make_run(run: RunConfig) {
 
     /// Compile LLVM for `target`.
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let target = self.target;
         let emscripten = self.emscripten;
 
         // If we're using a custom LLVM bail out here, but we can only use a
         // custom LLVM for the build triple.
         if !self.emscripten {
-            if let Some(config) = build.config.target_config.get(&target) {
+            if let Some(config) = builder.config.target_config.get(&target) {
                 if let Some(ref s) = config.llvm_config {
-                    check_llvm_version(build, s);
+                    check_llvm_version(builder, s);
                     return s.to_path_buf()
                 }
             }
         }
 
-        let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
+        let rebuild_trigger = builder.src.join("src/rustllvm/llvm-rebuild-trigger");
         let mut rebuild_trigger_contents = String::new();
         t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
 
         let (out_dir, llvm_config_ret_dir) = if emscripten {
-            let dir = build.emscripten_llvm_out(target);
+            let dir = builder.emscripten_llvm_out(target);
             let config_dir = dir.join("bin");
             (dir, config_dir)
         } else {
-            let mut dir = build.llvm_out(build.config.build);
-            if !build.config.build.contains("msvc") || build.config.ninja {
+            let mut dir = builder.llvm_out(builder.config.build);
+            if !builder.config.build.contains("msvc") || builder.config.ninja {
                 dir.push("build");
             }
-            (build.llvm_out(target), dir.join("bin"))
+            (builder.llvm_out(target), dir.join("bin"))
         };
         let done_stamp = out_dir.join("llvm-finished-building");
         let build_llvm_config = llvm_config_ret_dir
-            .join(exe("llvm-config", &*build.config.build));
+            .join(exe("llvm-config", &*builder.config.build));
         if done_stamp.exists() {
             let mut done_contents = String::new();
             t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
@@ -104,17 +102,17 @@ fn run(self, builder: &Builder) -> PathBuf {
             }
         }
 
-        let _folder = build.fold_output(|| "llvm");
+        let _folder = builder.fold_output(|| "llvm");
         let descriptor = if emscripten { "Emscripten " } else { "" };
-        build.info(&format!("Building {}LLVM for {}", descriptor, target));
-        let _time = util::timeit(&build);
+        builder.info(&format!("Building {}LLVM for {}", descriptor, target));
+        let _time = util::timeit(&builder);
         t!(fs::create_dir_all(&out_dir));
 
         // http://llvm.org/docs/CMake.html
         let root = if self.emscripten { "src/llvm-emscripten" } else { "src/llvm" };
-        let mut cfg = cmake::Config::new(build.src.join(root));
+        let mut cfg = cmake::Config::new(builder.src.join(root));
 
-        let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
+        let profile = match (builder.config.llvm_optimize, builder.config.llvm_release_debuginfo) {
             (false, _) => "Debug",
             (true, false) => "Release",
             (true, true) => "RelWithDebInfo",
@@ -125,7 +123,7 @@ fn run(self, builder: &Builder) -> PathBuf {
         let llvm_targets = if self.emscripten {
             "JSBackend"
         } else {
-            match build.config.llvm_targets {
+            match builder.config.llvm_targets {
                 Some(ref s) => s,
                 None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;MSP430;Sparc;NVPTX;Hexagon",
             }
@@ -134,10 +132,10 @@ fn run(self, builder: &Builder) -> PathBuf {
         let llvm_exp_targets = if self.emscripten {
             ""
         } else {
-            &build.config.llvm_experimental_targets[..]
+            &builder.config.llvm_experimental_targets[..]
         };
 
-        let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+        let assertions = if builder.config.llvm_assertions {"ON"} else {"OFF"};
 
         cfg.out_dir(&out_dir)
            .profile(profile)
@@ -151,7 +149,7 @@ fn run(self, builder: &Builder) -> PathBuf {
            .define("WITH_POLLY", "OFF")
            .define("LLVM_ENABLE_TERMINFO", "OFF")
            .define("LLVM_ENABLE_LIBEDIT", "OFF")
-           .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
+           .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string())
            .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
            .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
 
@@ -183,22 +181,22 @@ fn run(self, builder: &Builder) -> PathBuf {
             cfg.define("LLVM_BUILD_32_BITS", "ON");
         }
 
-        if let Some(num_linkers) = build.config.llvm_link_jobs {
+        if let Some(num_linkers) = builder.config.llvm_link_jobs {
             if num_linkers > 0 {
                 cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
             }
         }
 
         // http://llvm.org/docs/HowToCrossCompileLLVM.html
-        if target != build.build && !emscripten {
+        if target != builder.config.build && !emscripten {
             builder.ensure(Llvm {
-                target: build.build,
+                target: builder.config.build,
                 emscripten: false,
             });
             // FIXME: if the llvm root for the build triple is overridden then we
             //        should use llvm-tblgen from there, also should verify that it
             //        actually exists most of the time in normal installs of LLVM.
-            let host = build.llvm_out(build.build).join("bin/llvm-tblgen");
+            let host = builder.llvm_out(builder.config.build).join("bin/llvm-tblgen");
             cfg.define("CMAKE_CROSSCOMPILING", "True")
                .define("LLVM_TABLEGEN", &host);
 
@@ -208,10 +206,10 @@ fn run(self, builder: &Builder) -> PathBuf {
                cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD");
             }
 
-            cfg.define("LLVM_NATIVE_BUILD", build.llvm_out(build.build).join("build"));
+            cfg.define("LLVM_NATIVE_BUILD", builder.llvm_out(builder.config.build).join("build"));
         }
 
-        configure_cmake(build, target, &mut cfg, false);
+        configure_cmake(builder, target, &mut cfg, false);
 
         // FIXME: we don't actually need to build all LLVM tools and all LLVM
         //        libraries here, e.g. we just want a few components and a few
@@ -230,12 +228,12 @@ fn run(self, builder: &Builder) -> PathBuf {
     }
 }
 
-fn check_llvm_version(build: &Build, llvm_config: &Path) {
-    if !build.config.llvm_version_check {
+fn check_llvm_version(builder: &Builder, llvm_config: &Path) {
+    if !builder.config.llvm_version_check {
         return
     }
 
-    if build.config.dry_run {
+    if builder.config.dry_run {
         return;
     }
 
@@ -251,15 +249,15 @@ fn check_llvm_version(build: &Build, llvm_config: &Path) {
     panic!("\n\nbad LLVM version: {}, need >=3.9\n\n", version)
 }
 
-fn configure_cmake(build: &Build,
+fn configure_cmake(builder: &Builder,
                    target: Interned<String>,
                    cfg: &mut cmake::Config,
                    building_dist_binaries: bool) {
-    if build.config.ninja {
+    if builder.config.ninja {
         cfg.generator("Ninja");
     }
     cfg.target(&target)
-       .host(&build.config.build);
+       .host(&builder.config.build);
 
     let sanitize_cc = |cc: &Path| {
         if target.contains("msvc") {
@@ -272,29 +270,29 @@ fn configure_cmake(build: &Build,
     // MSVC with CMake uses msbuild by default which doesn't respect these
     // vars that we'd otherwise configure. In that case we just skip this
     // entirely.
-    if target.contains("msvc") && !build.config.ninja {
+    if target.contains("msvc") && !builder.config.ninja {
         return
     }
 
-    let cc = build.cc(target);
-    let cxx = build.cxx(target).unwrap();
+    let cc = builder.cc(target);
+    let cxx = builder.cxx(target).unwrap();
 
     // Handle msvc + ninja + ccache specially (this is what the bots use)
     if target.contains("msvc") &&
-       build.config.ninja &&
-       build.config.ccache.is_some() {
+       builder.config.ninja &&
+       builder.config.ccache.is_some() {
         let mut cc = env::current_exe().expect("failed to get cwd");
         cc.set_file_name("sccache-plus-cl.exe");
 
        cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
           .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
        cfg.env("SCCACHE_PATH",
-               build.config.ccache.as_ref().unwrap())
+               builder.config.ccache.as_ref().unwrap())
           .env("SCCACHE_TARGET", target);
 
     // If ccache is configured we inform the build a little differently hwo
     // to invoke ccache while also invoking our compilers.
-    } else if let Some(ref ccache) = build.config.ccache {
+    } else if let Some(ref ccache) = builder.config.ccache {
        cfg.define("CMAKE_C_COMPILER", ccache)
           .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
           .define("CMAKE_CXX_COMPILER", ccache)
@@ -304,16 +302,16 @@ fn configure_cmake(build: &Build,
           .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
     }
 
-    cfg.build_arg("-j").build_arg(build.jobs().to_string());
-    cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
-    let mut cxxflags = build.cflags(target).join(" ");
+    cfg.build_arg("-j").build_arg(builder.jobs().to_string());
+    cfg.define("CMAKE_C_FLAGS", builder.cflags(target).join(" "));
+    let mut cxxflags = builder.cflags(target).join(" ");
     if building_dist_binaries {
-        if build.config.llvm_static_stdcpp && !target.contains("windows") {
+        if builder.config.llvm_static_stdcpp && !target.contains("windows") {
             cxxflags.push_str(" -static-libstdc++");
         }
     }
     cfg.define("CMAKE_CXX_FLAGS", cxxflags);
-    if let Some(ar) = build.ar(target) {
+    if let Some(ar) = builder.ar(target) {
         if ar.is_absolute() {
             // LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
             // tries to resolve this path in the LLVM build directory.
@@ -349,26 +347,25 @@ fn run(self, builder: &Builder) -> PathBuf {
             return PathBuf::from("lld-out-dir-test-gen");
         }
         let target = self.target;
-        let build = builder.build;
 
         let llvm_config = builder.ensure(Llvm {
             target: self.target,
             emscripten: false,
         });
 
-        let out_dir = build.lld_out(target);
+        let out_dir = builder.lld_out(target);
         let done_stamp = out_dir.join("lld-finished-building");
         if done_stamp.exists() {
             return out_dir
         }
 
-        let _folder = build.fold_output(|| "lld");
-        build.info(&format!("Building LLD for {}", target));
-        let _time = util::timeit(&build);
+        let _folder = builder.fold_output(|| "lld");
+        builder.info(&format!("Building LLD for {}", target));
+        let _time = util::timeit(&builder);
         t!(fs::create_dir_all(&out_dir));
 
-        let mut cfg = cmake::Config::new(build.src.join("src/tools/lld"));
-        configure_cmake(build, target, &mut cfg, true);
+        let mut cfg = cmake::Config::new(builder.src.join("src/tools/lld"));
+        configure_cmake(builder, target, &mut cfg, true);
 
         cfg.out_dir(&out_dir)
            .profile("Release")
@@ -404,16 +401,15 @@ fn run(self, builder: &Builder) {
         if builder.config.dry_run {
             return;
         }
-        let build = builder.build;
         let target = self.target;
-        let dst = build.test_helpers_out(target);
-        let src = build.src.join("src/test/auxiliary/rust_test_helpers.c");
+        let dst = builder.test_helpers_out(target);
+        let src = builder.src.join("src/test/auxiliary/rust_test_helpers.c");
         if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
             return
         }
 
-        let _folder = build.fold_output(|| "build_test_helpers");
-        build.info(&format!("Building test helpers"));
+        let _folder = builder.fold_output(|| "build_test_helpers");
+        builder.info(&format!("Building test helpers"));
         t!(fs::create_dir_all(&dst));
         let mut cfg = cc::Build::new();
 
@@ -421,20 +417,20 @@ fn run(self, builder: &Builder) {
         // extra configuration, so inform gcc of these compilers. Note, though, that
         // on MSVC we still need gcc's detection of env vars (ugh).
         if !target.contains("msvc") {
-            if let Some(ar) = build.ar(target) {
+            if let Some(ar) = builder.ar(target) {
                 cfg.archiver(ar);
             }
-            cfg.compiler(build.cc(target));
+            cfg.compiler(builder.cc(target));
         }
 
         cfg.cargo_metadata(false)
            .out_dir(&dst)
            .target(&target)
-           .host(&build.build)
+           .host(&builder.config.build)
            .opt_level(0)
            .warnings(false)
            .debug(false)
-           .file(build.src.join("src/test/auxiliary/rust_test_helpers.c"))
+           .file(builder.src.join("src/test/auxiliary/rust_test_helpers.c"))
            .compile("rust_test_helpers");
     }
 }
@@ -459,9 +455,8 @@ fn run(self, builder: &Builder) {
         if builder.config.dry_run {
             return;
         }
-        let build = builder.build;
         let target = self.target;
-        let out = match build.openssl_dir(target) {
+        let out = match builder.openssl_dir(target) {
             Some(dir) => dir,
             None => return,
         };
@@ -497,7 +492,8 @@ fn run(self, builder: &Builder) {
                 }
 
                 // Ensure the hash is correct.
-                let mut shasum = if target.contains("apple") || build.build.contains("netbsd") {
+                let mut shasum = if target.contains("apple") ||
+                    builder.config.build.contains("netbsd") {
                     let mut cmd = Command::new("shasum");
                     cmd.arg("-a").arg("256");
                     cmd
@@ -530,10 +526,10 @@ fn run(self, builder: &Builder) {
             t!(fs::rename(&tmp, &tarball));
         }
         let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
-        let dst = build.openssl_install_dir(target).unwrap();
+        let dst = builder.openssl_install_dir(target).unwrap();
         drop(fs::remove_dir_all(&obj));
         drop(fs::remove_dir_all(&dst));
-        build.run(Command::new("tar").arg("zxf").arg(&tarball).current_dir(&out));
+        builder.run(Command::new("tar").arg("zxf").arg(&tarball).current_dir(&out));
 
         let mut configure = Command::new("perl");
         configure.arg(obj.join("Configure"));
@@ -583,8 +579,8 @@ fn run(self, builder: &Builder) {
             _ => panic!("don't know how to configure OpenSSL for {}", target),
         };
         configure.arg(os);
-        configure.env("CC", build.cc(target));
-        for flag in build.cflags(target) {
+        configure.env("CC", builder.cc(target));
+        for flag in builder.cflags(target) {
             configure.arg(flag);
         }
         // There is no specific os target for android aarch64 or x86_64,
@@ -596,7 +592,7 @@ fn run(self, builder: &Builder) {
         if target == "sparc64-unknown-netbsd" {
             // Need -m64 to get assembly generated correctly for sparc64.
             configure.arg("-m64");
-            if build.build.contains("netbsd") {
+            if builder.config.build.contains("netbsd") {
                 // Disable sparc64 asm on NetBSD builders, it uses
                 // m4(1)'s -B flag, which NetBSD m4 does not support.
                 configure.arg("no-asm");
@@ -609,12 +605,12 @@ fn run(self, builder: &Builder) {
             configure.arg("no-asm");
         }
         configure.current_dir(&obj);
-        build.info(&format!("Configuring openssl for {}", target));
-        build.run_quiet(&mut configure);
-        build.info(&format!("Building openssl for {}", target));
-        build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
-        build.info(&format!("Installing openssl for {}", target));
-        build.run_quiet(Command::new("make").arg("install").arg("-j1").current_dir(&obj));
+        builder.info(&format!("Configuring openssl for {}", target));
+        builder.run_quiet(&mut configure);
+        builder.info(&format!("Building openssl for {}", target));
+        builder.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
+        builder.info(&format!("Installing openssl for {}", target));
+        builder.run_quiet(Command::new("make").arg("install").arg("-j1").current_dir(&obj));
 
         let mut f = t!(File::create(&stamp));
         t!(f.write_all(OPENSSL_VERS.as_bytes()));
index 1b1cec5f18c07d530795f35ee5df4d4faa8145e8..c7f514da939a60639b7082ed5a8b7b64a2a80f0f 100644 (file)
@@ -140,14 +140,18 @@ pub fn check(build: &mut Build) {
             continue;
         }
 
-        cmd_finder.must_have(build.cc(*target));
-        if let Some(ar) = build.ar(*target) {
-            cmd_finder.must_have(ar);
+        if !build.config.dry_run {
+            cmd_finder.must_have(build.cc(*target));
+            if let Some(ar) = build.ar(*target) {
+                cmd_finder.must_have(ar);
+            }
         }
     }
 
     for host in &build.hosts {
-        cmd_finder.must_have(build.cxx(*host).unwrap());
+        if !build.config.dry_run {
+            cmd_finder.must_have(build.cxx(*host).unwrap());
+        }
 
         // The msvc hosts don't use jemalloc, turn it off globally to
         // avoid packaging the dummy liballoc_jemalloc on that platform.
index 29c8cd1568a39a0db6af388a44ffd24d4ecd7029..3d954cd5d848ca39423c900f36eecaec5efdfbed 100644 (file)
@@ -32,7 +32,7 @@
 use native;
 use tool::{self, Tool};
 use util::{self, dylib_path, dylib_path_var};
-use {Build, Mode};
+use Mode;
 use toolstate::ToolState;
 
 const ADB_TEST_DIR: &str = "/data/tmp/work";
@@ -65,28 +65,28 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-fn try_run(build: &Build, cmd: &mut Command) -> bool {
-    if !build.fail_fast {
-        if !build.try_run(cmd) {
-            let mut failures = build.delayed_failures.borrow_mut();
+fn try_run(builder: &Builder, cmd: &mut Command) -> bool {
+    if !builder.fail_fast {
+        if !builder.try_run(cmd) {
+            let mut failures = builder.delayed_failures.borrow_mut();
             failures.push(format!("{:?}", cmd));
             return false;
         }
     } else {
-        build.run(cmd);
+        builder.run(cmd);
     }
     true
 }
 
-fn try_run_quiet(build: &Build, cmd: &mut Command) -> bool {
-    if !build.fail_fast {
-        if !build.try_run_quiet(cmd) {
-            let mut failures = build.delayed_failures.borrow_mut();
+fn try_run_quiet(builder: &Builder, cmd: &mut Command) -> bool {
+    if !builder.fail_fast {
+        if !builder.try_run_quiet(cmd) {
+            let mut failures = builder.delayed_failures.borrow_mut();
             failures.push(format!("{:?}", cmd));
             return false;
         }
     } else {
-        build.run_quiet(cmd);
+        builder.run_quiet(cmd);
     }
     true
 }
@@ -106,21 +106,20 @@ impl Step for Linkcheck {
     /// This tool in `src/tools` will verify the validity of all our links in the
     /// documentation to ensure we don't have a bunch of dead ones.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let host = self.host;
 
-        build.info(&format!("Linkcheck ({})", host));
+        builder.info(&format!("Linkcheck ({})", host));
 
         builder.default_doc(None);
 
-        let _time = util::timeit(&build);
-        try_run(build, builder.tool_cmd(Tool::Linkchecker)
-                              .arg(build.out.join(host).join("doc")));
+        let _time = util::timeit(&builder);
+        try_run(builder, builder.tool_cmd(Tool::Linkchecker)
+                              .arg(builder.out.join(host).join("doc")));
     }
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
+        run.path("src/tools/linkchecker").default_condition(builder.config.docs)
     }
 
     fn make_run(run: RunConfig) {
@@ -154,19 +153,18 @@ fn make_run(run: RunConfig) {
     /// This tool in `src/tools` will check out a few Rust projects and run `cargo
     /// test` to ensure that we don't regress the test suites there.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = builder.compiler(self.stage, self.host);
         builder.ensure(compile::Rustc { compiler, target: compiler.host });
 
         // Note that this is a short, cryptic, and not scoped directory name. This
         // is currently to minimize the length of path on Windows where we otherwise
         // quickly run into path name limit constraints.
-        let out_dir = build.out.join("ct");
+        let out_dir = builder.out.join("ct");
         t!(fs::create_dir_all(&out_dir));
 
-        let _time = util::timeit(&build);
+        let _time = util::timeit(&builder);
         let mut cmd = builder.tool_cmd(Tool::CargoTest);
-        try_run(build, cmd.arg(&build.initial_cargo)
+        try_run(builder, cmd.arg(&builder.initial_cargo)
                           .arg(&out_dir)
                           .env("RUSTC", builder.rustc(compiler))
                           .env("RUSTDOC", builder.rustdoc(compiler.host)));
@@ -196,13 +194,12 @@ fn make_run(run: RunConfig) {
 
     /// Runs `cargo test` for `cargo` packaged with Rust.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = builder.compiler(self.stage, self.host);
 
         builder.ensure(tool::Cargo { compiler, target: self.host });
         let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
-        cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
-        if !build.fail_fast {
+        cargo.arg("--manifest-path").arg(builder.src.join("src/tools/cargo/Cargo.toml"));
+        if !builder.fail_fast {
             cargo.arg("--no-fail-fast");
         }
 
@@ -213,7 +210,7 @@ fn run(self, builder: &Builder) {
         // available.
         cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
 
-        try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
+        try_run(builder, cargo.env("PATH", &path_for_cargo(builder, compiler)));
     }
 }
 
@@ -240,7 +237,6 @@ fn make_run(run: RunConfig) {
 
     /// Runs `cargo test` for the rls.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let host = self.host;
         let compiler = builder.compiler(stage, host);
@@ -257,8 +253,8 @@ fn run(self, builder: &Builder) {
 
         builder.add_rustc_lib_path(compiler, &mut cargo);
 
-        if try_run(build, &mut cargo) {
-            build.save_toolstate("rls", ToolState::TestPass);
+        if try_run(builder, &mut cargo) {
+            builder.save_toolstate("rls", ToolState::TestPass);
         }
     }
 }
@@ -286,7 +282,6 @@ fn make_run(run: RunConfig) {
 
     /// Runs `cargo test` for rustfmt.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let host = self.host;
         let compiler = builder.compiler(stage, host);
@@ -303,8 +298,8 @@ fn run(self, builder: &Builder) {
 
         builder.add_rustc_lib_path(compiler, &mut cargo);
 
-        if try_run(build, &mut cargo) {
-            build.save_toolstate("rustfmt", ToolState::TestPass);
+        if try_run(builder, &mut cargo) {
+            builder.save_toolstate("rustfmt", ToolState::TestPass);
         }
     }
 }
@@ -321,7 +316,7 @@ impl Step for Miri {
     const DEFAULT: bool = true;
 
     fn should_run(run: ShouldRun) -> ShouldRun {
-        let test_miri = run.builder.build.config.test_miri;
+        let test_miri = run.builder.config.test_miri;
         run.path("src/tools/miri").default_condition(test_miri)
     }
 
@@ -334,7 +329,6 @@ fn make_run(run: RunConfig) {
 
     /// Runs `cargo test` for miri.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let host = self.host;
         let compiler = builder.compiler(stage, host);
@@ -346,7 +340,7 @@ fn run(self, builder: &Builder) {
         });
         if let Some(miri) = miri {
             let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
-            cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml"));
+            cargo.arg("--manifest-path").arg(builder.src.join("src/tools/miri/Cargo.toml"));
 
             // Don't build tests dynamically, just a pain to work with
             cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
@@ -358,8 +352,8 @@ fn run(self, builder: &Builder) {
 
             builder.add_rustc_lib_path(compiler, &mut cargo);
 
-            if try_run(build, &mut cargo) {
-                build.save_toolstate("miri", ToolState::TestPass);
+            if try_run(builder, &mut cargo) {
+                builder.save_toolstate("miri", ToolState::TestPass);
             }
         } else {
             eprintln!("failed to test miri: could not build");
@@ -391,7 +385,6 @@ fn make_run(run: RunConfig) {
 
     /// Runs `cargo test` for clippy.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let stage = self.stage;
         let host = self.host;
         let compiler = builder.compiler(stage, host);
@@ -403,7 +396,7 @@ fn run(self, builder: &Builder) {
         });
         if let Some(clippy) = clippy {
             let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
-            cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml"));
+            cargo.arg("--manifest-path").arg(builder.src.join("src/tools/clippy/Cargo.toml"));
 
             // Don't build tests dynamically, just a pain to work with
             cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
@@ -418,8 +411,8 @@ fn run(self, builder: &Builder) {
 
             builder.add_rustc_lib_path(compiler, &mut cargo);
 
-            if try_run(build, &mut cargo) {
-                build.save_toolstate("clippy-driver", ToolState::TestPass);
+            if try_run(builder, &mut cargo) {
+                builder.save_toolstate("clippy-driver", ToolState::TestPass);
             }
         } else {
             eprintln!("failed to test clippy: could not build");
@@ -466,14 +459,14 @@ fn run(self, builder: &Builder) {
            .env("RUSTC_STAGE", self.compiler.stage.to_string())
            .env("RUSTC_SYSROOT", builder.sysroot(self.compiler))
            .env("RUSTDOC_LIBDIR", builder.sysroot_libdir(self.compiler, self.compiler.host))
-           .env("CFG_RELEASE_CHANNEL", &builder.build.config.channel)
+           .env("CFG_RELEASE_CHANNEL", &builder.config.channel)
            .env("RUSTDOC_REAL", builder.rustdoc(self.compiler.host))
-           .env("RUSTDOC_CRATE_VERSION", builder.build.rust_version())
+           .env("RUSTDOC_CRATE_VERSION", builder.rust_version())
            .env("RUSTC_BOOTSTRAP", "1");
-        if let Some(linker) = builder.build.linker(self.compiler.host) {
+        if let Some(linker) = builder.linker(self.compiler.host) {
             cmd.env("RUSTC_TARGET_LINKER", linker);
         }
-        try_run(builder.build, &mut cmd);
+        try_run(builder, &mut cmd);
     }
 }
 
@@ -514,6 +507,41 @@ fn run(self, builder: &Builder) {
     }
 }
 
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct RustdocUi {
+    pub host: Interned<String>,
+    pub target: Interned<String>,
+    pub compiler: Compiler,
+}
+
+impl Step for RustdocUi {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/test/rustdoc-ui")
+    }
+
+    fn make_run(run: RunConfig) {
+        let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+        run.builder.ensure(RustdocUi {
+            host: run.host,
+            target: run.target,
+            compiler,
+        });
+    }
+
+    fn run(self, builder: &Builder) {
+        builder.ensure(Compiletest {
+            compiler: self.compiler,
+            target: self.target,
+            mode: "ui",
+            suite: "rustdoc-ui",
+        })
+    }
+}
+
 #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
 pub struct Tidy;
 
@@ -528,21 +556,19 @@ impl Step for Tidy {
     /// otherwise just implements a few lint-like checks that are specific to the
     /// compiler itself.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
-
         let mut cmd = builder.tool_cmd(Tool::Tidy);
-        cmd.arg(build.src.join("src"));
-        cmd.arg(&build.initial_cargo);
-        if !build.config.vendor {
+        cmd.arg(builder.src.join("src"));
+        cmd.arg(&builder.initial_cargo);
+        if !builder.config.vendor {
             cmd.arg("--no-vendor");
         }
-        if build.config.quiet_tests {
+        if builder.config.quiet_tests {
             cmd.arg("--quiet");
         }
 
-        let _folder = build.fold_output(|| "tidy");
+        let _folder = builder.fold_output(|| "tidy");
         builder.info(&format!("tidy check"));
-        try_run(build, &mut cmd);
+        try_run(builder, &mut cmd);
     }
 
     fn should_run(run: ShouldRun) -> ShouldRun {
@@ -554,8 +580,8 @@ fn make_run(run: RunConfig) {
     }
 }
 
-fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
-    build.out.join(host).join("test")
+fn testdir(builder: &Builder, host: Interned<String>) -> PathBuf {
+    builder.out.join(host).join("test")
 }
 
 macro_rules! default_test {
@@ -793,25 +819,24 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// compiletest `mode` and `suite` arguments. For example `mode` can be
     /// "run-pass" or `suite` can be something like `debuginfo`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
         let mode = self.mode;
         let suite = self.suite;
 
         // Skip codegen tests if they aren't enabled in configuration.
-        if !build.config.codegen_tests && suite == "codegen" {
+        if !builder.config.codegen_tests && suite == "codegen" {
             return;
         }
 
         if suite == "debuginfo" {
             // Skip debuginfo tests on MSVC
-            if build.build.contains("msvc") {
+            if builder.config.build.contains("msvc") {
                 return;
             }
 
             if mode == "debuginfo-XXX" {
-                return if build.build.contains("apple") {
+                return if builder.config.build.contains("apple") {
                     builder.ensure(Compiletest {
                         mode: "debuginfo-lldb",
                         ..self
@@ -851,34 +876,44 @@ fn run(self, builder: &Builder) {
         cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
         cmd.arg("--rustc-path").arg(builder.rustc(compiler));
 
+        let is_rustdoc_ui = suite.ends_with("rustdoc-ui");
+
         // Avoid depending on rustdoc when we don't need it.
-        if mode == "rustdoc" || (mode == "run-make" && suite.ends_with("fulldeps")) {
+        if mode == "rustdoc" ||
+           (mode == "run-make" && suite.ends_with("fulldeps")) ||
+           (mode == "ui" && is_rustdoc_ui) {
             cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host));
         }
 
-        cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
-        cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+        cmd.arg("--src-base").arg(builder.src.join("src/test").join(suite));
+        cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
         cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
         cmd.arg("--mode").arg(mode);
         cmd.arg("--target").arg(target);
         cmd.arg("--host").arg(&*compiler.host);
-        cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
+        cmd.arg("--llvm-filecheck").arg(builder.llvm_filecheck(builder.config.build));
 
-        if let Some(ref nodejs) = build.config.nodejs {
+        if let Some(ref nodejs) = builder.config.nodejs {
             cmd.arg("--nodejs").arg(nodejs);
         }
 
-        let mut flags = vec!["-Crpath".to_string()];
-        if build.config.rust_optimize_tests {
-            flags.push("-O".to_string());
-        }
-        if build.config.rust_debuginfo_tests {
-            flags.push("-g".to_string());
+        let mut flags = if is_rustdoc_ui {
+            Vec::new()
+        } else {
+            vec!["-Crpath".to_string()]
+        };
+        if !is_rustdoc_ui {
+            if builder.config.rust_optimize_tests {
+                flags.push("-O".to_string());
+            }
+            if builder.config.rust_debuginfo_tests {
+                flags.push("-g".to_string());
+            }
         }
         flags.push("-Zunstable-options".to_string());
-        flags.push(build.config.cmd.rustc_args().join(" "));
+        flags.push(builder.config.cmd.rustc_args().join(" "));
 
-        if let Some(linker) = build.linker(target) {
+        if let Some(linker) = builder.linker(target) {
             cmd.arg("--linker").arg(linker);
         }
 
@@ -887,69 +922,69 @@ fn run(self, builder: &Builder) {
 
         let mut targetflags = flags.clone();
         targetflags.push(format!("-Lnative={}",
-                                 build.test_helpers_out(target).display()));
+                                 builder.test_helpers_out(target).display()));
         cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
 
-        cmd.arg("--docck-python").arg(build.python());
+        cmd.arg("--docck-python").arg(builder.python());
 
-        if build.build.ends_with("apple-darwin") {
+        if builder.config.build.ends_with("apple-darwin") {
             // Force /usr/bin/python on macOS for LLDB tests because we're loading the
             // LLDB plugin's compiled module which only works with the system python
             // (namely not Homebrew-installed python)
             cmd.arg("--lldb-python").arg("/usr/bin/python");
         } else {
-            cmd.arg("--lldb-python").arg(build.python());
+            cmd.arg("--lldb-python").arg(builder.python());
         }
 
-        if let Some(ref gdb) = build.config.gdb {
+        if let Some(ref gdb) = builder.config.gdb {
             cmd.arg("--gdb").arg(gdb);
         }
-        if let Some(ref vers) = build.lldb_version {
+        if let Some(ref vers) = builder.lldb_version {
             cmd.arg("--lldb-version").arg(vers);
         }
-        if let Some(ref dir) = build.lldb_python_dir {
+        if let Some(ref dir) = builder.lldb_python_dir {
             cmd.arg("--lldb-python-dir").arg(dir);
         }
 
-        cmd.args(&build.config.cmd.test_args());
+        cmd.args(&builder.config.cmd.test_args());
 
-        if build.is_verbose() {
+        if builder.is_verbose() {
             cmd.arg("--verbose");
         }
 
-        if build.config.quiet_tests {
+        if builder.config.quiet_tests {
             cmd.arg("--quiet");
         }
 
-        if build.config.llvm_enabled {
+        if builder.config.llvm_enabled {
             let llvm_config = builder.ensure(native::Llvm {
-                target: build.config.build,
+                target: builder.config.build,
                 emscripten: false,
             });
-            if !build.config.dry_run {
+            if !builder.config.dry_run {
                 let llvm_version = output(Command::new(&llvm_config).arg("--version"));
                 cmd.arg("--llvm-version").arg(llvm_version);
             }
-            if !build.is_rust_llvm(target) {
+            if !builder.is_rust_llvm(target) {
                 cmd.arg("--system-llvm");
             }
 
             // Only pass correct values for these flags for the `run-make` suite as it
             // requires that a C++ compiler was configured which isn't always the case.
-            if !build.config.dry_run && suite == "run-make-fulldeps" {
+            if !builder.config.dry_run && suite == "run-make-fulldeps" {
                 let llvm_components = output(Command::new(&llvm_config).arg("--components"));
                 let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
-                cmd.arg("--cc").arg(build.cc(target))
-                .arg("--cxx").arg(build.cxx(target).unwrap())
-                .arg("--cflags").arg(build.cflags(target).join(" "))
+                cmd.arg("--cc").arg(builder.cc(target))
+                .arg("--cxx").arg(builder.cxx(target).unwrap())
+                .arg("--cflags").arg(builder.cflags(target).join(" "))
                 .arg("--llvm-components").arg(llvm_components.trim())
                 .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
-                if let Some(ar) = build.ar(target) {
+                if let Some(ar) = builder.ar(target) {
                     cmd.arg("--ar").arg(ar);
                 }
             }
         }
-        if suite == "run-make-fulldeps" && !build.config.llvm_enabled {
+        if suite == "run-make-fulldeps" && !builder.config.llvm_enabled {
             builder.info(
                 &format!("Ignoring run-make test suite as they generally don't work without LLVM"));
             return;
@@ -963,7 +998,7 @@ fn run(self, builder: &Builder) {
                .arg("--llvm-cxxflags").arg("");
         }
 
-        if build.remote_tested(target) {
+        if builder.remote_tested(target) {
             cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
         }
 
@@ -973,42 +1008,42 @@ fn run(self, builder: &Builder) {
         // Note that if we encounter `PATH` we make sure to append to our own `PATH`
         // rather than stomp over it.
         if target.contains("msvc") {
-            for &(ref k, ref v) in build.cc[&target].env() {
+            for &(ref k, ref v) in builder.cc[&target].env() {
                 if k != "PATH" {
                     cmd.env(k, v);
                 }
             }
         }
         cmd.env("RUSTC_BOOTSTRAP", "1");
-        build.add_rust_test_threads(&mut cmd);
+        builder.add_rust_test_threads(&mut cmd);
 
-        if build.config.sanitizers {
+        if builder.config.sanitizers {
             cmd.env("SANITIZER_SUPPORT", "1");
         }
 
-        if build.config.profiler {
+        if builder.config.profiler {
             cmd.env("PROFILER_SUPPORT", "1");
         }
 
-        cmd.env("RUST_TEST_TMPDIR", build.out.join("tmp"));
+        cmd.env("RUST_TEST_TMPDIR", builder.out.join("tmp"));
 
         cmd.arg("--adb-path").arg("adb");
         cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
         if target.contains("android") {
             // Assume that cc for this target comes from the android sysroot
             cmd.arg("--android-cross-path")
-               .arg(build.cc(target).parent().unwrap().parent().unwrap());
+               .arg(builder.cc(target).parent().unwrap().parent().unwrap());
         } else {
             cmd.arg("--android-cross-path").arg("");
         }
 
-        build.ci_env.force_coloring_in_ci(&mut cmd);
+        builder.ci_env.force_coloring_in_ci(&mut cmd);
 
-        let _folder = build.fold_output(|| format!("test_{}", suite));
+        let _folder = builder.fold_output(|| format!("test_{}", suite));
         builder.info(&format!("Check compiletest suite={} mode={} ({} -> {})",
                  suite, mode, &compiler.host, target));
-        let _time = util::timeit(&build);
-        try_run(build, &mut cmd);
+        let _time = util::timeit(&builder);
+        try_run(builder, &mut cmd);
     }
 }
 
@@ -1034,16 +1069,15 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
     /// `compiler`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
 
         builder.ensure(compile::Test { compiler, target: compiler.host });
 
         // Do a breadth-first traversal of the `src/doc` directory and just run
         // tests for all files that end in `*.md`
-        let mut stack = vec![build.src.join(self.path)];
-        let _time = util::timeit(&build);
-        let _folder = build.fold_output(|| format!("test_{}", self.name));
+        let mut stack = vec![builder.src.join(self.path)];
+        let _time = util::timeit(&builder);
+        let _folder = builder.fold_output(|| format!("test_{}", self.name));
 
         let mut files = Vec::new();
         while let Some(p) = stack.pop() {
@@ -1057,7 +1091,7 @@ fn run(self, builder: &Builder) {
             }
 
             // The nostarch directory in the book is for no starch, and so isn't
-            // guaranteed to build. We don't care if it doesn't build, so skip it.
+            // guaranteed to builder. We don't care if it doesn't build, so skip it.
             if p.to_str().map_or(false, |p| p.contains("nostarch")) {
                 continue;
             }
@@ -1075,7 +1109,7 @@ fn run(self, builder: &Builder) {
                 } else {
                     ToolState::TestFail
                 };
-                build.save_toolstate(self.name, toolstate);
+                builder.save_toolstate(self.name, toolstate);
             }
         }
     }
@@ -1153,32 +1187,30 @@ fn make_run(run: RunConfig) {
     /// generate a markdown file from the error indexes of the code base which is
     /// then passed to `rustdoc --test`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
 
         builder.ensure(compile::Std { compiler, target: compiler.host });
 
-        let dir = testdir(build, compiler.host);
+        let dir = testdir(builder, compiler.host);
         t!(fs::create_dir_all(&dir));
         let output = dir.join("error-index.md");
 
         let mut tool = builder.tool_cmd(Tool::ErrorIndex);
         tool.arg("markdown")
             .arg(&output)
-            .env("CFG_BUILD", &build.build)
-            .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir());
+            .env("CFG_BUILD", &builder.config.build)
+            .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());
 
 
-        let _folder = build.fold_output(|| "test_error_index");
-        build.info(&format!("Testing error-index stage{}", compiler.stage));
-        let _time = util::timeit(&build);
-        build.run(&mut tool);
+        let _folder = builder.fold_output(|| "test_error_index");
+        builder.info(&format!("Testing error-index stage{}", compiler.stage));
+        let _time = util::timeit(&builder);
+        builder.run(&mut tool);
         markdown_test(builder, compiler, &output);
     }
 }
 
 fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) -> bool {
-    let build = builder.build;
     match File::open(markdown) {
         Ok(mut file) => {
             let mut contents = String::new();
@@ -1190,20 +1222,20 @@ fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) -> bool
         Err(_) => {},
     }
 
-    build.info(&format!("doc tests for: {}", markdown.display()));
+    builder.info(&format!("doc tests for: {}", markdown.display()));
     let mut cmd = builder.rustdoc_cmd(compiler.host);
-    build.add_rust_test_threads(&mut cmd);
+    builder.add_rust_test_threads(&mut cmd);
     cmd.arg("--test");
     cmd.arg(markdown);
     cmd.env("RUSTC_BOOTSTRAP", "1");
 
-    let test_args = build.config.cmd.test_args().join(" ");
+    let test_args = builder.config.cmd.test_args().join(" ");
     cmd.arg("--test-args").arg(test_args);
 
-    if build.config.quiet_tests {
-        try_run_quiet(build, &mut cmd)
+    if builder.config.quiet_tests {
+        try_run_quiet(builder, &mut cmd)
     } else {
-        try_run(build, &mut cmd)
+        try_run(builder, &mut cmd)
     }
 }
 
@@ -1387,7 +1419,6 @@ fn make_run(run: RunConfig) {
     /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
     /// arguments, and those arguments are discovered from `cargo metadata`.
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
         let mode = self.mode;
@@ -1401,7 +1432,7 @@ fn run(self, builder: &Builder) {
         // libstd, then what we're actually testing is the libstd produced in
         // stage1. Reflect that here by updating the compiler that we're working
         // with automatically.
-        let compiler = if build.force_use_stage1(compiler, target) {
+        let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
             compiler.clone()
@@ -1413,11 +1444,11 @@ fn run(self, builder: &Builder) {
                 compile::std_cargo(builder, &compiler, target, &mut cargo);
             }
             Mode::Libtest => {
-                compile::test_cargo(build, &compiler, target, &mut cargo);
+                compile::test_cargo(builder, &compiler, target, &mut cargo);
             }
             Mode::Librustc => {
                 builder.ensure(compile::Rustc { compiler, target });
-                compile::rustc_cargo(build, &mut cargo);
+                compile::rustc_cargo(builder, &mut cargo);
             }
             _ => panic!("can only test libraries"),
         };
@@ -1427,10 +1458,10 @@ fn run(self, builder: &Builder) {
         // Pass in some standard flags then iterate over the graph we've discovered
         // in `cargo metadata` with the maps above and figure out what `-p`
         // arguments need to get passed.
-        if test_kind.subcommand() == "test" && !build.fail_fast {
+        if test_kind.subcommand() == "test" && !builder.fail_fast {
             cargo.arg("--no-fail-fast");
         }
-        if build.doc_tests {
+        if builder.doc_tests {
             cargo.arg("--doc");
         }
 
@@ -1446,21 +1477,21 @@ fn run(self, builder: &Builder) {
         cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
 
         cargo.arg("--");
-        cargo.args(&build.config.cmd.test_args());
+        cargo.args(&builder.config.cmd.test_args());
 
-        if build.config.quiet_tests {
+        if builder.config.quiet_tests {
             cargo.arg("--quiet");
         }
 
         if target.contains("emscripten") {
             cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
-                      build.config.nodejs.as_ref().expect("nodejs not configured"));
+                      builder.config.nodejs.as_ref().expect("nodejs not configured"));
         } else if target.starts_with("wasm32") {
             // Warn about running tests without the `wasm_syscall` feature enabled.
             // The javascript shim implements the syscall interface so that test
             // output can be correctly reported.
-            if !build.config.wasm_syscall {
-                build.info(&format!("Libstd was built without `wasm_syscall` feature enabled: \
+            if !builder.config.wasm_syscall {
+                builder.info(&format!("Libstd was built without `wasm_syscall` feature enabled: \
                           test output may not be visible."));
             }
 
@@ -1468,25 +1499,25 @@ fn run(self, builder: &Builder) {
             // incompatible with `-C prefer-dynamic`, so disable that here
             cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
 
-            let node = build.config.nodejs.as_ref()
+            let node = builder.config.nodejs.as_ref()
                 .expect("nodejs not configured");
             let runner = format!("{} {}/src/etc/wasm32-shim.js",
                                  node.display(),
-                                 build.src.display());
+                                 builder.src.display());
             cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
-        } else if build.remote_tested(target) {
+        } else if builder.remote_tested(target) {
             cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
                       format!("{} run",
                               builder.tool_exe(Tool::RemoteTestClient).display()));
         }
 
-        let _folder = build.fold_output(|| {
+        let _folder = builder.fold_output(|| {
             format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, krate)
         });
-        build.info(&format!("{} {} stage{} ({} -> {})", test_kind, krate, compiler.stage,
+        builder.info(&format!("{} {} stage{} ({} -> {})", test_kind, krate, compiler.stage,
                 &compiler.host, target));
-        let _time = util::timeit(&build);
-        try_run(build, &mut cargo);
+        let _time = util::timeit(&builder);
+        try_run(builder, &mut cargo);
     }
 }
 
@@ -1523,7 +1554,6 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let test_kind = self.test_kind;
 
         let compiler = builder.compiler(builder.top_stage, self.host);
@@ -1534,27 +1564,27 @@ fn run(self, builder: &Builder) {
                                                  target,
                                                  test_kind.subcommand(),
                                                  "src/tools/rustdoc");
-        if test_kind.subcommand() == "test" && !build.fail_fast {
+        if test_kind.subcommand() == "test" && !builder.fail_fast {
             cargo.arg("--no-fail-fast");
         }
 
         cargo.arg("-p").arg("rustdoc:0.0.0");
 
         cargo.arg("--");
-        cargo.args(&build.config.cmd.test_args());
+        cargo.args(&builder.config.cmd.test_args());
 
-        if build.config.quiet_tests {
+        if builder.config.quiet_tests {
             cargo.arg("--quiet");
         }
 
-        let _folder = build.fold_output(|| {
+        let _folder = builder.fold_output(|| {
             format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)
         });
-        build.info(&format!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
+        builder.info(&format!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage,
                 &compiler.host, target));
-        let _time = util::timeit(&build);
+        let _time = util::timeit(&builder);
 
-        try_run(build, &mut cargo);
+        try_run(builder, &mut cargo);
     }
 }
 
@@ -1590,17 +1620,16 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
-        if !build.remote_tested(target) {
+        if !builder.remote_tested(target) {
             return
         }
 
         builder.ensure(compile::Test { compiler, target });
 
-        build.info(&format!("REMOTE copy libs to emulator ({})", target));
-        t!(fs::create_dir_all(build.out.join("tmp")));
+        builder.info(&format!("REMOTE copy libs to emulator ({})", target));
+        t!(fs::create_dir_all(builder.out.join("tmp")));
 
         let server = builder.ensure(tool::RemoteTestServer { compiler, target });
 
@@ -1610,18 +1639,18 @@ fn run(self, builder: &Builder) {
         cmd.arg("spawn-emulator")
            .arg(target)
            .arg(&server)
-           .arg(build.out.join("tmp"));
-        if let Some(rootfs) = build.qemu_rootfs(target) {
+           .arg(builder.out.join("tmp"));
+        if let Some(rootfs) = builder.qemu_rootfs(target) {
             cmd.arg(rootfs);
         }
-        build.run(&mut cmd);
+        builder.run(&mut cmd);
 
         // Push all our dylibs to the emulator
         for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
             let f = t!(f);
             let name = f.file_name().into_string().unwrap();
             if util::is_dylib(&name) {
-                build.run(Command::new(&tool)
+                builder.run(Command::new(&tool)
                                   .arg("push")
                                   .arg(f.path()));
             }
@@ -1645,10 +1674,8 @@ fn make_run(run: RunConfig) {
 
     /// Run "distcheck", a 'make check' from a tarball
     fn run(self, builder: &Builder) {
-        let build = builder.build;
-
-        build.info(&format!("Distcheck"));
-        let dir = build.out.join("tmp").join("distcheck");
+        builder.info(&format!("Distcheck"));
+        let dir = builder.out.join("tmp").join("distcheck");
         let _ = fs::remove_dir_all(&dir);
         t!(fs::create_dir_all(&dir));
 
@@ -1661,18 +1688,18 @@ fn run(self, builder: &Builder) {
            .arg(builder.ensure(dist::PlainSourceTarball))
            .arg("--strip-components=1")
            .current_dir(&dir);
-        build.run(&mut cmd);
-        build.run(Command::new("./configure")
-                         .args(&build.config.configure_args)
+        builder.run(&mut cmd);
+        builder.run(Command::new("./configure")
+                         .args(&builder.config.configure_args)
                          .arg("--enable-vendor")
                          .current_dir(&dir));
-        build.run(Command::new(build_helper::make(&build.build))
+        builder.run(Command::new(build_helper::make(&builder.config.build))
                          .arg("check")
                          .current_dir(&dir));
 
         // Now make sure that rust-src has all of libstd's dependencies
-        build.info(&format!("Distcheck rust-src"));
-        let dir = build.out.join("tmp").join("distcheck-src");
+        builder.info(&format!("Distcheck rust-src"));
+        let dir = builder.out.join("tmp").join("distcheck-src");
         let _ = fs::remove_dir_all(&dir);
         t!(fs::create_dir_all(&dir));
 
@@ -1681,10 +1708,10 @@ fn run(self, builder: &Builder) {
            .arg(builder.ensure(dist::Src))
            .arg("--strip-components=1")
            .current_dir(&dir);
-        build.run(&mut cmd);
+        builder.run(&mut cmd);
 
         let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
-        build.run(Command::new(&build.initial_cargo)
+        builder.run(Command::new(&builder.initial_cargo)
                          .arg("generate-lockfile")
                          .arg("--manifest-path")
                          .arg(&toml)
@@ -1702,25 +1729,24 @@ impl Step for Bootstrap {
 
     /// Test the build system itself
     fn run(self, builder: &Builder) {
-        let build = builder.build;
-        let mut cmd = Command::new(&build.initial_cargo);
+        let mut cmd = Command::new(&builder.initial_cargo);
         cmd.arg("test")
-           .current_dir(build.src.join("src/bootstrap"))
+           .current_dir(builder.src.join("src/bootstrap"))
            .env("RUSTFLAGS", "-Cdebuginfo=2")
-           .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
+           .env("CARGO_TARGET_DIR", builder.out.join("bootstrap"))
            .env("RUSTC_BOOTSTRAP", "1")
-           .env("RUSTC", &build.initial_rustc);
+           .env("RUSTC", &builder.initial_rustc);
         if let Some(flags) = option_env!("RUSTFLAGS") {
             // Use the same rustc flags for testing as for "normal" compilation,
             // so that Cargo doesn’t recompile the entire dependency graph every time:
             // https://github.com/rust-lang/rust/issues/49215
             cmd.env("RUSTFLAGS", flags);
         }
-        if !build.fail_fast {
+        if !builder.fail_fast {
             cmd.arg("--no-fail-fast");
         }
-        cmd.arg("--").args(&build.config.cmd.test_args());
-        try_run(build, &mut cmd);
+        cmd.arg("--").args(&builder.config.cmd.test_args());
+        try_run(builder, &mut cmd);
     }
 
     fn should_run(run: ShouldRun) -> ShouldRun {
index 97507bc08698a737882a93a62491aeebe8bdfe8b..6c29bd84fe4673b407da036a821a3d3add99d55a 100644 (file)
@@ -38,7 +38,6 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     }
 
     fn run(self, builder: &Builder) {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
         let mode = self.mode;
@@ -46,7 +45,7 @@ fn run(self, builder: &Builder) {
         // This is for the original compiler, but if we're forced to use stage 1, then
         // std/test/rustc stamps won't exist in stage 2, so we need to get those from stage 1, since
         // we copy the libs forward.
-        let tools_dir = build.stage_out(compiler, Mode::Tool);
+        let tools_dir = builder.stage_out(compiler, Mode::Tool);
         let compiler = if builder.force_use_stage1(compiler, target) {
             builder.compiler(1, compiler.host)
         } else {
@@ -55,13 +54,13 @@ fn run(self, builder: &Builder) {
 
         for &cur_mode in &[Mode::Libstd, Mode::Libtest, Mode::Librustc] {
             let stamp = match cur_mode {
-                Mode::Libstd => libstd_stamp(build, compiler, target),
-                Mode::Libtest => libtest_stamp(build, compiler, target),
-                Mode::Librustc => librustc_stamp(build, compiler, target),
+                Mode::Libstd => libstd_stamp(builder, compiler, target),
+                Mode::Libtest => libtest_stamp(builder, compiler, target),
+                Mode::Librustc => librustc_stamp(builder, compiler, target),
                 _ => panic!(),
             };
 
-            if build.clear_if_dirty(&tools_dir, &stamp) {
+            if builder.clear_if_dirty(&tools_dir, &stamp) {
                 break;
             }
 
@@ -97,7 +96,6 @@ fn should_run(run: ShouldRun) -> ShouldRun {
     /// This will build the specified tool with the specified `host` compiler in
     /// `stage` into the normal cargo output directory.
     fn run(self, builder: &Builder) -> Option<PathBuf> {
-        let build = builder.build;
         let compiler = self.compiler;
         let target = self.target;
         let tool = self.tool;
@@ -114,10 +112,10 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
         let mut cargo = prepare_tool_cargo(builder, compiler, target, "build", path);
         cargo.arg("--features").arg(self.extra_features.join(" "));
 
-        let _folder = build.fold_output(|| format!("stage{}-{}", compiler.stage, tool));
-        build.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target));
+        let _folder = builder.fold_output(|| format!("stage{}-{}", compiler.stage, tool));
+        builder.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target));
         let mut duplicates = Vec::new();
-        let is_expected = compile::stream_cargo(build, &mut cargo, &mut |msg| {
+        let is_expected = compile::stream_cargo(builder, &mut cargo, &mut |msg| {
             // Only care about big things like the RLS/Cargo for now
             if tool != "rls" && tool != "cargo" {
                 return
@@ -156,7 +154,7 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
                     }
                 }
 
-                let mut artifacts = build.tool_artifacts.borrow_mut();
+                let mut artifacts = builder.tool_artifacts.borrow_mut();
                 let prev_artifacts = artifacts
                     .entry(target)
                     .or_insert_with(Default::default);
@@ -190,7 +188,7 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
             panic!("tools should not compile multiple copies of the same crate");
         }
 
-        build.save_toolstate(tool, if is_expected {
+        builder.save_toolstate(tool, if is_expected {
             ToolState::TestFail
         } else {
             ToolState::BuildFail
@@ -203,10 +201,10 @@ fn run(self, builder: &Builder) -> Option<PathBuf> {
                 return None;
             }
         } else {
-            let cargo_out = build.cargo_out(compiler, Mode::Tool, target)
+            let cargo_out = builder.cargo_out(compiler, Mode::Tool, target)
                 .join(exe(tool, &compiler.host));
-            let bin = build.tools_dir(compiler).join(exe(tool, &compiler.host));
-            build.copy(&cargo_out, &bin);
+            let bin = builder.tools_dir(compiler).join(exe(tool, &compiler.host));
+            builder.copy(&cargo_out, &bin);
             Some(bin)
         }
     }
@@ -219,16 +217,15 @@ pub fn prepare_tool_cargo(
     command: &'static str,
     path: &'static str,
 ) -> Command {
-    let build = builder.build;
     let mut cargo = builder.cargo(compiler, Mode::Tool, target, command);
-    let dir = build.src.join(path);
+    let dir = builder.src.join(path);
     cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
 
     // We don't want to build tools dynamically as they'll be running across
     // stages and such and it's just easier if they're not dynamically linked.
     cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
 
-    if let Some(dir) = build.openssl_install_dir(target) {
+    if let Some(dir) = builder.openssl_install_dir(target) {
         cargo.env("OPENSSL_STATIC", "1");
         cargo.env("OPENSSL_DIR", dir);
         cargo.env("LIBZ_SYS_STATIC", "1");
@@ -238,10 +235,10 @@ pub fn prepare_tool_cargo(
     // own copy
     cargo.env("LZMA_API_STATIC", "1");
 
-    cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
-    cargo.env("CFG_VERSION", build.rust_version());
+    cargo.env("CFG_RELEASE_CHANNEL", &builder.config.channel);
+    cargo.env("CFG_VERSION", builder.rust_version());
 
-    let info = GitInfo::new(&build.config, &dir);
+    let info = GitInfo::new(&builder.config, &dir);
     if let Some(sha) = info.sha() {
         cargo.env("CFG_COMMIT_HASH", sha);
     }
@@ -269,8 +266,8 @@ pub fn tool_exe(&self, tool: Tool) -> PathBuf {
                 match tool {
                     $(Tool::$name =>
                         self.ensure($name {
-                            compiler: self.compiler(stage, self.build.build),
-                            target: self.build.build,
+                            compiler: self.compiler(stage, self.config.build),
+                            target: self.config.build,
                         }),
                     )+
                 }
@@ -304,7 +301,7 @@ fn should_run(run: ShouldRun) -> ShouldRun {
 
             fn make_run(run: RunConfig) {
                 run.builder.ensure($name {
-                    compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+                    compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
                     target: run.target,
                 });
             }
@@ -354,7 +351,7 @@ fn should_run(run: ShouldRun) -> ShouldRun {
 
     fn make_run(run: RunConfig) {
         run.builder.ensure(RemoteTestServer {
-            compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+            compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
             target: run.target,
         });
     }
@@ -393,26 +390,25 @@ fn make_run(run: RunConfig) {
     }
 
     fn run(self, builder: &Builder) -> PathBuf {
-        let build = builder.build;
         let target_compiler = builder.compiler(builder.top_stage, self.host);
         let target = target_compiler.host;
         let build_compiler = if target_compiler.stage == 0 {
-            builder.compiler(0, builder.build.build)
+            builder.compiler(0, builder.config.build)
         } else if target_compiler.stage >= 2 {
             // Past stage 2, we consider the compiler to be ABI-compatible and hence capable of
             // building rustdoc itself.
-            builder.compiler(target_compiler.stage, builder.build.build)
+            builder.compiler(target_compiler.stage, builder.config.build)
         } else {
             // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
             // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage
             // compilers, which isn't what we want.
-            builder.compiler(target_compiler.stage - 1, builder.build.build)
+            builder.compiler(target_compiler.stage - 1, builder.config.build)
         };
 
         builder.ensure(compile::Rustc { compiler: build_compiler, target });
         builder.ensure(compile::Rustc {
             compiler: build_compiler,
-            target: builder.build.build,
+            target: builder.config.build,
         });
 
         let mut cargo = prepare_tool_cargo(builder,
@@ -425,15 +421,15 @@ fn run(self, builder: &Builder) -> PathBuf {
         cargo.env("RUSTC_DEBUGINFO", builder.config.rust_debuginfo.to_string())
              .env("RUSTC_DEBUGINFO_LINES", builder.config.rust_debuginfo_lines.to_string());
 
-        let _folder = build.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage));
-        build.info(&format!("Building rustdoc for stage{} ({})",
+        let _folder = builder.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage));
+        builder.info(&format!("Building rustdoc for stage{} ({})",
             target_compiler.stage, target_compiler.host));
-        build.run(&mut cargo);
+        builder.run(&mut cargo);
 
         // Cargo adds a number of paths to the dylib search path on windows, which results in
         // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool"
         // rustdoc a different name.
-        let tool_rustdoc = build.cargo_out(build_compiler, Mode::Tool, target)
+        let tool_rustdoc = builder.cargo_out(build_compiler, Mode::Tool, target)
             .join(exe("rustdoc-tool-binary", &target_compiler.host));
 
         // don't create a stage0-sysroot/bin directory.
@@ -443,7 +439,7 @@ fn run(self, builder: &Builder) -> PathBuf {
             t!(fs::create_dir_all(&bindir));
             let bin_rustdoc = bindir.join(exe("rustdoc", &*target_compiler.host));
             let _ = fs::remove_file(&bin_rustdoc);
-            build.copy(&tool_rustdoc, &bin_rustdoc);
+            builder.copy(&tool_rustdoc, &bin_rustdoc);
             bin_rustdoc
         } else {
             tool_rustdoc
@@ -464,12 +460,12 @@ impl Step for Cargo {
 
     fn should_run(run: ShouldRun) -> ShouldRun {
         let builder = run.builder;
-        run.path("src/tools/cargo").default_condition(builder.build.config.extended)
+        run.path("src/tools/cargo").default_condition(builder.config.extended)
     }
 
     fn make_run(run: RunConfig) {
         run.builder.ensure(Cargo {
-            compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+            compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
             target: run.target,
         });
     }
@@ -482,7 +478,7 @@ fn run(self, builder: &Builder) -> PathBuf {
         // compiler to be available, so we need to depend on that.
         builder.ensure(compile::Rustc {
             compiler: self.compiler,
-            target: builder.build.build,
+            target: builder.config.build,
         });
         builder.ensure(ToolBuild {
             compiler: self.compiler,
@@ -518,12 +514,12 @@ impl Step for $name {
 
             fn should_run(run: ShouldRun) -> ShouldRun {
                 let builder = run.builder;
-                run.path($path).default_condition(builder.build.config.extended)
+                run.path($path).default_condition(builder.config.extended)
             }
 
             fn make_run(run: RunConfig) {
                 run.builder.ensure($name {
-                    compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build),
+                    compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
                     target: run.target,
                     extra_features: Vec::new(),
                 });
@@ -554,7 +550,7 @@ fn run(mut $sel, $builder: &Builder) -> Option<PathBuf> {
         // compiler to be available, so we need to depend on that.
         builder.ensure(compile::Rustc {
             compiler: self.compiler,
-            target: builder.build.build,
+            target: builder.config.build,
         });
     };
     Miri, miri, "src/tools/miri", "miri", {};
@@ -575,7 +571,7 @@ fn run(mut $sel, $builder: &Builder) -> Option<PathBuf> {
         // compiler to be available, so we need to depend on that.
         builder.ensure(compile::Rustc {
             compiler: self.compiler,
-            target: builder.build.build,
+            target: builder.config.build,
         });
     };
     Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", {};
@@ -586,7 +582,7 @@ impl<'a> Builder<'a> {
     /// `host`.
     pub fn tool_cmd(&self, tool: Tool) -> Command {
         let mut cmd = Command::new(self.tool_exe(tool));
-        let compiler = self.compiler(self.tool_default_stage(tool), self.build.build);
+        let compiler = self.compiler(self.tool_default_stage(tool), self.config.build);
         self.prepare_tool_cmd(compiler, &mut cmd);
         cmd
     }
index f8c7032369890b8472a66083dee9ae9b467d6abe..9a2b9e90440e53d909f8016a981585f16b2b4356 100644 (file)
@@ -22,7 +22,7 @@
 use std::time::{SystemTime, Instant};
 
 use config::Config;
-use Build;
+use builder::Builder;
 
 /// Returns the `name` as the filename of a static library for `target`.
 pub fn staticlib(name: &str, target: &str) -> String {
@@ -104,8 +104,8 @@ pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
 pub struct TimeIt(bool, Instant);
 
 /// Returns an RAII structure that prints out how long it took to drop.
-pub fn timeit(build: &Build) -> TimeIt {
-    TimeIt(build.config.dry_run, Instant::now())
+pub fn timeit(builder: &Builder) -> TimeIt {
+    TimeIt(builder.config.dry_run, Instant::now())
 }
 
 impl Drop for TimeIt {
index 853923ad947cd2ced021be50f5e7ef8e93c73c51..56eda5480715bc1b5517f4d916f5b27b7718ad34 100644 (file)
@@ -25,6 +25,12 @@ ENV RUST_CONFIGURE_ARGS \
   --set build.nodejs=/node-v9.2.0-linux-x64/bin/node \
   --set rust.lld
 
+# Some run-make tests have assertions about code size, and enabling debug
+# assertions in libstd causes the binary to be much bigger than it would
+# otherwise normally be. We already test libstd with debug assertions in lots of
+# other contexts as well
+ENV NO_DEBUG_ASSERTIONS=1
+
 ENV SCRIPT python2.7 /checkout/x.py test --target $TARGETS \
   src/test/run-make \
   src/test/ui \
index e51c63cf008981ae5f4cfcfcbdb2f77cd6bb9f85..417608cc5ca0a3e57f76c4cde1c28bc0addfabc3 100644 (file)
@@ -141,6 +141,31 @@ Similar to `--library-path`, `--extern` is about specifying the location
 of a dependency. `--library-path` provides directories to search in, `--extern`
 instead lets you specify exactly which dependency is located where.
 
+## `-C`/`--codegen`: pass codegen options to rustc
+
+Using this flag looks like this:
+
+```bash
+$ rustdoc src/lib.rs -C target_feature=+avx
+$ rustdoc src/lib.rs --codegen target_feature=+avx
+
+$ rustdoc --test src/lib.rs -C target_feature=+avx
+$ rustdoc --test src/lib.rs --codegen target_feature=+avx
+
+$ rustdoc --test README.md -C target_feature=+avx
+$ rustdoc --test README.md --codegen target_feature=+avx
+```
+
+When rustdoc generates documentation, looks for documentation tests, or executes documentation
+tests, it needs to compile some rust code, at least part-way. This flag allows you to tell rustdoc
+to provide some extra codegen options to rustc when it runs these compilations. Most of the time,
+these options won't affect a regular documentation run, but if something depends on target features
+to be enabled, or documentation tests need to use some additional options, this flag allows you to
+affect that.
+
+The arguments to this flag are the same as those for the `-C` flag on rustc. Run `rustc -C help` to
+get the full list.
+
 ## `--passes`: add more rustdoc passes
 
 Using this flag looks like this:
index fea8685a605d6859b52fcd4a3806d016b5826e06..3098587a8a4ccc11f1e09e4cb40be02d604d3a01 100644 (file)
@@ -138,31 +138,31 @@ To keep each code block testable, we want the whole program in each block, but
 we don't want the reader to see every line every time.  Here's what we put in
 our source code:
 
-```text
-    First, we set `x` to five:
+``````markdown
+First, we set `x` to five:
 
-    ```
-    let x = 5;
-    # let y = 6;
-    # println!("{}", x + y);
-    ```
+```
+let x = 5;
+# let y = 6;
+# println!("{}", x + y);
+```
 
-    Next, we set `y` to six:
+Next, we set `y` to six:
 
-    ```
-    # let x = 5;
-    let y = 6;
-    # println!("{}", x + y);
-    ```
+```
+# let x = 5;
+let y = 6;
+# println!("{}", x + y);
+```
 
-    Finally, we print the sum of `x` and `y`:
+Finally, we print the sum of `x` and `y`:
 
-    ```
-    # let x = 5;
-    # let y = 6;
-    println!("{}", x + y);
-    ```
 ```
+# let x = 5;
+# let y = 6;
+println!("{}", x + y);
+```
+``````
 
 By repeating all parts of the example, you can ensure that your example still
 compiles, while only showing the parts that are relevant to that part of your
index aceb6ff8abe2a7a6d914f3d8d250cf2ca853c393..5ebd2cc6146371febfa208b013699eadee9e963a 100644 (file)
@@ -429,6 +429,7 @@ fn from(slice: &'a [T]) -> Box<[T]> {
 
 #[stable(feature = "box_from_slice", since = "1.17.0")]
 impl<'a> From<&'a str> for Box<str> {
+    #[inline]
     fn from(s: &'a str) -> Box<str> {
         unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
     }
@@ -436,6 +437,7 @@ fn from(s: &'a str) -> Box<str> {
 
 #[stable(feature = "boxed_str_conv", since = "1.19.0")]
 impl From<Box<str>> for Box<[u8]> {
+    #[inline]
     fn from(s: Box<str>) -> Self {
         unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) }
     }
index c604df7049e0c02e10f53809f5c610986200b1a9..82cbec0517e5513288fbd8bdcac88e86acdf85da 100644 (file)
@@ -1287,8 +1287,7 @@ fn into_iter(self) -> IntoIter<K, V> {
 #[stable(feature = "btree_drop", since = "1.7.0")]
 impl<K, V> Drop for IntoIter<K, V> {
     fn drop(&mut self) {
-        for _ in &mut *self {
-        }
+        self.for_each(drop);
         unsafe {
             let leaf_node = ptr::read(&self.front).into_node();
             if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
index 129b3bc676432f058912c01ae3e834467ff34b0d..9844de9a57d70274c4bf91cc93d79da0e381acf4 100644 (file)
@@ -1019,7 +1019,7 @@ impl<'a, T, F> Drop for DrainFilter<'a, T, F>
     where F: FnMut(&mut T) -> bool,
 {
     fn drop(&mut self) {
-        for _ in self { }
+        self.for_each(drop);
     }
 }
 
index 405814c021afef423e4599e514c9e6d19683ba78..24b7cd3db0cc67e27569fcc4bf821f808abee812 100644 (file)
@@ -86,8 +86,8 @@ fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Self {
         unsafe {
             let elem_size = mem::size_of::<T>();
 
-            let alloc_size = cap.checked_mul(elem_size).expect("capacity overflow");
-            alloc_guard(alloc_size).expect("capacity overflow");
+            let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow());
+            alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
 
             // handles ZSTs and `cap = 0` alike
             let ptr = if alloc_size == 0 {
@@ -310,7 +310,7 @@ pub fn double(&mut self) {
                     // `from_size_align_unchecked`.
                     let new_cap = 2 * self.cap;
                     let new_size = new_cap * elem_size;
-                    alloc_guard(new_size).expect("capacity overflow");
+                    alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
                     let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
                                                  cur,
                                                  new_size);
@@ -369,7 +369,7 @@ pub fn double_in_place(&mut self) -> bool {
             // overflow and the alignment is sufficiently small.
             let new_cap = 2 * self.cap;
             let new_size = new_cap * elem_size;
-            alloc_guard(new_size).expect("capacity overflow");
+            alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
             match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
                 Ok(_) => {
                     // We can't directly divide `size`.
@@ -441,7 +441,7 @@ pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize)
 
     pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
         match self.try_reserve_exact(used_cap, needed_extra_cap) {
-            Err(CapacityOverflow) => panic!("capacity overflow"),
+            Err(CapacityOverflow) => capacity_overflow(),
             Err(AllocErr) => self.a.oom(),
             Ok(()) => { /* yay */ }
          }
@@ -551,7 +551,7 @@ pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize)
     /// The same as try_reserve, but errors are lowered to a call to oom().
     pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
         match self.try_reserve(used_cap, needed_extra_cap) {
-            Err(CapacityOverflow) => panic!("capacity overflow"),
+            Err(CapacityOverflow) => capacity_overflow(),
             Err(AllocErr) => self.a.oom(),
             Ok(()) => { /* yay */ }
          }
@@ -592,7 +592,7 @@ pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) ->
             }
 
             let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)
-                              .expect("capacity overflow");
+                .unwrap_or_else(|_| capacity_overflow());
 
             // Here, `cap < used_cap + needed_extra_cap <= new_cap`
             // (regardless of whether `self.cap - used_cap` wrapped).
@@ -600,7 +600,7 @@ pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) ->
 
             let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
             // FIXME: may crash and burn on over-reserve
-            alloc_guard(new_layout.size()).expect("capacity overflow");
+            alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
             match self.a.grow_in_place(
                 NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
             ) {
@@ -732,6 +732,13 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
     }
 }
 
+// One central function responsible for reporting capacity overflows. This'll
+// ensure that the code generation related to these panics is minimal as there's
+// only one location which panics rather than a bunch throughout the module.
+fn capacity_overflow() -> ! {
+    panic!("capacity overflow")
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
index 6c9f3dd7ec975d9d0368d31f864ab079d34bd021..0e7084653329e8b32be0ebaa7df109795e4557db 100644 (file)
@@ -1827,6 +1827,7 @@ pub fn parse<F: FromStr>(&self) -> Result<F, F::Err> {
     /// assert_eq!(*boxed_bytes, *s.as_bytes());
     /// ```
     #[stable(feature = "str_box_extras", since = "1.20.0")]
+    #[inline]
     pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
         self.into()
     }
@@ -2065,6 +2066,7 @@ pub fn escape_unicode(&self) -> String {
     /// assert_eq!(boxed_str.into_string(), string);
     /// ```
     #[stable(feature = "box_str", since = "1.4.0")]
+    #[inline]
     pub fn into_string(self: Box<str>) -> String {
         let slice = Box::<[u8]>::from(self);
         unsafe { String::from_utf8_unchecked(slice.into_vec()) }
@@ -2323,6 +2325,7 @@ pub fn make_ascii_lowercase(&mut self) {
 /// assert_eq!("☺", &*smile);
 /// ```
 #[stable(feature = "str_box_extras", since = "1.20.0")]
+#[inline]
 pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
     Box::from_raw(Box::into_raw(v) as *mut str)
 }
index 0924ca2479147f4f581f0f94eaa28a74b5237903..2f84d5f7f8676877328dc57aa79155374886f6e6 100644 (file)
@@ -1521,9 +1521,6 @@ pub fn drain<R>(&mut self, range: R) -> Drain
     /// and replaces it with the given string.
     /// The given string doesn't need to be the same length as the range.
     ///
-    /// Note: Unlike [`Vec::splice`], the replacement happens eagerly, and this
-    /// method does not return the removed chars.
-    ///
     /// # Panics
     ///
     /// Panics if the starting point or end point do not lie on a [`char`]
@@ -1586,6 +1583,7 @@ pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
     /// let b = s.into_boxed_str();
     /// ```
     #[stable(feature = "box_str", since = "1.4.0")]
+    #[inline]
     pub fn into_boxed_str(self) -> Box<str> {
         let slice = self.vec.into_boxed_slice();
         unsafe { from_boxed_utf8_unchecked(slice) }
index 9ae415c328bd2cc12bdef7a8ad890900be7d6a38..7d1b2ed85c7e13655e871cdaff00be37796788d8 100644 (file)
@@ -583,7 +583,9 @@ pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionA
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn shrink_to_fit(&mut self) {
-        self.buf.shrink_to_fit(self.len);
+        if self.capacity() != self.len {
+            self.buf.shrink_to_fit(self.len);
+        }
     }
 
     /// Shrinks the capacity of the vector with a lower bound.
@@ -2521,7 +2523,7 @@ fn next_back(&mut self) -> Option<T> {
 impl<'a, T> Drop for Drain<'a, T> {
     fn drop(&mut self) {
         // exhaust self first
-        while let Some(_) = self.next() {}
+        self.for_each(drop);
 
         if self.tail_len > 0 {
             unsafe {
@@ -2590,9 +2592,7 @@ impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {}
 #[stable(feature = "vec_splice", since = "1.21.0")]
 impl<'a, I: Iterator> Drop for Splice<'a, I> {
     fn drop(&mut self) {
-        // exhaust drain first
-        while let Some(_) = self.drain.next() {}
-
+        self.drain.by_ref().for_each(drop);
 
         unsafe {
             if self.drain.tail_len == 0 {
@@ -2721,8 +2721,7 @@ impl<'a, T, F> Drop for DrainFilter<'a, T, F>
     where F: FnMut(&mut T) -> bool,
 {
     fn drop(&mut self) {
-        for _ in self.by_ref() { }
-
+        self.for_each(drop);
         unsafe {
             self.vec.set_len(self.old_len - self.del);
         }
index 603e38ca2ca33d16bffd674c50163910d2091718..ff82b3a469caf1afae27c39246fc7aaf3b2b4088 100644 (file)
@@ -2250,7 +2250,7 @@ unsafe impl<'a, T: Send> Send for Drain<'a, T> {}
 #[stable(feature = "drain", since = "1.6.0")]
 impl<'a, T: 'a> Drop for Drain<'a, T> {
     fn drop(&mut self) {
-        for _ in self.by_ref() {}
+        self.for_each(drop);
 
         let source_deque = unsafe { self.deque.as_mut() };
 
index b53c0a2f48bf701d2b8d0dc8f50c8c3125bc6f6f..e2af67dd9286138e61ef1196d4d7a8487a80c20c 100644 (file)
@@ -7,3 +7,6 @@ version = "0.0.0"
 name = "arena"
 path = "lib.rs"
 crate-type = ["dylib"]
+
+[dependencies]
+rustc_data_structures = { path = "../librustc_data_structures" }
\ No newline at end of file
index b319f333342c1cc11134d98d0881aca582eb7bf8..c79e0e14e3d8b4d1ab65050153537f716e37508b 100644 (file)
@@ -32,6 +32,9 @@
 #![allow(deprecated)]
 
 extern crate alloc;
+extern crate rustc_data_structures;
+
+use rustc_data_structures::sync::MTLock;
 
 use std::cell::{Cell, RefCell};
 use std::cmp;
@@ -290,6 +293,8 @@ pub struct DroplessArena {
     chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
 }
 
+unsafe impl Send for DroplessArena {}
+
 impl DroplessArena {
     pub fn new() -> DroplessArena {
         DroplessArena {
@@ -410,6 +415,72 @@ pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
     }
 }
 
+pub struct SyncTypedArena<T> {
+    lock: MTLock<TypedArena<T>>,
+}
+
+impl<T> SyncTypedArena<T> {
+    #[inline(always)]
+    pub fn new() -> SyncTypedArena<T> {
+        SyncTypedArena {
+            lock: MTLock::new(TypedArena::new())
+        }
+    }
+
+    #[inline(always)]
+    pub fn alloc(&self, object: T) -> &mut T {
+        // Extend the lifetime of the result since it's limited to the lock guard
+        unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
+    }
+
+    #[inline(always)]
+    pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
+    where
+        T: Copy,
+    {
+        // Extend the lifetime of the result since it's limited to the lock guard
+        unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
+    }
+
+    #[inline(always)]
+    pub fn clear(&mut self) {
+        self.lock.get_mut().clear();
+    }
+}
+
+pub struct SyncDroplessArena {
+    lock: MTLock<DroplessArena>,
+}
+
+impl SyncDroplessArena {
+    #[inline(always)]
+    pub fn new() -> SyncDroplessArena {
+        SyncDroplessArena {
+            lock: MTLock::new(DroplessArena::new())
+        }
+    }
+
+    #[inline(always)]
+    pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
+        self.lock.lock().in_arena(ptr)
+    }
+
+    #[inline(always)]
+    pub fn alloc<T>(&self, object: T) -> &mut T {
+        // Extend the lifetime of the result since it's limited to the lock guard
+        unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
+    }
+
+    #[inline(always)]
+    pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
+    where
+        T: Copy,
+    {
+        // Extend the lifetime of the result since it's limited to the lock guard
+        unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     extern crate test;
index 22c674363789b0f097cd51fb41a43fa3f36cadf3..f08baa3dd71c3146ec312b325b66eff00761f6df 100644 (file)
@@ -145,7 +145,14 @@ pub fn align(&self) -> usize { self.align }
     /// Constructs a `Layout` suitable for holding a value of type `T`.
     pub fn new<T>() -> Self {
         let (size, align) = size_align::<T>();
-        Layout::from_size_align(size, align).unwrap()
+        // Note that the align is guaranteed by rustc to be a power of two and
+        // the size+align combo is guaranteed to fit in our address space. As a
+        // result use the unchecked constructor here to avoid inserting code
+        // that panics if it isn't optimized well enough.
+        debug_assert!(Layout::from_size_align(size, align).is_ok());
+        unsafe {
+            Layout::from_size_align_unchecked(size, align)
+        }
     }
 
     /// Produces layout describing a record that could be used to
@@ -153,7 +160,11 @@ pub fn new<T>() -> Self {
     /// or other unsized type like a slice).
     pub fn for_value<T: ?Sized>(t: &T) -> Self {
         let (size, align) = (mem::size_of_val(t), mem::align_of_val(t));
-        Layout::from_size_align(size, align).unwrap()
+        // See rationale in `new` for why this us using an unsafe variant below
+        debug_assert!(Layout::from_size_align(size, align).is_ok());
+        unsafe {
+            Layout::from_size_align_unchecked(size, align)
+        }
     }
 
     /// Creates a layout describing the record that can hold a value
@@ -234,12 +245,7 @@ pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> {
             .ok_or(LayoutErr { private: () })?;
         let alloc_size = padded_size.checked_mul(n)
             .ok_or(LayoutErr { private: () })?;
-
-        // We can assume that `self.align` is a power-of-two.
-        // Furthermore, `alloc_size` has already been rounded up
-        // to a multiple of `self.align`; therefore, the call to
-        // `Layout::from_size_align` below should never panic.
-        Ok((Layout::from_size_align(alloc_size, self.align).unwrap(), padded_size))
+        Ok((Layout::from_size_align(alloc_size, self.align)?, padded_size))
     }
 
     /// Creates a layout describing the record for `self` followed by
index d55219d7226e6d5fda7e7679df06994ab6a987f8..277bef2bf661a6488a8321d00429da8c73e77250 100644 (file)
@@ -1212,7 +1212,11 @@ pub fn pad(&mut self, s: &str) -> Result {
             // truncation. However other flags like `fill`, `width` and `align`
             // must act as always.
             if let Some((i, _)) = s.char_indices().skip(max).next() {
-                &s[..i]
+                // LLVM here can't prove that `..i` won't panic `&s[..i]`, but
+                // we know that it can't panic. Use `get` + `unwrap_or` to avoid
+                // `unsafe` and otherwise don't emit any panic-related code
+                // here.
+                s.get(..i).unwrap_or(&s)
             } else {
                 &s
             }
index 4ccf446aa6346f007655b95e50ebe30e60881c1a..6a77de2c9868dd68e160682895dbdf34d73dbe07 100644 (file)
@@ -998,7 +998,7 @@ fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
     /// an extra layer of indirection. `flat_map()` will remove this extra layer
     /// on its own.
     ///
-    /// You can think of [`flat_map(f)`][flat_map] as the semantic equivalent
+    /// You can think of `flat_map(f)` as the semantic equivalent
     /// of [`map`]ping, and then [`flatten`]ing as in `map(f).flatten()`.
     ///
     /// Another way of thinking about `flat_map()`: [`map`]'s closure returns
index 5ebd9e4334cd8ad000371ec0f857b4d3d55161e5..ea7a46f44ae0f31cb16fa752bcd80debde88afb1 100644 (file)
@@ -68,7 +68,6 @@
 #![feature(asm)]
 #![feature(associated_type_defaults)]
 #![feature(attr_literals)]
-#![feature(cfg_target_feature)]
 #![feature(cfg_target_has_atomic)]
 #![feature(concat_idents)]
 #![feature(const_fn)]
 #![feature(specialization)]
 #![feature(staged_api)]
 #![feature(stmt_expr_attributes)]
-#![feature(target_feature)]
 #![feature(unboxed_closures)]
 #![feature(untagged_unions)]
 #![feature(unwind_attributes)]
 
+#![cfg_attr(not(stage0), feature(mmx_target_feature))]
+#![cfg_attr(not(stage0), feature(tbm_target_feature))]
+#![cfg_attr(not(stage0), feature(sse4a_target_feature))]
+#![cfg_attr(not(stage0), feature(arm_target_feature))]
+#![cfg_attr(not(stage0), feature(powerpc_target_feature))]
+#![cfg_attr(not(stage0), feature(mips_target_feature))]
+#![cfg_attr(not(stage0), feature(aarch64_target_feature))]
+
+#![cfg_attr(stage0, feature(target_feature))]
+#![cfg_attr(stage0, feature(cfg_target_feature))]
+
 #[prelude_import]
 #[allow(unused)]
 use prelude::v1::*;
@@ -204,6 +213,20 @@ pub mod heap {
 // things like SIMD and such. Note that the actual source for all this lies in a
 // different repository, rust-lang-nursery/stdsimd. That's why the setup here is
 // a bit wonky.
+#[allow(unused_macros)]
+macro_rules! test_v16 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v32 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v64 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v128 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v256 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! test_v512 { ($item:item) => {}; }
+#[allow(unused_macros)]
+macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }
 #[path = "../stdsimd/coresimd/mod.rs"]
 #[allow(missing_docs, missing_debug_implementations, dead_code)]
 #[unstable(feature = "stdsimd", issue = "48556")]
@@ -213,6 +236,6 @@ pub mod heap {
 #[unstable(feature = "stdsimd", issue = "48556")]
 #[cfg(not(stage0))]
 pub use coresimd::simd;
-#[unstable(feature = "stdsimd", issue = "48556")]
+#[stable(feature = "simd_arch", since = "1.27.0")]
 #[cfg(not(stage0))]
 pub use coresimd::arch;
index 3f6674071256696e45bf74c8229a05c85993dd71..6f3e3b508853ba7c3172824222bd86a0ba6dfcb2 100644 (file)
@@ -100,17 +100,28 @@ impl<Idx: PartialOrd<Idx>> Range<Idx> {
     /// ```
     /// #![feature(range_contains)]
     ///
-    /// assert!(!(3..5).contains(2));
-    /// assert!( (3..5).contains(3));
-    /// assert!( (3..5).contains(4));
-    /// assert!(!(3..5).contains(5));
+    /// use std::f32;
     ///
-    /// assert!(!(3..3).contains(3));
-    /// assert!(!(3..2).contains(3));
+    /// assert!(!(3..5).contains(&2));
+    /// assert!( (3..5).contains(&3));
+    /// assert!( (3..5).contains(&4));
+    /// assert!(!(3..5).contains(&5));
+    ///
+    /// assert!(!(3..3).contains(&3));
+    /// assert!(!(3..2).contains(&3));
+    ///
+    /// assert!( (0.0..1.0).contains(&0.5));
+    /// assert!(!(0.0..1.0).contains(&f32::NAN));
+    /// assert!(!(0.0..f32::NAN).contains(&0.5));
+    /// assert!(!(f32::NAN..1.0).contains(&0.5));
     /// ```
     #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
-    pub fn contains(&self, item: Idx) -> bool {
-        (self.start <= item) && (item < self.end)
+    pub fn contains<U>(&self, item: &U) -> bool
+    where
+        Idx: PartialOrd<U>,
+        U: ?Sized + PartialOrd<Idx>,
+    {
+        <Self as RangeBounds<Idx>>::contains(self, item)
     }
 
     /// Returns `true` if the range contains no items.
@@ -179,7 +190,6 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
 impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
     /// Returns `true` if `item` is contained in the range.
     ///
@@ -188,12 +198,23 @@ impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
     /// ```
     /// #![feature(range_contains)]
     ///
-    /// assert!(!(3..).contains(2));
-    /// assert!( (3..).contains(3));
-    /// assert!( (3..).contains(1_000_000_000));
+    /// use std::f32;
+    ///
+    /// assert!(!(3..).contains(&2));
+    /// assert!( (3..).contains(&3));
+    /// assert!( (3..).contains(&1_000_000_000));
+    ///
+    /// assert!( (0.0..).contains(&0.5));
+    /// assert!(!(0.0..).contains(&f32::NAN));
+    /// assert!(!(f32::NAN..).contains(&0.5));
     /// ```
-    pub fn contains(&self, item: Idx) -> bool {
-        (self.start <= item)
+    #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+    pub fn contains<U>(&self, item: &U) -> bool
+    where
+        Idx: PartialOrd<U>,
+        U: ?Sized + PartialOrd<Idx>,
+    {
+        <Self as RangeBounds<Idx>>::contains(self, item)
     }
 }
 
@@ -250,7 +271,6 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
 impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
     /// Returns `true` if `item` is contained in the range.
     ///
@@ -259,12 +279,23 @@ impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
     /// ```
     /// #![feature(range_contains)]
     ///
-    /// assert!( (..5).contains(-1_000_000_000));
-    /// assert!( (..5).contains(4));
-    /// assert!(!(..5).contains(5));
+    /// use std::f32;
+    ///
+    /// assert!( (..5).contains(&-1_000_000_000));
+    /// assert!( (..5).contains(&4));
+    /// assert!(!(..5).contains(&5));
+    ///
+    /// assert!( (..1.0).contains(&0.5));
+    /// assert!(!(..1.0).contains(&f32::NAN));
+    /// assert!(!(..f32::NAN).contains(&0.5));
     /// ```
-    pub fn contains(&self, item: Idx) -> bool {
-        (item < self.end)
+    #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+    pub fn contains<U>(&self, item: &U) -> bool
+    where
+        Idx: PartialOrd<U>,
+        U: ?Sized + PartialOrd<Idx>,
+    {
+        <Self as RangeBounds<Idx>>::contains(self, item)
     }
 }
 
@@ -318,18 +349,29 @@ impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
     /// ```
     /// #![feature(range_contains)]
     ///
-    /// assert!(!(3..=5).contains(2));
-    /// assert!( (3..=5).contains(3));
-    /// assert!( (3..=5).contains(4));
-    /// assert!( (3..=5).contains(5));
-    /// assert!(!(3..=5).contains(6));
+    /// use std::f32;
+    ///
+    /// assert!(!(3..=5).contains(&2));
+    /// assert!( (3..=5).contains(&3));
+    /// assert!( (3..=5).contains(&4));
+    /// assert!( (3..=5).contains(&5));
+    /// assert!(!(3..=5).contains(&6));
     ///
-    /// assert!( (3..=3).contains(3));
-    /// assert!(!(3..=2).contains(3));
+    /// assert!( (3..=3).contains(&3));
+    /// assert!(!(3..=2).contains(&3));
+    ///
+    /// assert!( (0.0..=1.0).contains(&1.0));
+    /// assert!(!(0.0..=1.0).contains(&f32::NAN));
+    /// assert!(!(0.0..=f32::NAN).contains(&0.0));
+    /// assert!(!(f32::NAN..=1.0).contains(&1.0));
     /// ```
     #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
-    pub fn contains(&self, item: Idx) -> bool {
-        self.start <= item && item <= self.end
+    pub fn contains<U>(&self, item: &U) -> bool
+    where
+        Idx: PartialOrd<U>,
+        U: ?Sized + PartialOrd<Idx>,
+    {
+        <Self as RangeBounds<Idx>>::contains(self, item)
     }
 
     /// Returns `true` if the range contains no items.
@@ -431,12 +473,23 @@ impl<Idx: PartialOrd<Idx>> RangeToInclusive<Idx> {
     /// ```
     /// #![feature(range_contains)]
     ///
-    /// assert!( (..=5).contains(-1_000_000_000));
-    /// assert!( (..=5).contains(5));
-    /// assert!(!(..=5).contains(6));
+    /// use std::f32;
+    ///
+    /// assert!( (..=5).contains(&-1_000_000_000));
+    /// assert!( (..=5).contains(&5));
+    /// assert!(!(..=5).contains(&6));
+    ///
+    /// assert!( (..=1.0).contains(&1.0));
+    /// assert!(!(..=1.0).contains(&f32::NAN));
+    /// assert!(!(..=f32::NAN).contains(&0.5));
     /// ```
-    pub fn contains(&self, item: Idx) -> bool {
-        (item <= self.end)
+    #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+    pub fn contains<U>(&self, item: &U) -> bool
+    where
+        Idx: PartialOrd<U>,
+        U: ?Sized + PartialOrd<Idx>,
+    {
+        <Self as RangeBounds<Idx>>::contains(self, item)
     }
 }
 
@@ -537,6 +590,42 @@ pub trait RangeBounds<T: ?Sized> {
     /// # }
     /// ```
     fn end(&self) -> Bound<&T>;
+
+
+    /// Returns `true` if `item` is contained in the range.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(range_contains)]
+    ///
+    /// use std::f32;
+    ///
+    /// assert!( (3..5).contains(&4));
+    /// assert!(!(3..5).contains(&2));
+    ///
+    /// assert!( (0.0..1.0).contains(&0.5));
+    /// assert!(!(0.0..1.0).contains(&f32::NAN));
+    /// assert!(!(0.0..f32::NAN).contains(&0.5));
+    /// assert!(!(f32::NAN..1.0).contains(&0.5));
+    #[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
+    fn contains<U>(&self, item: &U) -> bool
+    where
+        T: PartialOrd<U>,
+        U: ?Sized + PartialOrd<T>,
+    {
+        (match self.start() {
+            Included(ref start) => *start <= item,
+            Excluded(ref start) => *start < item,
+            Unbounded => true,
+        })
+        &&
+        (match self.end() {
+            Included(ref end) => item <= *end,
+            Excluded(ref end) => item < *end,
+            Unbounded => true,
+        })
+    }
 }
 
 use self::Bound::{Excluded, Included, Unbounded};
index 1720c9d8c60790a9a171f92c467fedbf86ed8dcf..27ec4aaac75dec3a9ed09cc4f268a880f97f3896 100644 (file)
@@ -49,11 +49,17 @@ impl<'a> PanicInfo<'a> {
                           and related macros",
                 issue = "0")]
     #[doc(hidden)]
-    pub fn internal_constructor(payload: &'a (Any + Send),
-                                message: Option<&'a fmt::Arguments<'a>>,
+    #[inline]
+    pub fn internal_constructor(message: Option<&'a fmt::Arguments<'a>>,
                                 location: Location<'a>)
                                 -> Self {
-        PanicInfo { payload, location, message }
+        PanicInfo { payload: &(), location, message }
+    }
+
+    #[doc(hidden)]
+    #[inline]
+    pub fn set_payload(&mut self, info: &'a (Any + Send)) {
+        self.payload = info;
     }
 
     /// Returns the payload associated with the panic.
@@ -251,3 +257,13 @@ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
         write!(formatter, "{}:{}:{}", self.file, self.line, self.col)
     }
 }
+
+/// An internal trait used by libstd to pass data from libstd to `panic_unwind`
+/// and other panic runtimes. Not intended to be stabilized any time soon, do
+/// not use.
+#[unstable(feature = "std_internals", issue = "0")]
+#[doc(hidden)]
+pub unsafe trait BoxMeUp {
+    fn box_me_up(&mut self) -> *mut (Any + Send);
+    fn get(&mut self) -> &(Any + Send);
+}
index 95bb8f18947ec842a5a92ff107e4fb63b550f9b5..464d57a270241e7e639d9ae0a90c324bf5e50ab1 100644 (file)
@@ -258,7 +258,7 @@ pub struct CharSearcher<'a> {
 
     /// `finger` is the current byte index of the forward search.
     /// Imagine that it exists before the byte at its index, i.e.
-    /// haystack[finger] is the first byte of the slice we must inspect during
+    /// `haystack[finger]` is the first byte of the slice we must inspect during
     /// forward searching
     finger: usize,
     /// `finger_back` is the current byte index of the reverse search.
index 62e0979c5fefc077f9171c0595136fc5a297d29c..cf3842dbe2750f78314386957d333d796a2ab9f2 100644 (file)
@@ -1360,8 +1360,7 @@ pub fn fetch_and(&self, val: $int_type, order: Ordering) -> $int_type {
 # Examples
 
 ```
-", $extra_feature, "#![feature(atomic_nand)]
-
+", $extra_feature, "
 use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
 
 let foo = ", stringify!($atomic_type), "::new(0x13);
@@ -1555,7 +1554,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "i8", "../../../std/primitive.i8.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
@@ -1568,7 +1567,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "u8", "../../../std/primitive.u8.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
@@ -1581,7 +1580,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "i16", "../../../std/primitive.i16.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
@@ -1594,7 +1593,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "u16", "../../../std/primitive.u16.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
@@ -1607,7 +1606,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "i32", "../../../std/primitive.i32.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
@@ -1620,7 +1619,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "u32", "../../../std/primitive.u32.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
@@ -1633,7 +1632,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "i64", "../../../std/primitive.i64.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
@@ -1646,7 +1645,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    unstable(feature = "integer_atomics", issue = "32976"),
     "u64", "../../../std/primitive.u64.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
@@ -1659,7 +1658,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     stable(feature = "atomic_debug", since = "1.3.0"),
     stable(feature = "atomic_access", since = "1.15.0"),
     stable(feature = "atomic_from", since = "1.23.0"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    stable(feature = "atomic_nand", since = "1.27.0"),
     "isize", "../../../std/primitive.isize.html",
     "",
     atomic_min, atomic_max,
@@ -1672,7 +1671,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     stable(feature = "atomic_debug", since = "1.3.0"),
     stable(feature = "atomic_access", since = "1.15.0"),
     stable(feature = "atomic_from", since = "1.23.0"),
-    unstable(feature = "atomic_nand", issue = "13226"),
+    stable(feature = "atomic_nand", since = "1.27.0"),
     "usize", "../../../std/primitive.usize.html",
     "",
     atomic_umin, atomic_umax,
index 149269263dc8a542b34c1ff36aed75c4e4ca9075..bb875c7219a6bef1db86b75905b2350fdd17808d 100644 (file)
@@ -38,7 +38,7 @@
 #![feature(trusted_len)]
 #![feature(try_trait)]
 #![feature(exact_chunks)]
-#![feature(atomic_nand)]
+#![cfg_attr(stage0, feature(atomic_nand))]
 #![feature(reverse_bits)]
 #![feature(inclusive_range_fields)]
 #![feature(iterator_find_map)]
index 43c5bbbc618c2954edf7a2580102ad8932acc7ae..392bf17968fbdfc28f648b7b9f94f2c5d16e403f 100644 (file)
@@ -52,7 +52,7 @@
 // now hopefully.
 #[no_mangle]
 #[rustc_std_internal_symbol]
-pub unsafe extern fn __rust_start_panic(_data: usize, _vtable: usize) -> u32 {
+pub unsafe extern fn __rust_start_panic(_payload: usize) -> u32 {
     abort();
 
     #[cfg(any(unix, target_os = "cloudabi"))]
index 9321d6917d1566db3ef070a9c35066c0ef8b1007..6c52c0fa10cc0b534536ed0f19ef3f602801082f 100644 (file)
@@ -29,6 +29,7 @@
        html_root_url = "https://doc.rust-lang.org/nightly/",
        issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")]
 
+#![feature(allocator_api)]
 #![feature(alloc)]
 #![feature(core_intrinsics)]
 #![feature(lang_items)]
@@ -36,6 +37,7 @@
 #![feature(panic_unwind)]
 #![feature(raw)]
 #![feature(staged_api)]
+#![feature(std_internals)]
 #![feature(unwind_attributes)]
 #![cfg_attr(target_env = "msvc", feature(raw))]
 
 #[cfg(not(any(target_env = "msvc", all(windows, target_arch = "x86_64", target_env = "gnu"))))]
 extern crate unwind;
 
+use alloc::boxed::Box;
 use core::intrinsics;
 use core::mem;
 use core::raw;
+use core::panic::BoxMeUp;
 
 // Rust runtime's startup objects depend on these symbols, so make them public.
 #[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))]
 // implementation.
 #[no_mangle]
 #[unwind(allowed)]
-pub unsafe extern "C" fn __rust_start_panic(data: usize, vtable: usize) -> u32 {
-    imp::panic(mem::transmute(raw::TraitObject {
-        data: data as *mut (),
-        vtable: vtable as *mut (),
-    }))
+pub unsafe extern "C" fn __rust_start_panic(payload: usize) -> u32 {
+    let payload = payload as *mut &mut BoxMeUp;
+    imp::panic(Box::from_raw((*payload).box_me_up()))
 }
index 1e348e3a31ce215225c4a269e5fa792e235b4439..b88185c3154a9fea222d470159aa1afd4ee2dc17 100644 (file)
@@ -32,7 +32,7 @@
 use hir::svh::Svh;
 use util::nodemap::{DefIdMap, FxHashMap};
 
-use arena::TypedArena;
+use arena::SyncTypedArena;
 use std::io;
 use ty::TyCtxt;
 
@@ -219,7 +219,7 @@ fn is_body_owner(self, node_id: NodeId) -> bool {
 pub struct Forest {
     krate: Crate,
     pub dep_graph: DepGraph,
-    inlined_bodies: TypedArena<Body>
+    inlined_bodies: SyncTypedArena<Body>
 }
 
 impl Forest {
@@ -227,7 +227,7 @@ pub fn new(krate: Crate, dep_graph: &DepGraph) -> Forest {
         Forest {
             krate,
             dep_graph: dep_graph.clone(),
-            inlined_bodies: TypedArena::new()
+            inlined_bodies: SyncTypedArena::new()
         }
     }
 
index e6080fad91d597f60410ffd2e989951490c1b835..f471ffb072d6700ebc937d0c25d5e9c6b8d24161 100644 (file)
@@ -2277,6 +2277,7 @@ pub struct TransFnAttrFlags: u8 {
         const NAKED                     = 0b0001_0000;
         const NO_MANGLE                 = 0b0010_0000;
         const RUSTC_STD_INTERNAL_SYMBOL = 0b0100_0000;
+        const NO_DEBUG                  = 0b1000_0000;
     }
 }
 
index 0071850e1052b5a77292d68d50e34c1a64ca6e89..d885bd43bc89dbce656a42db81c6e5aa475bf3cb 100644 (file)
 });
 
 impl_stable_hash_for!(struct middle::cstore::ExternCrate {
-    def_id,
+    src,
     span,
-    direct,
-    path_len
+    path_len,
+    direct
+});
+
+impl_stable_hash_for!(enum middle::cstore::ExternCrateSource {
+    Extern(def_id),
+    Use,
+    Path,
 });
 
 impl_stable_hash_for!(struct middle::cstore::CrateSource {
index 41cfac2674be66b4dc358a30515923f777923758..70c152b40c0d25fdbc77561268c46efa6c192459 100644 (file)
@@ -398,12 +398,12 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 enum AllocDiscriminant {
     Alloc,
-    ExternStatic,
+    Static,
     Function,
 }
 impl_stable_hash_for!(enum self::AllocDiscriminant {
     Alloc,
-    ExternStatic,
+    Static,
     Function
 });
 
@@ -414,24 +414,25 @@ fn hash_stable<W: StableHasherResult>(
         hasher: &mut StableHasher<W>,
     ) {
         ty::tls::with_opt(|tcx| {
+            trace!("hashing {:?}", *self);
             let tcx = tcx.expect("can't hash AllocIds during hir lowering");
-            if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
+            if let Some(def_id) = tcx.interpret_interner.get_static(*self) {
+                AllocDiscriminant::Static.hash_stable(hcx, hasher);
+                trace!("hashing {:?} as static {:?}", *self, def_id);
+                def_id.hash_stable(hcx, hasher);
+            } else if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
                 AllocDiscriminant::Alloc.hash_stable(hcx, hasher);
                 if hcx.alloc_id_recursion_tracker.insert(*self) {
-                    tcx
-                        .interpret_interner
-                        .get_corresponding_static_def_id(*self)
-                        .hash_stable(hcx, hasher);
+                    trace!("hashing {:?} as alloc {:#?}", *self, alloc);
                     alloc.hash_stable(hcx, hasher);
                     assert!(hcx.alloc_id_recursion_tracker.remove(self));
+                } else {
+                    trace!("skipping hashing of {:?} due to recursion", *self);
                 }
             } else if let Some(inst) = tcx.interpret_interner.get_fn(*self) {
+                trace!("hashing {:?} as fn {:#?}", *self, inst);
                 AllocDiscriminant::Function.hash_stable(hcx, hasher);
                 inst.hash_stable(hcx, hasher);
-            } else if let Some(def_id) = tcx.interpret_interner
-                                            .get_corresponding_static_def_id(*self) {
-                AllocDiscriminant::ExternStatic.hash_stable(hcx, hasher);
-                def_id.hash_stable(hcx, hasher);
             } else {
                 bug!("no allocation for {}", self);
             }
@@ -550,7 +551,6 @@ fn hash_stable<W: StableHasherResult>(&self,
             InvalidPointerMath |
             ReadUndefBytes |
             DeadLocal |
-            ExecutionTimeLimitReached |
             StackFrameLimitReached |
             OutOfTls |
             TlsOutOfBounds |
@@ -1387,6 +1387,7 @@ fn hash_stable<W: StableHasherResult>(&self,
             FromEnv(where_clause) => where_clause.hash_stable(hcx, hasher),
 
             WellFormedTy(ty) => ty.hash_stable(hcx, hasher),
+            Normalize(projection) => projection.hash_stable(hcx, hasher),
             FromEnvTy(ty) => ty.hash_stable(hcx, hasher),
             RegionOutlives(predicate) => predicate.hash_stable(hcx, hasher),
             TypeOutlives(predicate) => predicate.hash_stable(hcx, hasher),
index 8d314e251972d140e1b173b931b1b71d306b5a40..d8a2c95ab5904f1adadc5c1ad00f2a0a0808924b 100644 (file)
@@ -303,7 +303,7 @@ pub fn report_region_errors(
     ) {
         debug!("report_region_errors(): {} errors to start", errors.len());
 
-        if will_later_be_reported_by_nll && self.tcx.nll() {
+        if will_later_be_reported_by_nll && self.tcx.use_mir_borrowck() {
             // With `#![feature(nll)]`, we want to present a nice user
             // experience, so don't even mention the errors from the
             // AST checker.
@@ -311,20 +311,20 @@ pub fn report_region_errors(
                 return;
             }
 
-            // But with -Znll, it's nice to have some note for later.
+            // But with nll, it's nice to have some note for later.
             for error in errors {
                 match *error {
                     RegionResolutionError::ConcreteFailure(ref origin, ..)
                     | RegionResolutionError::GenericBoundFailure(ref origin, ..) => {
                         self.tcx
                             .sess
-                            .span_warn(origin.span(), "not reporting region error due to -Znll");
+                            .span_warn(origin.span(), "not reporting region error due to nll");
                     }
 
                     RegionResolutionError::SubSupConflict(ref rvo, ..) => {
                         self.tcx
                             .sess
-                            .span_warn(rvo.span(), "not reporting region error due to -Znll");
+                            .span_warn(rvo.span(), "not reporting region error due to nll");
                     }
                 }
             }
index 84bf9cc84e7373499419c79f7d4b9682840801aa..40cc43c3ca670d00e56b7d160f449d762cd7cf09 100644 (file)
@@ -37,7 +37,7 @@
 use syntax_pos::{self, Span};
 use syntax_pos::symbol::InternedString;
 use util::nodemap::FxHashMap;
-use arena::DroplessArena;
+use arena::SyncDroplessArena;
 
 use self::combine::CombineFields;
 use self::higher_ranked::HrMatchResult;
@@ -407,7 +407,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>).
 pub struct InferCtxtBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
-    arena: DroplessArena,
+    arena: SyncDroplessArena,
     fresh_tables: Option<RefCell<ty::TypeckTables<'tcx>>>,
 }
 
@@ -415,7 +415,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'gcx> {
     pub fn infer_ctxt(self) -> InferCtxtBuilder<'a, 'gcx, 'tcx> {
         InferCtxtBuilder {
             global_tcx: self,
-            arena: DroplessArena::new(),
+            arena: SyncDroplessArena::new(),
             fresh_tables: None,
 
         }
index 9f8cc2f86992f54ffa5d73f15be5cf2c5a10cab9..d5849ea22b1ef299cf4bd8aa0dee5c7ca152e255 100644 (file)
@@ -27,6 +27,7 @@
 use self::TargetLint::*;
 
 use std::slice;
+use rustc_data_structures::sync::{RwLock, ReadGuard};
 use lint::{EarlyLintPassObject, LateLintPassObject};
 use lint::{Level, Lint, LintId, LintPass, LintBuffer};
 use lint::builtin::BuiltinLintDiagnostics;
@@ -39,7 +40,6 @@
 use util::nodemap::FxHashMap;
 
 use std::default::Default as StdDefault;
-use std::cell::{Ref, RefCell};
 use syntax::ast;
 use syntax::edition;
 use syntax_pos::{MultiSpan, Span};
@@ -78,7 +78,7 @@ pub struct LintStore {
 
 pub struct LintSession<'a, PassObject> {
     /// Reference to the store of registered lints.
-    lints: Ref<'a, LintStore>,
+    lints: ReadGuard<'a, LintStore>,
 
     /// Trait objects for each lint pass.
     passes: Option<Vec<PassObject>>,
@@ -336,7 +336,7 @@ impl<'a, PassObject: LintPassObject> LintSession<'a, PassObject> {
     /// Creates a new `LintSession`, by moving out the `LintStore`'s initial
     /// lint levels and pass objects. These can be restored using the `restore`
     /// method.
-    fn new(store: &'a RefCell<LintStore>) -> LintSession<'a, PassObject> {
+    fn new(store: &'a RwLock<LintStore>) -> LintSession<'a, PassObject> {
         let mut s = store.borrow_mut();
         let passes = PassObject::take_passes(&mut *s);
         drop(s);
@@ -347,7 +347,7 @@ fn new(store: &'a RefCell<LintStore>) -> LintSession<'a, PassObject> {
     }
 
     /// Restores the levels back to the original lint store.
-    fn restore(self, store: &RefCell<LintStore>) {
+    fn restore(self, store: &RwLock<LintStore>) {
         drop(self.lints);
         let mut s = store.borrow_mut();
         PassObject::restore_passes(&mut *s, self.passes);
index 41334a37dbef65050cecff58c463f781c1f0db4e..292ec184dfae5d9ada4e8fa913462ffe6fb8f5be 100644 (file)
@@ -148,23 +148,34 @@ pub enum LoadedMacro {
 
 #[derive(Copy, Clone, Debug)]
 pub struct ExternCrate {
-    /// def_id of an `extern crate` in the current crate that caused
-    /// this crate to be loaded; note that there could be multiple
-    /// such ids
-    pub def_id: DefId,
+    pub src: ExternCrateSource,
 
     /// span of the extern crate that caused this to be loaded
     pub span: Span,
 
+    /// Number of links to reach the extern;
+    /// used to select the extern with the shortest path
+    pub path_len: usize,
+
     /// If true, then this crate is the crate named by the extern
     /// crate referenced above. If false, then this crate is a dep
     /// of the crate.
     pub direct: bool,
+}
 
-    /// Number of links to reach the extern crate `def_id`
-    /// declaration; used to select the extern crate with the shortest
-    /// path
-    pub path_len: usize,
+#[derive(Copy, Clone, Debug)]
+pub enum ExternCrateSource {
+    /// Crate is loaded by `extern crate`.
+    Extern(
+        /// def_id of the item in the current crate that caused
+        /// this crate to be loaded; note that there could be multiple
+        /// such ids
+        DefId,
+    ),
+    // Crate is loaded by `use`.
+    Use,
+    /// Crate is implicitly loaded by an absolute or an `extern::` path.
+    Path,
 }
 
 pub struct EncodedMetadata {
@@ -357,9 +368,23 @@ fn metadata_loader(&self) -> &dyn MetadataLoader { bug!("metadata_loader") }
 }
 
 pub trait CrateLoader {
-    fn process_item(&mut self, item: &ast::Item, defs: &Definitions);
+    fn process_extern_crate(&mut self, item: &ast::Item, defs: &Definitions) -> CrateNum;
+
+    fn process_path_extern(
+        &mut self,
+        name: Symbol,
+        span: Span,
+    ) -> CrateNum;
+
+    fn process_use_extern(
+        &mut self,
+        name: Symbol,
+        span: Span,
+        id: ast::NodeId,
+        defs: &Definitions,
+    ) -> CrateNum;
+
     fn postprocess(&mut self, krate: &ast::Crate);
-    fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum;
 }
 
 // This method is used when generating the command line to pass through to
index 9e69990f22c0d0c20af457727493daf481e919be..b919f4d15a840a479678aafc968ffafc4cb05bfe 100644 (file)
@@ -65,7 +65,6 @@ pub enum EvalErrorKind<'tcx> {
     Intrinsic(String),
     OverflowingMath,
     InvalidChar(u128),
-    ExecutionTimeLimitReached,
     StackFrameLimitReached,
     OutOfTls,
     TlsOutOfBounds,
@@ -188,8 +187,6 @@ fn description(&self) -> &str {
                 "mir not found",
             InvalidChar(..) =>
                 "tried to interpret an invalid 32-bit value as a char",
-            ExecutionTimeLimitReached =>
-                "the expression was too complex to be evaluated or resulted in an infinite loop",
             StackFrameLimitReached =>
                 "reached the configured maximum number of stack frames",
             OutOfTls =>
index e242ec4985ab4ae838f7a974aef0776fcf4fc4d4..c9eed0e4a288580a0452c5ca4b647ff9229e07a4 100644 (file)
@@ -154,10 +154,12 @@ pub fn offset<C: HasDataLayout>(self, i: u64, cx: C) -> EvalResult<'tcx, Self> {
 impl ::rustc_serialize::UseSpecializedEncodable for AllocId {}
 impl ::rustc_serialize::UseSpecializedDecodable for AllocId {}
 
-pub const ALLOC_DISCRIMINANT: usize = 0;
-pub const FN_DISCRIMINANT: usize = 1;
-pub const EXTERN_STATIC_DISCRIMINANT: usize = 2;
-pub const SHORTHAND_START: usize = 3;
+#[derive(RustcDecodable, RustcEncodable)]
+enum AllocKind {
+    Alloc,
+    Fn,
+    Static,
+}
 
 pub fn specialized_encode_alloc_id<
     'a, 'tcx,
@@ -166,26 +168,18 @@ pub fn specialized_encode_alloc_id<
     encoder: &mut E,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     alloc_id: AllocId,
-    shorthand: Option<usize>,
 ) -> Result<(), E::Error> {
-    if let Some(shorthand) = shorthand {
-        return shorthand.encode(encoder);
-    }
     if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
         trace!("encoding {:?} with {:#?}", alloc_id, alloc);
-        ALLOC_DISCRIMINANT.encode(encoder)?;
+        AllocKind::Alloc.encode(encoder)?;
         alloc.encode(encoder)?;
-        // encode whether this allocation is the root allocation of a static
-        tcx.interpret_interner
-            .get_corresponding_static_def_id(alloc_id)
-            .encode(encoder)?;
     } else if let Some(fn_instance) = tcx.interpret_interner.get_fn(alloc_id) {
         trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
-        FN_DISCRIMINANT.encode(encoder)?;
+        AllocKind::Fn.encode(encoder)?;
         fn_instance.encode(encoder)?;
-    } else if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
-        // extern "C" statics don't have allocations, just encode its def_id
-        EXTERN_STATIC_DISCRIMINANT.encode(encoder)?;
+    } else if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
+        // referring to statics doesn't need to know about their allocations, just about its DefId
+        AllocKind::Static.encode(encoder)?;
         did.encode(encoder)?;
     } else {
         bug!("alloc id without corresponding allocation: {}", alloc_id);
@@ -196,53 +190,42 @@ pub fn specialized_encode_alloc_id<
 pub fn specialized_decode_alloc_id<
     'a, 'tcx,
     D: Decoder,
-    CACHE: FnOnce(&mut D, usize, AllocId),
-    SHORT: FnOnce(&mut D, usize) -> Result<AllocId, D::Error>
+    CACHE: FnOnce(&mut D, AllocId),
 >(
     decoder: &mut D,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    pos: usize,
     cache: CACHE,
-    short: SHORT,
 ) -> Result<AllocId, D::Error> {
-    match usize::decode(decoder)? {
-        ALLOC_DISCRIMINANT => {
+    match AllocKind::decode(decoder)? {
+        AllocKind::Alloc => {
             let alloc_id = tcx.interpret_interner.reserve();
-            trace!("creating alloc id {:?} at {}", alloc_id, pos);
+            trace!("creating alloc id {:?}", alloc_id);
             // insert early to allow recursive allocs
-            cache(decoder, pos, alloc_id);
+            cache(decoder, alloc_id);
 
             let allocation = Allocation::decode(decoder)?;
             trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
             let allocation = tcx.intern_const_alloc(allocation);
             tcx.interpret_interner.intern_at_reserved(alloc_id, allocation);
 
-            if let Some(glob) = Option::<DefId>::decode(decoder)? {
-                tcx.interpret_interner.cache(glob, alloc_id);
-            }
-
             Ok(alloc_id)
         },
-        FN_DISCRIMINANT => {
-            trace!("creating fn alloc id at {}", pos);
+        AllocKind::Fn => {
+            trace!("creating fn alloc id");
             let instance = ty::Instance::decode(decoder)?;
             trace!("decoded fn alloc instance: {:?}", instance);
             let id = tcx.interpret_interner.create_fn_alloc(instance);
             trace!("created fn alloc id: {:?}", id);
-            cache(decoder, pos, id);
+            cache(decoder, id);
             Ok(id)
         },
-        EXTERN_STATIC_DISCRIMINANT => {
-            trace!("creating extern static alloc id at {}", pos);
+        AllocKind::Static => {
+            trace!("creating extern static alloc id at");
             let did = DefId::decode(decoder)?;
-            let alloc_id = tcx.interpret_interner.reserve();
-            tcx.interpret_interner.cache(did, alloc_id);
+            let alloc_id = tcx.interpret_interner.cache_static(did);
+            cache(decoder, alloc_id);
             Ok(alloc_id)
         },
-        shorthand => {
-            trace!("loading shorthand {}", shorthand);
-            short(decoder, shorthand)
-        },
     }
 }
 
index 33f52ab09c85668b72c9d3a008345fe5394688e6..c525c4ed651f244faf93df66289f4884629441c3 100644 (file)
@@ -1991,7 +1991,7 @@ pub fn successor_within_block(&self) -> Location {
         Location { block: self.block, statement_index: self.statement_index + 1 }
     }
 
-    pub fn dominates(&self, other: &Location, dominators: &Dominators<BasicBlock>) -> bool {
+    pub fn dominates(&self, other: Location, dominators: &Dominators<BasicBlock>) -> bool {
         if self.block == other.block {
             self.statement_index <= other.statement_index
         } else {
index d309026212bfa1f408667cfe5a6053064553adb1..59823390a0a5861f6ad3472e1c4f1e007846f98d 100644 (file)
@@ -246,6 +246,10 @@ pub fn values<'a>(&'a self) -> BTreeMapValuesIter<'a, OutputType, Option<PathBuf
         self.0.values()
     }
 
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+
     // True if any of the output types require codegen or linking.
     pub fn should_trans(&self) -> bool {
         self.0.keys().any(|k| match *k {
@@ -1255,8 +1259,6 @@ fn parse_edition(slot: &mut Edition, v: Option<&str>) -> bool {
         useful for profiling / PGO."),
     relro_level: Option<RelroLevel> = (None, parse_relro_level, [TRACKED],
         "choose which RELRO level to use"),
-    nll: bool = (false, parse_bool, [UNTRACKED],
-                 "run the non-lexical lifetimes MIR pass"),
     disable_nll_user_type_assert: bool = (false, parse_bool, [UNTRACKED],
         "disable user provided type assertion in NLL"),
     trans_time_graph: bool = (false, parse_bool, [UNTRACKED],
index 8f2043fdfc643bb15220e7f70509f635fee56f10..2993234f266256b767c4650f6e85bdc7133cc054 100644 (file)
@@ -26,7 +26,7 @@
 use util::common::{duration_to_secs_str, ErrorReported};
 use util::common::ProfileQueriesMsg;
 
-use rustc_data_structures::sync::{Lrc, Lock, LockCell, OneThread, Once};
+use rustc_data_structures::sync::{self, Lrc, Lock, LockCell, OneThread, Once, RwLock};
 
 use syntax::ast::NodeId;
 use errors::{self, DiagnosticBuilder, DiagnosticId};
@@ -83,13 +83,13 @@ pub struct Session {
 
     // FIXME: lint_store and buffered_lints are not thread-safe,
     // but are only used in a single thread
-    pub lint_store: OneThread<RefCell<lint::LintStore>>,
-    pub buffered_lints: OneThread<RefCell<Option<lint::LintBuffer>>>,
+    pub lint_store: RwLock<lint::LintStore>,
+    pub buffered_lints: Lock<Option<lint::LintBuffer>>,
 
     /// Set of (DiagnosticId, Option<Span>, message) tuples tracking
     /// (sub)diagnostics that have been set once, but should not be set again,
     /// in order to avoid redundantly verbose output (Issue #24690, #44953).
-    pub one_time_diagnostics: RefCell<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
+    pub one_time_diagnostics: Lock<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
     pub plugin_llvm_passes: OneThread<RefCell<Vec<String>>>,
     pub plugin_attributes: OneThread<RefCell<Vec<(String, AttributeType)>>>,
     pub crate_types: Once<Vec<config::CrateType>>,
@@ -112,8 +112,6 @@ pub struct Session {
 
     /// The maximum number of stackframes allowed in const eval
     pub const_eval_stack_frame_limit: usize,
-    /// The maximum number miri steps per constant
-    pub const_eval_step_limit: usize,
 
     /// The metadata::creader module may inject an allocator/panic_runtime
     /// dependency if it didn't already find one, and this tracks what was
@@ -931,7 +929,7 @@ pub fn codegen_units(&self) -> usize {
     }
 
     pub fn teach(&self, code: &DiagnosticId) -> bool {
-        self.opts.debugging_opts.teach && !self.parse_sess.span_diagnostic.code_emitted(code)
+        self.opts.debugging_opts.teach && self.parse_sess.span_diagnostic.must_teach(code)
     }
 
     /// Are we allowed to use features from the Rust 2018 edition?
@@ -985,7 +983,7 @@ pub fn build_session_with_codemap(
 
     let external_macro_backtrace = sopts.debugging_opts.external_macro_backtrace;
 
-    let emitter: Box<dyn Emitter> =
+    let emitter: Box<dyn Emitter + sync::Send> =
         match (sopts.error_format, emitter_dest) {
             (config::ErrorOutputType::HumanReadable(color_config), None) => Box::new(
                 EmitterWriter::stderr(
@@ -1091,9 +1089,9 @@ pub fn build_session_(
         default_sysroot,
         local_crate_source_file,
         working_dir,
-        lint_store: OneThread::new(RefCell::new(lint::LintStore::new())),
-        buffered_lints: OneThread::new(RefCell::new(Some(lint::LintBuffer::new()))),
-        one_time_diagnostics: RefCell::new(FxHashSet()),
+        lint_store: RwLock::new(lint::LintStore::new()),
+        buffered_lints: Lock::new(Some(lint::LintBuffer::new())),
+        one_time_diagnostics: Lock::new(FxHashSet()),
         plugin_llvm_passes: OneThread::new(RefCell::new(Vec::new())),
         plugin_attributes: OneThread::new(RefCell::new(Vec::new())),
         crate_types: Once::new(),
@@ -1103,7 +1101,6 @@ pub fn build_session_(
         recursion_limit: Once::new(),
         type_length_limit: Once::new(),
         const_eval_stack_frame_limit: 100,
-        const_eval_step_limit: 1_000_000,
         next_node_id: OneThread::new(Cell::new(NodeId::new(1))),
         injected_allocator: Once::new(),
         allocator_kind: Once::new(),
@@ -1191,7 +1188,7 @@ pub enum IncrCompSession {
 }
 
 pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
-    let emitter: Box<dyn Emitter> = match output {
+    let emitter: Box<dyn Emitter + sync::Send> = match output {
         config::ErrorOutputType::HumanReadable(color_config) => {
             Box::new(EmitterWriter::stderr(color_config, None, false, false))
         }
@@ -1206,7 +1203,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
 }
 
 pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
-    let emitter: Box<dyn Emitter> = match output {
+    let emitter: Box<dyn Emitter + sync::Send> = match output {
         config::ErrorOutputType::HumanReadable(color_config) => {
             Box::new(EmitterWriter::stderr(color_config, None, false, false))
         }
index 32fd93cf20a1f7599175205ec62829c553e09b1b..8d2398d34090d8d6f8ddc327150ef42466141217 100644 (file)
@@ -266,6 +266,7 @@ pub enum DomainGoal<'tcx> {
     WellFormed(WhereClauseAtom<'tcx>),
     FromEnv(WhereClauseAtom<'tcx>),
     WellFormedTy(Ty<'tcx>),
+    Normalize(ty::ProjectionPredicate<'tcx>),
     FromEnvTy(Ty<'tcx>),
     RegionOutlives(ty::RegionOutlivesPredicate<'tcx>),
     TypeOutlives(ty::TypeOutlivesPredicate<'tcx>),
index 523cd42940e27b7ef3798476ad2f06d23fcc271a..31c5bf1bbad84fd45870dd2b302f4b283d4e72bb 100644 (file)
@@ -450,6 +450,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
             FromEnv(Implemented(trait_ref)) => write!(fmt, "FromEnv({})", trait_ref),
             FromEnv(ProjectionEq(projection)) => write!(fmt, "FromEnv({})", projection),
             WellFormedTy(ty) => write!(fmt, "WellFormed({})", ty),
+            Normalize(projection) => write!(fmt, "Normalize({})", projection),
             FromEnvTy(ty) => write!(fmt, "FromEnv({})", ty),
             RegionOutlives(predicate) => write!(fmt, "RegionOutlives({})", predicate),
             TypeOutlives(predicate) => write!(fmt, "TypeOutlives({})", predicate),
@@ -538,6 +539,7 @@ impl<'tcx> TypeFoldable<'tcx> for traits::DomainGoal<'tcx> {
         (traits::DomainGoal::WellFormed)(wc),
         (traits::DomainGoal::FromEnv)(wc),
         (traits::DomainGoal::WellFormedTy)(ty),
+        (traits::DomainGoal::Normalize)(projection),
         (traits::DomainGoal::FromEnvTy)(ty),
         (traits::DomainGoal::RegionOutlives)(predicate),
         (traits::DomainGoal::TypeOutlives)(predicate),
index a508f33db3f771c0eab854fd3c666cf67ddc6125..28ad5edbd2db7ad376e1c7d3adace5694f62bb7b 100644 (file)
 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
                                            StableHasher, StableHasherResult,
                                            StableVec};
-use arena::{TypedArena, DroplessArena};
+use arena::{TypedArena, SyncDroplessArena};
 use rustc_data_structures::indexed_vec::IndexVec;
 use rustc_data_structures::sync::{Lrc, Lock};
 use std::any::Any;
 use std::borrow::Borrow;
-use std::cell::Cell;
 use std::cmp::Ordering;
 use std::collections::hash_map::{self, Entry};
 use std::hash::{Hash, Hasher};
 
 pub struct AllArenas<'tcx> {
     pub global: GlobalArenas<'tcx>,
-    pub interner: DroplessArena,
+    pub interner: SyncDroplessArena,
 }
 
 impl<'tcx> AllArenas<'tcx> {
     pub fn new() -> Self {
         AllArenas {
             global: GlobalArenas::new(),
-            interner: DroplessArena::new(),
+            interner: SyncDroplessArena::new(),
         }
     }
 }
@@ -130,7 +129,7 @@ pub fn new() -> GlobalArenas<'tcx> {
 
 pub struct CtxtInterners<'tcx> {
     /// The arena that types, regions, etc are allocated from
-    arena: &'tcx DroplessArena,
+    arena: &'tcx SyncDroplessArena,
 
     /// Specifically use a speedy hash algorithm for these hash sets,
     /// they're accessed quite often.
@@ -147,7 +146,7 @@ pub struct CtxtInterners<'tcx> {
 }
 
 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
-    fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
+    fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
         CtxtInterners {
             arena,
             type_: Default::default(),
@@ -174,10 +173,10 @@ fn intern_ty(&self, st: TypeVariants<'tcx>,
                 return ty;
             }
             let global_interner = global_interners.map(|interners| {
-                interners.type_.borrow_mut()
+                (interners.type_.borrow_mut(), &interners.arena)
             });
-            if let Some(ref interner) = global_interner {
-                if let Some(&Interned(ty)) = interner.get(&st) {
+            if let Some((ref type_, _)) = global_interner {
+                if let Some(&Interned(ty)) = type_.get(&st) {
                     return ty;
                 }
             }
@@ -193,18 +192,18 @@ fn intern_ty(&self, st: TypeVariants<'tcx>,
             // determine that all contents are in the global tcx.
             // See comments on Lift for why we can't use that.
             if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
-                if let Some(interner) = global_interners {
+                if let Some((mut type_, arena)) = global_interner {
                     let ty_struct: TyS<'gcx> = unsafe {
                         mem::transmute(ty_struct)
                     };
-                    let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
-                    global_interner.unwrap().insert(Interned(ty));
+                    let ty: Ty<'gcx> = arena.alloc(ty_struct);
+                    type_.insert(Interned(ty));
                     return ty;
                 }
             } else {
                 // Make sure we don't end up with inference
                 // types/regions in the global tcx.
-                if global_interners.is_none() {
+                if global_interner.is_none() {
                     drop(interner);
                     bug!("Attempted to intern `{:?}` which contains \
                           inference types/regions in the global type context",
@@ -915,9 +914,6 @@ pub struct GlobalCtxt<'tcx> {
     /// Data layout specification for the current target.
     pub data_layout: TargetDataLayout,
 
-    /// Used to prevent layout from recursing too deeply.
-    pub layout_depth: Cell<usize>,
-
     stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
 
     pub interpret_interner: InterpretInterner<'tcx>,
@@ -956,18 +952,16 @@ struct InterpretInternerInner<'tcx> {
     /// Allows obtaining const allocs via a unique identifier
     alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
 
-    /// Reverse map of `alloc_cache`
-    global_cache: FxHashMap<interpret::AllocId, DefId>,
+    /// Allows obtaining static def ids via a unique id
+    statics: FxHashMap<interpret::AllocId, DefId>,
 
     /// The AllocId to assign to the next new regular allocation.
     /// Always incremented, never gets smaller.
     next_id: interpret::AllocId,
 
-    /// Allows checking whether a static already has an allocation
-    ///
-    /// This is only important for detecting statics referring to themselves
-    // FIXME(oli-obk) move it to the EvalContext?
-    alloc_cache: FxHashMap<DefId, interpret::AllocId>,
+    /// Inverse map of `statics`
+    /// Used so we don't allocate a new pointer every time we need one
+    static_cache: FxHashMap<DefId, interpret::AllocId>,
 
     /// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
     /// allocations for string and bytestring literals.
@@ -1001,30 +995,25 @@ pub fn get_alloc(
         self.inner.borrow().alloc_by_id.get(&id).cloned()
     }
 
-    pub fn get_cached(
-        &self,
-        static_id: DefId,
-    ) -> Option<interpret::AllocId> {
-        self.inner.borrow().alloc_cache.get(&static_id).cloned()
-    }
-
-    pub fn cache(
+    pub fn cache_static(
         &self,
         static_id: DefId,
-        alloc_id: interpret::AllocId,
-    ) {
-        let mut inner = self.inner.borrow_mut();
-        inner.global_cache.insert(alloc_id, static_id);
-        if let Some(old) = inner.alloc_cache.insert(static_id, alloc_id) {
-            bug!("tried to cache {:?}, but was already existing as {:#?}", static_id, old);
+    ) -> interpret::AllocId {
+        if let Some(alloc_id) = self.inner.borrow().static_cache.get(&static_id).cloned() {
+            return alloc_id;
         }
+        let alloc_id = self.reserve();
+        let mut inner = self.inner.borrow_mut();
+        inner.static_cache.insert(static_id, alloc_id);
+        inner.statics.insert(alloc_id, static_id);
+        alloc_id
     }
 
-    pub fn get_corresponding_static_def_id(
+    pub fn get_static(
         &self,
         ptr: interpret::AllocId,
     ) -> Option<DefId> {
-        self.inner.borrow().global_cache.get(&ptr).cloned()
+        self.inner.borrow().statics.get(&ptr).cloned()
     }
 
     pub fn intern_at_reserved(
@@ -1299,7 +1288,6 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             crate_name: Symbol::intern(crate_name),
             data_layout,
             layout_interner: Lock::new(FxHashSet()),
-            layout_depth: Cell::new(0),
             stability_interner: Lock::new(FxHashSet()),
             interpret_interner: Default::default(),
             tx_to_llvm_workers: Lock::new(tx),
@@ -1471,15 +1459,9 @@ pub fn serialize_query_result_cache<E>(self,
         self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
     }
 
-    /// If true, we should use NLL-style region checking instead of
-    /// lexical style.
-    pub fn nll(self) -> bool {
-        self.features().nll || self.sess.opts.debugging_opts.nll
-    }
-
     /// If true, we should use the MIR-based borrowck (we may *also* use
     /// the AST-based borrowck).
-    pub fn use_mir(self) -> bool {
+    pub fn use_mir_borrowck(self) -> bool {
         self.borrowck_mode().use_mir()
     }
 
@@ -1498,7 +1480,7 @@ pub fn borrowck_mode(&self) -> BorrowckMode {
             mode @ BorrowckMode::Compare => mode,
 
             mode @ BorrowckMode::Ast => {
-                if self.nll() {
+                if self.features().nll {
                     BorrowckMode::Mir
                 } else {
                     mode
@@ -1512,11 +1494,9 @@ pub fn borrowck_mode(&self) -> BorrowckMode {
     /// MIR borrowck, but not when NLL is used. They are also consumed
     /// by the validation stuff.
     pub fn emit_end_regions(self) -> bool {
-        // FIXME(#46875) -- we should not emit end regions when NLL is enabled,
-        // but for now we can't stop doing so because it causes false positives
         self.sess.opts.debugging_opts.emit_end_regions ||
             self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
-            self.use_mir()
+            self.use_mir_borrowck()
     }
 
     #[inline]
@@ -1566,7 +1546,7 @@ impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
     /// Call the closure with a local `TyCtxt` using the given arena.
     pub fn enter_local<F, R>(
         &self,
-        arena: &'tcx DroplessArena,
+        arena: &'tcx SyncDroplessArena,
         f: F
     ) -> R
     where
@@ -1581,6 +1561,7 @@ pub fn enter_local<F, R>(
             let new_icx = ty::tls::ImplicitCtxt {
                 tcx,
                 query: icx.query.clone(),
+                layout_depth: icx.layout_depth,
             };
             ty::tls::enter_context(&new_icx, |new_icx| {
                 f(new_icx.tcx)
@@ -1775,6 +1756,9 @@ pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
         /// The current query job, if any. This is updated by start_job in
         /// ty::maps::plumbing when executing a query
         pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
+
+        /// Used to prevent layout from recursing too deeply.
+        pub layout_depth: usize,
     }
 
     // A thread local value which stores a pointer to the current ImplicitCtxt
@@ -1860,6 +1844,7 @@ pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
             let icx = ImplicitCtxt {
                 tcx,
                 query: None,
+                layout_depth: 0,
             };
             enter_context(&icx, |_| {
                 f(tcx)
index 1f23b0a27e33de547d85402b0bf9301317978ce1..8189064db6968eeecfcc353eeb03ee8326b419c6 100644 (file)
@@ -11,6 +11,7 @@
 use hir::map::DefPathData;
 use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
 use ty::{self, Ty, TyCtxt};
+use middle::cstore::{ExternCrate, ExternCrateSource};
 use syntax::ast;
 use syntax::symbol::Symbol;
 use syntax::symbol::InternedString;
@@ -95,21 +96,20 @@ pub fn push_krate_path<T>(self, buffer: &mut T, cnum: CrateNum)
                 //    `extern crate` manually, we put the `extern
                 //    crate` as the parent. So you wind up with
                 //    something relative to the current crate.
-                // 2. for an indirect crate, where there is no extern
-                //    crate, we just prepend the crate name.
+                // 2. for an extern inferred from a path or an indirect crate,
+                //    where there is no explicit `extern crate`, we just prepend
+                //    the crate name.
                 //
                 // Returns `None` for the local crate.
                 if cnum != LOCAL_CRATE {
                     let opt_extern_crate = self.extern_crate(cnum.as_def_id());
-                    let opt_extern_crate = opt_extern_crate.and_then(|extern_crate| {
-                        if extern_crate.direct {
-                            Some(extern_crate.def_id)
-                        } else {
-                            None
-                        }
-                    });
-                    if let Some(extern_crate_def_id) = opt_extern_crate {
-                        self.push_item_path(buffer, extern_crate_def_id);
+                    if let Some(ExternCrate {
+                        src: ExternCrateSource::Extern(def_id),
+                        direct: true,
+                        ..
+                    }) = *opt_extern_crate
+                    {
+                        self.push_item_path(buffer, def_id);
                     } else {
                         buffer.push(&self.crate_name(cnum).as_str());
                     }
@@ -137,14 +137,18 @@ pub fn try_push_visible_item_path<T>(self, buffer: &mut T, external_def_id: DefI
             // followed by the path to the item within the crate and return.
             if cur_def.index == CRATE_DEF_INDEX {
                 match *self.extern_crate(cur_def) {
-                    Some(ref extern_crate) if extern_crate.direct => {
-                        self.push_item_path(buffer, extern_crate.def_id);
-                        cur_path.iter().rev().map(|segment| buffer.push(&segment)).count();
+                    Some(ExternCrate {
+                        src: ExternCrateSource::Extern(def_id),
+                        direct: true,
+                        ..
+                    }) => {
+                        self.push_item_path(buffer, def_id);
+                        cur_path.iter().rev().for_each(|segment| buffer.push(&segment));
                         return true;
                     }
                     None => {
                         buffer.push(&self.crate_name(cur_def.krate).as_str());
-                        cur_path.iter().rev().map(|segment| buffer.push(&segment)).count();
+                        cur_path.iter().rev().for_each(|segment| buffer.push(&segment));
                         return true;
                     }
                     _ => {},
index dff5b0b65476a22f7515af9585a07e14d474627f..77e2e9447f1ed9143ee381324170506284b980cc 100644 (file)
@@ -896,21 +896,26 @@ fn layout_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                         query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
                         -> Result<&'tcx LayoutDetails, LayoutError<'tcx>>
 {
-    let (param_env, ty) = query.into_parts();
+    ty::tls::with_related_context(tcx, move |icx| {
+        let rec_limit = *tcx.sess.recursion_limit.get();
+        let (param_env, ty) = query.into_parts();
 
-    let rec_limit = *tcx.sess.recursion_limit.get();
-    let depth = tcx.layout_depth.get();
-    if depth > rec_limit {
-        tcx.sess.fatal(
-            &format!("overflow representing the type `{}`", ty));
-    }
+        if icx.layout_depth > rec_limit {
+            tcx.sess.fatal(
+                &format!("overflow representing the type `{}`", ty));
+        }
 
-    tcx.layout_depth.set(depth+1);
-    let cx = LayoutCx { tcx, param_env };
-    let layout = cx.layout_raw_uncached(ty);
-    tcx.layout_depth.set(depth);
+        // Update the ImplicitCtxt to increase the layout_depth
+        let icx = ty::tls::ImplicitCtxt {
+            layout_depth: icx.layout_depth + 1,
+            ..icx.clone()
+        };
 
-    layout
+        ty::tls::enter_context(&icx, |_| {
+            let cx = LayoutCx { tcx, param_env };
+            cx.layout_raw_uncached(ty)
+        })
+    })
 }
 
 pub fn provide(providers: &mut ty::maps::Providers) {
index 7d756fb16a453d0b03b4e82d1f1e82b0cef9df7d..374406158c1d51a5695dfd0ae615e4dd686d1e8c 100644 (file)
@@ -31,6 +31,7 @@ pub(super) enum QueryResult<'tcx, T> {
 /// A span and a query key
 #[derive(Clone, Debug)]
 pub struct QueryInfo<'tcx> {
+    /// The span for a reason this query was required
     pub span: Span,
     pub query: Query<'tcx>,
 }
@@ -73,13 +74,22 @@ pub(super) fn await<'lcx>(
             cycle.insert(0, job.info.clone());
 
             if &*job as *const _ == self as *const _ {
-                break;
+                // This is the end of the cycle
+                // The span entry we included was for the usage
+                // of the cycle itself, and not part of the cycle
+                // Replace it with the span which caused the cycle to form
+                cycle[0].span = span;
+                // Find out why the cycle itself was used
+                let usage = job.parent.as_ref().map(|parent| {
+                    (job.info.span, parent.info.query.clone())
+                });
+                return Err(CycleError { usage, cycle });
             }
 
             current_job = job.parent.clone();
         }
 
-        Err(CycleError { span, cycle })
+        panic!("did not find a cycle")
     }
 
     /// Signals to waiters that the query is complete.
index d317f5a494b782b2ab6d80f71f93500393b73762..2325b1893d996f85a6df0ff90e6662c23f898a14 100644 (file)
         substitute_normalize_and_test_predicates_node((DefId, &'tcx Substs<'tcx>)) -> bool,
 
     [] fn target_features_whitelist:
-        target_features_whitelist_node(CrateNum) -> Lrc<FxHashSet<String>>,
+        target_features_whitelist_node(CrateNum) -> Lrc<FxHashMap<String, Option<String>>>,
 
     // Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
     [] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
index 9ea4b21c552218dc785bf7a8a57189fd1b8af681..f88e33c708e3685027e0aade041adf1ee5243e11 100644 (file)
@@ -33,6 +33,7 @@
 use ty::maps::job::QueryResult;
 use ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
 use ty::context::TyCtxt;
+use util::common::time;
 
 const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
 
@@ -77,12 +78,11 @@ pub struct OnDiskCache<'sess> {
     // `serialized_data`.
     prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
 
-    // A cache to ensure we don't read allocations twice
-    interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
+    // Alloc indices to memory location map
+    prev_interpret_alloc_index: Vec<AbsoluteBytePos>,
 
-    // A map from positions to size of the serialized allocation
-    // so we can skip over already processed allocations
-    interpret_alloc_size: RefCell<FxHashMap<usize, usize>>,
+    /// Deserialization: A cache to ensure we don't read allocations twice
+    interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
 }
 
 // This type is used only for (de-)serialization.
@@ -92,6 +92,8 @@ struct Footer {
     prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
     query_result_index: EncodedQueryResultIndex,
     diagnostics_index: EncodedQueryResultIndex,
+    // the location of all allocations
+    interpret_alloc_index: Vec<AbsoluteBytePos>,
 }
 
 type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
@@ -148,8 +150,8 @@ pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> OnDiskCache
             query_result_index: footer.query_result_index.into_iter().collect(),
             prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
             synthetic_expansion_infos: Lock::new(FxHashMap()),
+            prev_interpret_alloc_index: footer.interpret_alloc_index,
             interpret_alloc_cache: RefCell::new(FxHashMap::default()),
-            interpret_alloc_size: RefCell::new(FxHashMap::default()),
         }
     }
 
@@ -165,8 +167,8 @@ pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> {
             query_result_index: FxHashMap(),
             prev_diagnostics_index: FxHashMap(),
             synthetic_expansion_infos: Lock::new(FxHashMap()),
+            prev_interpret_alloc_index: Vec::new(),
             interpret_alloc_cache: RefCell::new(FxHashMap::default()),
-            interpret_alloc_size: RefCell::new(FxHashMap::default()),
         }
     }
 
@@ -199,7 +201,8 @@ pub fn serialize<'a, 'tcx, E>(&self,
                 type_shorthands: FxHashMap(),
                 predicate_shorthands: FxHashMap(),
                 expn_info_shorthands: FxHashMap(),
-                interpret_alloc_shorthands: FxHashMap(),
+                interpret_allocs: FxHashMap(),
+                interpret_allocs_inverse: Vec::new(),
                 codemap: CachingCodemapView::new(tcx.sess.codemap()),
                 file_to_file_index,
             };
@@ -212,7 +215,7 @@ pub fn serialize<'a, 'tcx, E>(&self,
             // Encode query results
             let mut query_result_index = EncodedQueryResultIndex::new();
 
-            {
+            time(tcx.sess, "encode query results", || {
                 use ty::maps::queries::*;
                 let enc = &mut encoder;
                 let qri = &mut query_result_index;
@@ -256,7 +259,9 @@ pub fn serialize<'a, 'tcx, E>(&self,
                         }
                     }
                 }
-            }
+
+                Ok(())
+            })?;
 
             // Encode diagnostics
             let diagnostics_index = {
@@ -277,6 +282,31 @@ pub fn serialize<'a, 'tcx, E>(&self,
                 diagnostics_index
             };
 
+            let interpret_alloc_index = {
+                let mut interpret_alloc_index = Vec::new();
+                let mut n = 0;
+                loop {
+                    let new_n = encoder.interpret_allocs_inverse.len();
+                    // if we have found new ids, serialize those, too
+                    if n == new_n {
+                        // otherwise, abort
+                        break;
+                    }
+                    for idx in n..new_n {
+                        let id = encoder.interpret_allocs_inverse[idx];
+                        let pos = AbsoluteBytePos::new(encoder.position());
+                        interpret_alloc_index.push(pos);
+                        interpret::specialized_encode_alloc_id(
+                            &mut encoder,
+                            tcx,
+                            id,
+                        )?;
+                    }
+                    n = new_n;
+                }
+                interpret_alloc_index
+            };
+
             let sorted_cnums = sorted_cnums_including_local_crate(tcx);
             let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
                 let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
@@ -291,6 +321,7 @@ pub fn serialize<'a, 'tcx, E>(&self,
                 prev_cnums,
                 query_result_index,
                 diagnostics_index,
+                interpret_alloc_index,
             })?;
 
             // Encode the position of the footer as the last 8 bytes of the
@@ -396,8 +427,8 @@ fn load_indexed<'tcx, T>(&self,
             file_index_to_file: &self.file_index_to_file,
             file_index_to_stable_id: &self.file_index_to_stable_id,
             synthetic_expansion_infos: &self.synthetic_expansion_infos,
+            prev_interpret_alloc_index: &self.prev_interpret_alloc_index,
             interpret_alloc_cache: &self.interpret_alloc_cache,
-            interpret_alloc_size: &self.interpret_alloc_size,
         };
 
         match decode_tagged(&mut decoder, dep_node_index) {
@@ -460,7 +491,8 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> {
     file_index_to_file: &'x Lock<FxHashMap<FileMapIndex, Lrc<FileMap>>>,
     file_index_to_stable_id: &'x FxHashMap<FileMapIndex, StableFilemapId>,
     interpret_alloc_cache: &'x RefCell<FxHashMap<usize, interpret::AllocId>>,
-    interpret_alloc_size: &'x RefCell<FxHashMap<usize, usize>>,
+    /// maps from index in the cache file to location in the cache file
+    prev_interpret_alloc_index: &'x [AbsoluteBytePos],
 }
 
 impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
@@ -584,36 +616,29 @@ fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
 impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx, 'x> {
     fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
         let tcx = self.tcx;
-        let pos = TyDecoder::position(self);
-        trace!("specialized_decode_alloc_id: {:?}", pos);
-        if let Some(cached) = self.interpret_alloc_cache.borrow().get(&pos).cloned() {
-            // if there's no end position we are currently deserializing a recursive
-            // allocation
-            if let Some(end) = self.interpret_alloc_size.borrow().get(&pos).cloned() {
-                trace!("{} already cached as {:?}", pos, cached);
-                // skip ahead
-                self.opaque.set_position(end);
-                return Ok(cached)
-            }
+        let idx = usize::decode(self)?;
+        trace!("loading index {}", idx);
+
+        if let Some(cached) = self.interpret_alloc_cache.borrow().get(&idx).cloned() {
+            trace!("loading alloc id {:?} from alloc_cache", cached);
+            return Ok(cached);
         }
-        let id = interpret::specialized_decode_alloc_id(
-            self,
-            tcx,
-            pos,
-            |this, pos, alloc_id| {
-                assert!(this.interpret_alloc_cache.borrow_mut().insert(pos, alloc_id).is_none());
-            },
-            |this, shorthand| {
-                // need to load allocation
-                this.with_position(shorthand, |this| interpret::AllocId::decode(this))
-            }
-        )?;
-        assert!(self
-            .interpret_alloc_size
-            .borrow_mut()
-            .insert(pos, TyDecoder::position(self))
-            .is_none());
-        Ok(id)
+        let pos = self.prev_interpret_alloc_index[idx].to_usize();
+        trace!("loading position {}", pos);
+        self.with_position(pos, |this| {
+            interpret::specialized_decode_alloc_id(
+                this,
+                tcx,
+                |this, alloc_id| {
+                    trace!("caching idx {} for alloc id {} at position {}", idx, alloc_id, pos);
+                    assert!(this
+                        .interpret_alloc_cache
+                        .borrow_mut()
+                        .insert(idx, alloc_id)
+                        .is_none());
+                },
+            )
+        })
     }
 }
 impl<'a, 'tcx, 'x> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx, 'x> {
@@ -777,7 +802,8 @@ struct CacheEncoder<'enc, 'a, 'tcx, E>
     type_shorthands: FxHashMap<ty::Ty<'tcx>, usize>,
     predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
     expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
-    interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+    interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+    interpret_allocs_inverse: Vec<interpret::AllocId>,
     codemap: CachingCodemapView<'tcx>,
     file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>,
 }
@@ -815,26 +841,17 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<
 {
     fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
         use std::collections::hash_map::Entry;
-        let tcx = self.tcx;
-        let pos = self.position();
-        let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
-            Entry::Occupied(entry) => Some(entry.get().clone()),
-            Entry::Vacant(entry) => {
-                // ensure that we don't place any AllocIds at the very beginning
-                // of the metadata file, because that would end up making our indices
-                // not special. It is essentially impossible for that to happen,
-                // but let's make sure
-                assert!(pos >= interpret::SHORTHAND_START);
-                entry.insert(pos);
-                None
+        let index = match self.interpret_allocs.entry(*alloc_id) {
+            Entry::Occupied(e) => *e.get(),
+            Entry::Vacant(e) => {
+                let idx = self.interpret_allocs_inverse.len();
+                self.interpret_allocs_inverse.push(*alloc_id);
+                e.insert(idx);
+                idx
             },
         };
-        interpret::specialized_encode_alloc_id(
-            self,
-            tcx,
-            *alloc_id,
-            shorthand,
-        )
+
+        index.encode(self)
     }
 }
 
@@ -1111,6 +1128,11 @@ fn encode_query_results<'enc, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
           E: 'enc + TyEncoder,
           Q::Value: Encodable,
 {
+    let desc = &format!("encode_query_results for {}",
+        unsafe { ::std::intrinsics::type_name::<Q>() });
+
+    time(tcx.sess, desc, || {
+
     for (key, entry) in Q::get_cache_internal(tcx).map.iter() {
         if Q::cache_on_disk(key.clone()) {
             let entry = match *entry {
@@ -1129,4 +1151,5 @@ fn encode_query_results<'enc, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     Ok(())
+    })
 }
index efe7a56d8009709f8fdd460f5cdeb08fe49e0afd..4f6925938c8029ee9d43fc5912b4f497946c7286 100644 (file)
@@ -17,6 +17,7 @@
 use errors::Level;
 use ty::tls;
 use ty::{TyCtxt};
+use ty::maps::Query;
 use ty::maps::config::QueryDescription;
 use ty::maps::job::{QueryResult, QueryInfo};
 use ty::item_path;
@@ -63,7 +64,8 @@ fn get_cache_internal<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
 
 #[derive(Clone)]
 pub(super) struct CycleError<'tcx> {
-    pub(super) span: Span,
+    /// The query and related span which uses the cycle
+    pub(super) usage: Option<(Span, Query<'tcx>)>,
     pub(super) cycle: Vec<QueryInfo<'tcx>>,
 }
 
@@ -79,33 +81,41 @@ pub(super) enum TryGetLock<'a, 'tcx: 'a, T, D: QueryDescription<'tcx> + 'a> {
 }
 
 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
-    pub(super) fn report_cycle(self, CycleError { span, cycle: stack }: CycleError)
+    pub(super) fn report_cycle(self, CycleError { usage, cycle: stack }: CycleError<'gcx>)
         -> DiagnosticBuilder<'a>
     {
         assert!(!stack.is_empty());
 
+        let fix_span = |span: Span, query: &Query<'gcx>| {
+            self.sess.codemap().def_span(query.default_span(self, span))
+        };
+
         // Disable naming impls with types in this path, since that
         // sometimes cycles itself, leading to extra cycle errors.
         // (And cycle errors around impls tend to occur during the
         // collect/coherence phases anyhow.)
         item_path::with_forced_impl_filename_line(|| {
-            let span = self.sess.codemap().def_span(span);
-            let mut err =
-                struct_span_err!(self.sess, span, E0391,
-                                 "cyclic dependency detected");
-            err.span_label(span, "cyclic reference");
-
-            err.span_note(self.sess.codemap().def_span(stack[0].span),
-                          &format!("the cycle begins when {}...", stack[0].query.describe(self)));
-
-            for &QueryInfo { span, ref query, .. } in &stack[1..] {
-                err.span_note(self.sess.codemap().def_span(span),
-                              &format!("...which then requires {}...", query.describe(self)));
+            let span = fix_span(stack[1 % stack.len()].span, &stack[0].query);
+            let mut err = struct_span_err!(self.sess,
+                                           span,
+                                           E0391,
+                                           "cycle detected when {}",
+                                           stack[0].query.describe(self));
+
+            for i in 1..stack.len() {
+                let query = &stack[i].query;
+                let span = fix_span(stack[(i + 1) % stack.len()].span, query);
+                err.span_note(span, &format!("...which requires {}...", query.describe(self)));
             }
 
-            err.note(&format!("...which then again requires {}, completing the cycle.",
+            err.note(&format!("...which again requires {}, completing the cycle",
                               stack[0].query.describe(self)));
 
+            if let Some((span, query)) = usage {
+                err.span_note(fix_span(span, &query),
+                              &format!("cycle used when {}", query.describe(self)));
+            }
+
             return err
         })
     }
@@ -266,6 +276,22 @@ pub fn describe(&self, tcx: TyCtxt) -> String {
                     r
                 }
             }
+
+            // FIXME(eddyb) Get more valid Span's on queries.
+            pub fn default_span(&self, tcx: TyCtxt<'_, $tcx, '_>, span: Span) -> Span {
+                if span != DUMMY_SP {
+                    return span;
+                }
+                // The def_span query is used to calculate default_span,
+                // so exit to avoid infinite recursion
+                match *self {
+                    Query::def_span(..) => return span,
+                    _ => ()
+                }
+                match *self {
+                    $(Query::$name(key) => key.default_span(tcx),)*
+                }
+            }
         }
 
         pub mod queries {
@@ -303,7 +329,7 @@ fn to_dep_node(tcx: TyCtxt<'a, $tcx, 'lcx>, key: &$K) -> DepNode {
             /// If the query already executed and panicked, this will fatal error / silently panic
             fn try_get_lock(
                 tcx: TyCtxt<'a, $tcx, 'lcx>,
-                mut span: Span,
+                span: Span,
                 key: &$K
             ) -> TryGetLock<'a, $tcx, $V, Self>
             {
@@ -329,13 +355,6 @@ fn try_get_lock(
                     };
                     mem::drop(lock);
 
-                    // This just matches the behavior of `try_get_with` so the span when
-                    // we await matches the span we would use when executing.
-                    // See the FIXME there.
-                    if span == DUMMY_SP && stringify!($name) != "def_span" {
-                        span = key.default_span(tcx);
-                    }
-
                     if let Err(cycle) = job.await(tcx, span) {
                         return TryGetLock::JobCompleted(Err(cycle));
                     }
@@ -343,7 +362,7 @@ fn try_get_lock(
             }
 
             fn try_get_with(tcx: TyCtxt<'a, $tcx, 'lcx>,
-                            mut span: Span,
+                            span: Span,
                             key: $K)
                             -> Result<$V, CycleError<$tcx>>
             {
@@ -377,18 +396,6 @@ macro_rules! get_lock_or_return {
 
                 let mut lock = get_lock_or_return!();
 
-                // FIXME(eddyb) Get more valid Span's on queries.
-                // def_span guard is necessary to prevent a recursive loop,
-                // default_span calls def_span query internally.
-                if span == DUMMY_SP && stringify!($name) != "def_span" {
-                    // This might deadlock if we hold the map lock since we might be
-                    // waiting for the def_span query and switch to some other fiber
-                    // So we drop the lock here and reacquire it
-                    mem::drop(lock);
-                    span = key.default_span(tcx);
-                    lock = get_lock_or_return!();
-                }
-
                 // Fast path for when incr. comp. is off. `to_dep_node` is
                 // expensive for some DepKinds.
                 if !tcx.dep_graph.is_fully_enabled() {
@@ -522,6 +529,7 @@ fn start_job<F, R>(tcx: TyCtxt<'_, $tcx, 'lcx>,
                         let icx = ty::tls::ImplicitCtxt {
                             tcx,
                             query: Some(job.clone()),
+                            layout_depth: icx.layout_depth,
                         };
 
                         // Use the ImplicitCtxt while we execute the query
index 8cf662ccaea9254f9421c5ee4d37a504b828662f..7b4b7082bb6ceae41fb22c35d27e09fbe64aac2f 100644 (file)
@@ -509,7 +509,6 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
             Intrinsic(ref s) => Intrinsic(s.clone()),
             OverflowingMath => OverflowingMath,
             InvalidChar(c) => InvalidChar(c),
-            ExecutionTimeLimitReached => ExecutionTimeLimitReached,
             StackFrameLimitReached => StackFrameLimitReached,
             OutOfTls => OutOfTls,
             TlsOutOfBounds => TlsOutOfBounds,
index d68393956efd1a51ece763e6e8e1314eb1cae533..310fcbcfcb374ac5cbbbc3c728ad34c5b18bfa71 100644 (file)
@@ -1550,7 +1550,7 @@ pub fn builtin_deref(&self, explicit: bool) -> Option<TypeAndMut<'tcx>> {
         }
     }
 
-    /// Returns the type of ty[i]
+    /// Returns the type of `ty[i]`.
     pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
         match self.sty {
             TyArray(ty, _) | TySlice(ty) => Some(ty),
index 305502e7f063b562ac1d298925911ebcc325ce44..ffd02108c270e4352ba97ecb342cc85038c59030 100644 (file)
 use rustc::middle::allocator::AllocatorKind;
 use rustc_errors;
 use syntax::abi::Abi;
-use syntax::ast::{Crate, Attribute, LitKind, StrStyle};
-use syntax::ast::{Unsafety, Constness, Generics, Mutability, Ty, Mac, Arg};
-use syntax::ast::{self, Ident, Item, ItemKind, TyKind, VisibilityKind, Expr};
+use syntax::ast::{Attribute, Crate, LitKind, StrStyle};
+use syntax::ast::{Arg, Constness, Generics, Mac, Mutability, Ty, Unsafety};
+use syntax::ast::{self, Expr, Ident, Item, ItemKind, TyKind, VisibilityKind};
 use syntax::attr;
 use syntax::codemap::{dummy_spanned, respan};
-use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute};
+use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan};
 use syntax::ext::base::ExtCtxt;
 use syntax::ext::base::Resolver;
 use syntax::ext::build::AstBuilder;
 
 use {AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
 
-pub fn modify(sess: &ParseSess,
-              resolver: &mut Resolver,
-              krate: Crate,
-              handler: &rustc_errors::Handler) -> ast::Crate {
+pub fn modify(
+    sess: &ParseSess,
+    resolver: &mut Resolver,
+    krate: Crate,
+    handler: &rustc_errors::Handler,
+) -> ast::Crate {
     ExpandAllocatorDirectives {
         handler,
         sess,
@@ -55,20 +57,24 @@ fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
         let name = if attr::contains_name(&item.attrs, "global_allocator") {
             "global_allocator"
         } else {
-            return fold::noop_fold_item(item, self)
+            return fold::noop_fold_item(item, self);
         };
         match item.node {
             ItemKind::Static(..) => {}
             _ => {
-                self.handler.span_err(item.span, "allocators must be statics");
-                return SmallVector::one(item)
+                self.handler
+                    .span_err(item.span, "allocators must be statics");
+                return SmallVector::one(item);
             }
         }
 
         if self.found {
-            self.handler.span_err(item.span, "cannot define more than one \
-                                              #[global_allocator]");
-            return SmallVector::one(item)
+            self.handler.span_err(
+                item.span,
+                "cannot define more than one \
+                 #[global_allocator]",
+            );
+            return SmallVector::one(item);
         }
         self.found = true;
 
@@ -80,7 +86,7 @@ fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
                 span: None,
                 allow_internal_unstable: true,
                 allow_internal_unsafe: false,
-            }
+            },
         });
         let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark));
         let ecfg = ExpansionConfig::default(name.to_string());
@@ -91,10 +97,7 @@ fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
             core: Ident::from_str("core"),
             cx: ExtCtxt::new(self.sess, ecfg, self.resolver),
         };
-        let super_path = f.cx.path(f.span, vec![
-            Ident::from_str("super"),
-            f.global,
-        ]);
+        let super_path = f.cx.path(f.span, vec![Ident::from_str("super"), f.global]);
         let mut items = vec![
             f.cx.item_extern_crate(f.span, f.core),
             f.cx.item_use_simple(
@@ -114,7 +117,7 @@ fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
         let mut ret = SmallVector::new();
         ret.push(item);
         ret.push(module);
-        return ret
+        return ret;
     }
 
     fn fold_mac(&mut self, mac: Mac) -> Mac {
@@ -139,30 +142,39 @@ fn allocator_fn(&self, method: &AllocatorMethod) -> P<Item> {
             i += 1;
             name
         };
-        let args = method.inputs.iter().map(|ty| {
-            self.arg_ty(ty, &mut abi_args, mk)
-        }).collect();
+        let args = method
+            .inputs
+            .iter()
+            .map(|ty| self.arg_ty(ty, &mut abi_args, mk))
+            .collect();
         let result = self.call_allocator(method.name, args);
         let (output_ty, output_expr) = self.ret_ty(&method.output, result);
-        let kind = ItemKind::Fn(self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)),
-                                Unsafety::Unsafe,
-                                dummy_spanned(Constness::NotConst),
-                                Abi::Rust,
-                                Generics::default(),
-                                self.cx.block_expr(output_expr));
-        self.cx.item(self.span,
-                     Ident::from_str(&self.kind.fn_name(method.name)),
-                     self.attrs(),
-                     kind)
+        let kind = ItemKind::Fn(
+            self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)),
+            Unsafety::Unsafe,
+            dummy_spanned(Constness::NotConst),
+            Abi::Rust,
+            Generics::default(),
+            self.cx.block_expr(output_expr),
+        );
+        self.cx.item(
+            self.span,
+            Ident::from_str(&self.kind.fn_name(method.name)),
+            self.attrs(),
+            kind,
+        )
     }
 
     fn call_allocator(&self, method: &str, mut args: Vec<P<Expr>>) -> P<Expr> {
-        let method = self.cx.path(self.span, vec![
-            self.core,
-            Ident::from_str("alloc"),
-            Ident::from_str("GlobalAlloc"),
-            Ident::from_str(method),
-        ]);
+        let method = self.cx.path(
+            self.span,
+            vec![
+                self.core,
+                Ident::from_str("alloc"),
+                Ident::from_str("GlobalAlloc"),
+                Ident::from_str(method),
+            ],
+        );
         let method = self.cx.expr_path(method);
         let allocator = self.cx.path_ident(self.span, self.global);
         let allocator = self.cx.expr_path(allocator);
@@ -189,10 +201,12 @@ fn attrs(&self) -> Vec<Attribute> {
         ]
     }
 
-    fn arg_ty(&self,
-              ty: &AllocatorTy,
-              args: &mut Vec<Arg>,
-              ident: &mut FnMut() -> Ident) -> P<Expr> {
+    fn arg_ty(
+        &self,
+        ty: &AllocatorTy,
+        args: &mut Vec<Arg>,
+        ident: &mut FnMut() -> Ident,
+    ) -> P<Expr> {
         match *ty {
             AllocatorTy::Layout => {
                 let usize = self.cx.path_ident(self.span, Ident::from_str("usize"));
@@ -202,18 +216,19 @@ fn arg_ty(&self,
                 args.push(self.cx.arg(self.span, size, ty_usize.clone()));
                 args.push(self.cx.arg(self.span, align, ty_usize));
 
-                let layout_new = self.cx.path(self.span, vec![
-                    self.core,
-                    Ident::from_str("alloc"),
-                    Ident::from_str("Layout"),
-                    Ident::from_str("from_size_align_unchecked"),
-                ]);
+                let layout_new = self.cx.path(
+                    self.span,
+                    vec![
+                        self.core,
+                        Ident::from_str("alloc"),
+                        Ident::from_str("Layout"),
+                        Ident::from_str("from_size_align_unchecked"),
+                    ],
+                );
                 let layout_new = self.cx.expr_path(layout_new);
                 let size = self.cx.expr_ident(self.span, size);
                 let align = self.cx.expr_ident(self.span, align);
-                let layout = self.cx.expr_call(self.span,
-                                               layout_new,
-                                               vec![size, align]);
+                let layout = self.cx.expr_call(self.span, layout_new, vec![size, align]);
                 layout
             }
 
@@ -230,9 +245,7 @@ fn arg_ty(&self,
                 self.cx.expr_ident(self.span, ident)
             }
 
-            AllocatorTy::ResultPtr |
-            AllocatorTy::Bang |
-            AllocatorTy::Unit => {
+            AllocatorTy::ResultPtr | AllocatorTy::Bang | AllocatorTy::Unit => {
                 panic!("can't convert AllocatorTy to an argument")
             }
         }
@@ -249,17 +262,11 @@ fn ret_ty(&self, ty: &AllocatorTy, expr: P<Expr>) -> (P<Ty>, P<Expr>) {
                 (self.ptr_u8(), expr)
             }
 
-            AllocatorTy::Bang => {
-                (self.cx.ty(self.span, TyKind::Never), expr)
-            }
+            AllocatorTy::Bang => (self.cx.ty(self.span, TyKind::Never), expr),
 
-            AllocatorTy::Unit => {
-                (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr)
-            }
+            AllocatorTy::Unit => (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr),
 
-            AllocatorTy::Layout |
-            AllocatorTy::Usize |
-            AllocatorTy::Ptr => {
+            AllocatorTy::Layout | AllocatorTy::Usize | AllocatorTy::Ptr => {
                 panic!("can't convert AllocatorTy to an output")
             }
         }
@@ -277,11 +284,14 @@ fn ptr_u8(&self) -> P<Ty> {
     }
 
     fn ptr_opaque(&self) -> P<Ty> {
-        let opaque = self.cx.path(self.span, vec![
-            self.core,
-            Ident::from_str("alloc"),
-            Ident::from_str("Opaque"),
-        ]);
+        let opaque = self.cx.path(
+            self.span,
+            vec![
+                self.core,
+                Ident::from_str("alloc"),
+                Ident::from_str("Opaque"),
+            ],
+        );
         let ty_opaque = self.cx.ty_path(opaque);
         self.cx.ty_ptr(self.span, ty_opaque, Mutability::Mutable)
     }
index db1cfb5c767687cf92613bb333035f980cdb2592..7b33ee40d8cdb9d8e8ca1f8f987eef86b783f7b3 100644 (file)
@@ -207,7 +207,7 @@ pub struct Iter<A: Array> {
 
 impl<A: Array> Drop for Iter<A> {
     fn drop(&mut self) {
-        for _ in self {}
+        self.for_each(drop);
     }
 }
 
@@ -251,7 +251,7 @@ fn size_hint(&self) -> (usize, Option<usize>) {
 impl<'a, A: Array> Drop for Drain<'a, A> {
     fn drop(&mut self) {
         // exhaust self first
-        while let Some(_) = self.next() {}
+        self.for_each(drop);
 
         if self.tail_len > 0 {
             unsafe {
index 4071b804def6c4f77f72be1b54f3ec4d816e7187..f8c0289cc98c870269fa62bc9a34073a635f028b 100644 (file)
 use rustc::ich::Fingerprint;
 use rustc_data_structures::stable_hasher::StableHasher;
 use rustc_mir as mir;
-use rustc::session::{Session, CompileResult, CrateDisambiguator};
+use rustc::session::{CompileResult, CrateDisambiguator, Session};
 use rustc::session::CompileIncomplete;
 use rustc::session::config::{self, Input, OutputFilenames, OutputType};
 use rustc::session::search_paths::PathKind;
 use rustc::lint;
-use rustc::middle::{self, stability, reachable, resolve_lifetime};
+use rustc::middle::{self, reachable, resolve_lifetime, stability};
 use rustc::middle::cstore::CrateStore;
 use rustc::middle::privacy::AccessLevels;
-use rustc::ty::{self, TyCtxt, Resolutions, AllArenas};
+use rustc::ty::{self, AllArenas, Resolutions, TyCtxt};
 use rustc::traits;
-use rustc::util::common::{ErrorReported, time, install_panic_hook};
+use rustc::util::common::{install_panic_hook, time, ErrorReported};
 use rustc_allocator as allocator;
 use rustc_borrowck as borrowck;
 use rustc_incremental;
 use rustc_privacy;
 use rustc_plugin::registry::Registry;
 use rustc_plugin as plugin;
-use rustc_passes::{self, ast_validation, loops, rvalue_promotion, hir_stats};
+use rustc_passes::{self, ast_validation, hir_stats, loops, rvalue_promotion};
 use super::Compilation;
 
 use serialize::json;
 
 use std::any::Any;
 use std::env;
-use std::ffi::{OsString, OsStr};
+use std::ffi::{OsStr, OsString};
 use std::fs;
 use std::io::{self, Write};
 use std::iter;
 
 use profile;
 
-pub fn compile_input(trans: Box<TransCrate>,
-                     sess: &Session,
-                     cstore: &CStore,
-                     input_path: &Option<PathBuf>,
-                     input: &Input,
-                     outdir: &Option<PathBuf>,
-                     output: &Option<PathBuf>,
-                     addl_plugins: Option<Vec<String>>,
-                     control: &CompileController) -> CompileResult {
+pub fn compile_input(
+    trans: Box<TransCrate>,
+    sess: &Session,
+    cstore: &CStore,
+    input_path: &Option<PathBuf>,
+    input: &Input,
+    outdir: &Option<PathBuf>,
+    output: &Option<PathBuf>,
+    addl_plugins: Option<Vec<String>>,
+    control: &CompileController,
+) -> CompileResult {
     macro_rules! controller_entry_point {
         ($point: ident, $tsess: expr, $make_state: expr, $phase_result: expr) => {{
             let state = &mut $make_state;
@@ -106,16 +108,9 @@ macro_rules! controller_entry_point {
         };
 
         let (krate, registry) = {
-            let mut compile_state = CompileState::state_after_parse(input,
-                                                                    sess,
-                                                                    outdir,
-                                                                    output,
-                                                                    krate,
-                                                                    &cstore);
-            controller_entry_point!(after_parse,
-                                    sess,
-                                    compile_state,
-                                    Ok(()));
+            let mut compile_state =
+                CompileState::state_after_parse(input, sess, outdir, output, krate, &cstore);
+            controller_entry_point!(after_parse, sess, compile_state, Ok(()));
 
             (compile_state.krate.unwrap(), compile_state.registry)
         };
@@ -125,7 +120,13 @@ macro_rules! controller_entry_point {
             ::rustc_trans_utils::link::find_crate_name(Some(sess), &krate.attrs, input);
         install_panic_hook();
 
-        let ExpansionResult { expanded_crate, defs, analysis, resolutions, mut hir_forest } = {
+        let ExpansionResult {
+            expanded_crate,
+            defs,
+            analysis,
+            resolutions,
+            mut hir_forest,
+        } = {
             phase_2_configure_and_expand(
                 sess,
                 &cstore,
@@ -136,11 +137,17 @@ macro_rules! controller_entry_point {
                 control.make_glob_map,
                 |expanded_crate| {
                     let mut state = CompileState::state_after_expand(
-                        input, sess, outdir, output, &cstore, expanded_crate, &crate_name,
+                        input,
+                        sess,
+                        outdir,
+                        output,
+                        &cstore,
+                        expanded_crate,
+                        &crate_name,
                     );
                     controller_entry_point!(after_expand, sess, state, Ok(()));
                     Ok(())
-                }
+                },
             )?
         };
 
@@ -152,24 +159,28 @@ macro_rules! controller_entry_point {
                 if output_contains_path(&output_paths, input_path) {
                     sess.err(&format!(
                         "the input file \"{}\" would be overwritten by the generated \
-                        executable",
-                        input_path.display()));
+                         executable",
+                        input_path.display()
+                    ));
                     return Err(CompileIncomplete::Stopped);
                 }
                 if let Some(dir_path) = output_conflicts_with_dir(&output_paths) {
                     sess.err(&format!(
                         "the generated executable for the input file \"{}\" conflicts with the \
-                        existing directory \"{}\"",
-                        input_path.display(), dir_path.display()));
+                         existing directory \"{}\"",
+                        input_path.display(),
+                        dir_path.display()
+                    ));
                     return Err(CompileIncomplete::Stopped);
                 }
             }
         }
 
         write_out_deps(sess, &outputs, &output_paths);
-        if sess.opts.output_types.contains_key(&OutputType::DepInfo) &&
-            sess.opts.output_types.keys().count() == 1 {
-            return Ok(())
+        if sess.opts.output_types.contains_key(&OutputType::DepInfo)
+            && sess.opts.output_types.len() == 1
+        {
+            return Ok(());
         }
 
         if let &Some(ref dir) = outdir {
@@ -182,28 +193,32 @@ macro_rules! controller_entry_point {
         let arenas = AllArenas::new();
 
         // Construct the HIR map
-        let hir_map = time(sess,
-                           "indexing hir",
-                           || hir_map::map_crate(sess, cstore, &mut hir_forest, &defs));
+        let hir_map = time(sess, "indexing hir", || {
+            hir_map::map_crate(sess, cstore, &mut hir_forest, &defs)
+        });
 
         {
             hir_map.dep_graph.assert_ignored();
-            controller_entry_point!(after_hir_lowering,
-                                    sess,
-                                    CompileState::state_after_hir_lowering(input,
-                                                                  sess,
-                                                                  outdir,
-                                                                  output,
-                                                                  &arenas,
-                                                                  &cstore,
-                                                                  &hir_map,
-                                                                  &analysis,
-                                                                  &resolutions,
-                                                                  &expanded_crate,
-                                                                  &hir_map.krate(),
-                                                                  &outputs,
-                                                                  &crate_name),
-                                    Ok(()));
+            controller_entry_point!(
+                after_hir_lowering,
+                sess,
+                CompileState::state_after_hir_lowering(
+                    input,
+                    sess,
+                    outdir,
+                    output,
+                    &arenas,
+                    &cstore,
+                    &hir_map,
+                    &analysis,
+                    &resolutions,
+                    &expanded_crate,
+                    &hir_map.krate(),
+                    &outputs,
+                    &crate_name
+                ),
+                Ok(())
+            );
         }
 
         let opt_crate = if control.keep_ast {
@@ -213,60 +228,64 @@ macro_rules! controller_entry_point {
             None
         };
 
-        phase_3_run_analysis_passes(&*trans,
-                                    control,
-                                    sess,
-                                    cstore,
-                                    hir_map,
-                                    analysis,
-                                    resolutions,
-                                    &arenas,
-                                    &crate_name,
-                                    &outputs,
-                                    |tcx, analysis, rx, result| {
-            {
-                // Eventually, we will want to track plugins.
-                tcx.dep_graph.with_ignore(|| {
-                    let mut state = CompileState::state_after_analysis(input,
-                                                                       sess,
-                                                                       outdir,
-                                                                       output,
-                                                                       opt_crate,
-                                                                       tcx.hir.krate(),
-                                                                       &analysis,
-                                                                       tcx,
-                                                                       &crate_name);
-                    (control.after_analysis.callback)(&mut state);
-                });
-
-                if control.after_analysis.stop == Compilation::Stop {
-                    return result.and_then(|_| Err(CompileIncomplete::Stopped));
+        phase_3_run_analysis_passes(
+            &*trans,
+            control,
+            sess,
+            cstore,
+            hir_map,
+            analysis,
+            resolutions,
+            &arenas,
+            &crate_name,
+            &outputs,
+            |tcx, analysis, rx, result| {
+                {
+                    // Eventually, we will want to track plugins.
+                    tcx.dep_graph.with_ignore(|| {
+                        let mut state = CompileState::state_after_analysis(
+                            input,
+                            sess,
+                            outdir,
+                            output,
+                            opt_crate,
+                            tcx.hir.krate(),
+                            &analysis,
+                            tcx,
+                            &crate_name,
+                        );
+                        (control.after_analysis.callback)(&mut state);
+                    });
+
+                    if control.after_analysis.stop == Compilation::Stop {
+                        return result.and_then(|_| Err(CompileIncomplete::Stopped));
+                    }
                 }
-            }
 
-            result?;
+                result?;
 
-            if log_enabled!(::log::Level::Info) {
-                println!("Pre-trans");
-                tcx.print_debug_stats();
-            }
+                if log_enabled!(::log::Level::Info) {
+                    println!("Pre-trans");
+                    tcx.print_debug_stats();
+                }
 
-            let ongoing_trans = phase_4_translate_to_llvm(&*trans, tcx, rx);
+                let ongoing_trans = phase_4_translate_to_llvm(&*trans, tcx, rx);
 
-            if log_enabled!(::log::Level::Info) {
-                println!("Post-trans");
-                tcx.print_debug_stats();
-            }
+                if log_enabled!(::log::Level::Info) {
+                    println!("Post-trans");
+                    tcx.print_debug_stats();
+                }
 
-            if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
-                if let Err(e) = mir::transform::dump_mir::emit_mir(tcx, &outputs) {
-                    sess.err(&format!("could not emit MIR: {}", e));
-                    sess.abort_if_errors();
+                if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
+                    if let Err(e) = mir::transform::dump_mir::emit_mir(tcx, &outputs) {
+                        sess.err(&format!("could not emit MIR: {}", e));
+                        sess.abort_if_errors();
+                    }
                 }
-            }
 
-            Ok((outputs.clone(), ongoing_trans, tcx.dep_graph.clone()))
-        })??
+                Ok((outputs.clone(), ongoing_trans, tcx.dep_graph.clone()))
+            },
+        )??
     };
 
     if sess.opts.debugging_opts.print_type_sizes {
@@ -391,10 +410,7 @@ pub struct CompileState<'a, 'tcx: 'a> {
 }
 
 impl<'a, 'tcx> CompileState<'a, 'tcx> {
-    fn empty(input: &'a Input,
-             session: &'tcx Session,
-             out_dir: &'a Option<PathBuf>)
-             -> Self {
+    fn empty(input: &'a Input, session: &'tcx Session, out_dir: &'a Option<PathBuf>) -> Self {
         CompileState {
             input,
             session,
@@ -415,13 +431,14 @@ fn empty(input: &'a Input,
         }
     }
 
-    fn state_after_parse(input: &'a Input,
-                         session: &'tcx Session,
-                         out_dir: &'a Option<PathBuf>,
-                         out_file: &'a Option<PathBuf>,
-                         krate: ast::Crate,
-                         cstore: &'tcx CStore)
-                         -> Self {
+    fn state_after_parse(
+        input: &'a Input,
+        session: &'tcx Session,
+        out_dir: &'a Option<PathBuf>,
+        out_file: &'a Option<PathBuf>,
+        krate: ast::Crate,
+        cstore: &'tcx CStore,
+    ) -> Self {
         CompileState {
             // Initialize the registry before moving `krate`
             registry: Some(Registry::new(&session, krate.span)),
@@ -432,14 +449,15 @@ fn state_after_parse(input: &'a Input,
         }
     }
 
-    fn state_after_expand(input: &'a Input,
-                          session: &'tcx Session,
-                          out_dir: &'a Option<PathBuf>,
-                          out_file: &'a Option<PathBuf>,
-                          cstore: &'tcx CStore,
-                          expanded_crate: &'a ast::Crate,
-                          crate_name: &'a str)
-                          -> Self {
+    fn state_after_expand(
+        input: &'a Input,
+        session: &'tcx Session,
+        out_dir: &'a Option<PathBuf>,
+        out_file: &'a Option<PathBuf>,
+        cstore: &'tcx CStore,
+        expanded_crate: &'a ast::Crate,
+        crate_name: &'a str,
+    ) -> Self {
         CompileState {
             crate_name: Some(crate_name),
             cstore: Some(cstore),
@@ -449,20 +467,21 @@ fn state_after_expand(input: &'a Input,
         }
     }
 
-    fn state_after_hir_lowering(input: &'a Input,
-                                session: &'tcx Session,
-                                out_dir: &'a Option<PathBuf>,
-                                out_file: &'a Option<PathBuf>,
-                                arenas: &'tcx AllArenas<'tcx>,
-                                cstore: &'tcx CStore,
-                                hir_map: &'a hir_map::Map<'tcx>,
-                                analysis: &'a ty::CrateAnalysis,
-                                resolutions: &'a Resolutions,
-                                krate: &'a ast::Crate,
-                                hir_crate: &'a hir::Crate,
-                                output_filenames: &'a OutputFilenames,
-                                crate_name: &'a str)
-                                -> Self {
+    fn state_after_hir_lowering(
+        input: &'a Input,
+        session: &'tcx Session,
+        out_dir: &'a Option<PathBuf>,
+        out_file: &'a Option<PathBuf>,
+        arenas: &'tcx AllArenas<'tcx>,
+        cstore: &'tcx CStore,
+        hir_map: &'a hir_map::Map<'tcx>,
+        analysis: &'a ty::CrateAnalysis,
+        resolutions: &'a Resolutions,
+        krate: &'a ast::Crate,
+        hir_crate: &'a hir::Crate,
+        output_filenames: &'a OutputFilenames,
+        crate_name: &'a str,
+    ) -> Self {
         CompileState {
             crate_name: Some(crate_name),
             arenas: Some(arenas),
@@ -478,16 +497,17 @@ fn state_after_hir_lowering(input: &'a Input,
         }
     }
 
-    fn state_after_analysis(input: &'a Input,
-                            session: &'tcx Session,
-                            out_dir: &'a Option<PathBuf>,
-                            out_file: &'a Option<PathBuf>,
-                            krate: Option<&'a ast::Crate>,
-                            hir_crate: &'a hir::Crate,
-                            analysis: &'a ty::CrateAnalysis,
-                            tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                            crate_name: &'a str)
-                            -> Self {
+    fn state_after_analysis(
+        input: &'a Input,
+        session: &'tcx Session,
+        out_dir: &'a Option<PathBuf>,
+        out_file: &'a Option<PathBuf>,
+        krate: Option<&'a ast::Crate>,
+        hir_crate: &'a hir::Crate,
+        analysis: &'a ty::CrateAnalysis,
+        tcx: TyCtxt<'a, 'tcx, 'tcx>,
+        crate_name: &'a str,
+    ) -> Self {
         CompileState {
             analysis: Some(analysis),
             tcx: Some(tcx),
@@ -499,11 +519,12 @@ fn state_after_analysis(input: &'a Input,
         }
     }
 
-    fn state_when_compilation_done(input: &'a Input,
-                                   session: &'tcx Session,
-                                   out_dir: &'a Option<PathBuf>,
-                                   out_file: &'a Option<PathBuf>)
-                                   -> Self {
+    fn state_when_compilation_done(
+        input: &'a Input,
+        session: &'tcx Session,
+        out_dir: &'a Option<PathBuf>,
+        out_file: &'a Option<PathBuf>,
+    ) -> Self {
         CompileState {
             out_file: out_file.as_ref().map(|s| &**s),
             ..CompileState::empty(input, session, out_dir)
@@ -511,27 +532,24 @@ fn state_when_compilation_done(input: &'a Input,
     }
 }
 
-pub fn phase_1_parse_input<'a>(control: &CompileController,
-                               sess: &'a Session,
-                               input: &Input)
-                               -> PResult<'a, ast::Crate> {
-    sess.diagnostic().set_continue_after_error(control.continue_parse_after_error);
+pub fn phase_1_parse_input<'a>(
+    control: &CompileController,
+    sess: &'a Session,
+    input: &Input,
+) -> PResult<'a, ast::Crate> {
+    sess.diagnostic()
+        .set_continue_after_error(control.continue_parse_after_error);
 
     if sess.profile_queries() {
         profile::begin(sess);
     }
 
-    let krate = time(sess, "parsing", || {
-        match *input {
-            Input::File(ref file) => {
-                parse::parse_crate_from_file(file, &sess.parse_sess)
-            }
-            Input::Str { ref input, ref name } => {
-                parse::parse_crate_from_source_str(name.clone(),
-                                                   input.clone(),
-                                                   &sess.parse_sess)
-            }
-        }
+    let krate = time(sess, "parsing", || match *input {
+        Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
+        Input::Str {
+            ref input,
+            ref name,
+        } => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
     })?;
 
     sess.diagnostic().set_continue_after_error(true);
@@ -541,7 +559,10 @@ pub fn phase_1_parse_input<'a>(control: &CompileController,
     }
 
     if sess.opts.debugging_opts.input_stats {
-        println!("Lines of code:             {}", sess.codemap().count_lines());
+        println!(
+            "Lines of code:             {}",
+            sess.codemap().count_lines()
+        );
         println!("Pre-expansion node count:  {}", count_nodes(&krate));
     }
 
@@ -586,16 +607,19 @@ pub struct InnerExpansionResult<'a> {
 /// standard library and prelude, and name resolution.
 ///
 /// Returns `None` if we're aborting after handling -W help.
-pub fn phase_2_configure_and_expand<F>(sess: &Session,
-                                       cstore: &CStore,
-                                       krate: ast::Crate,
-                                       registry: Option<Registry>,
-                                       crate_name: &str,
-                                       addl_plugins: Option<Vec<String>>,
-                                       make_glob_map: MakeGlobMap,
-                                       after_expand: F)
-                                       -> Result<ExpansionResult, CompileIncomplete>
-    where F: FnOnce(&ast::Crate) -> CompileResult {
+pub fn phase_2_configure_and_expand<F>(
+    sess: &Session,
+    cstore: &CStore,
+    krate: ast::Crate,
+    registry: Option<Registry>,
+    crate_name: &str,
+    addl_plugins: Option<Vec<String>>,
+    make_glob_map: MakeGlobMap,
+    after_expand: F,
+) -> Result<ExpansionResult, CompileIncomplete>
+where
+    F: FnOnce(&ast::Crate) -> CompileResult,
+{
     // Currently, we ignore the name resolution data structures for the purposes of dependency
     // tracking. Instead we will run name resolution and include its output in the hash of each
     // item, much like we do for macro expansion. In other words, the hash reflects not just
@@ -603,52 +627,72 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
     // this back at some point.
     let mut crate_loader = CrateLoader::new(sess, &cstore, &crate_name);
     let resolver_arenas = Resolver::arenas();
-    let result = phase_2_configure_and_expand_inner(sess, cstore, krate, registry, crate_name,
-                                                    addl_plugins, make_glob_map, &resolver_arenas,
-                                                    &mut crate_loader, after_expand);
+    let result = phase_2_configure_and_expand_inner(
+        sess,
+        cstore,
+        krate,
+        registry,
+        crate_name,
+        addl_plugins,
+        make_glob_map,
+        &resolver_arenas,
+        &mut crate_loader,
+        after_expand,
+    );
     match result {
-        Ok(InnerExpansionResult {expanded_crate, resolver, hir_forest}) => {
-            Ok(ExpansionResult {
-                expanded_crate,
-                defs: resolver.definitions,
-                hir_forest,
-                resolutions: Resolutions {
-                    freevars: resolver.freevars,
-                    export_map: resolver.export_map,
-                    trait_map: resolver.trait_map,
-                    maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
-                    maybe_unused_extern_crates: resolver.maybe_unused_extern_crates,
-                },
-
-                analysis: ty::CrateAnalysis {
-                    access_levels: Lrc::new(AccessLevels::default()),
-                    name: crate_name.to_string(),
-                    glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
+        Ok(InnerExpansionResult {
+            expanded_crate,
+            resolver,
+            hir_forest,
+        }) => Ok(ExpansionResult {
+            expanded_crate,
+            defs: resolver.definitions,
+            hir_forest,
+            resolutions: Resolutions {
+                freevars: resolver.freevars,
+                export_map: resolver.export_map,
+                trait_map: resolver.trait_map,
+                maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
+                maybe_unused_extern_crates: resolver.maybe_unused_extern_crates,
+            },
+
+            analysis: ty::CrateAnalysis {
+                access_levels: Lrc::new(AccessLevels::default()),
+                name: crate_name.to_string(),
+                glob_map: if resolver.make_glob_map {
+                    Some(resolver.glob_map)
+                } else {
+                    None
                 },
-            })
-        }
-        Err(x) => Err(x)
+            },
+        }),
+        Err(x) => Err(x),
     }
 }
 
 /// Same as phase_2_configure_and_expand, but doesn't let you keep the resolver
 /// around
-pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
-                                       cstore: &'a CStore,
-                                       krate: ast::Crate,
-                                       registry: Option<Registry>,
-                                       crate_name: &str,
-                                       addl_plugins: Option<Vec<String>>,
-                                       make_glob_map: MakeGlobMap,
-                                       resolver_arenas: &'a ResolverArenas<'a>,
-                                       crate_loader: &'a mut CrateLoader,
-                                       after_expand: F)
-                                       -> Result<InnerExpansionResult<'a>, CompileIncomplete>
-    where F: FnOnce(&ast::Crate) -> CompileResult,
+pub fn phase_2_configure_and_expand_inner<'a, F>(
+    sess: &'a Session,
+    cstore: &'a CStore,
+    krate: ast::Crate,
+    registry: Option<Registry>,
+    crate_name: &str,
+    addl_plugins: Option<Vec<String>>,
+    make_glob_map: MakeGlobMap,
+    resolver_arenas: &'a ResolverArenas<'a>,
+    crate_loader: &'a mut CrateLoader,
+    after_expand: F,
+) -> Result<InnerExpansionResult<'a>, CompileIncomplete>
+where
+    F: FnOnce(&ast::Crate) -> CompileResult,
 {
-    let (mut krate, features) = syntax::config::features(krate, &sess.parse_sess,
-                                                         sess.opts.test,
-                                                         sess.opts.debugging_opts.edition);
+    let (mut krate, features) = syntax::config::features(
+        krate,
+        &sess.parse_sess,
+        sess.opts.test,
+        sess.opts.debugging_opts.edition,
+    );
     // these need to be set "early" so that expansion sees `quote` if enabled.
     sess.init_features(features);
 
@@ -657,17 +701,16 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
 
     let disambiguator = compute_crate_disambiguator(sess);
     sess.crate_disambiguator.set(disambiguator);
-    rustc_incremental::prepare_session_directory(
-        sess,
-        &crate_name,
-        disambiguator,
-    );
+    rustc_incremental::prepare_session_directory(sess, &crate_name, disambiguator);
 
     if sess.opts.incremental.is_some() {
         time(sess, "garbage collect incremental cache directory", || {
             if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
-                warn!("Error while trying to garbage collect incremental \
-                       compilation cache directory: {}", e);
+                warn!(
+                    "Error while trying to garbage collect incremental \
+                     compilation cache directory: {}",
+                    e
+                );
             }
         });
     }
@@ -690,23 +733,31 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
 
     let mut addl_plugins = Some(addl_plugins);
     let registrars = time(sess, "plugin loading", || {
-        plugin::load::load_plugins(sess,
-                                   &cstore,
-                                   &krate,
-                                   crate_name,
-                                   addl_plugins.take().unwrap())
+        plugin::load::load_plugins(
+            sess,
+            &cstore,
+            &krate,
+            crate_name,
+            addl_plugins.take().unwrap(),
+        )
     });
 
     let mut registry = registry.unwrap_or(Registry::new(sess, krate.span));
 
     time(sess, "plugin registration", || {
         if sess.features_untracked().rustc_diagnostic_macros {
-            registry.register_macro("__diagnostic_used",
-                                    diagnostics::plugin::expand_diagnostic_used);
-            registry.register_macro("__register_diagnostic",
-                                    diagnostics::plugin::expand_register_diagnostic);
-            registry.register_macro("__build_diagnostic_array",
-                                    diagnostics::plugin::expand_build_diagnostic_array);
+            registry.register_macro(
+                "__diagnostic_used",
+                diagnostics::plugin::expand_diagnostic_used,
+            );
+            registry.register_macro(
+                "__register_diagnostic",
+                diagnostics::plugin::expand_register_diagnostic,
+            );
+            registry.register_macro(
+                "__build_diagnostic_array",
+                diagnostics::plugin::expand_build_diagnostic_array,
+            );
         }
 
         for registrar in registrars {
@@ -716,8 +767,15 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     });
 
     let whitelisted_legacy_custom_derives = registry.take_whitelisted_custom_derives();
-    let Registry { syntax_exts, early_lint_passes, late_lint_passes, lint_groups,
-                   llvm_passes, attributes, .. } = registry;
+    let Registry {
+        syntax_exts,
+        early_lint_passes,
+        late_lint_passes,
+        lint_groups,
+        llvm_passes,
+        attributes,
+        ..
+    } = registry;
 
     sess.track_errors(|| {
         let mut ls = sess.lint_store.borrow_mut();
@@ -742,16 +800,19 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         return Err(CompileIncomplete::Stopped);
     }
 
-    let mut resolver = Resolver::new(sess,
-                                     cstore,
-                                     &krate,
-                                     crate_name,
-                                     make_glob_map,
-                                     crate_loader,
-                                     &resolver_arenas);
+    let mut resolver = Resolver::new(
+        sess,
+        cstore,
+        &krate,
+        crate_name,
+        make_glob_map,
+        crate_loader,
+        &resolver_arenas,
+    );
     resolver.whitelisted_legacy_custom_derives = whitelisted_legacy_custom_derives;
     syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features_untracked().quote);
 
+    // Expand all macros
     krate = time(sess, "expansion", || {
         // Windows dlls do not have rpaths, so they don't know how to find their
         // dependencies. It's up to us to tell the system where to find all the
@@ -769,18 +830,23 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         let mut old_path = OsString::new();
         if cfg!(windows) {
             old_path = env::var_os("PATH").unwrap_or(old_path);
-            let mut new_path = sess.host_filesearch(PathKind::All)
-                                   .get_dylib_search_paths();
+            let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths();
             for path in env::split_paths(&old_path) {
                 if !new_path.contains(&path) {
                     new_path.push(path);
                 }
             }
-            env::set_var("PATH",
-                &env::join_paths(new_path.iter()
-                                         .filter(|p| env::join_paths(iter::once(p)).is_ok()))
-                     .unwrap());
+            env::set_var(
+                "PATH",
+                &env::join_paths(
+                    new_path
+                        .iter()
+                        .filter(|p| env::join_paths(iter::once(p)).is_ok()),
+                ).unwrap(),
+            );
         }
+
+        // Create the config for macro expansion
         let features = sess.features_untracked();
         let cfg = syntax::ext::expand::ExpansionConfig {
             features: Some(&features),
@@ -793,12 +859,23 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
         let err_count = ecx.parse_sess.span_diagnostic.err_count();
 
-        let krate = ecx.monotonic_expander().expand_crate(krate);
+        // Expand macros now!
+        let krate = time(sess, "expand crate", || {
+            ecx.monotonic_expander().expand_crate(krate)
+        });
 
-        ecx.check_unused_macros();
+        // The rest is error reporting
 
-        let mut missing_fragment_specifiers: Vec<_> =
-            ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
+        time(sess, "check unused macros", || {
+            ecx.check_unused_macros();
+        });
+
+        let mut missing_fragment_specifiers: Vec<_> = ecx.parse_sess
+            .missing_fragment_specifiers
+            .borrow()
+            .iter()
+            .cloned()
+            .collect();
         missing_fragment_specifiers.sort();
         for span in missing_fragment_specifiers {
             let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
@@ -815,12 +892,14 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     });
 
     krate = time(sess, "maybe building test harness", || {
-        syntax::test::modify_for_testing(&sess.parse_sess,
-                                         &mut resolver,
-                                         sess.opts.test,
-                                         krate,
-                                         sess.diagnostic(),
-                                         &sess.features_untracked())
+        syntax::test::modify_for_testing(
+            &sess.parse_sess,
+            &mut resolver,
+            sess.opts.test,
+            krate,
+            sess.diagnostic(),
+            &sess.features_untracked(),
+        )
     });
 
     // If we're actually rustdoc then there's no need to actually compile
@@ -838,21 +917,20 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
             let num_crate_types = crate_types.len();
             let is_proc_macro_crate = crate_types.contains(&config::CrateTypeProcMacro);
             let is_test_crate = sess.opts.test;
-            syntax_ext::proc_macro_registrar::modify(&sess.parse_sess,
-                                                     &mut resolver,
-                                                     krate,
-                                                     is_proc_macro_crate,
-                                                     is_test_crate,
-                                                     num_crate_types,
-                                                     sess.diagnostic())
+            syntax_ext::proc_macro_registrar::modify(
+                &sess.parse_sess,
+                &mut resolver,
+                krate,
+                is_proc_macro_crate,
+                is_test_crate,
+                num_crate_types,
+                sess.diagnostic(),
+            )
         });
     }
 
     krate = time(sess, "creating allocators", || {
-        allocator::expand::modify(&sess.parse_sess,
-                                  &mut resolver,
-                                  krate,
-                                  sess.diagnostic())
+        allocator::expand::modify(&sess.parse_sess, &mut resolver, krate, sess.diagnostic())
     });
 
     after_expand(&krate)?;
@@ -869,9 +947,9 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         println!("{}", json::as_json(&krate));
     }
 
-    time(sess,
-         "AST validation",
-         || ast_validation::check_crate(sess, &krate));
+    time(sess, "AST validation", || {
+        ast_validation::check_crate(sess, &krate)
+    });
 
     time(sess, "name resolution", || -> CompileResult {
         resolver.resolve_crate(&krate);
@@ -881,11 +959,13 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
     // Needs to go *after* expansion to be able to check the results of macro expansion.
     time(sess, "complete gated feature checking", || {
         sess.track_errors(|| {
-            syntax::feature_gate::check_crate(&krate,
-                                              &sess.parse_sess,
-                                              &sess.features_untracked(),
-                                              &attributes,
-                                              sess.opts.unstable_features);
+            syntax::feature_gate::check_crate(
+                &krate,
+                &sess.parse_sess,
+                &sess.features_untracked(),
+                &attributes,
+                sess.opts.unstable_features,
+            );
         })
     })?;
 
@@ -901,11 +981,12 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         None => DepGraph::new_disabled(),
         Some(future) => {
             let prev_graph = time(sess, "blocked while dep-graph loading finishes", || {
-                future.open()
-                      .unwrap_or_else(|e| rustc_incremental::LoadResult::Error {
-                          message: format!("could not decode incremental cache: {:?}", e)
-                      })
-                      .open(sess)
+                future
+                    .open()
+                    .unwrap_or_else(|e| rustc_incremental::LoadResult::Error {
+                        message: format!("could not decode incremental cache: {:?}", e),
+                    })
+                    .open(sess)
             });
             DepGraph::new(prev_graph)
         }
@@ -920,9 +1001,9 @@ pub fn phase_2_configure_and_expand_inner<'a, F>(sess: &'a Session,
         hir_map::Forest::new(hir_crate, &dep_graph)
     });
 
-    time(sess,
-         "early lint checks",
-         || lint::check_ast_crate(sess, &krate));
+    time(sess, "early lint checks", || {
+        lint::check_ast_crate(sess, &krate)
+    });
 
     // Discard hygiene data, which isn't required after lowering to HIR.
     if !sess.opts.debugging_opts.keep_hygiene_data {
@@ -961,39 +1042,43 @@ pub fn default_provide_extern(providers: &mut ty::maps::Providers) {
 /// Run the resolution, typechecking, region checking and other
 /// miscellaneous analysis passes on the crate. Return various
 /// structures carrying the results of the analysis.
-pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
-                                               control: &CompileController,
-                                               sess: &'tcx Session,
-                                               cstore: &'tcx CrateStore,
-                                               hir_map: hir_map::Map<'tcx>,
-                                               mut analysis: ty::CrateAnalysis,
-                                               resolutions: Resolutions,
-                                               arenas: &'tcx AllArenas<'tcx>,
-                                               name: &str,
-                                               output_filenames: &OutputFilenames,
-                                               f: F)
-                                               -> Result<R, CompileIncomplete>
-    where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
-                            ty::CrateAnalysis,
-                            mpsc::Receiver<Box<Any + Send>>,
-                            CompileResult) -> R
+pub fn phase_3_run_analysis_passes<'tcx, F, R>(
+    trans: &TransCrate,
+    control: &CompileController,
+    sess: &'tcx Session,
+    cstore: &'tcx CrateStore,
+    hir_map: hir_map::Map<'tcx>,
+    mut analysis: ty::CrateAnalysis,
+    resolutions: Resolutions,
+    arenas: &'tcx AllArenas<'tcx>,
+    name: &str,
+    output_filenames: &OutputFilenames,
+    f: F,
+) -> Result<R, CompileIncomplete>
+where
+    F: for<'a> FnOnce(
+        TyCtxt<'a, 'tcx, 'tcx>,
+        ty::CrateAnalysis,
+        mpsc::Receiver<Box<Any + Send>>,
+        CompileResult,
+    ) -> R,
 {
-    let query_result_on_disk_cache = time(sess,
-        "load query result cache",
-        || rustc_incremental::load_query_result_cache(sess));
+    let query_result_on_disk_cache = time(sess, "load query result cache", || {
+        rustc_incremental::load_query_result_cache(sess)
+    });
 
-    time(sess,
-         "looking for entry point",
-         || middle::entry::find_entry_point(sess, &hir_map, name));
+    time(sess, "looking for entry point", || {
+        middle::entry::find_entry_point(sess, &hir_map, name)
+    });
 
-    sess.plugin_registrar_fn.set(time(sess, "looking for plugin registrar", || {
-        plugin::build::find_plugin_registrar(sess.diagnostic(), &hir_map)
-    }));
-    sess.derive_registrar_fn.set(derive_registrar::find(&hir_map));
+    sess.plugin_registrar_fn
+        .set(time(sess, "looking for plugin registrar", || {
+            plugin::build::find_plugin_registrar(sess.diagnostic(), &hir_map)
+        }));
+    sess.derive_registrar_fn
+        .set(derive_registrar::find(&hir_map));
 
-    time(sess,
-         "loop checking",
-         || loops::check_crate(sess, &hir_map));
+    time(sess, "loop checking", || loops::check_crate(sess, &hir_map));
 
     let mut local_providers = ty::maps::Providers::default();
     default_provide(&mut local_providers);
@@ -1007,115 +1092,113 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(trans: &TransCrate,
 
     let (tx, rx) = mpsc::channel();
 
-    TyCtxt::create_and_enter(sess,
-                             cstore,
-                             local_providers,
-                             extern_providers,
-                             arenas,
-                             resolutions,
-                             hir_map,
-                             query_result_on_disk_cache,
-                             name,
-                             tx,
-                             output_filenames,
-                             |tcx| {
-        // Do some initialization of the DepGraph that can only be done with the
-        // tcx available.
-        rustc_incremental::dep_graph_tcx_init(tcx);
-
-        time(sess, "attribute checking", || {
-            hir::check_attr::check_crate(tcx)
-        });
+    TyCtxt::create_and_enter(
+        sess,
+        cstore,
+        local_providers,
+        extern_providers,
+        arenas,
+        resolutions,
+        hir_map,
+        query_result_on_disk_cache,
+        name,
+        tx,
+        output_filenames,
+        |tcx| {
+            // Do some initialization of the DepGraph that can only be done with the
+            // tcx available.
+            rustc_incremental::dep_graph_tcx_init(tcx);
+
+            time(sess, "attribute checking", || {
+                hir::check_attr::check_crate(tcx)
+            });
 
-        time(sess,
-             "stability checking",
-             || stability::check_unstable_api_usage(tcx));
+            time(sess, "stability checking", || {
+                stability::check_unstable_api_usage(tcx)
+            });
 
-        // passes are timed inside typeck
-        match typeck::check_crate(tcx) {
-            Ok(x) => x,
-            Err(x) => {
-                f(tcx, analysis, rx, Err(x));
-                return Err(x);
+            // passes are timed inside typeck
+            match typeck::check_crate(tcx) {
+                Ok(x) => x,
+                Err(x) => {
+                    f(tcx, analysis, rx, Err(x));
+                    return Err(x);
+                }
             }
-        }
 
-        time(sess,
-             "rvalue promotion",
-             || rvalue_promotion::check_crate(tcx));
+            time(sess, "rvalue promotion", || {
+                rvalue_promotion::check_crate(tcx)
+            });
 
-        analysis.access_levels =
-            time(sess, "privacy checking", || rustc_privacy::check_crate(tcx));
+            analysis.access_levels =
+                time(sess, "privacy checking", || rustc_privacy::check_crate(tcx));
 
-        time(sess,
-             "intrinsic checking",
-             || middle::intrinsicck::check_crate(tcx));
+            time(sess, "intrinsic checking", || {
+                middle::intrinsicck::check_crate(tcx)
+            });
 
-        time(sess,
-             "match checking",
-             || mir::matchck_crate(tcx));
+            time(sess, "match checking", || mir::matchck_crate(tcx));
 
-        // this must run before MIR dump, because
-        // "not all control paths return a value" is reported here.
-        //
-        // maybe move the check to a MIR pass?
-        time(sess,
-             "liveness checking",
-             || middle::liveness::check_crate(tcx));
-
-        time(sess,
-             "borrow checking",
-             || borrowck::check_crate(tcx));
-
-        time(sess,
-             "MIR borrow checking",
-             || for def_id in tcx.body_owners() { tcx.mir_borrowck(def_id); });
-
-        time(sess,
-             "MIR effect checking",
-             || for def_id in tcx.body_owners() {
-                 mir::transform::check_unsafety::check_unsafety(tcx, def_id)
-             });
-        // Avoid overwhelming user with errors if type checking failed.
-        // I'm not sure how helpful this is, to be honest, but it avoids
-        // a
-        // lot of annoying errors in the compile-fail tests (basically,
-        // lint warnings and so on -- kindck used to do this abort, but
-        // kindck is gone now). -nmatsakis
-        if sess.err_count() > 0 {
-            return Ok(f(tcx, analysis, rx, sess.compile_status()));
-        }
+            // this must run before MIR dump, because
+            // "not all control paths return a value" is reported here.
+            //
+            // maybe move the check to a MIR pass?
+            time(sess, "liveness checking", || {
+                middle::liveness::check_crate(tcx)
+            });
 
-        time(sess, "death checking", || middle::dead::check_crate(tcx));
+            time(sess, "borrow checking", || borrowck::check_crate(tcx));
 
-        time(sess, "unused lib feature checking", || {
-            stability::check_unused_or_stable_features(tcx)
-        });
+            time(sess, "MIR borrow checking", || {
+                for def_id in tcx.body_owners() {
+                    tcx.mir_borrowck(def_id);
+                }
+            });
 
-        time(sess, "lint checking", || lint::check_crate(tcx));
+            time(sess, "MIR effect checking", || {
+                for def_id in tcx.body_owners() {
+                    mir::transform::check_unsafety::check_unsafety(tcx, def_id)
+                }
+            });
+            // Avoid overwhelming user with errors if type checking failed.
+            // I'm not sure how helpful this is, to be honest, but it avoids
+            // a
+            // lot of annoying errors in the compile-fail tests (basically,
+            // lint warnings and so on -- kindck used to do this abort, but
+            // kindck is gone now). -nmatsakis
+            if sess.err_count() > 0 {
+                return Ok(f(tcx, analysis, rx, sess.compile_status()));
+            }
 
-        time(sess,
-             "dumping chalk-like clauses",
-             || rustc_traits::lowering::dump_program_clauses(tcx));
+            time(sess, "death checking", || middle::dead::check_crate(tcx));
 
-        return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
-    })
+            time(sess, "unused lib feature checking", || {
+                stability::check_unused_or_stable_features(tcx)
+            });
+
+            time(sess, "lint checking", || lint::check_crate(tcx));
+
+            time(sess, "dumping chalk-like clauses", || {
+                rustc_traits::lowering::dump_program_clauses(tcx)
+            });
+
+            return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
+        },
+    )
 }
 
 /// Run the translation phase to LLVM, after which the AST and analysis can
 /// be discarded.
-pub fn phase_4_translate_to_llvm<'a, 'tcx>(trans: &TransCrate,
-                                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                           rx: mpsc::Receiver<Box<Any + Send>>)
-                                           -> Box<Any> {
-    time(tcx.sess,
-         "resolving dependency formats",
-         || ::rustc::middle::dependency_format::calculate(tcx));
-
-    let translation =
-        time(tcx.sess, "translation", move || {
-            trans.trans_crate(tcx, rx)
-        });
+pub fn phase_4_translate_to_llvm<'a, 'tcx>(
+    trans: &TransCrate,
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    rx: mpsc::Receiver<Box<Any + Send>>,
+) -> Box<Any> {
+    time(tcx.sess, "resolving dependency formats", || {
+        ::rustc::middle::dependency_format::calculate(tcx)
+    });
+
+    let translation = time(tcx.sess, "translation", move || trans.trans_crate(tcx, rx));
     if tcx.sess.profile_queries() {
         profile::dump(&tcx.sess, "profile_queries".to_string())
     }
@@ -1130,27 +1213,27 @@ fn escape_dep_filename(filename: &FileName) -> String {
 }
 
 // Returns all the paths that correspond to generated files.
-fn generated_output_paths(sess: &Session,
-                          outputs: &OutputFilenames,
-                          exact_name: bool,
-                          crate_name: &str) -> Vec<PathBuf> {
+fn generated_output_paths(
+    sess: &Session,
+    outputs: &OutputFilenames,
+    exact_name: bool,
+    crate_name: &str,
+) -> Vec<PathBuf> {
     let mut out_filenames = Vec::new();
     for output_type in sess.opts.output_types.keys() {
         let file = outputs.path(*output_type);
         match *output_type {
             // If the filename has been overridden using `-o`, it will not be modified
             // by appending `.rlib`, `.exe`, etc., so we can skip this transformation.
-            OutputType::Exe if !exact_name => {
-                for crate_type in sess.crate_types.borrow().iter() {
-                    let p = ::rustc_trans_utils::link::filename_for_input(
-                        sess,
-                        *crate_type,
-                        crate_name,
-                        outputs
-                    );
-                    out_filenames.push(p);
-                }
-            }
+            OutputType::Exe if !exact_name => for crate_type in sess.crate_types.borrow().iter() {
+                let p = ::rustc_trans_utils::link::filename_for_input(
+                    sess,
+                    *crate_type,
+                    crate_name,
+                    outputs,
+                );
+                out_filenames.push(p);
+            },
             OutputType::DepInfo if sess.opts.debugging_opts.dep_info_omit_d_target => {
                 // Don't add the dep-info output when omitting it from dep-info targets
             }
@@ -1165,24 +1248,28 @@ fn generated_output_paths(sess: &Session,
 // Runs `f` on every output file path and returns the first non-None result, or None if `f`
 // returns None for every file path.
 fn check_output<F, T>(output_paths: &Vec<PathBuf>, f: F) -> Option<T>
-        where F: Fn(&PathBuf) -> Option<T> {
-            for output_path in output_paths {
-                if let Some(result) = f(output_path) {
-                    return Some(result);
-                }
-            }
-            None
+where
+    F: Fn(&PathBuf) -> Option<T>,
+{
+    for output_path in output_paths {
+        if let Some(result) = f(output_path) {
+            return Some(result);
+        }
+    }
+    None
 }
 
 pub fn output_contains_path(output_paths: &Vec<PathBuf>, input_path: &PathBuf) -> bool {
     let input_path = input_path.canonicalize().ok();
     if input_path.is_none() {
-        return false
+        return false;
     }
     let check = |output_path: &PathBuf| {
         if output_path.canonicalize().ok() == input_path {
             Some(())
-        } else { None }
+        } else {
+            None
+        }
     };
     check_output(output_paths, check).is_some()
 }
@@ -1191,100 +1278,92 @@ pub fn output_conflicts_with_dir(output_paths: &Vec<PathBuf>) -> Option<PathBuf>
     let check = |output_path: &PathBuf| {
         if output_path.is_dir() {
             Some(output_path.clone())
-        } else { None }
+        } else {
+            None
+        }
     };
     check_output(output_paths, check)
 }
 
-fn write_out_deps(sess: &Session,
-                  outputs: &OutputFilenames,
-                  out_filenames: &Vec<PathBuf>) {
+fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &Vec<PathBuf>) {
     // Write out dependency rules to the dep-info file if requested
     if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
         return;
     }
     let deps_filename = outputs.path(OutputType::DepInfo);
 
-    let result =
-        (|| -> io::Result<()> {
-            // Build a list of files used to compile the output and
-            // write Makefile-compatible dependency rules
-            let files: Vec<String> = sess.codemap()
-                                         .files()
-                                         .iter()
-                                         .filter(|fmap| fmap.is_real_file())
-                                         .filter(|fmap| !fmap.is_imported())
-                                         .map(|fmap| escape_dep_filename(&fmap.name))
-                                         .collect();
-            let mut file = fs::File::create(&deps_filename)?;
-            for path in out_filenames {
-                write!(file, "{}: {}\n\n", path.display(), files.join(" "))?;
-            }
+    let result = (|| -> io::Result<()> {
+        // Build a list of files used to compile the output and
+        // write Makefile-compatible dependency rules
+        let files: Vec<String> = sess.codemap()
+            .files()
+            .iter()
+            .filter(|fmap| fmap.is_real_file())
+            .filter(|fmap| !fmap.is_imported())
+            .map(|fmap| escape_dep_filename(&fmap.name))
+            .collect();
+        let mut file = fs::File::create(&deps_filename)?;
+        for path in out_filenames {
+            write!(file, "{}: {}\n\n", path.display(), files.join(" "))?;
+        }
 
-            // Emit a fake target for each input file to the compilation. This
-            // prevents `make` from spitting out an error if a file is later
-            // deleted. For more info see #28735
-            for path in files {
-                writeln!(file, "{}:", path)?;
-            }
-            Ok(())
-        })();
+        // Emit a fake target for each input file to the compilation. This
+        // prevents `make` from spitting out an error if a file is later
+        // deleted. For more info see #28735
+        for path in files {
+            writeln!(file, "{}:", path)?;
+        }
+        Ok(())
+    })();
 
     match result {
         Ok(()) => {}
         Err(e) => {
-            sess.fatal(&format!("error writing dependencies to `{}`: {}",
-                                deps_filename.display(),
-                                e));
+            sess.fatal(&format!(
+                "error writing dependencies to `{}`: {}",
+                deps_filename.display(),
+                e
+            ));
         }
     }
 }
 
 pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<config::CrateType> {
     // Unconditionally collect crate types from attributes to make them used
-    let attr_types: Vec<config::CrateType> =
-        attrs.iter()
-             .filter_map(|a| {
-                 if a.check_name("crate_type") {
-                     match a.value_str() {
-                         Some(ref n) if *n == "rlib" => {
-                             Some(config::CrateTypeRlib)
-                         }
-                         Some(ref n) if *n == "dylib" => {
-                             Some(config::CrateTypeDylib)
-                         }
-                         Some(ref n) if *n == "cdylib" => {
-                             Some(config::CrateTypeCdylib)
-                         }
-                         Some(ref n) if *n == "lib" => {
-                             Some(config::default_lib_output())
-                         }
-                         Some(ref n) if *n == "staticlib" => {
-                             Some(config::CrateTypeStaticlib)
-                         }
-                         Some(ref n) if *n == "proc-macro" => {
-                             Some(config::CrateTypeProcMacro)
-                         }
-                         Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
-                         Some(_) => {
-                             session.buffer_lint(lint::builtin::UNKNOWN_CRATE_TYPES,
-                                                 ast::CRATE_NODE_ID,
-                                                 a.span,
-                                                 "invalid `crate_type` value");
-                             None
-                         }
-                         _ => {
-                             session.struct_span_err(a.span, "`crate_type` requires a value")
-                                 .note("for example: `#![crate_type=\"lib\"]`")
-                                 .emit();
-                             None
-                         }
-                     }
-                 } else {
-                     None
-                 }
-             })
-             .collect();
+    let attr_types: Vec<config::CrateType> = attrs
+        .iter()
+        .filter_map(|a| {
+            if a.check_name("crate_type") {
+                match a.value_str() {
+                    Some(ref n) if *n == "rlib" => Some(config::CrateTypeRlib),
+                    Some(ref n) if *n == "dylib" => Some(config::CrateTypeDylib),
+                    Some(ref n) if *n == "cdylib" => Some(config::CrateTypeCdylib),
+                    Some(ref n) if *n == "lib" => Some(config::default_lib_output()),
+                    Some(ref n) if *n == "staticlib" => Some(config::CrateTypeStaticlib),
+                    Some(ref n) if *n == "proc-macro" => Some(config::CrateTypeProcMacro),
+                    Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
+                    Some(_) => {
+                        session.buffer_lint(
+                            lint::builtin::UNKNOWN_CRATE_TYPES,
+                            ast::CRATE_NODE_ID,
+                            a.span,
+                            "invalid `crate_type` value",
+                        );
+                        None
+                    }
+                    _ => {
+                        session
+                            .struct_span_err(a.span, "`crate_type` requires a value")
+                            .note("for example: `#![crate_type=\"lib\"]`")
+                            .emit();
+                        None
+                    }
+                }
+            } else {
+                None
+            }
+        })
+        .collect();
 
     // If we're generating a test executable, then ignore all other output
     // styles at all other locations
@@ -1299,7 +1378,9 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<c
     if base.is_empty() {
         base.extend(attr_types);
         if base.is_empty() {
-            base.push(::rustc_trans_utils::link::default_output_for_target(session));
+            base.push(::rustc_trans_utils::link::default_output_for_target(
+                session,
+            ));
         }
         base.sort();
         base.dedup();
@@ -1310,9 +1391,10 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<c
             let res = !::rustc_trans_utils::link::invalid_output_for_target(session, *crate_type);
 
             if !res {
-                session.warn(&format!("dropping unsupported crate type `{}` for target `{}`",
-                                      *crate_type,
-                                      session.opts.target_triple));
+                session.warn(&format!(
+                    "dropping unsupported crate type `{}` for target `{}`",
+                    *crate_type, session.opts.target_triple
+                ));
             }
 
             res
@@ -1347,19 +1429,22 @@ pub fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguator {
 
     // Also incorporate crate type, so that we don't get symbol conflicts when
     // linking against a library of the same name, if this is an executable.
-    let is_exe = session.crate_types.borrow().contains(&config::CrateTypeExecutable);
+    let is_exe = session
+        .crate_types
+        .borrow()
+        .contains(&config::CrateTypeExecutable);
     hasher.write(if is_exe { b"exe" } else { b"lib" });
 
     CrateDisambiguator::from(hasher.finish())
-
 }
 
-pub fn build_output_filenames(input: &Input,
-                              odir: &Option<PathBuf>,
-                              ofile: &Option<PathBuf>,
-                              attrs: &[ast::Attribute],
-                              sess: &Session)
-                              -> OutputFilenames {
+pub fn build_output_filenames(
+    input: &Input,
+    odir: &Option<PathBuf>,
+    ofile: &Option<PathBuf>,
+    attrs: &[ast::Attribute],
+    sess: &Session,
+) -> OutputFilenames {
     match *ofile {
         None => {
             // "-" as input file will cause the parser to read from stdin so we
@@ -1372,10 +1457,10 @@ pub fn build_output_filenames(input: &Input,
 
             // If a crate name is present, we use it as the link name
             let stem = sess.opts
-                           .crate_name
-                           .clone()
-                           .or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
-                           .unwrap_or(input.filestem());
+                .crate_name
+                .clone()
+                .or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
+                .unwrap_or(input.filestem());
 
             OutputFilenames {
                 out_directory: dirpath,
@@ -1388,13 +1473,15 @@ pub fn build_output_filenames(input: &Input,
 
         Some(ref out_file) => {
             let unnamed_output_types = sess.opts
-                                           .output_types
-                                           .values()
-                                           .filter(|a| a.is_none())
-                                           .count();
+                .output_types
+                .values()
+                .filter(|a| a.is_none())
+                .count();
             let ofile = if unnamed_output_types > 1 {
-                sess.warn("due to multiple output types requested, the explicitly specified \
-                           output file name will be adapted for each output type");
+                sess.warn(
+                    "due to multiple output types requested, the explicitly specified \
+                     output file name will be adapted for each output type",
+                );
                 None
             } else {
                 Some(out_file.clone())
@@ -1410,11 +1497,12 @@ pub fn build_output_filenames(input: &Input,
 
             OutputFilenames {
                 out_directory: out_file.parent().unwrap_or(cur_dir).to_path_buf(),
-                out_filestem: out_file.file_stem()
-                                      .unwrap_or(OsStr::new(""))
-                                      .to_str()
-                                      .unwrap()
-                                      .to_string(),
+                out_filestem: out_file
+                    .file_stem()
+                    .unwrap_or(OsStr::new(""))
+                    .to_str()
+                    .unwrap()
+                    .to_string(),
                 single_output_file: ofile,
                 extra: sess.opts.cg.extra_filename.clone(),
                 outputs: sess.opts.output_types.clone(),
index 3dec84d174dd8d922badd5cd17625b8a14987639..fb333ec38fb60ada847ed1afba1723a1b4a60e1e 100644 (file)
@@ -25,6 +25,7 @@
 #![feature(slice_sort_by_cached_key)]
 #![feature(set_stdio)]
 #![feature(rustc_stack_internals)]
+#![feature(no_debug)]
 
 extern crate arena;
 extern crate getopts;
@@ -230,6 +231,9 @@ fn load_backend_from_dylib(path: &Path) -> fn() -> Box<TransCrate> {
 
 pub fn get_trans(sess: &Session) -> Box<TransCrate> {
     static INIT: Once = ONCE_INIT;
+
+    #[allow(deprecated)]
+    #[no_debug]
     static mut LOAD: fn() -> Box<TransCrate> = || unreachable!();
 
     INIT.call_once(|| {
@@ -547,6 +551,18 @@ macro_rules! do_or_return {($expr: expr, $sess: expr) => {
     (result, Some(sess))
 }
 
+#[cfg(unix)]
+pub fn set_sigpipe_handler() {
+    unsafe {
+        // Set the SIGPIPE signal handler, so that an EPIPE
+        // will cause rustc to terminate, as expected.
+        assert!(libc::signal(libc::SIGPIPE, libc::SIG_DFL) != libc::SIG_ERR);
+    }
+}
+
+#[cfg(windows)]
+pub fn set_sigpipe_handler() {}
+
 // Extract output directory and file from matches.
 fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>) {
     let odir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o));
index 5aae895ccc4d5e1bfe0b8c61459b1d12060293a3..04f6503d92dd89bc38e7ef3d6461617fb3d748a1 100644 (file)
@@ -28,7 +28,7 @@
 use rustc::hir::map as hir_map;
 use rustc::session::{self, config};
 use rustc::session::config::{OutputFilenames, OutputTypes};
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{self, Lrc};
 use syntax;
 use syntax::ast;
 use syntax::abi::Abi;
@@ -88,13 +88,13 @@ fn emit(&mut self, db: &DiagnosticBuilder) {
     }
 }
 
-fn errors(msgs: &[&str]) -> (Box<Emitter + Send>, usize) {
+fn errors(msgs: &[&str]) -> (Box<Emitter + sync::Send>, usize) {
     let v = msgs.iter().map(|m| m.to_string()).collect();
-    (box ExpectErrorEmitter { messages: v } as Box<Emitter + Send>, msgs.len())
+    (box ExpectErrorEmitter { messages: v } as Box<Emitter + sync::Send>, msgs.len())
 }
 
 fn test_env<F>(source_string: &str,
-               args: (Box<Emitter + Send>, usize),
+               args: (Box<Emitter + sync::Send>, usize),
                body: F)
     where F: FnOnce(Env)
 {
@@ -104,7 +104,7 @@ fn test_env<F>(source_string: &str,
 }
 
 fn test_env_impl<F>(source_string: &str,
-                    (emitter, expected_err_count): (Box<Emitter + Send>, usize),
+                    (emitter, expected_err_count): (Box<Emitter + sync::Send>, usize),
                     body: F)
     where F: FnOnce(Env)
 {
index ca5d3f55a0fea4988d75505405c1a32d05917145..91075ddcfa422ab40d532178204b806371749145 100644 (file)
@@ -1389,8 +1389,8 @@ fn num_overlap(a_start: usize, a_end: usize, b_start: usize, b_end:usize, inclus
     } else {
         0
     };
-    (b_start..b_end + extra).contains(a_start) ||
-    (a_start..a_end + extra).contains(b_start)
+    (b_start..b_end + extra).contains(&a_start) ||
+    (a_start..a_end + extra).contains(&b_start)
 }
 fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool {
     num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false)
index 8d5f9ac93f0b48e3d5b92f7233715e3f2c1cf7e9..ce3efef08cc4234f992590284732635140f8302c 100644 (file)
 
 use emitter::{Emitter, EmitterWriter};
 
-use rustc_data_structures::sync::{self, Lrc};
+use rustc_data_structures::sync::{self, Lrc, Lock, LockCell};
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::stable_hasher::StableHasher;
 
 use std::borrow::Cow;
-use std::cell::{RefCell, Cell};
+use std::cell::Cell;
 use std::{error, fmt};
 use std::sync::atomic::AtomicUsize;
 use std::sync::atomic::Ordering::SeqCst;
@@ -262,19 +262,22 @@ pub struct Handler {
     pub flags: HandlerFlags,
 
     err_count: AtomicUsize,
-    emitter: RefCell<Box<Emitter>>,
-    continue_after_error: Cell<bool>,
-    delayed_span_bug: RefCell<Option<Diagnostic>>,
+    emitter: Lock<Box<Emitter + sync::Send>>,
+    continue_after_error: LockCell<bool>,
+    delayed_span_bug: Lock<Option<Diagnostic>>,
 
     // This set contains the `DiagnosticId` of all emitted diagnostics to avoid
     // emitting the same diagnostic with extended help (`--teach`) twice, which
     // would be uneccessary repetition.
-    tracked_diagnostic_codes: RefCell<FxHashSet<DiagnosticId>>,
+    taught_diagnostics: Lock<FxHashSet<DiagnosticId>>,
+
+    /// Used to suggest rustc --explain <error code>
+    emitted_diagnostic_codes: Lock<FxHashSet<DiagnosticId>>,
 
     // This set contains a hash of every diagnostic that has been emitted by
     // this handler. These hashes is used to avoid emitting the same error
     // twice.
-    emitted_diagnostics: RefCell<FxHashSet<u128>>,
+    emitted_diagnostics: Lock<FxHashSet<u128>>,
 }
 
 fn default_track_diagnostic(_: &Diagnostic) {}
@@ -315,7 +318,7 @@ pub fn with_tty_emitter_and_flags(color_config: ColorConfig,
 
     pub fn with_emitter(can_emit_warnings: bool,
                         treat_err_as_bug: bool,
-                        e: Box<Emitter>)
+                        e: Box<Emitter + sync::Send>)
                         -> Handler {
         Handler::with_emitter_and_flags(
             e,
@@ -326,15 +329,16 @@ pub fn with_emitter(can_emit_warnings: bool,
             })
     }
 
-    pub fn with_emitter_and_flags(e: Box<Emitter>, flags: HandlerFlags) -> Handler {
+    pub fn with_emitter_and_flags(e: Box<Emitter + sync::Send>, flags: HandlerFlags) -> Handler {
         Handler {
             flags,
             err_count: AtomicUsize::new(0),
-            emitter: RefCell::new(e),
-            continue_after_error: Cell::new(true),
-            delayed_span_bug: RefCell::new(None),
-            tracked_diagnostic_codes: RefCell::new(FxHashSet()),
-            emitted_diagnostics: RefCell::new(FxHashSet()),
+            emitter: Lock::new(e),
+            continue_after_error: LockCell::new(true),
+            delayed_span_bug: Lock::new(None),
+            taught_diagnostics: Lock::new(FxHashSet()),
+            emitted_diagnostic_codes: Lock::new(FxHashSet()),
+            emitted_diagnostics: Lock::new(FxHashSet()),
         }
     }
 
@@ -348,7 +352,7 @@ pub fn set_continue_after_error(&self, continue_after_error: bool) {
     /// tools that want to reuse a `Parser` cleaning the previously emitted diagnostics as well as
     /// the overall count of emitted error diagnostics.
     pub fn reset_err_count(&self) {
-        self.emitted_diagnostics.replace(FxHashSet());
+        *self.emitted_diagnostics.borrow_mut() = FxHashSet();
         self.err_count.store(0, SeqCst);
     }
 
@@ -568,10 +572,10 @@ pub fn print_error_count(&self) {
         let _ = self.fatal(&s);
 
         let can_show_explain = self.emitter.borrow().should_show_explain();
-        let are_there_diagnostics = !self.tracked_diagnostic_codes.borrow().is_empty();
+        let are_there_diagnostics = !self.emitted_diagnostic_codes.borrow().is_empty();
         if can_show_explain && are_there_diagnostics {
             let mut error_codes =
-                self.tracked_diagnostic_codes.borrow()
+                self.emitted_diagnostic_codes.borrow()
                                              .clone()
                                              .into_iter()
                                              .filter_map(|x| match x {
@@ -630,12 +634,13 @@ pub fn emit_with_code(&self, msp: &MultiSpan, msg: &str, code: DiagnosticId, lvl
         }
     }
 
-    /// `true` if a diagnostic with this code has already been emitted in this handler.
+    /// `true` if we haven't taught a diagnostic with this code already.
+    /// The caller must then teach the user about such a diagnostic.
     ///
     /// Used to suppress emitting the same error multiple times with extended explanation when
     /// calling `-Zteach`.
-    pub fn code_emitted(&self, code: &DiagnosticId) -> bool {
-        self.tracked_diagnostic_codes.borrow().contains(code)
+    pub fn must_teach(&self, code: &DiagnosticId) -> bool {
+        self.taught_diagnostics.borrow_mut().insert(code.clone())
     }
 
     pub fn force_print_db(&self, mut db: DiagnosticBuilder) {
@@ -651,7 +656,7 @@ fn emit_db(&self, db: &DiagnosticBuilder) {
         });
 
         if let Some(ref code) = diagnostic.code {
-            self.tracked_diagnostic_codes.borrow_mut().insert(code.clone());
+            self.emitted_diagnostic_codes.borrow_mut().insert(code.clone());
         }
 
         let diagnostic_hash = {
index a5bc1106ba0b00fc24264b5378cd5711274e69ea..e524fcecf9094a3edce0a932d97ee83f2878272c 100644 (file)
@@ -43,7 +43,11 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
             time(sess, "persist dep-graph", || {
                 save_in(sess,
                         dep_graph_path(sess),
-                        |e| encode_dep_graph(tcx, e));
+                        |e| {
+                            time(sess, "encode dep-graph", || {
+                                encode_dep_graph(tcx, e)
+                            })
+                        });
             });
         }
 
@@ -145,7 +149,9 @@ fn encode_dep_graph(tcx: TyCtxt,
     tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
 
     // Encode the graph data.
-    let serialized_graph = tcx.dep_graph.serialize();
+    let serialized_graph = time(tcx.sess, "getting serialized graph", || {
+        tcx.dep_graph.serialize()
+    });
 
     if tcx.sess.opts.debugging_opts.incremental_info {
         #[derive(Clone)]
@@ -221,7 +227,9 @@ struct Stat {
         println!("[incremental]");
     }
 
-    serialized_graph.encode(encoder)?;
+    time(tcx.sess, "encoding serialized graph", || {
+        serialized_graph.encode(encoder)
+    })?;
 
     Ok(())
 }
@@ -245,5 +253,7 @@ fn encode_work_products(dep_graph: &DepGraph,
 fn encode_query_cache(tcx: TyCtxt,
                       encoder: &mut Encoder)
                       -> io::Result<()> {
-    tcx.serialize_query_result_cache(encoder)
+    time(tcx.sess, "serialize query result cache", || {
+        tcx.serialize_query_result_cache(encoder)
+    })
 }
index 34d9f47bac3ddc8d0bbcc6db280d8f1ed459a1b2..8a18a4b5290f8e91a7a386fe39da83cc946e66a0 100644 (file)
@@ -25,7 +25,7 @@
 use rustc_back::target::TargetTriple;
 use rustc::session::search_paths::PathKind;
 use rustc::middle;
-use rustc::middle::cstore::{validate_crate_name, ExternCrate};
+use rustc::middle::cstore::{validate_crate_name, ExternCrate, ExternCrateSource};
 use rustc::util::common::record_time;
 use rustc::util::nodemap::FxHashSet;
 use rustc::hir::map::Definitions;
@@ -371,12 +371,19 @@ fn update_extern_crate(&mut self,
         // - something over nothing (tuple.0);
         // - direct extern crate to indirect (tuple.1);
         // - shorter paths to longer (tuple.2).
-        let new_rank = (true, extern_crate.direct, !extern_crate.path_len);
+        let new_rank = (
+            true,
+            extern_crate.direct,
+            cmp::Reverse(extern_crate.path_len),
+        );
         let old_rank = match *old_extern_crate {
-            None => (false, false, !0),
-            Some(ref c) => (true, c.direct, !c.path_len),
+            None => (false, false, cmp::Reverse(usize::max_value())),
+            Some(ref c) => (
+                true,
+                c.direct,
+                cmp::Reverse(c.path_len),
+            ),
         };
-
         if old_rank >= new_rank {
             return; // no change needed
         }
@@ -1053,7 +1060,7 @@ fn postprocess(&mut self, krate: &ast::Crate) {
         }
     }
 
-    fn process_item(&mut self, item: &ast::Item, definitions: &Definitions) {
+    fn process_extern_crate(&mut self, item: &ast::Item, definitions: &Definitions) -> CrateNum {
         match item.node {
             ast::ItemKind::ExternCrate(orig_name) => {
                 debug!("resolving extern crate stmt. ident: {} orig_name: {:?}",
@@ -1079,17 +1086,72 @@ fn process_item(&mut self, item: &ast::Item, definitions: &Definitions) {
 
                 let def_id = definitions.opt_local_def_id(item.id).unwrap();
                 let path_len = definitions.def_path(def_id.index).data.len();
-
-                let extern_crate = ExternCrate { def_id, span: item.span, direct: true, path_len };
-                self.update_extern_crate(cnum, extern_crate, &mut FxHashSet());
+                self.update_extern_crate(
+                    cnum,
+                    ExternCrate {
+                        src: ExternCrateSource::Extern(def_id),
+                        span: item.span,
+                        path_len,
+                        direct: true,
+                    },
+                    &mut FxHashSet(),
+                );
                 self.cstore.add_extern_mod_stmt_cnum(item.id, cnum);
+                cnum
             }
-            _ => {}
+            _ => bug!(),
         }
     }
 
-    fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum {
-        self.resolve_crate(&None, name, name, None, None, span, PathKind::Crate,
-                           DepKind::Explicit).0
+    fn process_path_extern(
+        &mut self,
+        name: Symbol,
+        span: Span,
+    ) -> CrateNum {
+        let cnum = self.resolve_crate(
+            &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
+        ).0;
+
+        self.update_extern_crate(
+            cnum,
+            ExternCrate {
+                src: ExternCrateSource::Path,
+                span,
+                // to have the least priority in `update_extern_crate`
+                path_len: usize::max_value(),
+                direct: true,
+            },
+            &mut FxHashSet(),
+        );
+
+        cnum
+    }
+
+    fn process_use_extern(
+        &mut self,
+        name: Symbol,
+        span: Span,
+        id: ast::NodeId,
+        definitions: &Definitions,
+    ) -> CrateNum {
+        let cnum = self.resolve_crate(
+            &None, name, name, None, None, span, PathKind::Crate, DepKind::Explicit
+        ).0;
+
+        let def_id = definitions.opt_local_def_id(id).unwrap();
+        let path_len = definitions.def_path(def_id.index).data.len();
+
+        self.update_extern_crate(
+            cnum,
+            ExternCrate {
+                src: ExternCrateSource::Use,
+                span,
+                path_len,
+                direct: true,
+            },
+            &mut FxHashSet(),
+        );
+
+        cnum
     }
 }
index 3ea4ddc25226fffe80d39cbeba80b474ea18e2cd..936d680380c993a994cae26b43b69ea08cfc9398 100644 (file)
@@ -59,9 +59,9 @@ pub struct DecodeContext<'a, 'tcx: 'a> {
 
     // interpreter allocation cache
     interpret_alloc_cache: FxHashMap<usize, interpret::AllocId>,
-    // a cache for sizes of interpreter allocations
-    // needed to skip already deserialized allocations
-    interpret_alloc_size: FxHashMap<usize, usize>,
+
+    // Read from the LazySeq CrateRoot::inpterpret_alloc_index on demand
+    interpret_alloc_index: Option<Vec<u32>>,
 }
 
 /// Abstract over the various ways one can create metadata decoders.
@@ -81,7 +81,7 @@ fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> {
             last_filemap_index: 0,
             lazy_state: LazyState::NoNode,
             interpret_alloc_cache: FxHashMap::default(),
-            interpret_alloc_size: FxHashMap::default(),
+            interpret_alloc_index: None,
         }
     }
 }
@@ -180,6 +180,17 @@ fn read_lazy_distance(&mut self, min_size: usize) -> Result<usize, <Self as Deco
         self.lazy_state = LazyState::Previous(position + min_size);
         Ok(position)
     }
+
+    fn interpret_alloc(&mut self, idx: usize) -> usize {
+        if let Some(index) = self.interpret_alloc_index.as_mut() {
+            return index[idx] as usize;
+        }
+        let cdata = self.cdata();
+        let index: Vec<u32> = cdata.root.interpret_alloc_index.decode(cdata).collect();
+        let pos = index[idx];
+        self.interpret_alloc_index = Some(index);
+        pos as usize
+    }
 }
 
 impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> {
@@ -290,34 +301,22 @@ fn specialized_decode(&mut self) -> Result<LocalDefId, Self::Error> {
 
 impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
-        let tcx = self.tcx.expect("need tcx for AllocId decoding");
-        let pos = self.position();
-        if let Some(cached) = self.interpret_alloc_cache.get(&pos).cloned() {
-            // if there's no end position we are currently deserializing a recursive
-            // allocation
-            if let Some(end) = self.interpret_alloc_size.get(&pos).cloned() {
-                trace!("{} already cached as {:?}", pos, cached);
-                // skip ahead
-                self.opaque.set_position(end);
-                return Ok(cached)
-            }
+        let tcx = self.tcx.unwrap();
+        let idx = usize::decode(self)?;
+
+        if let Some(cached) = self.interpret_alloc_cache.get(&idx).cloned() {
+            return Ok(cached);
         }
-        let id = interpret::specialized_decode_alloc_id(
-            self,
-            tcx,
-            pos,
-            |this, pos, alloc_id| { this.interpret_alloc_cache.insert(pos, alloc_id); },
-            |this, shorthand| {
-                // need to load allocation
-                this.with_position(shorthand, |this| interpret::AllocId::decode(this))
-            }
-        )?;
-        let end_pos = self.position();
-        assert!(self
-            .interpret_alloc_size
-            .insert(pos, end_pos)
-            .is_none());
-        Ok(id)
+        let pos = self.interpret_alloc(idx);
+        self.with_position(pos, |this| {
+            interpret::specialized_decode_alloc_id(
+                this,
+                tcx,
+                |this, alloc_id| {
+                    assert!(this.interpret_alloc_cache.insert(idx, alloc_id).is_none());
+                },
+            )
+        })
     }
 }
 
index 22b440eea60efec9e5ffa5606e9a4266f8d69d57..67c180e94caff01129e6ba91540bb1724413dfd7 100644 (file)
@@ -59,7 +59,9 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
     lazy_state: LazyState,
     type_shorthands: FxHashMap<Ty<'tcx>, usize>,
     predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
-    interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+
+    interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+    interpret_allocs_inverse: Vec<interpret::AllocId>,
 
     // This is used to speed up Span encoding.
     filemap_cache: Lrc<FileMap>,
@@ -197,25 +199,17 @@ fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
 impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
     fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
         use std::collections::hash_map::Entry;
-        let tcx = self.tcx;
-        let pos = self.position();
-        let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
-            Entry::Occupied(entry) => Some(entry.get().clone()),
-            Entry::Vacant(entry) => {
-                // ensure that we don't place any AllocIds at the very beginning
-                // of the metadata file, because that would end up making our indices
-                // not special. This is essentially impossible, but let's make sure
-                assert!(pos >= interpret::SHORTHAND_START);
-                entry.insert(pos);
-                None
+        let index = match self.interpret_allocs.entry(*alloc_id) {
+            Entry::Occupied(e) => *e.get(),
+            Entry::Vacant(e) => {
+                let idx = self.interpret_allocs_inverse.len();
+                self.interpret_allocs_inverse.push(*alloc_id);
+                e.insert(idx);
+                idx
             },
         };
-        interpret::specialized_encode_alloc_id(
-            self,
-            tcx,
-            *alloc_id,
-            shorthand,
-        )
+
+        index.encode(self)
     }
 }
 
@@ -271,7 +265,11 @@ fn emit_lazy_distance(&mut self,
                 start - min_end
             }
             LazyState::Previous(last_min_end) => {
-                assert!(last_min_end <= position);
+                assert!(
+                    last_min_end <= position,
+                    "make sure that the calls to `lazy*` \
+                    are in the same order as the metadata fields",
+                );
                 position - last_min_end
             }
         };
@@ -445,21 +443,52 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
             IsolatedEncoder::encode_wasm_custom_sections,
             &wasm_custom_sections);
 
-        // Encode and index the items.
+        let tcx = self.tcx;
+
+        // Encode the items.
         i = self.position();
         let items = self.encode_info_for_items();
         let item_bytes = self.position() - i;
 
+        // Encode the allocation index
+        let interpret_alloc_index = {
+            let mut interpret_alloc_index = Vec::new();
+            let mut n = 0;
+            trace!("beginning to encode alloc ids");
+            loop {
+                let new_n = self.interpret_allocs_inverse.len();
+                // if we have found new ids, serialize those, too
+                if n == new_n {
+                    // otherwise, abort
+                    break;
+                }
+                trace!("encoding {} further alloc ids", new_n - n);
+                for idx in n..new_n {
+                    let id = self.interpret_allocs_inverse[idx];
+                    let pos = self.position() as u32;
+                    interpret_alloc_index.push(pos);
+                    interpret::specialized_encode_alloc_id(
+                        self,
+                        tcx,
+                        id,
+                    ).unwrap();
+                }
+                n = new_n;
+            }
+            self.lazy_seq(interpret_alloc_index)
+        };
+
+        // Index the items
         i = self.position();
         let index = items.write_index(&mut self.opaque.cursor);
         let index_bytes = self.position() - i;
 
-        let tcx = self.tcx;
         let link_meta = self.link_meta;
         let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
         let has_default_lib_allocator =
             attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
         let has_global_allocator = *tcx.sess.has_global_allocator.get();
+
         let root = self.lazy(&CrateRoot {
             name: tcx.crate_name(LOCAL_CRATE),
             extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
@@ -491,6 +520,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
             impls,
             exported_symbols,
             wasm_custom_sections,
+            interpret_alloc_index,
             index,
         });
 
@@ -834,6 +864,11 @@ fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
         }
     }
 
+    fn metadata_output_only(&self) -> bool {
+        // MIR optimisation can be skipped when we're just interested in the metadata.
+        !self.tcx.sess.opts.output_types.should_trans()
+    }
+
     fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
         debug!("IsolatedEncoder::encode_info_for_impl_item({:?})", def_id);
         let tcx = self.tcx;
@@ -878,7 +913,8 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
         } else if let hir::ImplItemKind::Method(ref sig, body) = ast_item.node {
             let generics = self.tcx.generics_of(def_id);
             let types = generics.parent_types as usize + generics.types.len();
-            let needs_inline = types > 0 || tcx.trans_fn_attrs(def_id).requests_inline();
+            let needs_inline = (types > 0 || tcx.trans_fn_attrs(def_id).requests_inline()) &&
+                !self.metadata_output_only();
             let is_const_fn = sig.constness == hir::Constness::Const;
             let ast = if is_const_fn { Some(body) } else { None };
             let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir;
@@ -1169,7 +1205,8 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                 hir::ItemConst(..) => self.encode_optimized_mir(def_id),
                 hir::ItemFn(_, _, constness, _, ref generics, _) => {
                     let has_tps = generics.ty_params().next().is_some();
-                    let needs_inline = has_tps || tcx.trans_fn_attrs(def_id).requests_inline();
+                    let needs_inline = (has_tps || tcx.trans_fn_attrs(def_id).requests_inline()) &&
+                        !self.metadata_output_only();
                     let always_encode_mir = self.tcx.sess.opts.debugging_opts.always_encode_mir;
                     if needs_inline || constness == hir::Constness::Const || always_encode_mir {
                         self.encode_optimized_mir(def_id)
@@ -1760,7 +1797,8 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             type_shorthands: Default::default(),
             predicate_shorthands: Default::default(),
             filemap_cache: tcx.sess.codemap().files()[0].clone(),
-            interpret_alloc_shorthands: Default::default(),
+            interpret_allocs: Default::default(),
+            interpret_allocs_inverse: Default::default(),
         };
 
         // Encode the rustc version string in a predictable location.
index e3986bb7d91f94c5bdbde85cef7b828b75cf6d58..23ea5e4cc55043b97ef075feb1c90302aaaa1c58 100644 (file)
@@ -207,6 +207,7 @@ pub struct CrateRoot {
     pub impls: LazySeq<TraitImpls>,
     pub exported_symbols: EncodedExportedSymbols,
     pub wasm_custom_sections: LazySeq<DefIndex>,
+    pub interpret_alloc_index: LazySeq<u32>,
 
     pub index: LazySeq<index::Index>,
 }
diff --git a/src/librustc_mir/borrow_check/borrow_set.rs b/src/librustc_mir/borrow_check/borrow_set.rs
new file mode 100644 (file)
index 0000000..ccfb44a
--- /dev/null
@@ -0,0 +1,328 @@
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use borrow_check::place_ext::PlaceExt;
+use dataflow::indexes::BorrowIndex;
+use rustc::mir::traversal;
+use rustc::mir::visit::{PlaceContext, Visitor};
+use rustc::mir::{self, Location, Mir, Place};
+use rustc::ty::{Region, TyCtxt};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
+use rustc_data_structures::indexed_vec::IndexVec;
+use std::fmt;
+use std::hash::Hash;
+use std::ops::Index;
+
+crate struct BorrowSet<'tcx> {
+    /// The fundamental map relating bitvector indexes to the borrows
+    /// in the MIR.
+    crate borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,
+
+    /// Each borrow is also uniquely identified in the MIR by the
+    /// `Location` of the assignment statement in which it appears on
+    /// the right hand side; we map each such location to the
+    /// corresponding `BorrowIndex`.
+    crate location_map: FxHashMap<Location, BorrowIndex>,
+
+    /// Locations which activate borrows.
+    /// NOTE: A given location may activate more than one borrow in the future
+    /// when more general two-phase borrow support is introduced, but for now we
+    /// only need to store one borrow index
+    crate activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
+
+    /// Every borrow has a region; this maps each such regions back to
+    /// its borrow-indexes.
+    crate region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
+
+    /// Map from local to all the borrows on that local
+    crate local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
+}
+
+impl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> {
+    type Output = BorrowData<'tcx>;
+
+    fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> {
+        &self.borrows[index]
+    }
+}
+
+#[derive(Debug)]
+crate struct BorrowData<'tcx> {
+    /// Location where the borrow reservation starts.
+    /// In many cases, this will be equal to the activation location but not always.
+    crate reserve_location: Location,
+    /// Location where the borrow is activated. None if this is not a
+    /// 2-phase borrow.
+    crate activation_location: Option<Location>,
+    /// What kind of borrow this is
+    crate kind: mir::BorrowKind,
+    /// The region for which this borrow is live
+    crate region: Region<'tcx>,
+    /// Place from which we are borrowing
+    crate borrowed_place: mir::Place<'tcx>,
+    /// Place to which the borrow was stored
+    crate assigned_place: mir::Place<'tcx>,
+}
+
+impl<'tcx> fmt::Display for BorrowData<'tcx> {
+    fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
+        let kind = match self.kind {
+            mir::BorrowKind::Shared => "",
+            mir::BorrowKind::Unique => "uniq ",
+            mir::BorrowKind::Mut { .. } => "mut ",
+        };
+        let region = format!("{}", self.region);
+        let region = if region.len() > 0 {
+            format!("{} ", region)
+        } else {
+            region
+        };
+        write!(w, "&{}{}{:?}", region, kind, self.borrowed_place)
+    }
+}
+
+impl<'tcx> BorrowSet<'tcx> {
+    pub fn build(tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> Self {
+        let mut visitor = GatherBorrows {
+            tcx,
+            mir,
+            idx_vec: IndexVec::new(),
+            location_map: FxHashMap(),
+            activation_map: FxHashMap(),
+            region_map: FxHashMap(),
+            local_map: FxHashMap(),
+            pending_activations: FxHashMap(),
+        };
+
+        for (block, block_data) in traversal::preorder(mir) {
+            visitor.visit_basic_block_data(block, block_data);
+        }
+
+        // Double check: We should have found an activation for every pending
+        // activation.
+        assert_eq!(
+            visitor
+                .pending_activations
+                .iter()
+                .find(|&(_local, &borrow_index)| visitor.idx_vec[borrow_index]
+                    .activation_location
+                    .is_none()),
+            None,
+            "never found an activation for this borrow!",
+        );
+
+        BorrowSet {
+            borrows: visitor.idx_vec,
+            location_map: visitor.location_map,
+            activation_map: visitor.activation_map,
+            region_map: visitor.region_map,
+            local_map: visitor.local_map,
+        }
+    }
+
+    crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {
+        self.activation_map
+            .get(&location)
+            .map(|activations| &activations[..])
+            .unwrap_or(&[])
+    }
+}
+
+struct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
+    tcx: TyCtxt<'a, 'gcx, 'tcx>,
+    mir: &'a Mir<'tcx>,
+    idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
+    location_map: FxHashMap<Location, BorrowIndex>,
+    activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
+    region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
+    local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
+
+    /// When we encounter a 2-phase borrow statement, it will always
+    /// be assigning into a temporary TEMP:
+    ///
+    ///    TEMP = &foo
+    ///
+    /// We add TEMP into this map with `b`, where `b` is the index of
+    /// the borrow. When we find a later use of this activation, we
+    /// remove from the map (and add to the "tombstone" set below).
+    pending_activations: FxHashMap<mir::Local, BorrowIndex>,
+}
+
+impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
+    fn visit_assign(
+        &mut self,
+        block: mir::BasicBlock,
+        assigned_place: &mir::Place<'tcx>,
+        rvalue: &mir::Rvalue<'tcx>,
+        location: mir::Location,
+    ) {
+        if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {
+            if borrowed_place.is_unsafe_place(self.tcx, self.mir) {
+                return;
+            }
+
+            let borrow = BorrowData {
+                kind,
+                region,
+                reserve_location: location,
+                activation_location: None,
+                borrowed_place: borrowed_place.clone(),
+                assigned_place: assigned_place.clone(),
+            };
+            let idx = self.idx_vec.push(borrow);
+            self.location_map.insert(location, idx);
+
+            self.insert_as_pending_if_two_phase(location, &assigned_place, region, kind, idx);
+
+            insert(&mut self.region_map, &region, idx);
+            if let Some(local) = borrowed_place.root_local() {
+                insert(&mut self.local_map, &local, idx);
+            }
+        }
+
+        return self.super_assign(block, assigned_place, rvalue, location);
+
+        fn insert<'a, K, V>(map: &'a mut FxHashMap<K, FxHashSet<V>>, k: &K, v: V)
+        where
+            K: Clone + Eq + Hash,
+            V: Eq + Hash,
+        {
+            map.entry(k.clone()).or_insert(FxHashSet()).insert(v);
+        }
+    }
+
+    fn visit_place(
+        &mut self,
+        place: &mir::Place<'tcx>,
+        context: PlaceContext<'tcx>,
+        location: Location,
+    ) {
+        self.super_place(place, context, location);
+
+        // We found a use of some temporary TEMP...
+        if let Place::Local(temp) = place {
+            // ... check whether we (earlier) saw a 2-phase borrow like
+            //
+            //     TMP = &mut place
+            match self.pending_activations.get(temp) {
+                Some(&borrow_index) => {
+                    let borrow_data = &mut self.idx_vec[borrow_index];
+
+                    // Watch out: the use of TMP in the borrow
+                    // itself doesn't count as an
+                    // activation. =)
+                    if borrow_data.reserve_location == location && context == PlaceContext::Store {
+                        return;
+                    }
+
+                    if let Some(other_activation) = borrow_data.activation_location {
+                        span_bug!(
+                            self.mir.source_info(location).span,
+                            "found two activations for 2-phase borrow temporary {:?}: \
+                             {:?} and {:?}",
+                            temp,
+                            location,
+                            other_activation,
+                        );
+                    }
+
+                    // Otherwise, this is the unique later use
+                    // that we expect.
+                    borrow_data.activation_location = Some(location);
+                    self.activation_map
+                        .entry(location)
+                        .or_insert(Vec::new())
+                        .push(borrow_index);
+                }
+
+                None => {}
+            }
+        }
+    }
+
+    fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: mir::Location) {
+        if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
+            // double-check that we already registered a BorrowData for this
+
+            let borrow_index = self.location_map[&location];
+            let borrow_data = &self.idx_vec[borrow_index];
+            assert_eq!(borrow_data.reserve_location, location);
+            assert_eq!(borrow_data.kind, kind);
+            assert_eq!(borrow_data.region, region);
+            assert_eq!(borrow_data.borrowed_place, *place);
+        }
+
+        return self.super_rvalue(rvalue, location);
+    }
+
+    fn visit_statement(
+        &mut self,
+        block: mir::BasicBlock,
+        statement: &mir::Statement<'tcx>,
+        location: Location,
+    ) {
+        return self.super_statement(block, statement, location);
+    }
+}
+
+impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {
+    /// Returns true if the borrow represented by `kind` is
+    /// allowed to be split into separate Reservation and
+    /// Activation phases.
+    fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {
+        self.tcx.two_phase_borrows()
+            && (kind.allows_two_phase_borrow()
+                || self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)
+    }
+
+    /// If this is a two-phase borrow, then we will record it
+    /// as "pending" until we find the activating use.
+    fn insert_as_pending_if_two_phase(
+        &mut self,
+        start_location: Location,
+        assigned_place: &mir::Place<'tcx>,
+        region: Region<'tcx>,
+        kind: mir::BorrowKind,
+        borrow_index: BorrowIndex,
+    ) {
+        debug!(
+            "Borrows::insert_as_pending_if_two_phase({:?}, {:?}, {:?}, {:?})",
+            start_location, assigned_place, region, borrow_index,
+        );
+
+        if !self.allow_two_phase_borrow(kind) {
+            debug!("  -> {:?}", start_location);
+            return;
+        }
+
+        // When we encounter a 2-phase borrow statement, it will always
+        // be assigning into a temporary TEMP:
+        //
+        //    TEMP = &foo
+        //
+        // so extract `temp`.
+        let temp = if let &mir::Place::Local(temp) = assigned_place {
+            temp
+        } else {
+            span_bug!(
+                self.mir.source_info(start_location).span,
+                "expected 2-phase borrow to assign to a local, not `{:?}`",
+                assigned_place,
+            );
+        };
+
+        // Insert `temp` into the list of pending activations. From
+        // now on, we'll be on the lookout for a use of it. Note that
+        // we are guaranteed that this use will come after the
+        // assignment.
+        let old_value = self.pending_activations.insert(temp, borrow_index);
+        assert!(old_value.is_none());
+    }
+}
index aaed1dd871bac458bff5698002d6c3dea5e5ed50..3dc5a7a84900cb280376e1933ddb24b5413321fd 100644 (file)
@@ -18,7 +18,9 @@
 
 use super::{Context, MirBorrowckCtxt};
 use super::{InitializationRequiringAction, PrefixSet};
-use dataflow::{Borrows, BorrowData, FlowAtLocation, MovingOutStatements};
+use super::borrow_set::BorrowData;
+
+use dataflow::{FlowAtLocation, MovingOutStatements};
 use dataflow::move_paths::MovePathIndex;
 use util::borrowck_errors::{BorrowckErrors, Origin};
 
@@ -261,7 +263,6 @@ pub(super) fn report_conflicting_borrow(
         (place, span): (&Place<'tcx>, Span),
         gen_borrow_kind: BorrowKind,
         issued_borrow: &BorrowData<'tcx>,
-        end_issued_loan_span: Option<Span>,
     ) {
         let issued_span = self.retrieve_borrow_span(issued_borrow);
 
@@ -295,7 +296,7 @@ pub(super) fn report_conflicting_borrow(
                     "it",
                     rgt,
                     "",
-                    end_issued_loan_span,
+                    None,
                     Origin::Mir,
                 )
             }
@@ -307,7 +308,7 @@ pub(super) fn report_conflicting_borrow(
                     "",
                     issued_span,
                     "",
-                    end_issued_loan_span,
+                    None,
                     Origin::Mir,
                 )
             }
@@ -317,7 +318,7 @@ pub(super) fn report_conflicting_borrow(
                     span,
                     &desc_place,
                     issued_span,
-                    end_issued_loan_span,
+                    None,
                     Origin::Mir,
                 )
             }
@@ -329,7 +330,7 @@ pub(super) fn report_conflicting_borrow(
                 issued_span,
                 "it",
                 "",
-                end_issued_loan_span,
+                None,
                 Origin::Mir,
             ),
 
@@ -341,7 +342,7 @@ pub(super) fn report_conflicting_borrow(
                     lft,
                     issued_span,
                     "",
-                    end_issued_loan_span,
+                    None,
                     Origin::Mir,
                 )
             }
@@ -354,7 +355,7 @@ pub(super) fn report_conflicting_borrow(
                     lft,
                     issued_span,
                     "",
-                    end_issued_loan_span,
+                    None,
                     Origin::Mir,
                 )
             }
@@ -389,10 +390,8 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
         context: Context,
         borrow: &BorrowData<'tcx>,
         drop_span: Span,
-        borrows: &Borrows<'cx, 'gcx, 'tcx>
     ) {
-        let end_span = borrows.opt_region_end_span(&borrow.region);
-        let scope_tree = borrows.scope_tree();
+        let scope_tree = self.tcx.region_scope_tree(self.mir_def_id);
         let root_place = self.prefixes(&borrow.borrowed_place, PrefixSet::All)
             .last()
             .unwrap();
@@ -426,7 +425,6 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
                     drop_span,
                     borrow_span,
                     proper_span,
-                    end_span,
                 );
             }
             (RegionKind::ReScope(_), None) => {
@@ -437,7 +435,6 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
                     drop_span,
                     borrow_span,
                     proper_span,
-                    end_span,
                 );
             }
             (RegionKind::ReEarlyBound(_), Some(name))
@@ -453,7 +450,6 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
                     drop_span,
                     borrow_span,
                     proper_span,
-                    end_span,
                 );
             }
             (RegionKind::ReEarlyBound(_), None)
@@ -468,7 +464,6 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
                     drop_span,
                     borrow_span,
                     proper_span,
-                    end_span,
                 );
             }
             (RegionKind::ReLateBound(_, _), _)
@@ -490,7 +485,6 @@ fn report_scoped_local_value_does_not_live_long_enough(
         drop_span: Span,
         borrow_span: Span,
         _proper_span: Span,
-        end_span: Option<Span>,
     ) {
         let tcx = self.tcx;
         let mut err =
@@ -500,9 +494,6 @@ fn report_scoped_local_value_does_not_live_long_enough(
             drop_span,
             format!("`{}` dropped here while still borrowed", name),
         );
-        if let Some(end) = end_span {
-            err.span_label(end, "borrowed value needs to live until here");
-        }
         self.explain_why_borrow_contains_point(context, borrow, &mut err);
         err.emit();
     }
@@ -515,7 +506,6 @@ fn report_scoped_temporary_value_does_not_live_long_enough(
         drop_span: Span,
         _borrow_span: Span,
         proper_span: Span,
-        end_span: Option<Span>,
     ) {
         let tcx = self.tcx;
         let mut err =
@@ -526,9 +516,6 @@ fn report_scoped_temporary_value_does_not_live_long_enough(
             "temporary value dropped here while still borrowed",
         );
         err.note("consider using a `let` binding to increase its lifetime");
-        if let Some(end) = end_span {
-            err.span_label(end, "temporary value needs to live until here");
-        }
         self.explain_why_borrow_contains_point(context, borrow, &mut err);
         err.emit();
     }
@@ -542,7 +529,6 @@ fn report_unscoped_local_value_does_not_live_long_enough(
         drop_span: Span,
         borrow_span: Span,
         _proper_span: Span,
-        _end_span: Option<Span>,
     ) {
         debug!(
             "report_unscoped_local_value_does_not_live_long_enough(\
@@ -557,16 +543,6 @@ fn report_unscoped_local_value_does_not_live_long_enough(
         err.span_label(borrow_span, "borrowed value does not live long enough");
         err.span_label(drop_span, "borrowed value only lives until here");
 
-        if !tcx.nll() {
-            tcx.note_and_explain_region(
-                scope_tree,
-                &mut err,
-                "borrowed value must be valid for ",
-                borrow.region,
-                "...",
-            );
-        }
-
         self.explain_why_borrow_contains_point(context, borrow, &mut err);
         err.emit();
     }
@@ -579,7 +555,6 @@ fn report_unscoped_temporary_value_does_not_live_long_enough(
         drop_span: Span,
         _borrow_span: Span,
         proper_span: Span,
-        _end_span: Option<Span>,
     ) {
         debug!(
             "report_unscoped_temporary_value_does_not_live_long_enough(\
@@ -594,16 +569,6 @@ fn report_unscoped_temporary_value_does_not_live_long_enough(
         err.span_label(proper_span, "temporary value does not live long enough");
         err.span_label(drop_span, "temporary value only lives until here");
 
-        if !tcx.nll() {
-            tcx.note_and_explain_region(
-                scope_tree,
-                &mut err,
-                "borrowed value must be valid for ",
-                borrow.region,
-                "...",
-            );
-        }
-
         self.explain_why_borrow_contains_point(context, borrow, &mut err);
         err.emit();
     }
@@ -834,7 +799,7 @@ fn describe_field_from_ty(&self, ty: &ty::Ty, field: Field) -> String {
     }
 
     // Retrieve span of given borrow from the current MIR representation
-    pub fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
+    crate fn retrieve_borrow_span(&self, borrow: &BorrowData) -> Span {
         self.mir.source_info(borrow.reserve_location).span
     }
 
index ceff380c594ed48f37abdcab322c14f6f4ef2f4d..070dc1d09bf6bd855bc45cf58affb4f3cc39832d 100644 (file)
 //! but is not as ugly as it is right now.
 
 use rustc::mir::{BasicBlock, Location};
+use rustc_data_structures::indexed_set::Iter;
 
 use dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
 use dataflow::{EverInitializedPlaces, MovingOutStatements};
 use dataflow::{Borrows};
 use dataflow::{FlowAtLocation, FlowsAtLocation};
 use dataflow::move_paths::HasMoveData;
+use dataflow::move_paths::indexes::BorrowIndex;
 use std::fmt;
 
 // (forced to be `pub` due to its use as an associated type below.)
-pub(crate) struct Flows<'b, 'gcx: 'tcx, 'tcx: 'b> {
-    pub borrows: FlowAtLocation<Borrows<'b, 'gcx, 'tcx>>,
+crate struct Flows<'b, 'gcx: 'tcx, 'tcx: 'b> {
+    borrows: FlowAtLocation<Borrows<'b, 'gcx, 'tcx>>,
     pub inits: FlowAtLocation<MaybeInitializedPlaces<'b, 'gcx, 'tcx>>,
     pub uninits: FlowAtLocation<MaybeUninitializedPlaces<'b, 'gcx, 'tcx>>,
     pub move_outs: FlowAtLocation<MovingOutStatements<'b, 'gcx, 'tcx>>,
@@ -32,7 +34,7 @@ pub(crate) struct Flows<'b, 'gcx: 'tcx, 'tcx: 'b> {
 }
 
 impl<'b, 'gcx, 'tcx> Flows<'b, 'gcx, 'tcx> {
-    pub fn new(
+    crate fn new(
         borrows: FlowAtLocation<Borrows<'b, 'gcx, 'tcx>>,
         inits: FlowAtLocation<MaybeInitializedPlaces<'b, 'gcx, 'tcx>>,
         uninits: FlowAtLocation<MaybeUninitializedPlaces<'b, 'gcx, 'tcx>>,
@@ -47,6 +49,14 @@ pub fn new(
             ever_inits,
         }
     }
+
+    crate fn borrows_in_scope(&self) -> impl Iterator<Item = BorrowIndex> + '_ {
+        self.borrows.iter_incoming()
+    }
+
+    crate fn with_outgoing_borrows(&self, op: impl FnOnce(Iter<BorrowIndex>)) {
+        self.borrows.with_iter_outgoing(op)
+    }
 }
 
 macro_rules! each_flow {
@@ -88,9 +98,8 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
                 s.push_str(", ");
             };
             saw_one = true;
-            let borrow_data = &self.borrows.operator().borrows()[borrow.borrow_index()];
-            s.push_str(&format!("{}{}", borrow_data,
-                                if borrow.is_activation() { "@active" } else { "" }));
+            let borrow_data = &self.borrows.operator().borrows()[borrow];
+            s.push_str(&format!("{}", borrow_data));
         });
         s.push_str("] ");
 
@@ -101,7 +110,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
                 s.push_str(", ");
             };
             saw_one = true;
-            let borrow_data = &self.borrows.operator().borrows()[borrow.borrow_index()];
+            let borrow_data = &self.borrows.operator().borrows()[borrow];
             s.push_str(&format!("{}", borrow_data));
         });
         s.push_str("] ");
index 62acdf76546248cae0733df9959bd93dfc1bd4c3..4dd8d245d3beaff1474d47f0d4961202c40b5dbe 100644 (file)
 use rustc::mir::{Field, Statement, StatementKind, Terminator, TerminatorKind};
 use rustc::mir::ClosureRegionRequirements;
 
+use rustc_data_structures::control_flow_graph::dominators::Dominators;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::indexed_set::IdxSetBuf;
 use rustc_data_structures::indexed_vec::Idx;
 
 use std::rc::Rc;
 
-use syntax::ast;
 use syntax_pos::Span;
 
 use dataflow::{do_dataflow, DebugFormatted};
@@ -37,7 +37,7 @@
 use dataflow::{DataflowResultsConsumer};
 use dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
 use dataflow::{EverInitializedPlaces, MovingOutStatements};
-use dataflow::{BorrowData, Borrows, ReserveOrActivateIndex};
+use dataflow::Borrows;
 use dataflow::indexes::BorrowIndex;
 use dataflow::move_paths::{IllegalMoveOriginKind, MoveError};
 use dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex};
 
 use std::iter;
 
+use self::borrow_set::{BorrowSet, BorrowData};
 use self::flows::Flows;
 use self::prefixes::PrefixSet;
 use self::MutateMode::{JustWrite, WriteAndRead};
 
+crate mod borrow_set;
 mod error_reporting;
 mod flows;
+crate mod place_ext;
 mod prefixes;
 
 pub(crate) mod nll;
@@ -70,7 +73,7 @@ fn mir_borrowck<'a, 'tcx>(
     let input_mir = tcx.mir_validated(def_id);
     debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
 
-    if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir() {
+    if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck() {
         return None;
     }
 
@@ -95,19 +98,13 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         .as_local_node_id(def_id)
         .expect("do_mir_borrowck: non-local DefId");
 
-    // Make our own copy of the MIR. This copy will be modified (in place) to
-    // contain non-lexical lifetimes. It will have a lifetime tied
-    // to the inference context.
+    // Replace all regions with fresh inference variables. This
+    // requires first making our own copy of the MIR. This copy will
+    // be modified (in place) to contain non-lexical lifetimes. It
+    // will have a lifetime tied to the inference context.
     let mut mir: Mir<'tcx> = input_mir.clone();
-    let free_regions = if !tcx.nll() {
-        None
-    } else {
-        let mir = &mut mir;
-
-        // Replace all regions with fresh inference variables.
-        Some(nll::replace_regions_in_mir(infcx, def_id, param_env, mir))
-    };
-    let mir = &mir;
+    let free_regions = nll::replace_regions_in_mir(infcx, def_id, param_env, &mut mir);
+    let mir = &mir; // no further changes
 
     let move_data: MoveData<'tcx> = match MoveData::gather_moves(mir, tcx) {
         Ok(move_data) => move_data,
@@ -189,22 +186,20 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         |bd, i| DebugFormatted::new(&bd.move_data().inits[i]),
     ));
 
+    let borrow_set = Rc::new(BorrowSet::build(tcx, mir));
+
     // If we are in non-lexical mode, compute the non-lexical lifetimes.
-    let (opt_regioncx, opt_closure_req) = if let Some(free_regions) = free_regions {
-        let (regioncx, opt_closure_req) = nll::compute_regions(
-            infcx,
-            def_id,
-            free_regions,
-            mir,
-            param_env,
-            &mut flow_inits,
-            &mdpe.move_data,
-        );
-        (Some(Rc::new(regioncx)), opt_closure_req)
-    } else {
-        assert!(!tcx.nll());
-        (None, None)
-    };
+    let (regioncx, opt_closure_req) = nll::compute_regions(
+        infcx,
+        def_id,
+        free_regions,
+        mir,
+        param_env,
+        &mut flow_inits,
+        &mdpe.move_data,
+        &borrow_set,
+    );
+    let regioncx = Rc::new(regioncx);
     let flow_inits = flow_inits; // remove mut
 
     let flow_borrows = FlowAtLocation::new(do_dataflow(
@@ -213,24 +208,24 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         id,
         &attributes,
         &dead_unwinds,
-        Borrows::new(tcx, mir, opt_regioncx.clone(), def_id, body_id),
-        |rs, i| {
-            DebugFormatted::new(&(i.kind(), rs.location(i.borrow_index())))
-        }
+        Borrows::new(tcx, mir, regioncx.clone(), def_id, body_id, &borrow_set),
+        |rs, i| DebugFormatted::new(&rs.location(i)),
     ));
 
-    let movable_generator = !match tcx.hir.get(id) {
+    let movable_generator = match tcx.hir.get(id) {
         hir::map::Node::NodeExpr(&hir::Expr {
             node: hir::ExprClosure(.., Some(hir::GeneratorMovability::Static)),
             ..
-        }) => true,
-        _ => false,
+        }) => false,
+        _ => true,
     };
 
+    let dominators = mir.dominators();
+
     let mut mbcx = MirBorrowckCtxt {
         tcx: tcx,
         mir: mir,
-        node_id: id,
+        mir_def_id: def_id,
         move_data: &mdpe.move_data,
         param_env: param_env,
         movable_generator,
@@ -241,8 +236,10 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         access_place_error_reported: FxHashSet(),
         reservation_error_reported: FxHashSet(),
         moved_error_reported: FxHashSet(),
-        nonlexical_regioncx: opt_regioncx,
+        nonlexical_regioncx: regioncx,
         nonlexical_cause_info: None,
+        borrow_set,
+        dominators,
     };
 
     let mut state = Flows::new(
@@ -262,7 +259,7 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
 pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     mir: &'cx Mir<'tcx>,
-    node_id: ast::NodeId,
+    mir_def_id: DefId,
     move_data: &'cx MoveData<'tcx>,
     param_env: ParamEnv<'gcx>,
     movable_generator: bool,
@@ -293,8 +290,14 @@ pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     /// Non-lexical region inference context, if NLL is enabled.  This
     /// contains the results from region inference and lets us e.g.
     /// find out which CFG points are contained in each borrow region.
-    nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
+    nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
     nonlexical_cause_info: Option<RegionCausalInfo>,
+
+    /// The set of borrows extracted from the MIR
+    borrow_set: Rc<BorrowSet<'tcx>>,
+
+    /// Dominators for MIR
+    dominators: Dominators<BasicBlock>,
 }
 
 // Check that:
@@ -535,11 +538,10 @@ fn visit_terminator_entry(
 
                 if self.movable_generator {
                     // Look for any active borrows to locals
-                    let domain = flow_state.borrows.operator();
-                    let data = domain.borrows();
-                    flow_state.borrows.with_iter_outgoing(|borrows| {
+                    let borrow_set = self.borrow_set.clone();
+                    flow_state.with_outgoing_borrows(|borrows| {
                         for i in borrows {
-                            let borrow = &data[i.borrow_index()];
+                            let borrow = &borrow_set[i];
                             self.check_for_local_borrow(borrow, span);
                         }
                     });
@@ -551,13 +553,12 @@ fn visit_terminator_entry(
                 // Often, the storage will already have been killed by an explicit
                 // StorageDead, but we don't always emit those (notably on unwind paths),
                 // so this "extra check" serves as a kind of backup.
-                let domain = flow_state.borrows.operator();
-                let data = domain.borrows();
-                flow_state.borrows.with_iter_outgoing(|borrows| {
+                let borrow_set = self.borrow_set.clone();
+                flow_state.with_outgoing_borrows(|borrows| {
                     for i in borrows {
-                        let borrow = &data[i.borrow_index()];
+                        let borrow = &borrow_set[i];
                         let context = ContextKind::StorageDead.new(loc);
-                        self.check_for_invalidation_at_exit(context, borrow, span, flow_state);
+                        self.check_for_invalidation_at_exit(context, borrow, span);
                     }
                 });
             }
@@ -836,27 +837,34 @@ fn check_access_for_conflict(
         rw: ReadOrWrite,
         flow_state: &Flows<'cx, 'gcx, 'tcx>,
     ) -> bool {
+        debug!(
+            "check_access_for_conflict(context={:?}, place_span={:?}, sd={:?}, rw={:?})",
+            context,
+            place_span,
+            sd,
+            rw,
+        );
+
         let mut error_reported = false;
         self.each_borrow_involving_path(
             context,
             (sd, place_span.0),
             flow_state,
-            |this, index, borrow| match (rw, borrow.kind) {
+            |this, borrow_index, borrow| match (rw, borrow.kind) {
                 // Obviously an activation is compatible with its own
                 // reservation (or even prior activating uses of same
                 // borrow); so don't check if they interfere.
                 //
                 // NOTE: *reservations* do conflict with themselves;
                 // thus aren't injecting unsoundenss w/ this check.)
-                (Activation(_, activating), _) if activating == index.borrow_index() => {
+                (Activation(_, activating), _) if activating == borrow_index => {
                     debug!(
                         "check_access_for_conflict place_span: {:?} sd: {:?} rw: {:?} \
-                         skipping {:?} b/c activation of same borrow_index: {:?}",
+                         skipping {:?} b/c activation of same borrow_index",
                         place_span,
                         sd,
                         rw,
-                        (index, borrow),
-                        index.borrow_index()
+                        (borrow_index, borrow),
                     );
                     Control::Continue
                 }
@@ -867,7 +875,8 @@ fn check_access_for_conflict(
 
                 (Read(kind), BorrowKind::Unique) | (Read(kind), BorrowKind::Mut { .. }) => {
                     // Reading from mere reservations of mutable-borrows is OK.
-                    if this.allow_two_phase_borrow(borrow.kind) && index.is_reservation() {
+                    if !this.is_active(borrow, context.loc) {
+                        assert!(this.allow_two_phase_borrow(borrow.kind));
                         return Control::Continue;
                     }
 
@@ -877,17 +886,12 @@ fn check_access_for_conflict(
                             this.report_use_while_mutably_borrowed(context, place_span, borrow)
                         }
                         ReadKind::Borrow(bk) => {
-                            let end_issued_loan_span = flow_state
-                                .borrows
-                                .operator()
-                                .opt_region_end_span(&borrow.region);
                             error_reported = true;
                             this.report_conflicting_borrow(
                                 context,
                                 place_span,
                                 bk,
                                 &borrow,
-                                end_issued_loan_span,
                             )
                         }
                     }
@@ -919,18 +923,12 @@ fn check_access_for_conflict(
 
                     match kind {
                         WriteKind::MutableBorrow(bk) => {
-                            let end_issued_loan_span = flow_state
-                                .borrows
-                                .operator()
-                                .opt_region_end_span(&borrow.region);
-
                             error_reported = true;
                             this.report_conflicting_borrow(
                                 context,
                                 place_span,
                                 bk,
                                 &borrow,
-                                end_issued_loan_span,
                             )
                         }
                         WriteKind::StorageDeadOrDrop => {
@@ -939,7 +937,6 @@ fn check_access_for_conflict(
                                 context,
                                 borrow,
                                 place_span.1,
-                                flow_state.borrows.operator(),
                             );
                         }
                         WriteKind::Mutate => {
@@ -1141,7 +1138,6 @@ fn check_for_invalidation_at_exit(
         context: Context,
         borrow: &BorrowData<'tcx>,
         span: Span,
-        flow_state: &Flows<'cx, 'gcx, 'tcx>,
     ) {
         debug!("check_for_invalidation_at_exit({:?})", borrow);
         let place = &borrow.borrowed_place;
@@ -1194,7 +1190,6 @@ fn check_for_invalidation_at_exit(
                 context,
                 borrow,
                 span,
-                flow_state.borrows.operator(),
             )
         }
     }
@@ -1249,36 +1244,30 @@ fn check_activations(
         // Two-phase borrow support: For each activation that is newly
         // generated at this statement, check if it interferes with
         // another borrow.
-        let domain = flow_state.borrows.operator();
-        let data = domain.borrows();
-        flow_state.borrows.each_gen_bit(|gen| {
-            if gen.is_activation() {
-                let borrow_index = gen.borrow_index();
-                let borrow = &data[borrow_index];
-                // currently the flow analysis registers
-                // activations for both mutable and immutable
-                // borrows. So make sure we are talking about a
-                // mutable borrow before we check it.
-                match borrow.kind {
-                    BorrowKind::Shared => return,
-                    BorrowKind::Unique | BorrowKind::Mut { .. } => {}
-                }
-
-                self.access_place(
-                    ContextKind::Activation.new(location),
-                    (&borrow.borrowed_place, span),
-                    (
-                        Deep,
-                        Activation(WriteKind::MutableBorrow(borrow.kind), borrow_index),
-                    ),
-                    LocalMutationIsAllowed::No,
-                    flow_state,
-                );
-                // We do not need to call `check_if_path_or_subpath_is_moved`
-                // again, as we already called it when we made the
-                // initial reservation.
-            }
-        });
+        let borrow_set = self.borrow_set.clone();
+        for &borrow_index in borrow_set.activations_at_location(location) {
+            let borrow = &borrow_set[borrow_index];
+
+            // only mutable borrows should be 2-phase
+            assert!(match borrow.kind {
+                BorrowKind::Shared => false,
+                BorrowKind::Unique | BorrowKind::Mut { .. } => true,
+            });
+
+            self.access_place(
+                ContextKind::Activation.new(location),
+                (&borrow.borrowed_place, span),
+                (
+                    Deep,
+                    Activation(WriteKind::MutableBorrow(borrow.kind), borrow_index),
+                ),
+                LocalMutationIsAllowed::No,
+                flow_state,
+            );
+            // We do not need to call `check_if_path_or_subpath_is_moved`
+            // again, as we already called it when we made the
+            // initial reservation.
+        }
     }
 }
 
@@ -1639,10 +1628,18 @@ fn check_access_permissions(
                                             } else {
                                                 self.get_default_err_msg(place)
                                             };
+                                            let sp = self.mir.source_info(locations[0]).span;
+                                            let mut to_suggest_span = String::new();
+                                            if let Ok(src) =
+                                                self.tcx.sess.codemap().span_to_snippet(sp) {
+                                                    to_suggest_span = src[1..].to_string();
+                                            };
                                             err_info = Some((
-                                                self.mir.source_info(locations[0]).span,
+                                                    sp,
                                                     "consider changing this to be a \
-                                                    mutable reference: `&mut`", item_msg,
+                                                    mutable reference",
+                                                    to_suggest_span,
+                                                    item_msg,
                                                     self.get_primary_err_msg(base)));
                                         }
                                 },
@@ -1652,9 +1649,15 @@ fn check_access_permissions(
                         _ => {},
                     }
 
-                    if let Some((err_help_span, err_help_stmt, item_msg, sec_span)) = err_info {
+                    if let Some((err_help_span,
+                                 err_help_stmt,
+                                 to_suggest_span,
+                                 item_msg,
+                                 sec_span)) = err_info {
                         let mut err = self.tcx.cannot_assign(span, &item_msg, Origin::Mir);
-                        err.span_suggestion(err_help_span, err_help_stmt, format!(""));
+                        err.span_suggestion(err_help_span,
+                                            err_help_stmt,
+                                            format!("&mut {}", to_suggest_span));
                         if place != place_err {
                             err.span_label(span, sec_span);
                         }
@@ -2203,18 +2206,15 @@ fn place_elements<'a, 'tcx>(place: &'a Place<'tcx>) -> Vec<&'a Place<'tcx>> {
         unreachable!("iter::repeat returned None")
     }
 
-    /// This function iterates over all of the current borrows
-    /// (represented by 1-bits in `flow_state.borrows`) that conflict
-    /// with an access to a place, invoking the `op` callback for each
-    /// one.
+    /// This function iterates over all of the in-scope borrows that
+    /// conflict with an access to a place, invoking the `op` callback
+    /// for each one.
     ///
     /// "Current borrow" here means a borrow that reaches the point in
     /// the control-flow where the access occurs.
     ///
-    /// The borrow's phase is represented by the ReserveOrActivateIndex
-    /// passed to the callback: one can call `is_reservation()` and
-    /// `is_activation()` to determine what phase the borrow is
-    /// currently in, when such distinction matters.
+    /// The borrow's phase is represented by the IsActive parameter
+    /// passed to the callback.
     fn each_borrow_involving_path<F>(
         &mut self,
         _context: Context,
@@ -2222,20 +2222,18 @@ fn each_borrow_involving_path<F>(
         flow_state: &Flows<'cx, 'gcx, 'tcx>,
         mut op: F,
     ) where
-        F: FnMut(&mut Self, ReserveOrActivateIndex, &BorrowData<'tcx>) -> Control,
+        F: FnMut(&mut Self, BorrowIndex, &BorrowData<'tcx>) -> Control,
     {
         let (access, place) = access_place;
 
         // FIXME: analogous code in check_loans first maps `place` to
         // its base_path.
 
-        let data = flow_state.borrows.operator().borrows();
-
         // check for loan restricting path P being used. Accounts for
         // borrows of P, P.a.b, etc.
-        let mut iter_incoming = flow_state.borrows.iter_incoming();
-        while let Some(i) = iter_incoming.next() {
-            let borrowed = &data[i.borrow_index()];
+        let borrow_set = self.borrow_set.clone();
+        for i in flow_state.borrows_in_scope() {
+            let borrowed = &borrow_set[i];
 
             if self.places_conflict(&borrowed.borrowed_place, place, access) {
                 debug!(
@@ -2249,6 +2247,65 @@ fn each_borrow_involving_path<F>(
             }
         }
     }
+
+    fn is_active(
+        &self,
+        borrow_data: &BorrowData<'tcx>,
+        location: Location
+    ) -> bool {
+        debug!("is_active(borrow_data={:?}, location={:?})", borrow_data, location);
+
+        // If this is not a 2-phase borrow, it is always active.
+        let activation_location = match borrow_data.activation_location {
+            Some(v) => v,
+            None => return true,
+        };
+
+        // Otherwise, it is active for every location *except* in between
+        // the reservation and the activation:
+        //
+        //       X
+        //      /
+        //     R      <--+ Except for this
+        //    / \        | diamond
+        //    \ /        |
+        //     A  <------+
+        //     |
+        //     Z
+        //
+        // Note that we assume that:
+        // - the reservation R dominates the activation A
+        // - the activation A post-dominates the reservation R (ignoring unwinding edges).
+        //
+        // This means that there can't be an edge that leaves A and
+        // comes back into that diamond unless it passes through R.
+        //
+        // Suboptimal: In some cases, this code walks the dominator
+        // tree twice when it only has to be walked once. I am
+        // lazy. -nmatsakis
+
+        // If dominated by the activation A, then it is active. The
+        // activation occurs upon entering the point A, so this is
+        // also true if location == activation_location.
+        if activation_location.dominates(location, &self.dominators) {
+            return true;
+        }
+
+        // The reservation starts *on exiting* the reservation block,
+        // so check if the location is dominated by R.successor. If so,
+        // this point falls in between the reservation and location.
+        let reserve_location = borrow_data.reserve_location.successor_within_block();
+        if reserve_location.dominates(location, &self.dominators) {
+            false
+        } else {
+            // Otherwise, this point is outside the diamond, so
+            // consider the borrow active. This could happen for
+            // example if the borrow remains active around a loop (in
+            // which case it would be active also for the point R,
+            // which would generate an error).
+            true
+        }
+    }
 }
 
 impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
index 187bfc2bf90f926784a35c571b88d3f23dcfe1d1..d5e11a312ec26fb763f3be5dc6f8e9c5162131be 100644 (file)
@@ -10,7 +10,7 @@
 
 use borrow_check::nll::region_infer::{Cause, RegionInferenceContext};
 use borrow_check::{Context, MirBorrowckCtxt};
-use dataflow::BorrowData;
+use borrow_check::borrow_set::BorrowData;
 use rustc::mir::visit::{MirVisitable, PlaceContext, Visitor};
 use rustc::mir::{Local, Location, Mir};
 use rustc_data_structures::fx::FxHashSet;
@@ -29,82 +29,81 @@ pub(in borrow_check) fn explain_why_borrow_contains_point(
         borrow: &BorrowData<'tcx>,
         err: &mut DiagnosticBuilder<'_>,
     ) {
-        if let Some(regioncx) = &self.nonlexical_regioncx {
-            let mir = self.mir;
+        let regioncx = &&self.nonlexical_regioncx;
+        let mir = self.mir;
 
-            if self.nonlexical_cause_info.is_none() {
-                self.nonlexical_cause_info = Some(regioncx.compute_causal_info(mir));
-            }
+        if self.nonlexical_cause_info.is_none() {
+            self.nonlexical_cause_info = Some(regioncx.compute_causal_info(mir));
+        }
+
+        let cause_info = self.nonlexical_cause_info.as_ref().unwrap();
+        if let Some(cause) = cause_info.why_region_contains_point(borrow.region, context.loc) {
+            match *cause.root_cause() {
+                Cause::LiveVar(local, location) => {
+                    match find_regular_use(mir, regioncx, borrow, location, local) {
+                        Some(p) => {
+                            err.span_label(
+                                mir.source_info(p).span,
+                                format!("borrow later used here"),
+                            );
+                        }
+
+                        None => {
+                            span_bug!(
+                                mir.source_info(context.loc).span,
+                                "Cause should end in a LiveVar"
+                            );
+                        }
+                    }
+                }
 
-            let cause_info = self.nonlexical_cause_info.as_ref().unwrap();
-            if let Some(cause) = cause_info.why_region_contains_point(borrow.region, context.loc) {
-                match *cause.root_cause() {
-                    Cause::LiveVar(local, location) => {
-                        match find_regular_use(mir, regioncx, borrow, location, local) {
-                            Some(p) => {
+                Cause::DropVar(local, location) => {
+                    match find_drop_use(mir, regioncx, borrow, location, local) {
+                        Some(p) => match &mir.local_decls[local].name {
+                            Some(local_name) => {
                                 err.span_label(
                                     mir.source_info(p).span,
-                                    format!("borrow later used here"),
+                                    format!(
+                                        "borrow later used here, when `{}` is dropped",
+                                        local_name
+                                    ),
                                 );
                             }
-
                             None => {
-                                span_bug!(
-                                    mir.source_info(context.loc).span,
-                                    "Cause should end in a LiveVar"
+                                err.span_label(
+                                    mir.local_decls[local].source_info.span,
+                                    "borrow may end up in a temporary, created here",
                                 );
-                            }
-                        }
-                    }
 
-                    Cause::DropVar(local, location) => {
-                        match find_drop_use(mir, regioncx, borrow, location, local) {
-                            Some(p) => match &mir.local_decls[local].name {
-                                Some(local_name) => {
-                                    err.span_label(
-                                        mir.source_info(p).span,
-                                        format!(
-                                            "borrow later used here, when `{}` is dropped",
-                                            local_name
-                                        ),
-                                    );
-                                }
-                                None => {
-                                    err.span_label(
-                                        mir.local_decls[local].source_info.span,
-                                        "borrow may end up in a temporary, created here",
-                                    );
-
-                                    err.span_label(
-                                        mir.source_info(p).span,
-                                        "temporary later dropped here, \
-                                         potentially using the reference",
-                                    );
-                                }
-                            },
-
-                            None => {
-                                span_bug!(
-                                    mir.source_info(context.loc).span,
-                                    "Cause should end in a DropVar"
+                                err.span_label(
+                                    mir.source_info(p).span,
+                                    "temporary later dropped here, \
+                                     potentially using the reference",
                                 );
                             }
-                        }
-                    }
+                        },
 
-                    Cause::UniversalRegion(region_vid) => {
-                        if let Some(region) = regioncx.to_error_region(region_vid) {
-                            self.tcx.note_and_explain_free_region(
-                                err,
-                                "borrowed value must be valid for ",
-                                region,
-                                "...",
+                        None => {
+                            span_bug!(
+                                mir.source_info(context.loc).span,
+                                "Cause should end in a DropVar"
                             );
                         }
                     }
+                }
 
-                    _ => {}
+                Cause::UniversalRegion(region_vid) => {
+                    if let Some(region) = regioncx.to_error_region(region_vid) {
+                        self.tcx.note_and_explain_free_region(
+                            err,
+                            "borrowed value must be valid for ",
+                            region,
+                            "...",
+                        );
+                    }
                 }
+
+                _ => {}
             }
         }
     }
index 942e4fb56cabc0471c3e960e939d6d9f505d0f07..3ca1bd23e86ef4c9556ebcc289dabd076a53354a 100644 (file)
@@ -8,6 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use borrow_check::borrow_set::BorrowSet;
 use rustc::hir::def_id::DefId;
 use rustc::mir::{ClosureRegionRequirements, ClosureOutlivesSubject, Mir};
 use rustc::infer::InferCtxt;
@@ -73,6 +74,7 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
     param_env: ty::ParamEnv<'gcx>,
     flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'cx, 'gcx, 'tcx>>,
     move_data: &MoveData<'tcx>,
+    _borrow_set: &BorrowSet<'tcx>,
 ) -> (
     RegionInferenceContext<'tcx>,
     Option<ClosureRegionRequirements<'gcx>>,
index 544cb5eefc8868a86ba3a2adf9e33d56d7647e7e..acd246b7031260ad0bad7304a111fc83714ac218 100644 (file)
 use borrow_check::nll::region_infer::Cause;
 use borrow_check::nll::region_infer::ClosureRegionRequirementsExt;
 use borrow_check::nll::universal_regions::UniversalRegions;
+use dataflow::move_paths::MoveData;
 use dataflow::FlowAtLocation;
 use dataflow::MaybeInitializedPlaces;
-use dataflow::move_paths::MoveData;
 use rustc::hir::def_id::DefId;
-use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
 use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
-use rustc::traits::{self, Normalized, TraitEngine};
+use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
+use rustc::mir::tcx::PlaceTy;
+use rustc::mir::visit::{PlaceContext, Visitor};
+use rustc::mir::*;
 use rustc::traits::query::NoSolution;
+use rustc::traits::{self, Normalized, TraitEngine};
 use rustc::ty::error::TypeError;
 use rustc::ty::fold::TypeFoldable;
 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
-use rustc::mir::*;
-use rustc::mir::tcx::PlaceTy;
-use rustc::mir::visit::{PlaceContext, Visitor};
 use std::fmt;
 use syntax::ast;
 use syntax_pos::{Span, DUMMY_SP};
@@ -61,8 +61,8 @@ macro_rules! span_mirbug_and_err {
     })
 }
 
-mod liveness;
 mod input_output;
+mod liveness;
 
 /// Type checks the given `mir` in the context of the inference
 /// context `infcx`. Returns any region constraints that have yet to
@@ -275,7 +275,7 @@ fn sanitize_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
                         tcx.predicates_of(def_id).instantiate(tcx, substs);
                     let predicates =
                         type_checker.normalize(&instantiated_predicates.predicates, location);
-                    type_checker.prove_predicates(&predicates, location);
+                    type_checker.prove_predicates(predicates.iter().cloned(), location);
                 }
 
                 value.ty
@@ -763,9 +763,12 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
             }
             StatementKind::UserAssertTy(ref c_ty, ref local) => {
                 let local_ty = mir.local_decls()[*local].ty;
-                let (ty, _) = self.infcx.instantiate_canonical_with_fresh_inference_vars(
-                    stmt.source_info.span, c_ty);
-                debug!("check_stmt: user_assert_ty ty={:?} local_ty={:?}", ty, local_ty);
+                let (ty, _) = self.infcx
+                    .instantiate_canonical_with_fresh_inference_vars(stmt.source_info.span, c_ty);
+                debug!(
+                    "check_stmt: user_assert_ty ty={:?} local_ty={:?}",
+                    ty, local_ty
+                );
                 if let Err(terr) = self.eq_types(ty, local_ty, location.at_self()) {
                     span_mirbug!(
                         self,
@@ -895,6 +898,11 @@ fn check_terminator(
                 let sig = self.normalize(&sig, term_location);
                 self.check_call_dest(mir, term, &sig, destination, term_location);
 
+                self.prove_predicates(
+                    sig.inputs().iter().map(|ty| ty::Predicate::WellFormed(ty)),
+                    term_location,
+                );
+
                 // The ordinary liveness rules will ensure that all
                 // regions in the type of the callee are live here. We
                 // then further constrain the late-bound regions that
@@ -1508,28 +1516,35 @@ fn prove_aggregate_predicates(
 
         let predicates = self.normalize(&instantiated_predicates.predicates, location);
         debug!("prove_aggregate_predicates: predicates={:?}", predicates);
-        self.prove_predicates(&predicates, location);
+        self.prove_predicates(predicates.iter().cloned(), location);
     }
 
     fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
         self.prove_predicates(
-            &[
-                ty::Predicate::Trait(trait_ref.to_poly_trait_ref().to_poly_trait_predicate()),
-            ],
+            [ty::Predicate::Trait(
+                trait_ref.to_poly_trait_ref().to_poly_trait_predicate(),
+            )].iter()
+                .cloned(),
             location,
         );
     }
 
-    fn prove_predicates(&mut self, predicates: &[ty::Predicate<'tcx>], location: Location) {
+    fn prove_predicates(
+        &mut self,
+        predicates: impl IntoIterator<Item = ty::Predicate<'tcx>>,
+        location: Location,
+    ) {
+        let mut predicates_iter = predicates.into_iter();
+
         debug!(
             "prove_predicates(predicates={:?}, location={:?})",
-            predicates, location
+            predicates_iter.by_ref().collect::<Vec<_>>(),
+            location
         );
         self.fully_perform_op(location.at_self(), |this| {
             let cause = this.misc(this.last_span);
-            let obligations = predicates
-                .iter()
-                .map(|&p| traits::Obligation::new(cause.clone(), this.param_env, p))
+            let obligations = predicates_iter
+                .map(|p| traits::Obligation::new(cause.clone(), this.param_env, p))
                 .collect();
             Ok(InferOk {
                 value: (),
@@ -1596,7 +1611,7 @@ fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &
 
         // When NLL is enabled, the borrow checker runs the typeck
         // itself, so we don't need this MIR pass anymore.
-        if tcx.nll() {
+        if tcx.use_mir_borrowck() {
             return;
         }
 
index 39dc29ba18b645ae00339258f987b8f16b06f329..0fe6265345de1ecfa7fd6f988dae5fb9acfa42f4 100644 (file)
@@ -259,18 +259,18 @@ pub fn closure_mapping(
 
     /// True if `r` is a member of this set of universal regions.
     pub fn is_universal_region(&self, r: RegionVid) -> bool {
-        (FIRST_GLOBAL_INDEX..self.num_universals).contains(r.index())
+        (FIRST_GLOBAL_INDEX..self.num_universals).contains(&r.index())
     }
 
     /// Classifies `r` as a universal region, returning `None` if this
     /// is not a member of this set of universal regions.
     pub fn region_classification(&self, r: RegionVid) -> Option<RegionClassification> {
         let index = r.index();
-        if (FIRST_GLOBAL_INDEX..self.first_extern_index).contains(index) {
+        if (FIRST_GLOBAL_INDEX..self.first_extern_index).contains(&index) {
             Some(RegionClassification::Global)
-        } else if (self.first_extern_index..self.first_local_index).contains(index) {
+        } else if (self.first_extern_index..self.first_local_index).contains(&index) {
             Some(RegionClassification::External)
-        } else if (self.first_local_index..self.num_universals).contains(index) {
+        } else if (self.first_local_index..self.num_universals).contains(&index) {
             Some(RegionClassification::Local)
         } else {
             None
diff --git a/src/librustc_mir/borrow_check/place_ext.rs b/src/librustc_mir/borrow_check/place_ext.rs
new file mode 100644 (file)
index 0000000..f6ffe3c
--- /dev/null
@@ -0,0 +1,60 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir;
+use rustc::mir::ProjectionElem;
+use rustc::mir::{Local, Mir, Place};
+use rustc::ty::{self, TyCtxt};
+
+/// Extension methods for the `Place` type.
+crate trait PlaceExt<'tcx> {
+    /// True if this is a deref of a raw pointer.
+    fn is_unsafe_place(&self, tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> bool;
+
+    /// If this is a place like `x.f.g`, returns the local
+    /// `x`. Returns `None` if this is based in a static.
+    fn root_local(&self) -> Option<Local>;
+}
+
+impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
+    fn is_unsafe_place(&self, tcx: TyCtxt<'_, '_, 'tcx>, mir: &Mir<'tcx>) -> bool {
+        match self {
+            Place::Local(_) => false,
+            Place::Static(static_) => {
+                tcx.is_static(static_.def_id) == Some(hir::Mutability::MutMutable)
+            }
+            Place::Projection(proj) => match proj.elem {
+                ProjectionElem::Field(..)
+                | ProjectionElem::Downcast(..)
+                | ProjectionElem::Subslice { .. }
+                | ProjectionElem::ConstantIndex { .. }
+                | ProjectionElem::Index(_) => proj.base.is_unsafe_place(tcx, mir),
+                ProjectionElem::Deref => {
+                    let ty = proj.base.ty(mir, tcx).to_ty(tcx);
+                    match ty.sty {
+                        ty::TyRawPtr(..) => true,
+                        _ => proj.base.is_unsafe_place(tcx, mir),
+                    }
+                }
+            },
+        }
+    }
+
+    fn root_local(&self) -> Option<Local> {
+        let mut p = self;
+        loop {
+            match p {
+                Place::Projection(pi) => p = &pi.base,
+                Place::Static(_) => return None,
+                Place::Local(l) => return Some(*l),
+            }
+        }
+    }
+}
index aa991427be09397b615d9ef466536c7b8afdd5a2..a21691813a4d4bdd8f8edaa579da1430bfef6346 100644 (file)
@@ -8,31 +8,28 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use borrow_check::borrow_set::{BorrowSet, BorrowData};
+use borrow_check::place_ext::PlaceExt;
+
 use rustc;
 use rustc::hir;
 use rustc::hir::def_id::DefId;
 use rustc::middle::region;
 use rustc::mir::{self, Location, Place, Mir};
-use rustc::mir::visit::{PlaceContext, Visitor};
-use rustc::ty::{self, Region, TyCtxt};
+use rustc::ty::TyCtxt;
 use rustc::ty::RegionKind;
 use rustc::ty::RegionKind::ReScope;
-use rustc::util::nodemap::{FxHashMap, FxHashSet};
 
-use rustc_data_structures::bitslice::{BitwiseOperator};
-use rustc_data_structures::indexed_set::{IdxSet};
-use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::bitslice::BitwiseOperator;
+use rustc_data_structures::indexed_set::IdxSet;
+use rustc_data_structures::indexed_vec::IndexVec;
 use rustc_data_structures::sync::Lrc;
 
 use dataflow::{BitDenotation, BlockSets, InitialFlow};
-pub use dataflow::indexes::{BorrowIndex, ReserveOrActivateIndex};
+pub use dataflow::indexes::BorrowIndex;
 use borrow_check::nll::region_infer::RegionInferenceContext;
 use borrow_check::nll::ToRegionVid;
 
-use syntax_pos::Span;
-
-use std::fmt;
-use std::hash::Hash;
 use std::rc::Rc;
 
 /// `Borrows` stores the data used in the analyses that track the flow
@@ -48,534 +45,102 @@ pub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
     scope_tree: Lrc<region::ScopeTree>,
     root_scope: Option<region::Scope>,
 
-    /// The fundamental map relating bitvector indexes to the borrows
-    /// in the MIR.
-    borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,
-
-    /// Each borrow is also uniquely identified in the MIR by the
-    /// `Location` of the assignment statement in which it appears on
-    /// the right hand side; we map each such location to the
-    /// corresponding `BorrowIndex`.
-    location_map: FxHashMap<Location, BorrowIndex>,
-
-    /// Every borrow in MIR is immediately stored into a place via an
-    /// assignment statement. This maps each such assigned place back
-    /// to its borrow-indexes.
-    assigned_map: FxHashMap<Place<'tcx>, FxHashSet<BorrowIndex>>,
-
-    /// Locations which activate borrows.
-    activation_map: FxHashMap<Location, FxHashSet<BorrowIndex>>,
-
-    /// Every borrow has a region; this maps each such regions back to
-    /// its borrow-indexes.
-    region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
-
-    /// Map from local to all the borrows on that local
-    local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
-
-    /// Maps regions to their corresponding source spans
-    /// Only contains ReScope()s as keys
-    region_span_map: FxHashMap<RegionKind, Span>,
+    borrow_set: Rc<BorrowSet<'tcx>>,
 
     /// NLL region inference context with which NLL queries should be resolved
-    nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
-}
-
-// temporarily allow some dead fields: `kind` and `region` will be
-// needed by borrowck; `borrowed_place` will probably be a MovePathIndex when
-// that is extended to include borrowed data paths.
-#[allow(dead_code)]
-#[derive(Debug)]
-pub struct BorrowData<'tcx> {
-    /// Location where the borrow reservation starts.
-    /// In many cases, this will be equal to the activation location but not always.
-    pub(crate) reserve_location: Location,
-    /// What kind of borrow this is
-    pub(crate) kind: mir::BorrowKind,
-    /// The region for which this borrow is live
-    pub(crate) region: Region<'tcx>,
-    /// Place from which we are borrowing
-    pub(crate) borrowed_place: mir::Place<'tcx>,
-    /// Place to which the borrow was stored
-    pub(crate) assigned_place: mir::Place<'tcx>,
-}
-
-impl<'tcx> fmt::Display for BorrowData<'tcx> {
-    fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
-        let kind = match self.kind {
-            mir::BorrowKind::Shared => "",
-            mir::BorrowKind::Unique => "uniq ",
-            mir::BorrowKind::Mut { .. } => "mut ",
-        };
-        let region = format!("{}", self.region);
-        let region = if region.len() > 0 { format!("{} ", region) } else { region };
-        write!(w, "&{}{}{:?}", region, kind, self.borrowed_place)
-    }
-}
-
-impl ReserveOrActivateIndex {
-    fn reserved(i: BorrowIndex) -> Self { ReserveOrActivateIndex::new(i.index() * 2) }
-    fn active(i: BorrowIndex) -> Self { ReserveOrActivateIndex::new((i.index() * 2) + 1) }
-
-    pub(crate) fn is_reservation(self) -> bool { self.index() % 2 == 0 }
-    pub(crate) fn is_activation(self) -> bool { self.index() % 2 == 1}
-
-    pub(crate) fn kind(self) -> &'static str {
-        if self.is_reservation() { "reserved" } else { "active" }
-    }
-    pub(crate) fn borrow_index(self) -> BorrowIndex {
-        BorrowIndex::new(self.index() / 2)
-    }
+    nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
 }
 
 impl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {
-    pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-               mir: &'a Mir<'tcx>,
-               nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
-               def_id: DefId,
-               body_id: Option<hir::BodyId>)
-               -> Self {
+    crate fn new(
+        tcx: TyCtxt<'a, 'gcx, 'tcx>,
+        mir: &'a Mir<'tcx>,
+        nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
+        def_id: DefId,
+        body_id: Option<hir::BodyId>,
+        borrow_set: &Rc<BorrowSet<'tcx>>
+    ) -> Self {
         let scope_tree = tcx.region_scope_tree(def_id);
         let root_scope = body_id.map(|body_id| {
             region::Scope::CallSite(tcx.hir.body(body_id).value.hir_id.local_id)
         });
-        let mut visitor = GatherBorrows {
-            tcx,
-            mir,
-            idx_vec: IndexVec::new(),
-            location_map: FxHashMap(),
-            assigned_map: FxHashMap(),
-            activation_map: FxHashMap(),
-            region_map: FxHashMap(),
-            local_map: FxHashMap(),
-            region_span_map: FxHashMap(),
-            nonlexical_regioncx: nonlexical_regioncx.clone()
-        };
-        visitor.visit_mir(mir);
-        return Borrows { tcx: tcx,
-                         mir: mir,
-                         borrows: visitor.idx_vec,
-                         scope_tree,
-                         root_scope,
-                         location_map: visitor.location_map,
-                         assigned_map: visitor.assigned_map,
-                         activation_map: visitor.activation_map,
-                         region_map: visitor.region_map,
-                         local_map: visitor.local_map,
-                         region_span_map: visitor.region_span_map,
-                         nonlexical_regioncx };
-
-        struct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
-            tcx: TyCtxt<'a, 'gcx, 'tcx>,
-            mir: &'a Mir<'tcx>,
-            idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
-            location_map: FxHashMap<Location, BorrowIndex>,
-            assigned_map: FxHashMap<Place<'tcx>, FxHashSet<BorrowIndex>>,
-            activation_map: FxHashMap<Location, FxHashSet<BorrowIndex>>,
-            region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
-            local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
-            region_span_map: FxHashMap<RegionKind, Span>,
-            nonlexical_regioncx: Option<Rc<RegionInferenceContext<'tcx>>>,
-        }
-
-        impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
-            fn visit_assign(&mut self,
-                            block: mir::BasicBlock,
-                            assigned_place: &mir::Place<'tcx>,
-                            rvalue: &mir::Rvalue<'tcx>,
-                            location: mir::Location) {
-                fn root_local(mut p: &mir::Place<'_>) -> Option<mir::Local> {
-                    loop { match p {
-                        mir::Place::Projection(pi) => p = &pi.base,
-                        mir::Place::Static(_) => return None,
-                        mir::Place::Local(l) => return Some(*l)
-                    }}
-                }
-
-                if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {
-                    if is_unsafe_place(self.tcx, self.mir, borrowed_place) { return; }
-
-                    let activate_location = self.compute_activation_location(location,
-                                                                             &assigned_place,
-                                                                             region,
-                                                                             kind);
-                    let borrow = BorrowData {
-                        kind, region,
-                        reserve_location: location,
-                        borrowed_place: borrowed_place.clone(),
-                        assigned_place: assigned_place.clone(),
-                    };
-                    let idx = self.idx_vec.push(borrow);
-                    self.location_map.insert(location, idx);
-
-                    insert(&mut self.activation_map, &activate_location, idx);
-                    insert(&mut self.assigned_map, assigned_place, idx);
-                    insert(&mut self.region_map, &region, idx);
-                    if let Some(local) = root_local(borrowed_place) {
-                        insert(&mut self.local_map, &local, idx);
-                    }
-                }
-
-                return self.super_assign(block, assigned_place, rvalue, location);
-
-                fn insert<'a, K, V>(map: &'a mut FxHashMap<K, FxHashSet<V>>,
-                                    k: &K,
-                                    v: V)
-                    where K: Clone+Eq+Hash, V: Eq+Hash
-                {
-                    map.entry(k.clone())
-                        .or_insert(FxHashSet())
-                        .insert(v);
-                }
-            }
-
-            fn visit_rvalue(&mut self,
-                            rvalue: &mir::Rvalue<'tcx>,
-                            location: mir::Location) {
-                if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
-                    // double-check that we already registered a BorrowData for this
-
-                    let mut found_it = false;
-                    for idx in &self.region_map[region] {
-                        let bd = &self.idx_vec[*idx];
-                        if bd.reserve_location == location &&
-                            bd.kind == kind &&
-                            bd.region == region &&
-                            bd.borrowed_place == *place
-                        {
-                            found_it = true;
-                            break;
-                        }
-                    }
-                    assert!(found_it, "Ref {:?} at {:?} missing BorrowData", rvalue, location);
-                }
-
-                return self.super_rvalue(rvalue, location);
-            }
-
-            fn visit_statement(&mut self,
-                               block: mir::BasicBlock,
-                               statement: &mir::Statement<'tcx>,
-                               location: Location) {
-                if let mir::StatementKind::EndRegion(region_scope) = statement.kind {
-                    self.region_span_map.insert(ReScope(region_scope), statement.source_info.span);
-                }
-                return self.super_statement(block, statement, location);
-            }
-        }
-
-        /// A MIR visitor that determines if a specific place is used in a two-phase activating
-        /// manner in a given chunk of MIR.
-        struct ContainsUseOfPlace<'b, 'tcx: 'b> {
-            target: &'b Place<'tcx>,
-            use_found: bool,
-        }
-
-        impl<'b, 'tcx: 'b> ContainsUseOfPlace<'b, 'tcx> {
-            fn new(place: &'b Place<'tcx>) -> Self {
-                Self { target: place, use_found: false }
-            }
-
-            /// return whether `context` should be considered a "use" of a
-            /// place found in that context. "Uses" activate associated
-            /// borrows (at least when such uses occur while the borrow also
-            /// has a reservation at the time).
-            fn is_potential_use(context: PlaceContext) -> bool {
-                match context {
-                    // storage effects on a place do not activate it
-                    PlaceContext::StorageLive | PlaceContext::StorageDead => false,
-
-                    // validation effects do not activate a place
-                    //
-                    // FIXME: Should they? Is it just another read? Or can we
-                    // guarantee it won't dereference the stored address? How
-                    // "deep" does validation go?
-                    PlaceContext::Validate => false,
-
-                    // FIXME: This is here to not change behaviour from before
-                    // AsmOutput existed, but it's not necessarily a pure overwrite.
-                    // so it's possible this should activate the place.
-                    PlaceContext::AsmOutput |
-                    // pure overwrites of a place do not activate it. (note
-                    // PlaceContext::Call is solely about dest place)
-                    PlaceContext::Store | PlaceContext::Call => false,
-
-                    // reads of a place *do* activate it
-                    PlaceContext::Move |
-                    PlaceContext::Copy |
-                    PlaceContext::Drop |
-                    PlaceContext::Inspect |
-                    PlaceContext::Borrow { .. } |
-                    PlaceContext::Projection(..) => true,
-                }
-            }
-        }
-
-        impl<'b, 'tcx: 'b> Visitor<'tcx> for ContainsUseOfPlace<'b, 'tcx> {
-            fn visit_place(&mut self,
-                           place: &mir::Place<'tcx>,
-                           context: PlaceContext<'tcx>,
-                           location: Location) {
-                if Self::is_potential_use(context) && place == self.target {
-                    self.use_found = true;
-                    return;
-                    // There is no need to keep checking the statement, we already found a use
-                }
-
-                self.super_place(place, context, location);
-            }
-        }
-
-        impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {
-            /// Returns true if the borrow represented by `kind` is
-            /// allowed to be split into separate Reservation and
-            /// Activation phases.
-            fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {
-                self.tcx.two_phase_borrows() &&
-                    (kind.allows_two_phase_borrow() ||
-                     self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)
-            }
-
-            /// Returns true if the given location contains an NLL-activating use of the given place
-            fn location_contains_use(&self, location: Location, place: &Place) -> bool {
-                let mut use_checker = ContainsUseOfPlace::new(place);
-                let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
-                    panic!("could not find block at location {:?}", location);
-                });
-                if location.statement_index != block.statements.len() {
-                    // This is a statement
-                    let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {
-                        panic!("could not find statement at location {:?}");
-                    });
-                    use_checker.visit_statement(location.block, stmt, location);
-                } else {
-                    // This is a terminator
-                    match block.terminator {
-                        Some(ref term) => {
-                            use_checker.visit_terminator(location.block, term, location);
-                        }
-                        None => {
-                            // There is no way for Place to be used by the terminator if there is no
-                            // terminator
-                        }
-                    }
-                }
-
-                use_checker.use_found
-            }
-
-            /// Determines if the provided region is terminated after the provided location.
-            /// EndRegion statements terminate their enclosed region::Scope.
-            /// We also consult with the NLL region inference engine, should one be available
-            fn region_terminated_after(&self, region: Region<'tcx>, location: Location) -> bool {
-                let block_data = &self.mir[location.block];
-                if location.statement_index != block_data.statements.len() {
-                    let stmt = &block_data.statements[location.statement_index];
-                    if let mir::StatementKind::EndRegion(region_scope) = stmt.kind {
-                        if &ReScope(region_scope) == region {
-                            // We encountered an EndRegion statement that terminates the provided
-                            // region
-                            return true;
-                        }
-                    }
-                }
-                if let Some(ref regioncx) = self.nonlexical_regioncx {
-                    if !regioncx.region_contains_point(region, location) {
-                        // NLL says the region has ended already
-                        return true;
-                    }
-                }
-
-                false
-            }
-
-            /// Computes the activation location of a borrow.
-            /// The general idea is to start at the beginning of the region and perform a DFS
-            /// until we exit the region, either via an explicit EndRegion or because NLL tells
-            /// us so. If we find more than one valid activation point, we currently panic the
-            /// compiler since two-phase borrows are only currently supported for compiler-
-            /// generated code. More precisely, we only allow two-phase borrows for:
-            ///   - Function calls (fn some_func(&mut self, ....))
-            ///   - *Assign operators (a += b -> fn add_assign(&mut self, other: Self))
-            /// See
-            ///   - https://github.com/rust-lang/rust/issues/48431
-            /// for detailed design notes.
-            /// See the FIXME in the body of the function for notes on extending support to more
-            /// general two-phased borrows.
-            fn compute_activation_location(&self,
-                                           start_location: Location,
-                                           assigned_place: &mir::Place<'tcx>,
-                                           region: Region<'tcx>,
-                                           kind: mir::BorrowKind) -> Location {
-                debug!("Borrows::compute_activation_location({:?}, {:?}, {:?})",
-                       start_location,
-                       assigned_place,
-                       region);
-                if !self.allow_two_phase_borrow(kind) {
-                    debug!("  -> {:?}", start_location);
-                    return start_location;
-                }
-
-                // Perform the DFS.
-                // `stack` is the stack of locations still under consideration
-                // `visited` is the set of points we have already visited
-                // `found_use` is an Option that becomes Some when we find a use
-                let mut stack = vec![start_location];
-                let mut visited = FxHashSet();
-                let mut found_use = None;
-                while let Some(curr_loc) = stack.pop() {
-                    let block_data = &self.mir.basic_blocks()
-                        .get(curr_loc.block)
-                        .unwrap_or_else(|| {
-                            panic!("could not find block at location {:?}", curr_loc);
-                        });
-
-                    if self.region_terminated_after(region, curr_loc) {
-                        // No need to process this statement.
-                        // It's either an EndRegion (and thus couldn't use assigned_place) or not
-                        // contained in the NLL region and thus a use would be invalid
-                        continue;
-                    }
-
-                    if !visited.insert(curr_loc) {
-                        debug!("  Already visited {:?}", curr_loc);
-                        continue;
-                    }
-
-                    if self.location_contains_use(curr_loc, assigned_place) {
-                        // FIXME: Handle this case a little more gracefully. Perhaps collect
-                        // all uses in a vector, and find the point in the CFG that dominates
-                        // all of them?
-                        // Right now this is sufficient though since there should only be exactly
-                        // one borrow-activating use of the borrow.
-                        assert!(found_use.is_none(), "Found secondary use of place");
-                        found_use = Some(curr_loc);
-                    }
-
-                    // Push the points we should consider next.
-                    if curr_loc.statement_index < block_data.statements.len() {
-                        stack.push(curr_loc.successor_within_block());
-                    } else {
-                        stack.extend(block_data.terminator().successors().iter().map(
-                            |&basic_block| {
-                                Location {
-                                    statement_index: 0,
-                                    block: basic_block
-                                }
-                            }
-                        ))
-                    }
-                }
 
-                let found_use = found_use.expect("Did not find use of two-phase place");
-                debug!("  -> {:?}", found_use);
-                found_use
-            }
-        }
-    }
-
-    /// Returns the span for the "end point" given region. This will
-    /// return `None` if NLL is enabled, since that concept has no
-    /// meaning there.  Otherwise, return region span if it exists and
-    /// span for end of the function if it doesn't exist.
-    pub(crate) fn opt_region_end_span(&self, region: &Region) -> Option<Span> {
-        match self.nonlexical_regioncx {
-            Some(_) => None,
-            None => {
-                match self.region_span_map.get(region) {
-                    Some(span) => Some(self.tcx.sess.codemap().end_point(*span)),
-                    None => Some(self.tcx.sess.codemap().end_point(self.mir.span))
-                }
-            }
+        Borrows {
+            tcx: tcx,
+            mir: mir,
+            borrow_set: borrow_set.clone(),
+            scope_tree,
+            root_scope,
+            nonlexical_regioncx,
         }
     }
 
-    pub fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrows }
+    crate fn borrows(&self) -> &IndexVec<BorrowIndex, BorrowData<'tcx>> { &self.borrow_set.borrows }
 
     pub fn scope_tree(&self) -> &Lrc<region::ScopeTree> { &self.scope_tree }
 
     pub fn location(&self, idx: BorrowIndex) -> &Location {
-        &self.borrows[idx].reserve_location
+        &self.borrow_set.borrows[idx].reserve_location
     }
 
     /// Add all borrows to the kill set, if those borrows are out of scope at `location`.
     /// That means either they went out of either a nonlexical scope, if we care about those
     /// at the moment, or the location represents a lexical EndRegion
     fn kill_loans_out_of_scope_at_location(&self,
-                                           sets: &mut BlockSets<ReserveOrActivateIndex>,
+                                           sets: &mut BlockSets<BorrowIndex>,
                                            location: Location) {
-        if let Some(ref regioncx) = self.nonlexical_regioncx {
-            // NOTE: The state associated with a given `location`
-            // reflects the dataflow on entry to the statement. If it
-            // does not contain `borrow_region`, then then that means
-            // that the statement at `location` kills the borrow.
-            //
-            // We are careful always to call this function *before* we
-            // set up the gen-bits for the statement or
-            // termanator. That way, if the effect of the statement or
-            // terminator *does* introduce a new loan of the same
-            // region, then setting that gen-bit will override any
-            // potential kill introduced here.
-            for (borrow_index, borrow_data) in self.borrows.iter_enumerated() {
-                let borrow_region = borrow_data.region.to_region_vid();
-                if !regioncx.region_contains_point(borrow_region, location) {
-                    sets.kill(&ReserveOrActivateIndex::reserved(borrow_index));
-                    sets.kill(&ReserveOrActivateIndex::active(borrow_index));
-                }
+        let regioncx = &self.nonlexical_regioncx;
+
+        // NOTE: The state associated with a given `location`
+        // reflects the dataflow on entry to the statement. If it
+        // does not contain `borrow_region`, then then that means
+        // that the statement at `location` kills the borrow.
+        //
+        // We are careful always to call this function *before* we
+        // set up the gen-bits for the statement or
+        // termanator. That way, if the effect of the statement or
+        // terminator *does* introduce a new loan of the same
+        // region, then setting that gen-bit will override any
+        // potential kill introduced here.
+        for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {
+            let borrow_region = borrow_data.region.to_region_vid();
+            if !regioncx.region_contains_point(borrow_region, location) {
+                sets.kill(&borrow_index);
             }
         }
     }
 
     fn kill_borrows_on_local(&self,
-                             sets: &mut BlockSets<ReserveOrActivateIndex>,
+                             sets: &mut BlockSets<BorrowIndex>,
                              local: &rustc::mir::Local)
     {
-        if let Some(borrow_indexes) = self.local_map.get(local) {
-            sets.kill_all(borrow_indexes.iter()
-                          .map(|b| ReserveOrActivateIndex::reserved(*b)));
-            sets.kill_all(borrow_indexes.iter()
-                          .map(|b| ReserveOrActivateIndex::active(*b)));
-        }
-    }
-
-    /// Performs the activations for a given location
-    fn perform_activations_at_location(&self,
-                                       sets: &mut BlockSets<ReserveOrActivateIndex>,
-                                       location: Location) {
-        // Handle activations
-        match self.activation_map.get(&location) {
-            Some(activations) => {
-                for activated in activations {
-                    debug!("activating borrow {:?}", activated);
-                    sets.gen(&ReserveOrActivateIndex::active(*activated))
-                }
-            }
-            None => {}
+        if let Some(borrow_indexes) = self.borrow_set.local_map.get(local) {
+            sets.kill_all(borrow_indexes);
         }
     }
 }
 
 impl<'a, 'gcx, 'tcx> BitDenotation for Borrows<'a, 'gcx, 'tcx> {
-    type Idx = ReserveOrActivateIndex;
+    type Idx = BorrowIndex;
     fn name() -> &'static str { "borrows" }
     fn bits_per_block(&self) -> usize {
-        self.borrows.len() * 2
+        self.borrow_set.borrows.len() * 2
     }
 
-    fn start_block_effect(&self, _entry_set: &mut IdxSet<ReserveOrActivateIndex>) {
+    fn start_block_effect(&self, _entry_set: &mut IdxSet<BorrowIndex>) {
         // no borrows of code region_scopes have been taken prior to
         // function execution, so this method has no effect on
         // `_sets`.
     }
 
     fn before_statement_effect(&self,
-                               sets: &mut BlockSets<ReserveOrActivateIndex>,
+                               sets: &mut BlockSets<BorrowIndex>,
                                location: Location) {
         debug!("Borrows::before_statement_effect sets: {:?} location: {:?}", sets, location);
         self.kill_loans_out_of_scope_at_location(sets, location);
     }
 
-    fn statement_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, location: Location) {
+    fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {
         debug!("Borrows::statement_effect sets: {:?} location: {:?}", sets, location);
 
         let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
@@ -585,21 +150,10 @@ fn statement_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locatio
             panic!("could not find statement at location {:?}");
         });
 
-        self.perform_activations_at_location(sets, location);
         self.kill_loans_out_of_scope_at_location(sets, location);
 
         match stmt.kind {
-            // EndRegion kills any borrows (reservations and active borrows both)
-            mir::StatementKind::EndRegion(region_scope) => {
-                if let Some(borrow_indexes) = self.region_map.get(&ReScope(region_scope)) {
-                    assert!(self.nonlexical_regioncx.is_none());
-                    for idx in borrow_indexes {
-                        sets.kill(&ReserveOrActivateIndex::reserved(*idx));
-                        sets.kill(&ReserveOrActivateIndex::active(*idx));
-                    }
-                } else {
-                    // (if there is no entry, then there are no borrows to be tracked)
-                }
+            mir::StatementKind::EndRegion(_) => {
             }
 
             mir::StatementKind::Assign(ref lhs, ref rhs) => {
@@ -617,22 +171,22 @@ fn statement_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locatio
                 // propagate_call_return method.
 
                 if let mir::Rvalue::Ref(region, _, ref place) = *rhs {
-                    if is_unsafe_place(self.tcx, self.mir, place) { return; }
-                    let index = self.location_map.get(&location).unwrap_or_else(|| {
+                    if place.is_unsafe_place(self.tcx, self.mir) { return; }
+                    let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {
                         panic!("could not find BorrowIndex for location {:?}", location);
                     });
 
                     if let RegionKind::ReEmpty = region {
                         // If the borrowed value dies before the borrow is used, the region for
                         // the borrow can be empty. Don't track the borrow in that case.
-                        sets.kill(&ReserveOrActivateIndex::active(*index));
+                        sets.kill(&index);
                         return
                     }
 
-                    assert!(self.region_map.get(region).unwrap_or_else(|| {
+                    assert!(self.borrow_set.region_map.get(region).unwrap_or_else(|| {
                         panic!("could not find BorrowIndexs for region {:?}", region);
                     }).contains(&index));
-                    sets.gen(&ReserveOrActivateIndex::reserved(*index));
+                    sets.gen(&index);
 
                     // Issue #46746: Two-phase borrows handles
                     // stmts of form `Tmp = &mut Borrow` ...
@@ -643,7 +197,7 @@ fn statement_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locatio
                             // e.g. `box (&mut _)`. Current
                             // conservative solution: force
                             // immediate activation here.
-                            sets.gen(&ReserveOrActivateIndex::active(*index));
+                            sets.gen(&index);
                         }
                     }
                 }
@@ -679,13 +233,13 @@ fn statement_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locatio
     }
 
     fn before_terminator_effect(&self,
-                                sets: &mut BlockSets<ReserveOrActivateIndex>,
+                                sets: &mut BlockSets<BorrowIndex>,
                                 location: Location) {
         debug!("Borrows::before_terminator_effect sets: {:?} location: {:?}", sets, location);
         self.kill_loans_out_of_scope_at_location(sets, location);
     }
 
-    fn terminator_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, location: Location) {
+    fn terminator_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location) {
         debug!("Borrows::terminator_effect sets: {:?} location: {:?}", sets, location);
 
         let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
@@ -693,7 +247,6 @@ fn terminator_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locati
         });
 
         let term = block.terminator();
-        self.perform_activations_at_location(sets, location);
         self.kill_loans_out_of_scope_at_location(sets, location);
 
 
@@ -707,7 +260,7 @@ fn terminator_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locati
                 // and hence most of these loans will already be dead -- but, in some cases
                 // like unwind paths, we do not always emit `EndRegion` statements, so we
                 // add some kills here as a "backup" and to avoid spurious error messages.
-                for (borrow_index, borrow_data) in self.borrows.iter_enumerated() {
+                for (borrow_index, borrow_data) in self.borrow_set.borrows.iter_enumerated() {
                     if let ReScope(scope) = borrow_data.region {
                         // Check that the scope is not actually a scope from a function that is
                         // a parent of our closure. Note that the CallSite scope itself is
@@ -716,8 +269,7 @@ fn terminator_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locati
                             if *scope != root_scope &&
                                 self.scope_tree.is_subscope_of(*scope, root_scope)
                             {
-                                sets.kill(&ReserveOrActivateIndex::reserved(borrow_index));
-                                sets.kill(&ReserveOrActivateIndex::active(borrow_index));
+                                sets.kill(&borrow_index);
                             }
                         }
                     }
@@ -738,7 +290,7 @@ fn terminator_effect(&self, sets: &mut BlockSets<ReserveOrActivateIndex>, locati
     }
 
     fn propagate_call_return(&self,
-                             _in_out: &mut IdxSet<ReserveOrActivateIndex>,
+                             _in_out: &mut IdxSet<BorrowIndex>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
                              _dest_place: &mir::Place) {
@@ -764,34 +316,3 @@ fn bottom_value() -> bool {
     }
 }
 
-fn is_unsafe_place<'a, 'gcx: 'tcx, 'tcx: 'a>(
-    tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Mir<'tcx>,
-    place: &mir::Place<'tcx>
-) -> bool {
-    use self::mir::Place::*;
-    use self::mir::ProjectionElem;
-
-    match *place {
-        Local(_) => false,
-        Static(ref static_) => tcx.is_static(static_.def_id) == Some(hir::Mutability::MutMutable),
-        Projection(ref proj) => {
-            match proj.elem {
-                ProjectionElem::Field(..) |
-                ProjectionElem::Downcast(..) |
-                ProjectionElem::Subslice { .. } |
-                ProjectionElem::ConstantIndex { .. } |
-                ProjectionElem::Index(_) => {
-                    is_unsafe_place(tcx, mir, &proj.base)
-                }
-                ProjectionElem::Deref => {
-                    let ty = proj.base.ty(mir, tcx).to_ty(tcx);
-                    match ty.sty {
-                        ty::TyRawPtr(..) => true,
-                        _ => is_unsafe_place(tcx, mir, &proj.base),
-                    }
-                }
-            }
-        }
-    }
-}
index 287640439c0e8f9ab39216066e8e64eb38bc64d6..f64fd64b283ea5ea63ad992123d702695f28bffa 100644 (file)
@@ -19,6 +19,7 @@
 use rustc_data_structures::indexed_vec::Idx;
 
 use super::MoveDataParamEnv;
+
 use util::elaborate_drops::DropFlagState;
 
 use super::move_paths::{HasMoveData, MoveData, MoveOutIndex, MovePathIndex, InitIndex};
@@ -37,7 +38,6 @@
 
 pub use self::borrowed_locals::*;
 
-#[allow(dead_code)]
 pub(super) mod borrows;
 
 /// `MaybeInitializedPlaces` tracks all places that might be
index 454b6cbd27df1e93b20952c3964b32ef8507d02a..85458c7d6848848174a9eab94fb1ea47bf610a8a 100644 (file)
@@ -29,9 +29,8 @@
 pub use self::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
 pub use self::impls::{DefinitelyInitializedPlaces, MovingOutStatements};
 pub use self::impls::EverInitializedPlaces;
-pub use self::impls::borrows::{Borrows, BorrowData};
+pub use self::impls::borrows::Borrows;
 pub use self::impls::HaveBeenBorrowedLocals;
-pub(crate) use self::impls::borrows::{ReserveOrActivateIndex};
 pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
 pub(crate) use self::drop_flag_effects::*;
 
index 9f6cf8c036e19a686e44febe3a783e252f8dd44c..3a49e28f04187dbaf71b8e4e1bf7cd0cd1421f45 100644 (file)
@@ -65,9 +65,6 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
 
     /// Index into Borrows.locations
     new_index!(BorrowIndex, "bw");
-
-    /// Index into Reservations/Activations bitvector
-    new_index!(ReserveOrActivateIndex, "ra");
 }
 
 pub use self::indexes::MovePathIndex;
index 57977b6201a61095e384f81ab1e758890651bdbf..954a3dbe5b9ab0e78224cb1a508925810b4606a5 100644 (file)
@@ -5,7 +5,6 @@
 use rustc::ty::{self, TyCtxt, Ty, Instance};
 use rustc::ty::layout::{self, LayoutOf};
 use rustc::ty::subst::Subst;
-use rustc::util::nodemap::FxHashSet;
 
 use syntax::ast::Mutability;
 use syntax::codemap::Span;
@@ -110,53 +109,38 @@ fn eval_body_and_ecx<'a, 'mir, 'tcx>(
         }
         span = mir.span;
         let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
-        let alloc = tcx.interpret_interner.get_cached(cid.instance.def_id());
-        let is_static = tcx.is_static(cid.instance.def_id()).is_some();
-        let alloc = match alloc {
-            Some(alloc) => {
-                assert!(cid.promoted.is_none());
-                assert!(param_env.caller_bounds.is_empty());
-                alloc
-            },
-            None => {
-                assert!(!layout.is_unsized());
-                let ptr = ecx.memory.allocate(
-                    layout.size.bytes(),
-                    layout.align,
-                    None,
-                )?;
-                if is_static {
-                    tcx.interpret_interner.cache(cid.instance.def_id(), ptr.alloc_id);
-                }
-                let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
-                let mutability = tcx.is_static(cid.instance.def_id());
-                let mutability = if mutability == Some(hir::Mutability::MutMutable) || internally_mutable {
-                    Mutability::Mutable
-                } else {
-                    Mutability::Immutable
-                };
-                let cleanup = StackPopCleanup::MarkStatic(mutability);
-                let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
-                let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
-                trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
-                assert!(mir.arg_count == 0);
-                ecx.push_stack_frame(
-                    cid.instance,
-                    mir.span,
-                    mir,
-                    Place::from_ptr(ptr, layout.align),
-                    cleanup,
-                )?;
-
-                while ecx.step()? {}
-                ptr.alloc_id
-            }
+        assert!(!layout.is_unsized());
+        let ptr = ecx.memory.allocate(
+            layout.size.bytes(),
+            layout.align,
+            None,
+        )?;
+        let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
+        let mutability = tcx.is_static(cid.instance.def_id());
+        let mutability = if mutability == Some(hir::Mutability::MutMutable) || internally_mutable {
+            Mutability::Mutable
+        } else {
+            Mutability::Immutable
         };
-        let ptr = MemoryPointer::new(alloc, 0).into();
+        let cleanup = StackPopCleanup::MarkStatic(mutability);
+        let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
+        let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
+        trace!("const_eval: pushing stack frame for global: {}{}", name, prom);
+        assert!(mir.arg_count == 0);
+        ecx.push_stack_frame(
+            cid.instance,
+            mir.span,
+            mir,
+            Place::from_ptr(ptr, layout.align),
+            cleanup,
+        )?;
+
+        while ecx.step()? {}
+        let ptr = ptr.into();
         // always try to read the value and report errors
         let value = match ecx.try_read_value(ptr, layout.align, layout.ty)? {
             // if it's a constant (so it needs no address, directly compute its value)
-            Some(val) if !is_static => val,
+            Some(val) if tcx.is_static(cid.instance.def_id()).is_none() => val,
             // point at the allocation
             _ => Value::ByRef(ptr, layout.align),
         };
@@ -340,21 +324,10 @@ fn init_static<'a>(
         ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
         cid: GlobalId<'tcx>,
     ) -> EvalResult<'tcx, AllocId> {
-        let alloc = ecx
-                    .tcx
-                    .interpret_interner
-                    .get_cached(cid.instance.def_id());
-        // Don't evaluate when already cached to prevent cycles
-        if let Some(alloc) = alloc {
-            return Ok(alloc)
-        }
-        // ensure the static is computed
-        ecx.const_eval(cid)?;
         Ok(ecx
             .tcx
             .interpret_interner
-            .get_cached(cid.instance.def_id())
-            .expect("uncached static"))
+            .cache_static(cid.instance.def_id()))
     }
 
     fn box_alloc<'a>(
@@ -460,16 +433,7 @@ pub fn const_eval_provider<'a, 'tcx>(
     let def_id = cid.instance.def.def_id();
 
     if tcx.is_foreign_item(def_id) {
-        let id = tcx.interpret_interner.get_cached(def_id);
-        let id = match id {
-            // FIXME: due to caches this shouldn't happen, add some assertions
-            Some(id) => id,
-            None => {
-                let id = tcx.interpret_interner.reserve();
-                tcx.interpret_interner.cache(def_id, id);
-                id
-            },
-        };
+        let id = tcx.interpret_interner.cache_static(def_id);
         let ty = tcx.type_of(def_id);
         let layout = tcx.layout_of(key.param_env.and(ty)).unwrap();
         let ptr = MemoryPointer::new(id, 0);
@@ -505,13 +469,7 @@ pub fn const_eval_provider<'a, 'tcx>(
     };
 
     let (res, ecx) = eval_body_and_ecx(tcx, cid, None, key.param_env);
-    res.map(|(miri_value, ptr, miri_ty)| {
-        if tcx.is_static(def_id).is_some() {
-            if let Ok(ptr) = ptr.primval.to_ptr() {
-                let mut seen = FxHashSet::default();
-                create_depgraph_edges(tcx, ptr.alloc_id, &mut seen);
-            }
-        }
+    res.map(|(miri_value, _, miri_ty)| {
         tcx.mk_const(ty::Const {
             val: ConstVal::Value(miri_value),
             ty: miri_ty,
@@ -528,35 +486,3 @@ pub fn const_eval_provider<'a, 'tcx>(
         }
     })
 }
-
-// This function creates dep graph edges from statics to all referred to statics.
-// This is necessary, because the `const_eval` query cannot directly call itself
-// for other statics, because we cannot prevent recursion in queries.
-//
-// see test/incremental/static_refering_to_other_static2/issue.rs for an example
-// where not creating those edges would cause static A, which refers to static B
-// to point to the old allocation of static B, even though B has changed.
-//
-// In the future we will want to remove this funcion in favour of a system that
-// makes sure that statics don't need to have edges to other statics as long as
-// they are only referring by reference and not inspecting the other static's body.
-fn create_depgraph_edges<'a, 'tcx>(
-    tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    alloc_id: AllocId,
-    seen: &mut FxHashSet<AllocId>,
-) {
-    trace!("create_depgraph_edges: {:?}, {:?}", alloc_id, seen);
-    if seen.insert(alloc_id) {
-        trace!("seen: {:?}, {:?}", alloc_id, seen);
-        if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
-            trace!("get_alloc: {:?}, {:?}, {:?}", alloc_id, seen, alloc);
-            for (_, &reloc) in &alloc.relocations {
-                if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(reloc) {
-                    trace!("get_corresponding: {:?}, {:?}, {:?}, {:?}, {:?}", alloc_id, seen, alloc, did, reloc);
-                    let _ = tcx.maybe_optimized_mir(did);
-                }
-                create_depgraph_edges(tcx, reloc, seen);
-            }
-        }
-    }
-}
index 58ea8d48e97114fb23ac49b6243d8b78fe527aa3..03785f9623b8c0b6925d15f77556cd3f6b57add1 100644 (file)
@@ -45,7 +45,7 @@ pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
     /// The maximum number of terminators that may be evaluated.
     /// This prevents infinite loops and huge computations from freezing up const eval.
     /// Remove once halting problem is solved.
-    pub(crate) steps_remaining: usize,
+    pub(crate) terminators_remaining: usize,
 }
 
 /// A stack frame.
@@ -195,7 +195,7 @@ pub fn new(
             memory: Memory::new(tcx, memory_data),
             stack: Vec::new(),
             stack_limit: tcx.sess.const_eval_stack_frame_limit,
-            steps_remaining: tcx.sess.const_eval_step_limit,
+            terminators_remaining: 1_000_000,
         }
     }
 
@@ -538,7 +538,7 @@ pub(super) fn eval_rvalue_into_place(
             }
 
             Aggregate(ref kind, ref operands) => {
-                self.inc_step_counter_and_check_limit(operands.len())?;
+                self.inc_step_counter_and_check_limit(operands.len());
 
                 let (dest, active_field_index) = match **kind {
                     mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
@@ -938,16 +938,14 @@ pub fn write_discriminant_value(
     }
 
     pub fn read_global_as_value(&self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
-        if gid.promoted.is_none() {
-            let cached = self
+        if self.tcx.is_static(gid.instance.def_id()).is_some() {
+            let alloc_id = self
                 .tcx
                 .interpret_interner
-                .get_cached(gid.instance.def_id());
-            if let Some(alloc_id) = cached {
-                let layout = self.layout_of(ty)?;
-                let ptr = MemoryPointer::new(alloc_id, 0);
-                return Ok(Value::ByRef(ptr.into(), layout.align))
-            }
+                .cache_static(gid.instance.def_id());
+            let layout = self.layout_of(ty)?;
+            let ptr = MemoryPointer::new(alloc_id, 0);
+            return Ok(Value::ByRef(ptr.into(), layout.align))
         }
         let cv = self.const_eval(gid)?;
         self.const_to_value(&cv.val, ty)
index a22572ec687c3cf05a90177aa0b41d96cb585143..554d87a04e2f815f5da33775b42588d653602f7e 100644 (file)
@@ -8,12 +8,12 @@
 use super::{EvalContext, Machine};
 
 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
-    pub fn inc_step_counter_and_check_limit(&mut self, n: usize) -> EvalResult<'tcx> {
-        self.steps_remaining = self.steps_remaining.saturating_sub(n);
-        if self.steps_remaining > 0 {
-            Ok(())
-        } else {
-            err!(ExecutionTimeLimitReached)
+    pub fn inc_step_counter_and_check_limit(&mut self, n: usize) {
+        self.terminators_remaining = self.terminators_remaining.saturating_sub(n);
+        if self.terminators_remaining == 0 {
+            // FIXME(#49980): make this warning a lint
+            self.tcx.sess.span_warn(self.frame().span, "Constant evaluating a complex constant, this might take some time");
+            self.terminators_remaining = 1_000_000;
         }
     }
 
@@ -36,7 +36,7 @@ pub fn step(&mut self) -> EvalResult<'tcx, bool> {
             return Ok(true);
         }
 
-        self.inc_step_counter_and_check_limit(1)?;
+        self.inc_step_counter_and_check_limit(1);
 
         let terminator = basic_block.terminator();
         assert_eq!(old_frames, self.cur_frame());
index a701fe3144266766d32bb8212a29f969605e274d..de3063a575673bc8b6f88e5f7fe5d78b5624f014 100644 (file)
@@ -20,6 +20,7 @@
 #![feature(box_patterns)]
 #![feature(box_syntax)]
 #![feature(catch_expr)]
+#![feature(crate_visibility_modifier)]
 #![feature(const_fn)]
 #![feature(core_intrinsics)]
 #![feature(decl_macro)]
index 83ef28e4f156cad110a93310e480be7f8be5fecf..008165f33b2bbc69f23fffa1c9a7aac0ae907c5b 100644 (file)
@@ -1142,7 +1142,7 @@ fn collect_miri<'a, 'tcx>(
     alloc_id: AllocId,
     output: &mut Vec<MonoItem<'tcx>>,
 ) {
-    if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
+    if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
         let instance = Instance::mono(tcx, did);
         if should_monomorphize_locally(tcx, &instance) {
             trace!("collecting static {:?}", did);
index 0542ca6fb24c271c18fa282b459e5a0e6af78140..f4e2136a5a1153db3ab74c61294c719543c695dd 100644 (file)
@@ -252,10 +252,7 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
             }
 
             ItemKind::ExternCrate(orig_name) => {
-                self.crate_loader.process_item(item, &self.definitions);
-
-                // n.b. we don't need to look at the path option here, because cstore already did
-                let crate_id = self.cstore.extern_mod_stmt_cnum_untracked(item.id).unwrap();
+                let crate_id = self.crate_loader.process_extern_crate(item, &self.definitions);
                 let module =
                     self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
                 self.populate_module_if_necessary(module);
@@ -302,7 +299,8 @@ fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) {
                 self.current_module = module;
             }
 
-            ItemKind::ForeignMod(..) => self.crate_loader.process_item(item, &self.definitions),
+            // Handled in `rustc_metadata::{native_libs,link_args}`
+            ItemKind::ForeignMod(..) => {}
 
             // These items live in the value namespace.
             ItemKind::Static(_, m, _) => {
index d32d853c18bf5d45f1e767c1bdaaeb1e732a3d56..671856c4e549e34507dc8dbf18d5eaaf83a50e30 100644 (file)
@@ -962,38 +962,38 @@ enum TypeParameters<'a, 'b> {
                       RibKind<'a>),
 }
 
-// The rib kind controls the translation of local
-// definitions (`Def::Local`) to upvars (`Def::Upvar`).
+/// The rib kind controls the translation of local
+/// definitions (`Def::Local`) to upvars (`Def::Upvar`).
 #[derive(Copy, Clone, Debug)]
 enum RibKind<'a> {
-    // No translation needs to be applied.
+    /// No translation needs to be applied.
     NormalRibKind,
 
-    // We passed through a closure scope at the given node ID.
-    // Translate upvars as appropriate.
+    /// We passed through a closure scope at the given node ID.
+    /// Translate upvars as appropriate.
     ClosureRibKind(NodeId /* func id */),
 
-    // We passed through an impl or trait and are now in one of its
-    // methods or associated types. Allow references to ty params that impl or trait
-    // binds. Disallow any other upvars (including other ty params that are
-    // upvars).
+    /// We passed through an impl or trait and are now in one of its
+    /// methods or associated types. Allow references to ty params that impl or trait
+    /// binds. Disallow any other upvars (including other ty params that are
+    /// upvars).
     TraitOrImplItemRibKind,
 
-    // We passed through an item scope. Disallow upvars.
+    /// We passed through an item scope. Disallow upvars.
     ItemRibKind,
 
-    // We're in a constant item. Can't refer to dynamic stuff.
+    /// We're in a constant item. Can't refer to dynamic stuff.
     ConstantItemRibKind,
 
-    // We passed through a module.
+    /// We passed through a module.
     ModuleRibKind(Module<'a>),
 
-    // We passed through a `macro_rules!` statement
+    /// We passed through a `macro_rules!` statement
     MacroDefinition(DefId),
 
-    // All bindings in this rib are type parameters that can't be used
-    // from the default of a type parameter because they're not declared
-    // before said type parameter. Also see the `visit_generics` override.
+    /// All bindings in this rib are type parameters that can't be used
+    /// from the default of a type parameter because they're not declared
+    /// before said type parameter. Also see the `visit_generics` override.
     ForwardTyParamBanRibKind,
 }
 
@@ -1198,7 +1198,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-// Records a possibly-private value, type, or module definition.
+/// Records a possibly-private value, type, or module definition.
 #[derive(Clone, Debug)]
 pub struct NameBinding<'a> {
     kind: NameBindingKind<'a>,
@@ -1408,36 +1408,36 @@ pub struct Resolver<'a> {
 
     prelude: Option<Module<'a>>,
 
-    // n.b. This is used only for better diagnostics, not name resolution itself.
+    /// n.b. This is used only for better diagnostics, not name resolution itself.
     has_self: FxHashSet<DefId>,
 
-    // Names of fields of an item `DefId` accessible with dot syntax.
-    // Used for hints during error reporting.
+    /// Names of fields of an item `DefId` accessible with dot syntax.
+    /// Used for hints during error reporting.
     field_names: FxHashMap<DefId, Vec<Name>>,
 
-    // All imports known to succeed or fail.
+    /// All imports known to succeed or fail.
     determined_imports: Vec<&'a ImportDirective<'a>>,
 
-    // All non-determined imports.
+    /// All non-determined imports.
     indeterminate_imports: Vec<&'a ImportDirective<'a>>,
 
-    // The module that represents the current item scope.
+    /// The module that represents the current item scope.
     current_module: Module<'a>,
 
-    // The current set of local scopes for types and values.
-    // FIXME #4948: Reuse ribs to avoid allocation.
+    /// The current set of local scopes for types and values.
+    /// FIXME #4948: Reuse ribs to avoid allocation.
     ribs: PerNS<Vec<Rib<'a>>>,
 
-    // The current set of local scopes, for labels.
+    /// The current set of local scopes, for labels.
     label_ribs: Vec<Rib<'a>>,
 
-    // The trait that the current context can refer to.
+    /// The trait that the current context can refer to.
     current_trait_ref: Option<(Module<'a>, TraitRef)>,
 
-    // The current self type if inside an impl (used for better errors).
+    /// The current self type if inside an impl (used for better errors).
     current_self_type: Option<Ty>,
 
-    // The idents for the primitive types.
+    /// The idents for the primitive types.
     primitive_type_table: PrimitiveTypeTable,
 
     def_map: DefMap,
@@ -1446,20 +1446,20 @@ pub struct Resolver<'a> {
     pub export_map: ExportMap,
     pub trait_map: TraitMap,
 
-    // A map from nodes to anonymous modules.
-    // Anonymous modules are pseudo-modules that are implicitly created around items
-    // contained within blocks.
-    //
-    // For example, if we have this:
-    //
-    //  fn f() {
-    //      fn g() {
-    //          ...
-    //      }
-    //  }
-    //
-    // There will be an anonymous module created around `g` with the ID of the
-    // entry block for `f`.
+    /// A map from nodes to anonymous modules.
+    /// Anonymous modules are pseudo-modules that are implicitly created around items
+    /// contained within blocks.
+    ///
+    /// For example, if we have this:
+    ///
+    ///  fn f() {
+    ///      fn g() {
+    ///          ...
+    ///      }
+    ///  }
+    ///
+    /// There will be an anonymous module created around `g` with the ID of the
+    /// entry block for `f`.
     block_map: NodeMap<Module<'a>>,
     module_map: FxHashMap<DefId, Module<'a>>,
     extern_module_map: FxHashMap<(DefId, bool /* MacrosOnly? */), Module<'a>>,
@@ -1487,7 +1487,8 @@ pub struct Resolver<'a> {
 
     arenas: &'a ResolverArenas<'a>,
     dummy_binding: &'a NameBinding<'a>,
-    use_extern_macros: bool, // true if `#![feature(use_extern_macros)]`
+    /// true if `#![feature(use_extern_macros)]`
+    use_extern_macros: bool,
 
     crate_loader: &'a mut CrateLoader,
     macro_names: FxHashSet<Ident>,
@@ -1501,29 +1502,29 @@ pub struct Resolver<'a> {
     pub whitelisted_legacy_custom_derives: Vec<Name>,
     pub found_unresolved_macro: bool,
 
-    // List of crate local macros that we need to warn about as being unused.
-    // Right now this only includes macro_rules! macros, and macros 2.0.
+    /// List of crate local macros that we need to warn about as being unused.
+    /// Right now this only includes macro_rules! macros, and macros 2.0.
     unused_macros: FxHashSet<DefId>,
 
-    // Maps the `Mark` of an expansion to its containing module or block.
+    /// Maps the `Mark` of an expansion to its containing module or block.
     invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
 
-    // Avoid duplicated errors for "name already defined".
+    /// Avoid duplicated errors for "name already defined".
     name_already_seen: FxHashMap<Name, Span>,
 
-    // If `#![feature(proc_macro)]` is set
+    /// If `#![feature(proc_macro)]` is set
     proc_macro_enabled: bool,
 
-    // A set of procedural macros imported by `#[macro_use]` that have already been warned about
+    /// A set of procedural macros imported by `#[macro_use]` that have already been warned about
     warned_proc_macros: FxHashSet<Name>,
 
     potentially_unused_imports: Vec<&'a ImportDirective<'a>>,
 
-    // This table maps struct IDs into struct constructor IDs,
-    // it's not used during normal resolution, only for better error reporting.
+    /// This table maps struct IDs into struct constructor IDs,
+    /// it's not used during normal resolution, only for better error reporting.
     struct_constructors: DefIdMap<(Def, ty::Visibility)>,
 
-    // Only used for better errors on `fn(): fn()`
+    /// Only used for better errors on `fn(): fn()`
     current_type_ascription: Vec<Span>,
 
     injected_crate: Option<Module<'a>>,
@@ -3254,7 +3255,7 @@ fn resolve_path(&mut self,
                        prev_name == keywords::CrateRoot.name() &&
                        self.session.features_untracked().extern_absolute_paths {
                         // `::extern_crate::a::b`
-                        let crate_id = self.crate_loader.resolve_crate_from_path(name, ident.span);
+                        let crate_id = self.crate_loader.process_path_extern(name, ident.span);
                         let crate_root =
                             self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
                         self.populate_module_if_necessary(crate_root);
index 87738f7b79be530e6ecb482d10a4a26a5da90ca9..37c62a7b0b45b276ab49270c3987195583e32123 100644 (file)
@@ -627,7 +627,12 @@ fn finalize_import(&mut self, directive: &'b ImportDirective<'b>) -> Option<(Spa
                         }
                     } else if is_extern && !token::is_path_segment_keyword(source) {
                         let crate_id =
-                            self.crate_loader.resolve_crate_from_path(source.name, directive.span);
+                            self.resolver.crate_loader.process_use_extern(
+                                source.name,
+                                directive.span,
+                                directive.id,
+                                &self.resolver.definitions,
+                            );
                         let crate_root =
                             self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
                         self.populate_module_if_necessary(crate_root);
index ca19ed0df67d1ab9efa4657867dff7a861d844e3..6a747decbd368f4ce71f43ce1c39beed29e37baf 100644 (file)
@@ -41,6 +41,7 @@
 use rustc::hir::def::Def as HirDef;
 use rustc::hir::map::{Node, NodeItem};
 use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::middle::cstore::ExternCrate;
 use rustc::session::config::CrateType::CrateTypeExecutable;
 use rustc::ty::{self, TyCtxt};
 use rustc_typeck::hir_ty_to_ty;
@@ -111,7 +112,7 @@ pub fn get_external_crates(&self) -> Vec<ExternalCrateData> {
 
         for &n in self.tcx.crates().iter() {
             let span = match *self.tcx.extern_crate(n.as_def_id()) {
-                Some(ref c) => c.span,
+                Some(ExternCrate { span, .. }) => span,
                 None => {
                     debug!("Skipping crate {}, no data", n);
                     continue;
index df6793e8a604c2e0ba43cf42d21d4e388aacaae5..36e60cee788dc6ea2f04ad9e947b09d84843804e 100644 (file)
@@ -108,6 +108,7 @@ fn into_from_env_goal(self) -> DomainGoal<'tcx> {
             FromEnv(..) |
             WellFormedTy(..) |
             FromEnvTy(..) |
+            Normalize(..) |
             RegionOutlives(..) |
             TypeOutlives(..) => self,
         }
@@ -118,10 +119,20 @@ fn into_from_env_goal(self) -> DomainGoal<'tcx> {
                                        -> Lrc<&'tcx Slice<Clause<'tcx>>>
 {
     let node_id = tcx.hir.as_local_node_id(def_id).unwrap();
-    let item = tcx.hir.expect_item(node_id);
-    match item.node {
-        hir::ItemTrait(..) => program_clauses_for_trait(tcx, def_id),
-        hir::ItemImpl(..) => program_clauses_for_impl(tcx, def_id),
+    let node = tcx.hir.find(node_id).unwrap();
+    match node {
+        hir::map::Node::NodeItem(item) => match item.node {
+            hir::ItemTrait(..) => program_clauses_for_trait(tcx, def_id),
+            hir::ItemImpl(..) => program_clauses_for_impl(tcx, def_id),
+            _ => Lrc::new(tcx.mk_clauses(iter::empty::<Clause>())),
+        }
+        hir::map::Node::NodeImplItem(item) => {
+            if let hir::ImplItemKind::Type(..) = item.node {
+                program_clauses_for_associated_type_value(tcx, def_id)
+            } else {
+                Lrc::new(tcx.mk_clauses(iter::empty::<Clause>()))
+            }
+        },
 
         // FIXME: other constructions e.g. traits, associated types...
         _ => Lrc::new(tcx.mk_clauses(iter::empty::<Clause>())),
@@ -233,6 +244,58 @@ fn program_clauses_for_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId
     Lrc::new(tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::dummy(clause)))))
 }
 
+pub fn program_clauses_for_associated_type_value<'a, 'tcx>(
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    item_id: DefId,
+) -> Lrc<&'tcx Slice<Clause<'tcx>>> {
+    // Rule Normalize-From-Impl (see rustc guide)
+    //
+    // ```impl<P0..Pn> Trait<A1..An> for A0
+    // {
+    //     type AssocType<Pn+1..Pm> where WC = T;
+    // }```
+    //
+    // ```
+    // forall<P0..Pm> {
+    //   forall<Pn+1..Pm> {
+    //     Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T) :-
+    //       Implemented(A0: Trait<A1..An>) && WC
+    //   }
+    // }
+    // ```
+
+    let item = tcx.associated_item(item_id);
+    debug_assert_eq!(item.kind, ty::AssociatedKind::Type);
+    let impl_id = if let ty::AssociatedItemContainer::ImplContainer(impl_id) = item.container {
+        impl_id
+    } else {
+        bug!()
+    };
+    // `A0 as Trait<A1..An>`
+    let trait_ref = tcx.impl_trait_ref(impl_id).unwrap();
+    // `T`
+    let ty = tcx.type_of(item_id);
+    // `Implemented(A0: Trait<A1..An>)`
+    let trait_implemented = ty::Binder::dummy(ty::TraitPredicate { trait_ref }.lower());
+    // `WC`
+    let item_where_clauses = tcx.predicates_of(item_id).predicates.lower();
+    // `Implemented(A0: Trait<A1..An>) && WC`
+    let mut where_clauses = vec![trait_implemented];
+    where_clauses.extend(item_where_clauses);
+    // `<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm>`
+    let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.name);
+    // `Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T)`
+    let normalize_goal = DomainGoal::Normalize(ty::ProjectionPredicate { projection_ty, ty });
+    // `Normalize(... -> T) :- ...`
+    let clause = ProgramClause {
+        goal: normalize_goal,
+        hypotheses: tcx.mk_goals(
+            where_clauses.into_iter().map(|wc| Goal::from_poly_domain_goal(wc, tcx))
+        ),
+    };
+    Lrc::new(tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::dummy(clause)))))
+}
+
 pub fn dump_program_clauses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     if !tcx.features().rustc_attrs {
         return;
index c968b8525a5b1c90784cffbb335192e20a0ff611..eb5c7396ae055606df48efe378bafe04fc417ba7 100644 (file)
@@ -174,12 +174,12 @@ pub fn provide(providers: &mut Providers) {
             // rustdoc needs to be able to document functions that use all the features, so
             // whitelist them all
             Lrc::new(llvm_util::all_known_features()
-                .map(|c| c.to_string())
+                .map(|(a, b)| (a.to_string(), b.map(|s| s.to_string())))
                 .collect())
         } else {
             Lrc::new(llvm_util::target_feature_whitelist(tcx.sess)
                 .iter()
-                .map(|c| c.to_string())
+                .map(|&(a, b)| (a.to_string(), b.map(|s| s.to_string())))
                 .collect())
         }
     };
index f3d95cf794babc101891650882d441cafbea29b5..eb550d7a605c5c0b774819868e092f8ef76fbd2f 100644 (file)
@@ -23,6 +23,7 @@
 use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor,
                       DICompositeType, DILexicalBlock, DIFlags};
 
+use rustc::hir::TransFnAttrFlags;
 use rustc::hir::def::CtorKind;
 use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
 use rustc::ty::fold::TypeVisitor;
@@ -41,7 +42,7 @@
 use std::fmt::Write;
 use std::ptr;
 use std::path::{Path, PathBuf};
-use syntax::{ast, attr};
+use syntax::ast;
 use syntax::symbol::{Interner, InternedString, Symbol};
 use syntax_pos::{self, Span, FileName};
 
@@ -1644,11 +1645,17 @@ pub fn create_global_var_metadata(cx: &CodegenCx,
     }
 
     let tcx = cx.tcx;
-    let no_mangle = attr::contains_name(&tcx.get_attrs(def_id), "no_mangle");
+    let attrs = tcx.trans_fn_attrs(def_id);
+
+    if attrs.flags.contains(TransFnAttrFlags::NO_DEBUG) {
+        return;
+    }
+
+    let no_mangle = attrs.flags.contains(TransFnAttrFlags::NO_MANGLE);
     // We may want to remove the namespace scope if we're in an extern block, see:
     // https://github.com/rust-lang/rust/pull/46457#issuecomment-351750952
     let var_scope = get_namespace_for_item(cx, def_id);
-    let span = cx.tcx.def_span(def_id);
+    let span = tcx.def_span(def_id);
 
     let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP {
         let loc = span_start(cx, span);
index 28311018ee7d7d3e3ecbed32d7042d5239c12fda..706dc3dca8a6143c2b10dc6faeb26d4dcd031f35 100644 (file)
@@ -23,6 +23,7 @@
 use llvm;
 use llvm::{ModuleRef, ContextRef, ValueRef};
 use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, DIFlags};
+use rustc::hir::TransFnAttrFlags;
 use rustc::hir::def_id::{DefId, CrateNum};
 use rustc::ty::subst::Substs;
 
@@ -30,7 +31,7 @@
 use common::CodegenCx;
 use builder::Builder;
 use monomorphize::Instance;
-use rustc::ty::{self, ParamEnv, Ty};
+use rustc::ty::{self, ParamEnv, Ty, InstanceDef};
 use rustc::mir;
 use rustc::session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
 use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
@@ -210,13 +211,12 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
         return FunctionDebugContext::DebugInfoDisabled;
     }
 
-    for attr in instance.def.attrs(cx.tcx).iter() {
-        if attr.check_name("no_debug") {
+    if let InstanceDef::Item(def_id) = instance.def {
+        if cx.tcx.trans_fn_attrs(def_id).flags.contains(TransFnAttrFlags::NO_DEBUG) {
             return FunctionDebugContext::FunctionWithoutDebugInfo;
         }
     }
 
-    let containing_scope = get_containing_scope(cx, instance);
     let span = mir.span;
 
     // This can be the case for functions inlined from another crate
@@ -226,6 +226,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
     }
 
     let def_id = instance.def_id();
+    let containing_scope = get_containing_scope(cx, instance);
     let loc = span_start(cx, span);
     let file_metadata = file_metadata(cx, &loc.file.name, def_id.krate);
 
index a38d51e754670dd96d8d4cb55cac1fe572d0b493..49d0f638f206137c2685d65c15d110f67383588c 100644 (file)
@@ -29,7 +29,6 @@
 #![feature(slice_sort_by_cached_key)]
 #![feature(optin_builtin_traits)]
 #![feature(inclusive_range_fields)]
-#![feature(underscore_lifetimes)]
 
 use rustc::dep_graph::WorkProduct;
 use syntax_pos::symbol::Symbol;
index fa3ecb1cc1199810fe3093a488ba908c3b4b34d5..bbd1c39a19e0eb95d534cd4cda6e0cebc9d0f7ec 100644 (file)
@@ -15,6 +15,7 @@
 use rustc::session::config::PrintRequest;
 use libc::c_int;
 use std::ffi::CString;
+use syntax::feature_gate::UnstableFeatures;
 
 use std::sync::atomic::{AtomicBool, Ordering};
 use std::sync::Once;
@@ -82,40 +83,95 @@ unsafe fn configure_llvm(sess: &Session) {
 // to LLVM or the feature detection code will walk past the end of the feature
 // array, leading to crashes.
 
-const ARM_WHITELIST: &'static [&'static str] = &["neon", "v7", "vfp2", "vfp3", "vfp4"];
-
-const AARCH64_WHITELIST: &'static [&'static str] = &["fp", "neon", "sve", "crc", "crypto",
-                                                     "ras", "lse", "rdm", "fp16", "rcpc",
-                                                     "dotprod", "v8.1a", "v8.2a", "v8.3a"];
-
-const X86_WHITELIST: &'static [&'static str] = &["aes", "avx", "avx2", "avx512bw",
-                                                 "avx512cd", "avx512dq", "avx512er",
-                                                 "avx512f", "avx512ifma", "avx512pf",
-                                                 "avx512vbmi", "avx512vl", "avx512vpopcntdq",
-                                                 "bmi1", "bmi2", "fma", "fxsr",
-                                                 "lzcnt", "mmx", "pclmulqdq",
-                                                 "popcnt", "rdrand", "rdseed",
-                                                 "sha",
-                                                 "sse", "sse2", "sse3", "sse4.1",
-                                                 "sse4.2", "sse4a", "ssse3",
-                                                 "tbm", "xsave", "xsavec",
-                                                 "xsaveopt", "xsaves"];
-
-const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx", "hvx-double"];
-
-const POWERPC_WHITELIST: &'static [&'static str] = &["altivec",
-                                                     "power8-altivec", "power9-altivec",
-                                                     "power8-vector", "power9-vector",
-                                                     "vsx"];
-
-const MIPS_WHITELIST: &'static [&'static str] = &["fp64", "msa"];
+const ARM_WHITELIST: &[(&str, Option<&str>)] = &[
+    ("neon", Some("arm_target_feature")),
+    ("v7", Some("arm_target_feature")),
+    ("vfp2", Some("arm_target_feature")),
+    ("vfp3", Some("arm_target_feature")),
+    ("vfp4", Some("arm_target_feature")),
+];
+
+const AARCH64_WHITELIST: &[(&str, Option<&str>)] = &[
+    ("fp", Some("aarch64_target_feature")),
+    ("neon", Some("aarch64_target_feature")),
+    ("sve", Some("aarch64_target_feature")),
+    ("crc", Some("aarch64_target_feature")),
+    ("crypto", Some("aarch64_target_feature")),
+    ("ras", Some("aarch64_target_feature")),
+    ("lse", Some("aarch64_target_feature")),
+    ("rdm", Some("aarch64_target_feature")),
+    ("fp16", Some("aarch64_target_feature")),
+    ("rcpc", Some("aarch64_target_feature")),
+    ("dotprod", Some("aarch64_target_feature")),
+    ("v8.1a", Some("aarch64_target_feature")),
+    ("v8.2a", Some("aarch64_target_feature")),
+    ("v8.3a", Some("aarch64_target_feature")),
+];
+
+const X86_WHITELIST: &[(&str, Option<&str>)] = &[
+    ("aes", None),
+    ("avx", None),
+    ("avx2", None),
+    ("avx512bw", Some("avx512_target_feature")),
+    ("avx512cd", Some("avx512_target_feature")),
+    ("avx512dq", Some("avx512_target_feature")),
+    ("avx512er", Some("avx512_target_feature")),
+    ("avx512f", Some("avx512_target_feature")),
+    ("avx512ifma", Some("avx512_target_feature")),
+    ("avx512pf", Some("avx512_target_feature")),
+    ("avx512vbmi", Some("avx512_target_feature")),
+    ("avx512vl", Some("avx512_target_feature")),
+    ("avx512vpopcntdq", Some("avx512_target_feature")),
+    ("bmi1", None),
+    ("bmi2", None),
+    ("fma", None),
+    ("fxsr", None),
+    ("lzcnt", None),
+    ("mmx", Some("mmx_target_feature")),
+    ("pclmulqdq", None),
+    ("popcnt", None),
+    ("rdrand", None),
+    ("rdseed", None),
+    ("sha", None),
+    ("sse", None),
+    ("sse2", None),
+    ("sse3", None),
+    ("sse4.1", None),
+    ("sse4.2", None),
+    ("sse4a", Some("sse4a_target_feature")),
+    ("ssse3", None),
+    ("tbm", Some("tbm_target_feature")),
+    ("xsave", None),
+    ("xsavec", None),
+    ("xsaveopt", None),
+    ("xsaves", None),
+];
+
+const HEXAGON_WHITELIST: &[(&str, Option<&str>)] = &[
+    ("hvx", Some("hexagon_target_feature")),
+    ("hvx-double", Some("hexagon_target_feature")),
+];
+
+const POWERPC_WHITELIST: &[(&str, Option<&str>)] = &[
+    ("altivec", Some("powerpc_target_feature")),
+    ("power8-altivec", Some("powerpc_target_feature")),
+    ("power9-altivec", Some("powerpc_target_feature")),
+    ("power8-vector", Some("powerpc_target_feature")),
+    ("power9-vector", Some("powerpc_target_feature")),
+    ("vsx", Some("powerpc_target_feature")),
+];
+
+const MIPS_WHITELIST: &[(&str, Option<&str>)] = &[
+    ("fp64", Some("mips_target_feature")),
+    ("msa", Some("mips_target_feature")),
+];
 
 /// When rustdoc is running, provide a list of all known features so that all their respective
 /// primtives may be documented.
 ///
 /// IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this
 /// iterator!
-pub fn all_known_features() -> impl Iterator<Item=&'static str> {
+pub fn all_known_features() -> impl Iterator<Item=(&'static str, Option<&'static str>)> {
     ARM_WHITELIST.iter().cloned()
         .chain(AARCH64_WHITELIST.iter().cloned())
         .chain(X86_WHITELIST.iter().cloned())
@@ -144,6 +200,13 @@ pub fn target_features(sess: &Session) -> Vec<Symbol> {
     let target_machine = create_target_machine(sess, true);
     target_feature_whitelist(sess)
         .iter()
+        .filter_map(|&(feature, gate)| {
+            if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {
+                Some(feature)
+            } else {
+                None
+            }
+        })
         .filter(|feature| {
             let llvm_feature = to_llvm_feature(sess, feature);
             let cstr = CString::new(llvm_feature).unwrap();
@@ -152,7 +215,9 @@ pub fn target_features(sess: &Session) -> Vec<Symbol> {
         .map(|feature| Symbol::intern(feature)).collect()
 }
 
-pub fn target_feature_whitelist(sess: &Session) -> &'static [&'static str] {
+pub fn target_feature_whitelist(sess: &Session)
+    -> &'static [(&'static str, Option<&'static str>)]
+{
     match &*sess.target.target.arch {
         "arm" => ARM_WHITELIST,
         "aarch64" => AARCH64_WHITELIST,
index 977c7c983d6f2458dd4eb0e08b54897de84f2837..6e07b8e73ef22c329ea9c8749ede3d649ae14667 100644 (file)
@@ -50,7 +50,7 @@ pub fn primval_to_llvm(cx: &CodegenCx,
                 let static_ = cx
                     .tcx
                     .interpret_interner
-                    .get_corresponding_static_def_id(ptr.alloc_id);
+                    .get_static(ptr.alloc_id);
                 let base_addr = if let Some(def_id) = static_ {
                     assert!(cx.tcx.is_static(def_id).is_some());
                     consts::get_static(cx, def_id)
@@ -126,18 +126,17 @@ pub fn trans_static_initializer<'a, 'tcx>(
         promoted: None
     };
     let param_env = ty::ParamEnv::reveal_all();
-    cx.tcx.const_eval(param_env.and(cid))?;
+    let static_ = cx.tcx.const_eval(param_env.and(cid))?;
 
-    let alloc_id = cx
-        .tcx
-        .interpret_interner
-        .get_cached(def_id)
-        .expect("global not cached");
+    let ptr = match static_.val {
+        ConstVal::Value(MiriValue::ByRef(ptr, _)) => ptr,
+        _ => bug!("static const eval returned {:#?}", static_),
+    };
 
     let alloc = cx
         .tcx
         .interpret_interner
-        .get_alloc(alloc_id)
+        .get_alloc(ptr.primval.to_ptr().expect("static has integer pointer").alloc_id)
         .expect("miri allocation never successfully created");
     Ok(global_initializer(cx, alloc))
 }
index 5cf9819288b5ef7628c1a8b670c05f979aa37b10..b7895631c6092dc75cfa2d9ce18cd9c4d0342e42 100644 (file)
@@ -44,7 +44,7 @@
 use rustc::middle::cstore::MetadataLoader;
 use rustc::dep_graph::DepGraph;
 use rustc_back::target::Target;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_mir::monomorphize::collector;
 use link::{build_link_meta, out_filename};
 
@@ -203,7 +203,7 @@ fn provide(&self, providers: &mut Providers) {
         ::symbol_names::provide(providers);
 
         providers.target_features_whitelist = |_tcx, _cnum| {
-            Lrc::new(FxHashSet()) // Just a dummy
+            Lrc::new(FxHashMap()) // Just a dummy
         };
     }
     fn provide_extern(&self, _providers: &mut Providers) {}
index ecfe1416050290b3277aa6fbf3da5b97de944fca..c0d6993c7d4dde1c133d3e11257dc3f5ce7951ae 100644 (file)
@@ -502,10 +502,6 @@ fn check_for_cast(&self,
                                             &format!("{}, producing the closest possible value",
                                                      msg),
                                             cast_suggestion);
-                        err.warn("casting here will cause undefined behavior if the value is \
-                                  finite but larger or smaller than the largest or smallest \
-                                  finite value representable by `f32` (this is a bug and will be \
-                                  fixed)");
                     }
                     true
                 }
index f386e1d8b825d4d355336e23bfdd6560bc64566e..6bd38244e8caf9956f02233c0b518552e2a69fad 100644 (file)
 use rustc::ty::util::IntTypeExt;
 use rustc::ty::util::Discr;
 use rustc::util::captures::Captures;
-use rustc::util::nodemap::{FxHashSet, FxHashMap};
+use rustc::util::nodemap::FxHashMap;
 
 use syntax::{abi, ast};
 use syntax::ast::MetaItemKind;
 use syntax::attr::{InlineAttr, list_contains_name, mark_used};
 use syntax::codemap::Spanned;
 use syntax::symbol::{Symbol, keywords};
+use syntax::feature_gate;
 use syntax_pos::{Span, DUMMY_SP};
 
 use rustc::hir::{self, map as hir_map, TransFnAttrs, TransFnAttrFlags, Unsafety};
@@ -1682,7 +1683,7 @@ fn is_foreign_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 fn from_target_feature(
     tcx: TyCtxt,
     attr: &ast::Attribute,
-    whitelist: &FxHashSet<String>,
+    whitelist: &FxHashMap<String, Option<String>>,
     target_features: &mut Vec<Symbol>,
 ) {
     let list = match attr.meta_item_list() {
@@ -1694,16 +1695,19 @@ fn from_target_feature(
             return
         }
     };
-
+    let rust_features = tcx.features();
     for item in list {
+        // Only `enable = ...` is accepted in the meta item list
         if !item.check_name("enable") {
             let msg = "#[target_feature(..)] only accepts sub-keys of `enable` \
                        currently";
             tcx.sess.span_err(item.span, &msg);
             continue
         }
+
+        // Must be of the form `enable = "..."` ( a string)
         let value = match item.value_str() {
-            Some(list) => list,
+            Some(value) => value,
             None => {
                 let msg = "#[target_feature] attribute must be of the form \
                            #[target_feature(enable = \"..\")]";
@@ -1711,24 +1715,55 @@ fn from_target_feature(
                 continue
             }
         };
-        let value = value.as_str();
-        for feature in value.split(',') {
-            if whitelist.contains(feature) {
-                target_features.push(Symbol::intern(feature));
-                continue
-            }
-
-            let msg = format!("the feature named `{}` is not valid for \
-                               this target", feature);
-            let mut err = tcx.sess.struct_span_err(item.span, &msg);
 
-            if feature.starts_with("+") {
-                let valid = whitelist.contains(&feature[1..]);
-                if valid {
-                    err.help("consider removing the leading `+` in the feature name");
+        // We allow comma separation to enable multiple features
+        for feature in value.as_str().split(',') {
+
+            // Only allow whitelisted features per platform
+            let feature_gate = match whitelist.get(feature) {
+                Some(g) => g,
+                None => {
+                    let msg = format!("the feature named `{}` is not valid for \
+                                       this target", feature);
+                    let mut err = tcx.sess.struct_span_err(item.span, &msg);
+
+                    if feature.starts_with("+") {
+                        let valid = whitelist.contains_key(&feature[1..]);
+                        if valid {
+                            err.help("consider removing the leading `+` in the feature name");
+                        }
+                    }
+                    err.emit();
+                    continue
                 }
+            };
+
+            // Only allow features whose feature gates have been enabled
+            let allowed = match feature_gate.as_ref().map(|s| &**s) {
+                Some("arm_target_feature") => rust_features.arm_target_feature,
+                Some("aarch64_target_feature") => rust_features.aarch64_target_feature,
+                Some("hexagon_target_feature") => rust_features.hexagon_target_feature,
+                Some("powerpc_target_feature") => rust_features.powerpc_target_feature,
+                Some("mips_target_feature") => rust_features.mips_target_feature,
+                Some("avx512_target_feature") => rust_features.avx512_target_feature,
+                Some("mmx_target_feature") => rust_features.mmx_target_feature,
+                Some("sse4a_target_feature") => rust_features.sse4a_target_feature,
+                Some("tbm_target_feature") => rust_features.tbm_target_feature,
+                Some(name) => bug!("unknown target feature gate {}", name),
+                None => true,
+            };
+            if !allowed {
+                feature_gate::emit_feature_err(
+                    &tcx.sess.parse_sess,
+                    feature_gate.as_ref().unwrap(),
+                    item.span,
+                    feature_gate::GateIssue::Language,
+                    &format!("the target feature `{}` is currently unstable",
+                             feature),
+                );
+                continue
             }
-            err.emit();
+            target_features.push(Symbol::intern(feature));
         }
     }
 }
@@ -1790,6 +1825,8 @@ fn trans_fn_attrs<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> TransFnAt
             trans_fn_attrs.flags |= TransFnAttrFlags::NO_MANGLE;
         } else if attr.check_name("rustc_std_internal_symbol") {
             trans_fn_attrs.flags |= TransFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL;
+        } else if attr.check_name("no_debug") {
+            trans_fn_attrs.flags |= TransFnAttrFlags::NO_DEBUG;
         } else if attr.check_name("inline") {
             trans_fn_attrs.inline = attrs.iter().fold(InlineAttr::None, |ia, attr| {
                 if attr.path != "inline" {
@@ -1835,20 +1872,6 @@ fn trans_fn_attrs<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> TransFnAt
                     .emit();
             }
         } else if attr.check_name("target_feature") {
-            // handle deprecated #[target_feature = "..."]
-            if let Some(val) = attr.value_str() {
-                for feat in val.as_str().split(",").map(|f| f.trim()) {
-                    if !feat.is_empty() && !feat.contains('\0') {
-                        trans_fn_attrs.target_features.push(Symbol::intern(feat));
-                    }
-                }
-                let msg = "#[target_feature = \"..\"] is deprecated and will \
-                           eventually be removed, use \
-                           #[target_feature(enable = \"..\")] instead";
-                tcx.sess.span_warn(attr.span, &msg);
-                continue
-            }
-
             if tcx.fn_sig(id).unsafety() == Unsafety::Normal {
                 let msg = "#[target_feature(..)] can only be applied to \
                            `unsafe` function";
index a4477e80b988a51003948e8cbdc26e796ab12623..4b66939963ed0a37add319284a78a58ae130fb4e 100644 (file)
@@ -82,7 +82,6 @@
 #![feature(slice_patterns)]
 #![feature(slice_sort_by_cached_key)]
 #![feature(dyn_trait)]
-#![feature(underscore_lifetimes)]
 
 #[macro_use] extern crate log;
 #[macro_use] extern crate syntax;
index da8085d84c3f6729d8cda184f862d1a52b20147b..443caa7618d7414f81896fe56e01cacb021d3d94 100644 (file)
@@ -1178,6 +1178,10 @@ enum PathKind {
     Type,
 }
 
+fn resolution_failure(cx: &DocContext, path_str: &str) {
+    cx.sess().warn(&format!("[{}] cannot be resolved, ignoring it...", path_str));
+}
+
 impl Clean<Attributes> for [ast::Attribute] {
     fn clean(&self, cx: &DocContext) -> Attributes {
         let mut attrs = Attributes::from_ast(cx.sess().diagnostic(), self);
@@ -1228,6 +1232,7 @@ fn clean(&self, cx: &DocContext) -> Attributes {
                             if let Ok(def) = resolve(cx, path_str, true) {
                                 def
                             } else {
+                                resolution_failure(cx, path_str);
                                 // this could just be a normal link or a broken link
                                 // we could potentially check if something is
                                 // "intra-doc-link-like" and warn in that case
@@ -1238,6 +1243,7 @@ fn clean(&self, cx: &DocContext) -> Attributes {
                             if let Ok(def) = resolve(cx, path_str, false) {
                                 def
                             } else {
+                                resolution_failure(cx, path_str);
                                 // this could just be a normal link
                                 continue;
                             }
@@ -1282,6 +1288,7 @@ fn clean(&self, cx: &DocContext) -> Attributes {
                             } else if let Ok(value_def) = resolve(cx, path_str, true) {
                                 value_def
                             } else {
+                                resolution_failure(cx, path_str);
                                 // this could just be a normal link
                                 continue;
                             }
@@ -1290,6 +1297,7 @@ fn clean(&self, cx: &DocContext) -> Attributes {
                             if let Some(def) = macro_resolve(cx, path_str) {
                                 (def, None)
                             } else {
+                                resolution_failure(cx, path_str);
                                 continue
                             }
                         }
index 6e2be2610cec072a4001c72d5788ba8049643be8..9fb024fd906099859e86becb2d3136af9fc09736 100644 (file)
@@ -18,6 +18,7 @@
 use rustc::ty::{self, TyCtxt, AllArenas};
 use rustc::hir::map as hir_map;
 use rustc::lint;
+use rustc::session::config::ErrorOutputType;
 use rustc::util::nodemap::{FxHashMap, FxHashSet};
 use rustc_resolve as resolve;
 use rustc_metadata::creader::CrateLoader;
 use syntax::codemap;
 use syntax::edition::Edition;
 use syntax::feature_gate::UnstableFeatures;
+use syntax::json::JsonEmitter;
 use errors;
-use errors::emitter::ColorConfig;
+use errors::emitter::{Emitter, EmitterWriter};
 
 use std::cell::{RefCell, Cell};
 use std::mem;
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{self, Lrc};
 use std::rc::Rc;
 use std::path::PathBuf;
 
@@ -42,7 +44,7 @@
 use clean::Clean;
 use html::render::RenderInfo;
 
-pub use rustc::session::config::Input;
+pub use rustc::session::config::{Input, CodegenOptions};
 pub use rustc::session::search_paths::SearchPaths;
 
 pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
@@ -115,7 +117,6 @@ fn is_doc_reachable(&self, did: DefId) -> bool {
     }
 }
 
-
 pub fn run_core(search_paths: SearchPaths,
                 cfgs: Vec<String>,
                 externs: config::Externs,
@@ -125,7 +126,9 @@ pub fn run_core(search_paths: SearchPaths,
                 allow_warnings: bool,
                 crate_name: Option<String>,
                 force_unstable_if_unmarked: bool,
-                edition: Edition) -> (clean::Crate, RenderInfo)
+                edition: Edition,
+                cg: CodegenOptions,
+                error_format: ErrorOutputType) -> (clean::Crate, RenderInfo)
 {
     // Parse, resolve, and typecheck the given crate.
 
@@ -137,12 +140,14 @@ pub fn run_core(search_paths: SearchPaths,
     let warning_lint = lint::builtin::WARNINGS.name_lower();
 
     let host_triple = TargetTriple::from_triple(config::host_triple());
+    // plays with error output here!
     let sessopts = config::Options {
         maybe_sysroot,
         search_paths,
         crate_types: vec![config::CrateTypeRlib],
         lint_opts: if !allow_warnings { vec![(warning_lint, lint::Allow)] } else { vec![] },
         lint_cap: Some(lint::Allow),
+        cg,
         externs,
         target_triple: triple.unwrap_or(host_triple),
         // Ensure that rustdoc works even if rustc is feature-staged
@@ -153,14 +158,42 @@ pub fn run_core(search_paths: SearchPaths,
             edition,
             ..config::basic_debugging_options()
         },
+        error_format,
         ..config::basic_options().clone()
     };
 
     let codemap = Lrc::new(codemap::CodeMap::new(sessopts.file_path_mapping()));
-    let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
-                                                               true,
-                                                               false,
-                                                               Some(codemap.clone()));
+    let emitter: Box<dyn Emitter + sync::Send> = match error_format {
+        ErrorOutputType::HumanReadable(color_config) => Box::new(
+            EmitterWriter::stderr(
+                color_config,
+                Some(codemap.clone()),
+                false,
+                sessopts.debugging_opts.teach,
+            ).ui_testing(sessopts.debugging_opts.ui_testing)
+        ),
+        ErrorOutputType::Json(pretty) => Box::new(
+            JsonEmitter::stderr(
+                None,
+                codemap.clone(),
+                pretty,
+                sessopts.debugging_opts.approximate_suggestions,
+            ).ui_testing(sessopts.debugging_opts.ui_testing)
+        ),
+        ErrorOutputType::Short(color_config) => Box::new(
+            EmitterWriter::stderr(color_config, Some(codemap.clone()), true, false)
+        ),
+    };
+
+    let diagnostic_handler = errors::Handler::with_emitter_and_flags(
+        emitter,
+        errors::HandlerFlags {
+            can_emit_warnings: true,
+            treat_err_as_bug: false,
+            external_macro_backtrace: false,
+            ..Default::default()
+        },
+    );
 
     let mut sess = session::build_session_(
         sessopts, cpath, diagnostic_handler, codemap,
index aac5d0d2601deca63ff43b781d06086a83a63a13..583c9f2b671441f0602e83887071add233727089 100644 (file)
@@ -37,128 +37,116 @@ pub fn render<T: fmt::Display, S: fmt::Display>(
     -> io::Result<()>
 {
     write!(dst,
-r##"<!DOCTYPE html>
-<html lang="en">
-<head>
-    <meta charset="utf-8">
-    <meta name="viewport" content="width=device-width, initial-scale=1.0">
-    <meta name="generator" content="rustdoc">
-    <meta name="description" content="{description}">
-    <meta name="keywords" content="{keywords}">
-
-    <title>{title}</title>
-
-    <link rel="stylesheet" type="text/css" href="{root_path}normalize{suffix}.css">
-    <link rel="stylesheet" type="text/css" href="{root_path}rustdoc{suffix}.css"
-          id="mainThemeStyle">
-    {themes}
-    <link rel="stylesheet" type="text/css" href="{root_path}dark{suffix}.css">
-    <link rel="stylesheet" type="text/css" href="{root_path}light{suffix}.css" id="themeStyle">
-    <script src="{root_path}storage{suffix}.js"></script>
-    {css_extension}
-
-    {favicon}
-    {in_header}
-</head>
-<body class="rustdoc {css_class}">
-    <!--[if lte IE 8]>
-    <div class="warning">
-        This old browser is unsupported and will most likely display funky
-        things.
-    </div>
-    <![endif]-->
-
-    {before_content}
-
-    <nav class="sidebar">
-        <div class="sidebar-menu">&#9776;</div>
-        {logo}
-        {sidebar}
-    </nav>
-
-    <div class="theme-picker">
-        <button id="theme-picker" aria-label="Pick another theme!">
-            <img src="{root_path}brush{suffix}.svg" width="18" alt="Pick another theme!">
-        </button>
-        <div id="theme-choices"></div>
-    </div>
-    <script src="{root_path}theme{suffix}.js"></script>
-    <nav class="sub">
-        <form class="search-form js-only">
-            <div class="search-container">
-                <input class="search-input" name="search"
-                       autocomplete="off"
-                       placeholder="Click or press â€˜S’ to search, â€˜?’ for more options…"
-                       type="search">
-            </div>
-        </form>
-    </nav>
-
-    <section id='main' class="content">{content}</section>
-    <section id='search' class="content hidden"></section>
-
-    <section class="footer"></section>
-
-    <aside id="help" class="hidden">
-        <div>
-            <h1 class="hidden">Help</h1>
-
-            <div class="shortcuts">
-                <h2>Keyboard Shortcuts</h2>
-
-                <dl>
-                    <dt><kbd>?</kbd></dt>
-                    <dd>Show this help dialog</dd>
-                    <dt><kbd>S</kbd></dt>
-                    <dd>Focus the search field</dd>
-                    <dt><kbd>↑</kbd></dt>
-                    <dd>Move up in search results</dd>
-                    <dt><kbd>↓</kbd></dt>
-                    <dd>Move down in search results</dd>
-                    <dt><kbd>↹</kbd></dt>
-                    <dd>Switch tab</dd>
-                    <dt><kbd>&#9166;</kbd></dt>
-                    <dd>Go to active search result</dd>
-                    <dt><kbd>+</kbd></dt>
-                    <dd>Expand all sections</dd>
-                    <dt><kbd>-</kbd></dt>
-                    <dd>Collapse all sections</dd>
-                </dl>
-            </div>
-
-            <div class="infos">
-                <h2>Search Tricks</h2>
-
-                <p>
-                    Prefix searches with a type followed by a colon (e.g.
-                    <code>fn:</code>) to restrict the search to a given type.
-                </p>
-
-                <p>
-                    Accepted types are: <code>fn</code>, <code>mod</code>,
-                    <code>struct</code>, <code>enum</code>,
-                    <code>trait</code>, <code>type</code>, <code>macro</code>,
-                    and <code>const</code>.
-                </p>
-
-                <p>
-                    Search functions by type signature (e.g.
-                    <code>vec -> usize</code> or <code>* -> vec</code>)
-                </p>
-            </div>
-        </div>
-    </aside>
-
-    {after_content}
-
-    <script>
-        window.rootPath = "{root_path}";
-        window.currentCrate = "{krate}";
-    </script>
-    <script src="{root_path}main{suffix}.js"></script>
-    <script defer src="{root_path}search-index.js"></script>
-</body>
-</html>"##,
+"<!DOCTYPE html>\
+<html lang=\"en\">\
+<head>\
+    <meta charset=\"utf-8\">\
+    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\
+    <meta name=\"generator\" content=\"rustdoc\">\
+    <meta name=\"description\" content=\"{description}\">\
+    <meta name=\"keywords\" content=\"{keywords}\">\
+    <title>{title}</title>\
+    <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}normalize{suffix}.css\">\
+    <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}rustdoc{suffix}.css\" \
+          id=\"mainThemeStyle\">\
+    {themes}\
+    <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}dark{suffix}.css\">\
+    <link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}light{suffix}.css\" \
+          id=\"themeStyle\">\
+    <script src=\"{root_path}storage{suffix}.js\"></script>\
+    {css_extension}\
+    {favicon}\
+    {in_header}\
+</head>\
+<body class=\"rustdoc {css_class}\">\
+    <!--[if lte IE 8]>\
+    <div class=\"warning\">\
+        This old browser is unsupported and will most likely display funky \
+        things.\
+    </div>\
+    <![endif]-->\
+    {before_content}\
+    <nav class=\"sidebar\">\
+        <div class=\"sidebar-menu\">&#9776;</div>\
+        {logo}\
+        {sidebar}\
+    </nav>\
+    <div class=\"theme-picker\">\
+        <button id=\"theme-picker\" aria-label=\"Pick another theme!\">\
+            <img src=\"{root_path}brush{suffix}.svg\" width=\"18\" alt=\"Pick another theme!\">\
+        </button>\
+        <div id=\"theme-choices\"></div>\
+    </div>\
+    <script src=\"{root_path}theme{suffix}.js\"></script>\
+    <nav class=\"sub\">\
+        <form class=\"search-form js-only\">\
+            <div class=\"search-container\">\
+                <input class=\"search-input\" name=\"search\" \
+                       autocomplete=\"off\" \
+                       placeholder=\"Click or press â€˜S’ to search, â€˜?’ for more options…\" \
+                       type=\"search\">\
+            </div>\
+        </form>\
+    </nav>\
+    <section id=\"main\" class=\"content\">{content}</section>\
+    <section id=\"search\" class=\"content hidden\"></section>\
+    <section class=\"footer\"></section>\
+    <aside id=\"help\" class=\"hidden\">\
+        <div>\
+            <h1 class=\"hidden\">Help</h1>\
+            <div class=\"shortcuts\">\
+                <h2>Keyboard Shortcuts</h2>\
+                <dl>\
+                    <dt><kbd>?</kbd></dt>\
+                    <dd>Show this help dialog</dd>\
+                    <dt><kbd>S</kbd></dt>\
+                    <dd>Focus the search field</dd>\
+                    <dt><kbd>↑</kbd></dt>\
+                    <dd>Move up in search results</dd>\
+                    <dt><kbd>↓</kbd></dt>\
+                    <dd>Move down in search results</dd>\
+                    <dt><kbd>↹</kbd></dt>\
+                    <dd>Switch tab</dd>\
+                    <dt><kbd>&#9166;</kbd></dt>\
+                    <dd>Go to active search result</dd>\
+                    <dt><kbd>+</kbd></dt>\
+                    <dd>Expand all sections</dd>\
+                    <dt><kbd>-</kbd></dt>\
+                    <dd>Collapse all sections</dd>\
+                </dl>\
+            </div>\
+            <div class=\"infos\">\
+                <h2>Search Tricks</h2>\
+                <p>\
+                    Prefix searches with a type followed by a colon (e.g. \
+                    <code>fn:</code>) to restrict the search to a given type.\
+                </p>\
+                <p>\
+                    Accepted types are: <code>fn</code>, <code>mod</code>, \
+                    <code>struct</code>, <code>enum</code>, \
+                    <code>trait</code>, <code>type</code>, <code>macro</code>, \
+                    and <code>const</code>.\
+                </p>\
+                <p>\
+                    Search functions by type signature (e.g. \
+                    <code>vec -> usize</code> or <code>* -> vec</code>)\
+                </p>\
+                <p>\
+                    Search multiple things at once by splitting your query with comma (e.g. \
+                    <code>str,u8</code> or <code>String,struct:Vec,test</code>)\
+                </p>\
+            </div>\
+        </div>\
+    </aside>\
+    {after_content}\
+    <script>\
+        window.rootPath = \"{root_path}\";\
+        window.currentCrate = \"{krate}\";\
+    </script>\
+    <script src=\"{root_path}main{suffix}.js\"></script>\
+    <script defer src=\"{root_path}search-index.js\"></script>\
+</body>\
+</html>",
     css_extension = if css_file_extension {
         format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}theme{suffix}.css\">",
                 root_path = page.root_path,
index 9e2c7bd7ef1ed6fc808192442760279ac88b7a76..651319743aadde640626ca7103e71c01c8c816ee 100644 (file)
@@ -107,7 +107,7 @@ pub struct SharedContext {
     /// This describes the layout of each page, and is not modified after
     /// creation of the context (contains info like the favicon and added html).
     pub layout: layout::Layout,
-    /// This flag indicates whether [src] links should be generated or not. If
+    /// This flag indicates whether `[src]` links should be generated or not. If
     /// the source files are present in the html rendering, then this will be
     /// `true`.
     pub include_sources: bool,
@@ -1728,7 +1728,9 @@ fn item<F>(&mut self, item: clean::Item, all: &mut AllTypes, mut f: F) -> Result
                 let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
                 try_err!(dst.write_all(&buf), &joint_dst);
 
-                all.append(full_path(self, &item), &item_type);
+                if !self.render_redirect_pages {
+                    all.append(full_path(self, &item), &item_type);
+                }
                 // Redirect from a sane URL using the namespace to Rustdoc's
                 // URL for the page.
                 let redir_name = format!("{}.{}.html", name, item_type.name_space());
index 3a3fa833c238ba569da70ade3fa9444ee0dd9ec0..2546a9410a9f653a44fb12dc37aa80f9ab287a26 100644 (file)
             printTab(currentTab);
         }
 
+        function execSearch(query, searchWords) {
+            var queries = query.raw.split(",");
+            var results = {
+                'in_args': [],
+                'returned': [],
+                'others': [],
+            };
+
+            for (var i = 0; i < queries.length; ++i) {
+                var query = queries[i].trim();
+                if (query.length !== 0) {
+                    var tmp = execQuery(getQuery(query), searchWords);
+
+                    results['in_args'].push(tmp['in_args']);
+                    results['returned'].push(tmp['returned']);
+                    results['others'].push(tmp['others']);
+                }
+            }
+            if (queries.length > 1) {
+                function getSmallest(arrays, positions) {
+                    var start = null;
+
+                    for (var it = 0; it < positions.length; ++it) {
+                        if (arrays[it].length > positions[it] &&
+                            (start === null || start > arrays[it][positions[it]].lev)) {
+                            start = arrays[it][positions[it]].lev;
+                        }
+                    }
+                    return start;
+                }
+
+                function mergeArrays(arrays) {
+                    var ret = [];
+                    var positions = [];
+
+                    for (var x = 0; x < arrays.length; ++x) {
+                        positions.push(0);
+                    }
+                    while (ret.length < MAX_RESULTS) {
+                        var smallest = getSmallest(arrays, positions);
+                        if (smallest === null) {
+                            break;
+                        }
+                        for (x = 0; x < arrays.length && ret.length < MAX_RESULTS; ++x) {
+                            if (arrays[x].length > positions[x] &&
+                                    arrays[x][positions[x]].lev === smallest) {
+                                ret.push(arrays[x][positions[x]]);
+                                positions[x] += 1;
+                            }
+                        }
+                    }
+                    return ret;
+                }
+
+                return {
+                    'in_args': mergeArrays(results['in_args']),
+                    'returned': mergeArrays(results['returned']),
+                    'others': mergeArrays(results['others']),
+                };
+            } else {
+                return {
+                    'in_args': results['in_args'][0],
+                    'returned': results['returned'][0],
+                    'others': results['others'][0],
+                };
+            }
+        }
+
         function search(e) {
-            var query,
-                obj, i, len,
-                results = {"in_args": [], "returned": [], "others": []},
-                resultIndex;
             var params = getQueryStringParams();
+            var query = getQuery(document.getElementsByClassName('search-input')[0].value);
 
-            query = getQuery(document.getElementsByClassName('search-input')[0].value);
             if (e) {
                 e.preventDefault();
             }
                 }
             }
 
-            results = execQuery(query, index);
-            showResults(results);
+            showResults(execSearch(query, index));
         }
 
         function buildIndex(rawSearchIndex) {
index 8463475afabda4b1f0f7138aad9a1879070f409c..60b713f2995e1089ee47478ed4c8a9dd9d58cc48 100644 (file)
@@ -23,6 +23,7 @@
 #![feature(test)]
 #![feature(vec_remove_item)]
 #![feature(entry_and_modify)]
+#![feature(dyn_trait)]
 
 extern crate arena;
 extern crate getopts;
@@ -48,6 +49,8 @@
 
 extern crate serialize as rustc_serialize; // used by deriving
 
+use errors::ColorConfig;
+
 use std::collections::{BTreeMap, BTreeSet};
 use std::default::Default;
 use std::env;
@@ -61,7 +64,8 @@
 use syntax::edition::Edition;
 use externalfiles::ExternalHtml;
 use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::{ErrorOutputType, RustcOptGroup, nightly_options, Externs};
+use rustc::session::config::{ErrorOutputType, RustcOptGroup, Externs, CodegenOptions};
+use rustc::session::config::{nightly_options, build_codegen_options};
 use rustc_back::target::TargetTriple;
 
 #[macro_use]
@@ -99,6 +103,7 @@ struct Output {
 
 pub fn main() {
     const STACK_SIZE: usize = 32_000_000; // 32MB
+    rustc_driver::set_sigpipe_handler();
     env_logger::init();
     let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
         syntax::with_globals(move || {
@@ -155,6 +160,9 @@ pub fn opts() -> Vec<RustcOptGroup> {
         stable("plugin-path", |o| {
             o.optmulti("", "plugin-path", "directory to load plugins from", "DIR")
         }),
+        stable("C", |o| {
+            o.optmulti("C", "codegen", "pass a codegen option to rustc", "OPT[=VALUE]")
+        }),
         stable("passes", |o| {
             o.optmulti("", "passes",
                        "list of passes to also run, you might want \
@@ -274,6 +282,21 @@ pub fn opts() -> Vec<RustcOptGroup> {
                      "edition to use when compiling rust code (default: 2015)",
                      "EDITION")
         }),
+        unstable("color", |o| {
+            o.optopt("",
+                     "color",
+                     "Configure coloring of output:
+                                          auto   = colorize, if output goes to a tty (default);
+                                          always = always colorize output;
+                                          never  = never colorize output",
+                     "auto|always|never")
+        }),
+        unstable("error-format", |o| {
+            o.optopt("",
+                     "error-format",
+                     "How errors and other messages are produced",
+                     "human|json|short")
+        }),
     ]
 }
 
@@ -358,9 +381,33 @@ pub fn main_args(args: &[String]) -> isize {
     }
     let input = &matches.free[0];
 
+    let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
+        Some("auto") => ColorConfig::Auto,
+        Some("always") => ColorConfig::Always,
+        Some("never") => ColorConfig::Never,
+        None => ColorConfig::Auto,
+        Some(arg) => {
+            print_error(&format!("argument for --color must be `auto`, `always` or `never` \
+                                  (instead was `{}`)", arg));
+            return 1;
+        }
+    };
+    let error_format = match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
+        Some("human") => ErrorOutputType::HumanReadable(color),
+        Some("json") => ErrorOutputType::Json(false),
+        Some("pretty-json") => ErrorOutputType::Json(true),
+        Some("short") => ErrorOutputType::Short(color),
+        None => ErrorOutputType::HumanReadable(color),
+        Some(arg) => {
+            print_error(&format!("argument for --error-format must be `human`, `json` or \
+                                  `short` (instead was `{}`)", arg));
+            return 1;
+        }
+    };
+
     let mut libs = SearchPaths::new();
     for s in &matches.opt_strs("L") {
-        libs.add_path(s, ErrorOutputType::default());
+        libs.add_path(s, error_format);
     }
     let externs = match parse_externs(&matches) {
         Ok(ex) => ex,
@@ -441,14 +488,16 @@ pub fn main_args(args: &[String]) -> isize {
         }
     };
 
+    let cg = build_codegen_options(&matches, ErrorOutputType::default());
+
     match (should_test, markdown_input) {
         (true, true) => {
             return markdown::test(input, cfgs, libs, externs, test_args, maybe_sysroot,
-                                  display_warnings, linker, edition)
+                                  display_warnings, linker, edition, cg)
         }
         (true, false) => {
             return test::run(Path::new(input), cfgs, libs, externs, test_args, crate_name,
-                             maybe_sysroot, display_warnings, linker, edition)
+                             maybe_sysroot, display_warnings, linker, edition, cg)
         }
         (false, true) => return markdown::render(Path::new(input),
                                                  output.unwrap_or(PathBuf::from("doc")),
@@ -458,7 +507,9 @@ pub fn main_args(args: &[String]) -> isize {
     }
 
     let output_format = matches.opt_str("w");
-    let res = acquire_input(PathBuf::from(input), externs, edition, &matches, move |out| {
+
+    let res = acquire_input(PathBuf::from(input), externs, edition, cg, &matches, error_format,
+                            move |out| {
         let Output { krate, passes, renderinfo } = out;
         info!("going to format");
         match output_format.as_ref().map(|s| &**s) {
@@ -500,14 +551,16 @@ fn print_error<T>(error_message: T) where T: Display {
 fn acquire_input<R, F>(input: PathBuf,
                        externs: Externs,
                        edition: Edition,
+                       cg: CodegenOptions,
                        matches: &getopts::Matches,
+                       error_format: ErrorOutputType,
                        f: F)
                        -> Result<R, String>
 where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
     match matches.opt_str("r").as_ref().map(|s| &**s) {
-        Some("rust") => Ok(rust_input(input, externs, edition, matches, f)),
+        Some("rust") => Ok(rust_input(input, externs, edition, cg, matches, error_format, f)),
         Some(s) => Err(format!("unknown input format: {}", s)),
-        None => Ok(rust_input(input, externs, edition, matches, f))
+        None => Ok(rust_input(input, externs, edition, cg, matches, error_format, f))
     }
 }
 
@@ -536,7 +589,9 @@ fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
 fn rust_input<R, F>(cratefile: PathBuf,
                     externs: Externs,
                     edition: Edition,
+                    cg: CodegenOptions,
                     matches: &getopts::Matches,
+                    error_format: ErrorOutputType,
                     f: F) -> R
 where R: 'static + Send,
       F: 'static + Send + FnOnce(Output) -> R
@@ -589,7 +644,7 @@ fn rust_input<R, F>(cratefile: PathBuf,
         let (mut krate, renderinfo) =
             core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot,
                            display_warnings, crate_name.clone(),
-                           force_unstable_if_unmarked, edition);
+                           force_unstable_if_unmarked, edition, cg, error_format);
 
         info!("finished with rustc");
 
index f14d4c602d070f58226be672c1db489ec4604275..8ada5ce1a4df9e83f74b068c903f2287038c083b 100644 (file)
@@ -16,7 +16,7 @@
 use getopts;
 use testing;
 use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::Externs;
+use rustc::session::config::{Externs, CodegenOptions};
 use syntax::codemap::DUMMY_SP;
 use syntax::edition::Edition;
 
@@ -140,7 +140,8 @@ pub fn render(input: &Path, mut output: PathBuf, matches: &getopts::Matches,
 /// Run any tests/code examples in the markdown file `input`.
 pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
             mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>,
-            display_warnings: bool, linker: Option<PathBuf>, edition: Edition) -> isize {
+            display_warnings: bool, linker: Option<PathBuf>, edition: Edition,
+            cg: CodegenOptions) -> isize {
     let input_str = match load_string(input) {
         Ok(s) => s,
         Err(LoadStringError::ReadFail) => return 1,
@@ -150,7 +151,7 @@ pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
     let mut opts = TestOptions::default();
     opts.no_crate_inject = true;
     opts.display_warnings = display_warnings;
-    let mut collector = Collector::new(input.to_owned(), cfgs, libs, externs,
+    let mut collector = Collector::new(input.to_owned(), cfgs, libs, cg, externs,
                                        true, opts, maybe_sysroot, None,
                                        Some(PathBuf::from(input)),
                                        linker, edition);
index a166bca709edbc232b3b5fdf36d682adc5db0d43..600e9eaa05f1450acbd3f5604241450fce828f0a 100644 (file)
@@ -24,7 +24,7 @@
 use rustc::hir;
 use rustc::hir::intravisit;
 use rustc::session::{self, CompileIncomplete, config};
-use rustc::session::config::{OutputType, OutputTypes, Externs};
+use rustc::session::config::{OutputType, OutputTypes, Externs, CodegenOptions};
 use rustc::session::search_paths::{SearchPaths, PathKind};
 use rustc_metadata::dynamic_lib::DynamicLibrary;
 use tempdir::TempDir;
@@ -64,7 +64,8 @@ pub fn run(input_path: &Path,
            maybe_sysroot: Option<PathBuf>,
            display_warnings: bool,
            linker: Option<PathBuf>,
-           edition: Edition)
+           edition: Edition,
+           cg: CodegenOptions)
            -> isize {
     let input = config::Input::File(input_path.to_owned());
 
@@ -73,6 +74,7 @@ pub fn run(input_path: &Path,
             || Some(env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_path_buf())),
         search_paths: libs.clone(),
         crate_types: vec![config::CrateTypeDylib],
+        cg: cg.clone(),
         externs: externs.clone(),
         unstable_features: UnstableFeatures::from_environment(),
         lint_cap: Some(::rustc::lint::Level::Allow),
@@ -125,6 +127,7 @@ pub fn run(input_path: &Path,
     let mut collector = Collector::new(crate_name,
                                        cfgs,
                                        libs,
+                                       cg,
                                        externs,
                                        false,
                                        opts,
@@ -190,7 +193,7 @@ fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions {
 
 fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
             cfgs: Vec<String>, libs: SearchPaths,
-            externs: Externs,
+            cg: CodegenOptions, externs: Externs,
             should_panic: bool, no_run: bool, as_test_harness: bool,
             compile_fail: bool, mut error_codes: Vec<String>, opts: &TestOptions,
             maybe_sysroot: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition) {
@@ -215,7 +218,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
         cg: config::CodegenOptions {
             prefer_dynamic: true,
             linker,
-            .. config::basic_codegen_options()
+            ..cg
         },
         test: as_test_harness,
         unstable_features: UnstableFeatures::from_environment(),
@@ -478,6 +481,7 @@ pub struct Collector {
 
     cfgs: Vec<String>,
     libs: SearchPaths,
+    cg: CodegenOptions,
     externs: Externs,
     use_headers: bool,
     cratename: String,
@@ -491,15 +495,16 @@ pub struct Collector {
 }
 
 impl Collector {
-    pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
-               use_headers: bool, opts: TestOptions, maybe_sysroot: Option<PathBuf>,
-               codemap: Option<Lrc<CodeMap>>, filename: Option<PathBuf>,
-               linker: Option<PathBuf>, edition: Edition) -> Collector {
+    pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, cg: CodegenOptions,
+               externs: Externs, use_headers: bool, opts: TestOptions,
+               maybe_sysroot: Option<PathBuf>, codemap: Option<Lrc<CodeMap>>,
+               filename: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition) -> Collector {
         Collector {
             tests: Vec::new(),
             names: Vec::new(),
             cfgs,
             libs,
+            cg,
             externs,
             use_headers,
             cratename,
@@ -524,6 +529,7 @@ pub fn add_test(&mut self, test: String,
         let name = self.generate_name(line, &filename);
         let cfgs = self.cfgs.clone();
         let libs = self.libs.clone();
+        let cg = self.cg.clone();
         let externs = self.externs.clone();
         let cratename = self.cratename.to_string();
         let opts = self.opts.clone();
@@ -552,6 +558,7 @@ pub fn add_test(&mut self, test: String,
                                  line,
                                  cfgs,
                                  libs,
+                                 cg,
                                  externs,
                                  should_panic,
                                  no_run,
index 93f059076d794ea84074ceb11de91b24e43255f7..115f9628a23cc4e6ca7ec5a307a49fb2cedbf49a 100644 (file)
@@ -1124,7 +1124,7 @@ fn len(&self) -> usize {
 
 impl<'a, K: 'a, V: 'a> Drop for Drain<'a, K, V> {
     fn drop(&mut self) {
-        for _ in self {}
+        self.for_each(drop);
     }
 }
 
index 7520121a8c2900f51506920814679790e61707ff..4850ed0c5be054fac6cd5bfe4771a7520fab1430 100644 (file)
@@ -61,7 +61,7 @@
 /// # Conversions
 ///
 /// See the [module's toplevel documentation about conversions][conversions] for a discussion on
-/// the traits which `OsString` implements for conversions from/to native representations.
+/// the traits which `OsString` implements for [conversions] from/to native representations.
 ///
 /// [`OsStr`]: struct.OsStr.html
 /// [`&OsStr`]: struct.OsStr.html
@@ -74,6 +74,7 @@
 /// [`new`]: #method.new
 /// [`push`]: #method.push
 /// [`as_os_str`]: #method.as_os_str
+/// [conversions]: index.html#conversions
 #[derive(Clone)]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct OsString {
@@ -89,7 +90,7 @@ pub struct OsString {
 /// references; the latter are owned strings.
 ///
 /// See the [module's toplevel documentation about conversions][conversions] for a discussion on
-/// the traits which `OsStr` implements for conversions from/to native representations.
+/// the traits which `OsStr` implements for [conversions] from/to native representations.
 ///
 /// [`OsString`]: struct.OsString.html
 /// [`&str`]: ../primitive.str.html
index a34fcb5a7f98b4221dbf37d44a9f5a49d3417511..f0bca7784d840414bb5bea52b1777d60a83ffc41 100644 (file)
 //!
 //! Once you are familiar with the contents of the standard library you may
 //! begin to find the verbosity of the prose distracting. At this stage in your
-//! development you may want to press the **[-]** button near the top of the
+//! development you may want to press the `[-]` button near the top of the
 //! page to collapse it into a more skimmable view.
 //!
-//! While you are looking at that **[-]** button also notice the **[src]**
+//! While you are looking at that `[-]` button also notice the `[src]`
 //! button. Rust's API documentation comes with the source code and you are
 //! encouraged to read it. The standard library source is generally high
 //! quality and a peek behind the curtains is often enlightening.
 #![feature(rand)]
 #![feature(raw)]
 #![feature(rustc_attrs)]
+#![feature(std_internals)]
 #![feature(stdsimd)]
 #![feature(shrink_to)]
 #![feature(slice_bytes)]
@@ -525,7 +526,7 @@ mod coresimd {
 #[unstable(feature = "stdsimd", issue = "48556")]
 #[cfg(all(not(stage0), not(test)))]
 pub use stdsimd::simd;
-#[unstable(feature = "stdsimd", issue = "48556")]
+#[stable(feature = "simd_arch", since = "1.27.0")]
 #[cfg(all(not(stage0), not(test)))]
 pub use stdsimd::arch;
 
index 5ef7c15965505df76be13fe25edf6f41eb2af87e..6902ec82047d73094259a93eccd16f401cebe179 100644 (file)
@@ -787,13 +787,13 @@ macro_rules! assert {
     }
 }
 
-/// A macro for defining #[cfg] if-else statements.
+/// A macro for defining `#[cfg]` if-else statements.
 ///
 /// This is similar to the `if/elif` C preprocessor macro by allowing definition
 /// of a cascade of `#[cfg]` cases, emitting the implementation which matches
 /// first.
 ///
-/// This allows you to conveniently provide a long list #[cfg]'d blocks of code
+/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
 /// without having to rewrite each clause multiple times.
 macro_rules! cfg_if {
     ($(
index fba3269204e90e10dbe32bab39be95c5db9d47e2..24eae6a4c821e1db50005be285519f85510a2eb3 100644 (file)
@@ -17,6 +17,8 @@
 //! * Executing a panic up to doing the actual implementation
 //! * Shims around "try"
 
+use core::panic::BoxMeUp;
+
 use io::prelude::*;
 
 use any::Any;
@@ -27,7 +29,7 @@
 use mem;
 use ptr;
 use raw;
-use sys::stdio::Stderr;
+use sys::stdio::{Stderr, stderr_prints_nothing};
 use sys_common::rwlock::RWLock;
 use sys_common::thread_info;
 use sys_common::util;
@@ -56,7 +58,7 @@ fn __rust_maybe_catch_panic(f: fn(*mut u8),
                                 data_ptr: *mut usize,
                                 vtable_ptr: *mut usize) -> u32;
     #[unwind(allowed)]
-    fn __rust_start_panic(data: usize, vtable: usize) -> u32;
+    fn __rust_start_panic(payload: usize) -> u32;
 }
 
 #[derive(Copy, Clone)]
@@ -177,9 +179,6 @@ fn default_hook(info: &PanicInfo) {
     };
 
     let location = info.location().unwrap();  // The current implementation always returns Some
-    let file = location.file();
-    let line = location.line();
-    let col = location.column();
 
     let msg = match info.payload().downcast_ref::<&'static str>() {
         Some(s) => *s,
@@ -193,8 +192,8 @@ fn default_hook(info: &PanicInfo) {
     let name = thread.as_ref().and_then(|t| t.name()).unwrap_or("<unnamed>");
 
     let write = |err: &mut ::io::Write| {
-        let _ = writeln!(err, "thread '{}' panicked at '{}', {}:{}:{}",
-                         name, msg, file, line, col);
+        let _ = writeln!(err, "thread '{}' panicked at '{}', {}",
+                         name, msg, location);
 
         #[cfg(feature = "backtrace")]
         {
@@ -212,15 +211,15 @@ fn default_hook(info: &PanicInfo) {
 
     let prev = LOCAL_STDERR.with(|s| s.borrow_mut().take());
     match (prev, err.as_mut()) {
-        (Some(mut stderr), _) => {
-            write(&mut *stderr);
-            let mut s = Some(stderr);
-            LOCAL_STDERR.with(|slot| {
-                *slot.borrow_mut() = s.take();
-            });
-        }
-        (None, Some(ref mut err)) => { write(err) }
-        _ => {}
+       (Some(mut stderr), _) => {
+           write(&mut *stderr);
+           let mut s = Some(stderr);
+           LOCAL_STDERR.with(|slot| {
+               *slot.borrow_mut() = s.take();
+           });
+       }
+       (None, Some(ref mut err)) => { write(err) }
+       _ => {}
     }
 }
 
@@ -342,9 +341,38 @@ pub fn begin_panic_fmt(msg: &fmt::Arguments,
     // panic + OOM properly anyway (see comment in begin_panic
     // below).
 
-    let mut s = String::new();
-    let _ = s.write_fmt(*msg);
-    rust_panic_with_hook(Box::new(s), Some(msg), file_line_col)
+    rust_panic_with_hook(&mut PanicPayload::new(msg), Some(msg), file_line_col);
+
+    struct PanicPayload<'a> {
+        inner: &'a fmt::Arguments<'a>,
+        string: Option<String>,
+    }
+
+    impl<'a> PanicPayload<'a> {
+        fn new(inner: &'a fmt::Arguments<'a>) -> PanicPayload<'a> {
+            PanicPayload { inner, string: None }
+        }
+
+        fn fill(&mut self) -> &mut String {
+            let inner = self.inner;
+            self.string.get_or_insert_with(|| {
+                let mut s = String::new();
+                drop(s.write_fmt(*inner));
+                s
+            })
+        }
+    }
+
+    unsafe impl<'a> BoxMeUp for PanicPayload<'a> {
+        fn box_me_up(&mut self) -> *mut (Any + Send) {
+            let contents = mem::replace(self.fill(), String::new());
+            Box::into_raw(Box::new(contents))
+        }
+
+        fn get(&mut self) -> &(Any + Send) {
+            self.fill()
+        }
+    }
 }
 
 /// This is the entry point of panicking for panic!() and assert!().
@@ -360,18 +388,42 @@ pub fn begin_panic<M: Any + Send>(msg: M, file_line_col: &(&'static str, u32, u3
     // be performed in the parent of this thread instead of the thread that's
     // panicking.
 
-    rust_panic_with_hook(Box::new(msg), None, file_line_col)
+    rust_panic_with_hook(&mut PanicPayload::new(msg), None, file_line_col);
+
+    struct PanicPayload<A> {
+        inner: Option<A>,
+    }
+
+    impl<A: Send + 'static> PanicPayload<A> {
+        fn new(inner: A) -> PanicPayload<A> {
+            PanicPayload { inner: Some(inner) }
+        }
+    }
+
+    unsafe impl<A: Send + 'static> BoxMeUp for PanicPayload<A> {
+        fn box_me_up(&mut self) -> *mut (Any + Send) {
+            let data = match self.inner.take() {
+                Some(a) => Box::new(a) as Box<Any + Send>,
+                None => Box::new(()),
+            };
+            Box::into_raw(data)
+        }
+
+        fn get(&mut self) -> &(Any + Send) {
+            match self.inner {
+                Some(ref a) => a,
+                None => &(),
+            }
+        }
+    }
 }
 
-/// Executes the primary logic for a panic, including checking for recursive
-/// panics and panic hooks.
+/// Central point for dispatching panics.
 ///
-/// This is the entry point or panics from libcore, formatted panics, and
-/// `Box<Any>` panics. Here we'll verify that we're not panicking recursively,
-/// run panic hooks, and then delegate to the actual implementation of panics.
-#[inline(never)]
-#[cold]
-fn rust_panic_with_hook(payload: Box<Any + Send>,
+/// Executes the primary logic for a panic, including checking for recursive
+/// panics, panic hooks, and finally dispatching to the panic runtime to either
+/// abort or unwind.
+fn rust_panic_with_hook(payload: &mut BoxMeUp,
                         message: Option<&fmt::Arguments>,
                         file_line_col: &(&'static str, u32, u32)) -> ! {
     let (file, line, col) = *file_line_col;
@@ -390,15 +442,24 @@ fn rust_panic_with_hook(payload: Box<Any + Send>,
     }
 
     unsafe {
-        let info = PanicInfo::internal_constructor(
-            &*payload,
+        let mut info = PanicInfo::internal_constructor(
             message,
             Location::internal_constructor(file, line, col),
         );
         HOOK_LOCK.read();
         match HOOK {
-            Hook::Default => default_hook(&info),
-            Hook::Custom(ptr) => (*ptr)(&info),
+            // Some platforms know that printing to stderr won't ever actually
+            // print anything, and if that's the case we can skip the default
+            // hook.
+            Hook::Default if stderr_prints_nothing() => {}
+            Hook::Default => {
+                info.set_payload(payload.get());
+                default_hook(&info);
+            }
+            Hook::Custom(ptr) => {
+                info.set_payload(payload.get());
+                (*ptr)(&info);
+            }
         }
         HOOK_LOCK.read_unlock();
     }
@@ -419,16 +480,29 @@ fn rust_panic_with_hook(payload: Box<Any + Send>,
 /// Shim around rust_panic. Called by resume_unwind.
 pub fn update_count_then_panic(msg: Box<Any + Send>) -> ! {
     update_panic_count(1);
-    rust_panic(msg)
+
+    struct RewrapBox(Box<Any + Send>);
+
+    unsafe impl BoxMeUp for RewrapBox {
+        fn box_me_up(&mut self) -> *mut (Any + Send) {
+            Box::into_raw(mem::replace(&mut self.0, Box::new(())))
+        }
+
+        fn get(&mut self) -> &(Any + Send) {
+            &*self.0
+        }
+    }
+
+    rust_panic(&mut RewrapBox(msg))
 }
 
 /// A private no-mangle function on which to slap yer breakpoints.
 #[no_mangle]
 #[allow(private_no_mangle_fns)] // yes we get it, but we like breakpoints
-pub fn rust_panic(msg: Box<Any + Send>) -> ! {
+pub fn rust_panic(mut msg: &mut BoxMeUp) -> ! {
     let code = unsafe {
-        let obj = mem::transmute::<_, raw::TraitObject>(msg);
-        __rust_start_panic(obj.data as usize, obj.vtable as usize)
+        let obj = &mut msg as *mut &mut BoxMeUp;
+        __rust_start_panic(obj as usize)
     };
     rtabort!("failed to initiate panic, error {}", code)
 }
index 9519a926471085f92e64217c60a6bb21c5b977e9..1d7344f921c9d564d30bac0c09f86bcf7b682995 100644 (file)
@@ -77,3 +77,7 @@ pub fn is_ebadf(err: &io::Error) -> bool {
 }
 
 pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+    false
+}
index 3abb094ac34e3d98af5855f15bbb96f7e63d0982..7a4d11b0ecb9a8c64f2845898755d6883608544e 100644 (file)
@@ -75,3 +75,7 @@ pub fn is_ebadf(err: &io::Error) -> bool {
 }
 
 pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+    false
+}
index 9bdea945ea42eacf52a993f91f6912bcbb675a08..c1298e5040dbeec35b3a503c17d1aa2e4aa961c8 100644 (file)
@@ -80,11 +80,11 @@ pub fn init() {
         reset_sigpipe();
     }
 
-    #[cfg(not(any(target_os = "emscripten", target_os="fuchsia")))]
+    #[cfg(not(any(target_os = "emscripten", target_os = "fuchsia")))]
     unsafe fn reset_sigpipe() {
         assert!(signal(libc::SIGPIPE, libc::SIG_IGN) != libc::SIG_ERR);
     }
-    #[cfg(any(target_os = "emscripten", target_os="fuchsia"))]
+    #[cfg(any(target_os = "emscripten", target_os = "fuchsia"))]
     unsafe fn reset_sigpipe() {}
 }
 
index e9b3d4affc7dd6fcb92505e96a99879529676a68..87ba2aef4f1d3dfe1e499f089b55adc4e6df5bba 100644 (file)
@@ -75,3 +75,7 @@ pub fn is_ebadf(err: &io::Error) -> bool {
 }
 
 pub const STDIN_BUF_SIZE: usize = ::sys_common::io::DEFAULT_BUF_SIZE;
+
+pub fn stderr_prints_nothing() -> bool {
+    false
+}
index 8b06f54167487dd6cd38955b5b567efa2d24ed10..6516010af475940f0d2f6cc0c8209db0a5cd4f76 100644 (file)
@@ -30,7 +30,7 @@ pub unsafe fn read(&self) {
         if *mode >= 0 {
             *mode += 1;
         } else {
-            panic!("rwlock locked for writing");
+            rtabort!("rwlock locked for writing");
         }
     }
 
@@ -51,7 +51,7 @@ pub unsafe fn write(&self) {
         if *mode == 0 {
             *mode = -1;
         } else {
-            panic!("rwlock locked for reading")
+            rtabort!("rwlock locked for reading")
         }
     }
 
index beb19c0ed2c1f62bd17b1611db911eb9a04eb7af..023f29576a27d9c4dd667b1a035d9966de5efcfe 100644 (file)
@@ -69,3 +69,7 @@ fn flush(&mut self) -> io::Result<()> {
 pub fn is_ebadf(_err: &io::Error) -> bool {
     true
 }
+
+pub fn stderr_prints_nothing() -> bool {
+    !cfg!(feature = "wasm_syscall")
+}
index 855603685905958de4bf5494a677bc4bc23e4497..9bf9f749d4df252c89182265ccb7fc3d80290631 100644 (file)
@@ -117,7 +117,7 @@ unsafe fn remutex(&self) -> *mut ReentrantMutex {
             0 => {}
             n => return n as *mut _,
         }
-        let mut re = Box::new(ReentrantMutex::uninitialized());
+        let mut re = box ReentrantMutex::uninitialized();
         re.init();
         let re = Box::into_raw(re);
         match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
index b43df20bddd087144faa8b3626de5a72efed7e67..81b89da21d3c60f0cf9d1574932e48403a12cc83 100644 (file)
@@ -227,3 +227,7 @@ pub fn is_ebadf(err: &io::Error) -> bool {
 // idea is that on windows we use a slightly smaller buffer that's
 // been seen to be acceptable.
 pub const STDIN_BUF_SIZE: usize = 8 * 1024;
+
+pub fn stderr_prints_nothing() -> bool {
+    false
+}
index 1955f3ec9a28fc98f129e9341c0289ae948bedf8..20109d2d0d5aceb5790c6d063c7c6d9b331e72eb 100644 (file)
@@ -139,10 +139,10 @@ pub fn __rust_begin_short_backtrace<F, T>(f: F) -> T
 /// Controls how the backtrace should be formatted.
 #[derive(Debug, Copy, Clone, Eq, PartialEq)]
 pub enum PrintFormat {
-    /// Show all the frames with absolute path for files.
-    Full = 2,
     /// Show only relevant data from the backtrace.
-    Short = 3,
+    Short = 2,
+    /// Show all the frames with absolute path for files.
+    Full = 3,
 }
 
 // For now logging is turned off by default, and this function checks to see
@@ -150,11 +150,10 @@ pub enum PrintFormat {
 pub fn log_enabled() -> Option<PrintFormat> {
     static ENABLED: atomic::AtomicIsize = atomic::AtomicIsize::new(0);
     match ENABLED.load(Ordering::SeqCst) {
-        0 => {},
+        0 => {}
         1 => return None,
-        2 => return Some(PrintFormat::Full),
-        3 => return Some(PrintFormat::Short),
-        _ => unreachable!(),
+        2 => return Some(PrintFormat::Short),
+        _ => return Some(PrintFormat::Full),
     }
 
     let val = match env::var_os("RUST_BACKTRACE") {
index 27504d374ddbfaba19cb09ea52ba382aae13919b..d0c4d6a773746e70cc175de3a0866085532d6609 100644 (file)
 use sync::Once;
 use sys;
 
+macro_rules! rtabort {
+    ($($t:tt)*) => (::sys_common::util::abort(format_args!($($t)*)))
+}
+
+macro_rules! rtassert {
+    ($e:expr) => (if !$e {
+        rtabort!(concat!("assertion failed: ", stringify!($e)));
+    })
+}
+
 pub mod at_exit_imp;
 #[cfg(feature = "backtrace")]
 pub mod backtrace;
@@ -101,10 +111,6 @@ pub fn at_exit<F: FnOnce() + Send + 'static>(f: F) -> Result<(), ()> {
     if at_exit_imp::push(Box::new(f)) {Ok(())} else {Err(())}
 }
 
-macro_rules! rtabort {
-    ($($t:tt)*) => (::sys_common::util::abort(format_args!($($t)*)))
-}
-
 /// One-time runtime cleanup.
 pub fn cleanup() {
     static CLEANUP: Once = Once::new();
index a4aa3d96d25c00f5c399236b2e0275b595fc6ac0..d0d6224de0a151376ab1e8407a9c68766c52399a 100644 (file)
@@ -169,7 +169,7 @@ unsafe fn lazy_init(&self) -> usize {
                 self.key.store(key, Ordering::SeqCst);
             }
             INIT_LOCK.unlock();
-            assert!(key != 0);
+            rtassert!(key != 0);
             return key
         }
 
@@ -190,7 +190,7 @@ unsafe fn lazy_init(&self) -> usize {
             imp::destroy(key1);
             key2
         };
-        assert!(key != 0);
+        rtassert!(key != 0);
         match self.key.compare_and_swap(0, key as usize, Ordering::SeqCst) {
             // The CAS succeeded, so we've created the actual key
             0 => key as usize,
index a391c7cc6ef0c959c2e25eaf0bddf175cbb59cec..a373e980b970d21263aa8389ddece4cb26442365 100644 (file)
 
 use fmt;
 use io::prelude::*;
-use sys::stdio::Stderr;
+use sys::stdio::{Stderr, stderr_prints_nothing};
 use thread;
 
 pub fn dumb_print(args: fmt::Arguments) {
+    if stderr_prints_nothing() {
+        return
+    }
     let _ = Stderr::new().map(|mut stderr| stderr.write_fmt(args));
 }
 
index 99479bc56eff30ed9388c04d14168ed90174f331..40d3280baa687da385fe43d2f36b52962f67ffc7 100644 (file)
@@ -172,12 +172,16 @@ unsafe fn __getit() -> $crate::option::Option<
                 &'static $crate::cell::UnsafeCell<
                     $crate::option::Option<$t>>>
             {
+                #[cfg(target_arch = "wasm32")]
+                static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
+                    $crate::thread::__StaticLocalKeyInner::new();
+
                 #[thread_local]
-                #[cfg(target_thread_local)]
+                #[cfg(all(target_thread_local, not(target_arch = "wasm32")))]
                 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
                     $crate::thread::__FastLocalKeyInner::new();
 
-                #[cfg(not(target_thread_local))]
+                #[cfg(all(not(target_thread_local), not(target_arch = "wasm32")))]
                 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
                     $crate::thread::__OsLocalKeyInner::new();
 
@@ -295,6 +299,39 @@ pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
     }
 }
 
+/// On some platforms like wasm32 there's no threads, so no need to generate
+/// thread locals and we can instead just use plain statics!
+#[doc(hidden)]
+#[cfg(target_arch = "wasm32")]
+pub mod statik {
+    use cell::UnsafeCell;
+    use fmt;
+
+    pub struct Key<T> {
+        inner: UnsafeCell<Option<T>>,
+    }
+
+    unsafe impl<T> ::marker::Sync for Key<T> { }
+
+    impl<T> fmt::Debug for Key<T> {
+        fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+            f.pad("Key { .. }")
+        }
+    }
+
+    impl<T> Key<T> {
+        pub const fn new() -> Key<T> {
+            Key {
+                inner: UnsafeCell::new(None),
+            }
+        }
+
+        pub unsafe fn get(&self) -> Option<&'static UnsafeCell<Option<T>>> {
+            Some(&*(&self.inner as *const _))
+        }
+    }
+}
+
 #[doc(hidden)]
 #[cfg(target_thread_local)]
 pub mod fast {
index 71aee673cfe3ee57e7893a54c1e1823493713693..1b976b79b4c989f8cc63f9470faec10bae0e8c30 100644 (file)
 // where fast TLS was not available; end-user code is compiled with fast TLS
 // where available, but both are needed.
 
+#[unstable(feature = "libstd_thread_internals", issue = "0")]
+#[cfg(target_arch = "wasm32")]
+#[doc(hidden)] pub use self::local::statik::Key as __StaticLocalKeyInner;
 #[unstable(feature = "libstd_thread_internals", issue = "0")]
 #[cfg(target_thread_local)]
 #[doc(hidden)] pub use self::local::fast::Key as __FastLocalKeyInner;
index 01b971976a763c3d49bf72f5283e2303e52f0442..77c6afa1c64a60883af2be338312117888fc1a6b 100644 (file)
@@ -386,72 +386,26 @@ fn parse_sep_and_kleene_op<I>(
 {
     // We basically look at two token trees here, denoted as #1 and #2 below
     let span = match parse_kleene_op(input, span) {
-        // #1 is a `+` or `*` KleeneOp
-        //
-        // `?` is ambiguous: it could be a separator or a Kleene::ZeroOrOne, so we need to look
-        // ahead one more token to be sure.
-        Ok(Ok(op)) if op != KleeneOp::ZeroOrOne => return (None, op),
-
-        // #1 is `?` token, but it could be a Kleene::ZeroOrOne without a separator or it could
-        // be a `?` separator followed by any Kleene operator. We need to look ahead 1 token to
-        // find out which.
-        Ok(Ok(op)) => {
-            assert_eq!(op, KleeneOp::ZeroOrOne);
-
-            // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
-            let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
-                kleene_op(tok2).is_some()
-            } else {
-                false
-            };
-
-            if is_1_sep {
-                // #1 is a separator and #2 should be a KleepeOp::*
-                // (N.B. We need to advance the input iterator.)
-                match parse_kleene_op(input, span) {
-                    // #2 is a KleeneOp (this is the only valid option) :)
-                    Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
-                        if !features.macro_at_most_once_rep
-                            && !attr::contains_name(attrs, "allow_internal_unstable")
-                        {
-                            let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
-                            emit_feature_err(
-                                sess,
-                                "macro_at_most_once_rep",
-                                span,
-                                GateIssue::Language,
-                                explain,
-                            );
-                        }
-                        return (Some(token::Question), op);
-                    }
-                    Ok(Ok(op)) => return (Some(token::Question), op),
-
-                    // #2 is a random token (this is an error) :(
-                    Ok(Err((_, span))) => span,
-
-                    // #2 is not even a token at all :(
-                    Err(span) => span,
-                }
-            } else {
-                if !features.macro_at_most_once_rep
-                    && !attr::contains_name(attrs, "allow_internal_unstable")
-                {
-                    let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
-                    emit_feature_err(
-                        sess,
-                        "macro_at_most_once_rep",
-                        span,
-                        GateIssue::Language,
-                        explain,
-                    );
-                }
-
-                // #2 is a random tree and #1 is KleeneOp::ZeroOrOne
-                return (None, op);
+        // #1 is any KleeneOp (`?`)
+        Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
+            if !features.macro_at_most_once_rep
+                && !attr::contains_name(attrs, "allow_internal_unstable")
+            {
+                let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
+                emit_feature_err(
+                    sess,
+                    "macro_at_most_once_rep",
+                    span,
+                    GateIssue::Language,
+                    explain,
+                );
             }
+            return (None, op);
         }
 
+        // #1 is any KleeneOp (`+`, `*`)
+        Ok(Ok(op)) => return (None, op),
+
         // #1 is a separator followed by #2, a KleeneOp
         Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
             // #2 is a KleeneOp :D
@@ -467,8 +421,11 @@ fn parse_sep_and_kleene_op<I>(
                         GateIssue::Language,
                         explain,
                     );
+                } else {
+                    sess.span_diagnostic
+                        .span_err(span, "`?` macro repetition does not allow a separator");
                 }
-                return (Some(tok), op);
+                return (None, op);
             }
             Ok(Ok(op)) => return (Some(tok), op),
 
@@ -483,9 +440,7 @@ fn parse_sep_and_kleene_op<I>(
         Err(span) => span,
     };
 
-    if !features.macro_at_most_once_rep
-        && !attr::contains_name(attrs, "allow_internal_unstable")
-    {
+    if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") {
         sess.span_diagnostic
             .span_err(span, "expected one of: `*`, `+`, or `?`");
     } else {
index 73ebfc20876d0679814369d4abe88883207d2570..7b7cfe5eea00b72a945a70e0f560de5084e4aa01 100644 (file)
@@ -231,9 +231,6 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     // allow `repr(simd)`, and importing the various simd intrinsics
     (active, repr_simd, "1.4.0", Some(27731), None),
 
-    // Allows cfg(target_feature = "...").
-    (active, cfg_target_feature, "1.4.0", Some(29717), None),
-
     // allow `extern "platform-intrinsic" { ... }`
     (active, platform_intrinsics, "1.4.0", Some(27731), None),
 
@@ -293,9 +290,6 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
 
     (active, use_extern_macros, "1.15.0", Some(35896), None),
 
-    // Allows #[target_feature(...)]
-    (active, target_feature, "1.15.0", None, None),
-
     // `extern "ptx-*" fn()`
     (active, abi_ptx, "1.15.0", None, None),
 
@@ -378,12 +372,6 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     // Future-proofing enums/structs with #[non_exhaustive] attribute (RFC 2008)
     (active, non_exhaustive, "1.22.0", Some(44109), None),
 
-    // allow `'_` placeholder lifetimes
-    (active, underscore_lifetimes, "1.22.0", Some(44524), None),
-
-    // Default match binding modes (RFC 2005)
-    (active, match_default_bindings, "1.22.0", Some(42640), None),
-
     // Trait object syntax with `dyn` prefix
     (active, dyn_trait, "1.22.0", Some(44662), Some(Edition::Edition2018)),
 
@@ -452,6 +440,17 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
 
     // Allows macro invocations in `extern {}` blocks
     (active, macros_in_extern, "1.27.0", Some(49476), None),
+
+    // unstable #[target_feature] directives
+    (active, arm_target_feature, "1.27.0", None, None),
+    (active, aarch64_target_feature, "1.27.0", None, None),
+    (active, hexagon_target_feature, "1.27.0", None, None),
+    (active, powerpc_target_feature, "1.27.0", None, None),
+    (active, mips_target_feature, "1.27.0", None, None),
+    (active, avx512_target_feature, "1.27.0", None, None),
+    (active, mmx_target_feature, "1.27.0", None, None),
+    (active, sse4a_target_feature, "1.27.0", None, None),
+    (active, tbm_target_feature, "1.27.0", None, None),
 );
 
 declare_features! (
@@ -574,6 +573,10 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (accepted, underscore_lifetimes, "1.26.0", Some(44524), None),
     // Allows attributes on lifetime/type formal parameters in generics (RFC 1327)
     (accepted, generic_param_attrs, "1.26.0", Some(48848), None),
+    // Allows cfg(target_feature = "...").
+    (accepted, cfg_target_feature, "1.27.0", Some(29717), None),
+    // Allows #[target_feature(...)]
+    (accepted, target_feature, "1.27.0", None, None),
 );
 
 // If you change this, please modify src/doc/unstable-book as well. You must
@@ -918,10 +921,7 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
                                  "the `#[naked]` attribute \
                                   is an experimental feature",
                                  cfg_fn!(naked_functions))),
-    ("target_feature", Whitelisted, Gated(
-        Stability::Unstable, "target_feature",
-        "the `#[target_feature]` attribute is an experimental feature",
-        cfg_fn!(target_feature))),
+    ("target_feature", Normal, Ungated),
     ("export_name", Whitelisted, Ungated),
     ("inline", Whitelisted, Ungated),
     ("link", Whitelisted, Ungated),
@@ -1052,7 +1052,6 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
 // cfg(...)'s that are feature gated
 const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
     // (name in cfg, feature, function to check if the feature is enabled)
-    ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
     ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
     ("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)),
     ("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)),
index 8cb5776fdeb0167ccc3ea403fa8e8cf7d728db8c..5e96b5ce6733c8d60631458755b37ac4e6ddd32e 100644 (file)
@@ -8,9 +8,9 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Machinery for hygienic macros, inspired by the MTWT[1] paper.
+//! Machinery for hygienic macros, inspired by the `MTWT[1]` paper.
 //!
-//! [1] Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012.
+//! `[1]` Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012.
 //! *Macros that work together: Compile-time bindings, partial expansion,
 //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216.
 //! DOI=10.1017/S0956796812000093 <http://dx.doi.org/10.1017/S0956796812000093>
index 331b0fe5481d5c0d0593626b21dcce8288b241e3..50fac600a978d21ab05dc0ae9174120f25621029 100644 (file)
@@ -336,24 +336,23 @@ pub fn fresh() -> Self {
     (46, Offsetof,           "offsetof")
     (47, Override,           "override")
     (48, Priv,               "priv")
-    (49, Proc,               "proc")
-    (50, Pure,               "pure")
-    (51, Sizeof,             "sizeof")
-    (52, Typeof,             "typeof")
-    (53, Unsized,            "unsized")
-    (54, Virtual,            "virtual")
-    (55, Yield,              "yield")
+    (49, Pure,               "pure")
+    (50, Sizeof,             "sizeof")
+    (51, Typeof,             "typeof")
+    (52, Unsized,            "unsized")
+    (53, Virtual,            "virtual")
+    (54, Yield,              "yield")
 
     // Special lifetime names
-    (56, UnderscoreLifetime, "'_")
-    (57, StaticLifetime,     "'static")
+    (55, UnderscoreLifetime, "'_")
+    (56, StaticLifetime,     "'static")
 
     // Weak keywords, have special meaning only in specific contexts.
-    (58, Auto,               "auto")
-    (59, Catch,              "catch")
-    (60, Default,            "default")
-    (61, Dyn,                "dyn")
-    (62, Union,              "union")
+    (57, Auto,               "auto")
+    (58, Catch,              "catch")
+    (59, Default,            "default")
+    (60, Dyn,                "dyn")
+    (61, Union,              "union")
 }
 
 // If an interner exists, return it. Otherwise, prepare a fresh one.
index 9291eaa910bd744d745935eb295fc6efac275f3b..a4d1797c3ec5baa000daeebb55524950c06b63fe 100644 (file)
@@ -1288,7 +1288,6 @@ fn num_cpus() -> usize {
 
 pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
     let mut filtered = tests;
-
     // Remove tests that don't match the test filter
     filtered = match opts.filter {
         None => filtered,
index 26376a3733f4fce327c5a83902e115ee4979d1f3..a962d5fc4153740e377dd50243969c75543d0562 100644 (file)
@@ -8,13 +8,13 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-/// A macro for defining #[cfg] if-else statements.
+/// A macro for defining `#[cfg]` if-else statements.
 ///
 /// This is similar to the `if/elif` C preprocessor macro by allowing definition
 /// of a cascade of `#[cfg]` cases, emitting the implementation which matches
 /// first.
 ///
-/// This allows you to conveniently provide a long list #[cfg]'d blocks of code
+/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
 /// without having to rewrite each clause multiple times.
 macro_rules! cfg_if {
     ($(
index a888838ce432cb1fa44e8f738406961ede720674..ab5a7c3f747eb3835aae5144032639e31215db30 100644 (file)
@@ -23,4 +23,7 @@
 
 extern crate rustc_driver;
 
-fn main() { rustc_driver::main() }
+fn main() {
+    rustc_driver::set_sigpipe_handler();
+    rustc_driver::main()
+}
index bcb720e55861c38db47f2ebdf26b7198338cb39d..effdcd0132d17b6c4badc67b4b6d3fdf749a2d22 160000 (submodule)
@@ -1 +1 @@
-Subproject commit bcb720e55861c38db47f2ebdf26b7198338cb39d
+Subproject commit effdcd0132d17b6c4badc67b4b6d3fdf749a2d22
index f7e246e8f0f41383b2e9186f3acffec03450de4b..5643e1ef09146eb1fe6b4daae607938d6c3797b1 100644 (file)
@@ -26,10 +26,12 @@ fn foo(a: &mut i32) {
         inside_closure(a)
     };
     outside_closure_1(a); //[ast]~ ERROR cannot borrow `*a` as mutable because previous closure requires unique access
-                         //[mir]~^ ERROR cannot borrow `*a` as mutable because previous closure requires unique access
+    //[mir]~^ ERROR cannot borrow `*a` as mutable because previous closure requires unique access
 
     outside_closure_2(a); //[ast]~ ERROR cannot borrow `*a` as immutable because previous closure requires unique access
-                         //[mir]~^ ERROR cannot borrow `*a` as immutable because previous closure requires unique access
+    //[mir]~^ ERROR cannot borrow `*a` as immutable because previous closure requires unique access
+
+    drop(bar);
 }
 
 fn main() {
index 9ad3a8f9fc8671e072a93e06d9fb77ccafae32f9..0b230be85ad97c2ab68d97220eb720159c6172d2 100644 (file)
@@ -14,7 +14,7 @@
 // ignore-sparc
 
 // revisions: ast mir
-//[mir]compile-flags: -Z borrowck=mir -Z nll
+//[mir]compile-flags: -Z borrowck=mir
 
 #![feature(asm)]
 
index f498d8d500e649e3562905495320f230324cb3fd..9057ba0790712f36ec808332ab3523acacdcd7c4 100644 (file)
@@ -29,7 +29,8 @@ fn a() {
     let mut x = 3;
     let c1 = || x = 4;
     let c2 = || x * 5; //[ast]~ ERROR cannot borrow `x`
-                       //[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+    //[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+    drop(c1);
 }
 
 fn b() {
@@ -37,6 +38,7 @@ fn b() {
     let c1 = || set(&mut x);
     let c2 = || get(&x); //[ast]~ ERROR cannot borrow `x`
                          //[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+    drop(c1);
 }
 
 fn c() {
@@ -44,6 +46,7 @@ fn c() {
     let c1 = || set(&mut x);
     let c2 = || x * 5; //[ast]~ ERROR cannot borrow `x`
                        //[mir]~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
+    drop(c1);
 }
 
 fn d() {
@@ -51,6 +54,7 @@ fn d() {
     let c2 = || x * 5;
     x = 5; //[ast]~ ERROR cannot assign
            //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+    drop(c2);
 }
 
 fn e() {
@@ -58,6 +62,7 @@ fn e() {
     let c1 = || get(&x);
     x = 5; //[ast]~ ERROR cannot assign
            //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+    drop(c1);
 }
 
 fn f() {
@@ -65,6 +70,7 @@ fn f() {
     let c1 = || get(&*x);
     *x = 5; //[ast]~ ERROR cannot assign to `*x`
             //[mir]~^ ERROR cannot assign to `*x` because it is borrowed
+    drop(c1);
 }
 
 fn g() {
@@ -76,6 +82,7 @@ struct Foo {
     let c1 = || get(&*x.f);
     *x.f = 5; //[ast]~ ERROR cannot assign to `*x.f`
               //[mir]~^ ERROR cannot assign to `*x.f` because it is borrowed
+    drop(c1);
 }
 
 fn h() {
@@ -87,6 +94,7 @@ struct Foo {
     let c1 = || get(&*x.f);
     let c2 = || *x.f = 5; //[ast]~ ERROR cannot borrow `x` as mutable
                           //[mir]~^ ERROR cannot borrow `x` as mutable because it is also borrowed as immutable
+    drop(c1);
 }
 
 fn main() {
index fa475949b36b0e65612e92e4290e4f0686f948fc..3a7e4a13740d070ab1b4afbba2bf2a7283f52968 100644 (file)
@@ -49,83 +49,93 @@ fn main() {
     // Local and field from struct
     {
         let mut f = Foo { x: 22 };
-        let _x = f.x();
+        let x = f.x();
         f.x; //[ast]~ ERROR cannot use `f.x` because it was mutably borrowed
-             //[mir]~^ ERROR cannot use `f.x` because it was mutably borrowed
+        //[mir]~^ ERROR cannot use `f.x` because it was mutably borrowed
+        drop(x);
     }
     // Local and field from tuple-struct
     {
         let mut g = Bar(22);
-        let _0 = g.x();
+        let x = g.x();
         g.0; //[ast]~ ERROR cannot use `g.0` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `g.0` because it was mutably borrowed
+        drop(x);
     }
     // Local and field from tuple
     {
         let mut h = (22, 23);
-        let _0 = &mut h.0;
+        let x = &mut h.0;
         h.0; //[ast]~ ERROR cannot use `h.0` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `h.0` because it was mutably borrowed
+        drop(x);
     }
     // Local and field from enum
     {
         let mut e = Baz::X(2);
-        let _e0 = e.x();
+        let x = e.x();
         match e { //[mir]~ ERROR cannot use `e` because it was mutably borrowed
             Baz::X(value) => value
             //[ast]~^ ERROR cannot use `e.0` because it was mutably borrowed
             //[mir]~^^ ERROR cannot use `e.0` because it was mutably borrowed
         };
+        drop(x);
     }
     // Local and field from union
     unsafe {
         let mut u = U { b: 0 };
-        let _ra = &mut u.a;
+        let x = &mut u.a;
         u.a; //[ast]~ ERROR cannot use `u.a` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `u.a` because it was mutably borrowed
+        drop(x);
     }
     // Deref and field from struct
     {
         let mut f = Box::new(Foo { x: 22 });
-        let _x = f.x();
+        let x = f.x();
         f.x; //[ast]~ ERROR cannot use `f.x` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `f.x` because it was mutably borrowed
+        drop(x);
     }
     // Deref and field from tuple-struct
     {
         let mut g = Box::new(Bar(22));
-        let _0 = g.x();
+        let x = g.x();
         g.0; //[ast]~ ERROR cannot use `g.0` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `g.0` because it was mutably borrowed
+        drop(x);
     }
     // Deref and field from tuple
     {
         let mut h = Box::new((22, 23));
-        let _0 = &mut h.0;
+        let x = &mut h.0;
         h.0; //[ast]~ ERROR cannot use `h.0` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `h.0` because it was mutably borrowed
+        drop(x);
     }
     // Deref and field from enum
     {
         let mut e = Box::new(Baz::X(3));
-        let _e0 = e.x();
+        let x = e.x();
         match *e { //[mir]~ ERROR cannot use `*e` because it was mutably borrowed
             Baz::X(value) => value
             //[ast]~^ ERROR cannot use `e.0` because it was mutably borrowed
             //[mir]~^^ ERROR cannot use `e.0` because it was mutably borrowed
         };
+        drop(x);
     }
     // Deref and field from union
     unsafe {
         let mut u = Box::new(U { b: 0 });
-        let _ra = &mut u.a;
+        let x = &mut u.a;
         u.a; //[ast]~ ERROR cannot use `u.a` because it was mutably borrowed
              //[mir]~^ ERROR cannot use `u.a` because it was mutably borrowed
+        drop(x);
     }
     // Constant index
     {
         let mut v = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
-        let _v = &mut v;
+        let x = &mut v;
         match v { //[mir]~ ERROR cannot use `v` because it was mutably borrowed
             &[x, _, .., _, _] => println!("{}", x),
                 //[ast]~^ ERROR cannot use `v[..]` because it was mutably borrowed
@@ -150,11 +160,12 @@ fn main() {
                 //[mir]~^^ ERROR cannot use `v[..]` because it was mutably borrowed
                             _ => panic!("other case"),
         }
+        drop(x);
     }
     // Subslices
     {
         let mut v = &[1, 2, 3, 4, 5];
-        let _v = &mut v;
+        let x = &mut v;
         match v { //[mir]~ ERROR cannot use `v` because it was mutably borrowed
             &[x..] => println!("{:?}", x),
                 //[ast]~^ ERROR cannot use `v[..]` because it was mutably borrowed
@@ -179,13 +190,14 @@ fn main() {
                 //[mir]~^^ ERROR cannot use `v[..]` because it was mutably borrowed
             _ => panic!("other case"),
         }
+        drop(x);
     }
     // Downcasted field
     {
         enum E<X> { A(X), B { x: X } }
 
         let mut e = E::A(3);
-        let _e = &mut e;
+        let x = &mut e;
         match e { //[mir]~ ERROR cannot use `e` because it was mutably borrowed
             E::A(ref ax) =>
                 //[ast]~^ ERROR cannot borrow `e.0` as immutable because `e` is also borrowed as mutable
@@ -197,13 +209,14 @@ enum E<X> { A(X), B { x: X } }
                 //[mir]~^^ ERROR cannot borrow `e.x` as immutable because it is also borrowed as mutable
                 println!("e.bx: {:?}", bx),
         }
+        drop(x);
     }
     // Field in field
     {
         struct F { x: u32, y: u32 };
         struct S { x: F, y: (u32, u32), };
         let mut s = S { x: F { x: 1, y: 2}, y: (999, 998) };
-        let _s = &mut s;
+        let x = &mut s;
         match s { //[mir]~ ERROR cannot use `s` because it was mutably borrowed
             S  { y: (ref y0, _), .. } =>
                 //[ast]~^ ERROR cannot borrow `s.y.0` as immutable because `s` is also borrowed as mutable
@@ -218,6 +231,7 @@ enum E<X> { A(X), B { x: X } }
                 println!("x0: {:?}", x0),
             _ => panic!("other case"),
         }
+        drop(x);
     }
     // Field of ref
     {
@@ -231,6 +245,7 @@ fn bump<'a>(mut block: &mut Block<'a>) {
             let p: &'a u8 = &*block.current;
             //[mir]~^ ERROR cannot borrow `*block.current` as immutable because it is also borrowed as mutable
             // No errors in AST because of issue rust#38899
+            drop(x);
         }
     }
     // Field of ptr
@@ -245,29 +260,32 @@ unsafe fn bump2(mut block: *mut Block2) {
             let p : *const u8 = &*(*block).current;
             //[mir]~^ ERROR cannot borrow `*block.current` as immutable because it is also borrowed as mutable
             // No errors in AST because of issue rust#38899
+            drop(x);
         }
     }
     // Field of index
     {
         struct F {x: u32, y: u32};
         let mut v = &[F{x: 1, y: 2}, F{x: 3, y: 4}];
-        let _v = &mut v;
+        let x = &mut v;
         v[0].y;
         //[ast]~^ ERROR cannot use `v[..].y` because it was mutably borrowed
         //[mir]~^^ ERROR cannot use `v[..].y` because it was mutably borrowed
         //[mir]~| ERROR cannot use `*v` because it was mutably borrowed
+        drop(x);
     }
     // Field of constant index
     {
         struct F {x: u32, y: u32};
         let mut v = &[F{x: 1, y: 2}, F{x: 3, y: 4}];
-        let _v = &mut v;
+        let x = &mut v;
         match v { //[mir]~ ERROR cannot use `v` because it was mutably borrowed
             &[_, F {x: ref xf, ..}] => println!("{}", xf),
             //[mir]~^ ERROR cannot borrow `v[..].x` as immutable because it is also borrowed as mutable
             // No errors in AST
             _ => panic!("other case")
         }
+        drop(x);
     }
     // Field from upvar
     {
@@ -281,13 +299,15 @@ unsafe fn bump2(mut block: *mut Block2) {
     }
     // Field from upvar nested
     {
+        // FIXME(#49824) -- the free region error below should probably not be there
         let mut x = 0;
            || {
-               || {
-                let y = &mut x;
-                &mut x; //[ast]~ ERROR cannot borrow `**x` as mutable more than once at a time
-                        //[mir]~^ ERROR cannot borrow `x` as mutable more than once at a time
-                *y = 1;
+               || { //[mir]~ ERROR free region `` does not outlive
+                   let y = &mut x;
+                   &mut x; //[ast]~ ERROR cannot borrow `**x` as mutable more than once at a time
+                   //[mir]~^ ERROR cannot borrow `x` as mutable more than once at a time
+                   *y = 1;
+                   drop(y);
                 }
            };
     }
index f09a0c7414ba901267846ed523ed10651afa8557..6f0e0f43f60885bc17c2d2c01b80b7c2ce63a3be 100644 (file)
 
 fn main() {
     let mut _a = 3;
-    let _b = &mut _a;
+    let b = &mut _a;
     {
-        let _c = &*_b;
+        let c = &*b;
         _a = 4; //[ast]~ ERROR cannot assign to `_a`
-                //[mir]~^ ERROR cannot assign to `_a` because it is borrowed
+        //[mir]~^ ERROR cannot assign to `_a` because it is borrowed
+        drop(c);
     }
+    drop(b);
 }
index 2fe764568bc82999e2b518ba87e0051b46812931..f2f3e7914227c788619e6122d3058227ee978e7d 100644 (file)
@@ -24,9 +24,10 @@ fn separate_arms() {
             // fact no outstanding loan of x!
             x = Some(0);
         }
-        Some(ref __isize) => {
+        Some(ref r) => {
             x = Some(1); //[ast]~ ERROR cannot assign
-                         //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+            //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+            drop(r);
         }
     }
     x.clone(); // just to prevent liveness warnings
index 3e57ac0ca1910a11996afa2a0b04ca796e2456a3..c2136e62a7b228afb0993055d0914953ffec9f25 100644 (file)
@@ -25,16 +25,18 @@ fn match_enum() {
         Foo::A(x) => x //[ast]~ ERROR [E0503]
                        //[mir]~^ ERROR [E0503]
     };
+    drop(p);
 }
 
 
 fn main() {
     let mut x = 1;
-    let _x = &mut x;
+    let r = &mut x;
     let _ = match x { //[mir]~ ERROR [E0503]
         x => x + 1, //[ast]~ ERROR [E0503]
                     //[mir]~^ ERROR [E0503]
         y => y + 2, //[ast]~ ERROR [E0503]
                     //[mir]~^ ERROR [E0503]
     };
+    drop(r);
 }
index 63bb04a0e4c3afd38716b3e7de834bbe298a963f..7b0a71815a5f065de4f43d43110ec81790642896 100644 (file)
 
 fn main() {
     let mut x = 1;
-    let mut addr;
+    let mut addr = vec![];
     loop {
         match 1 {
-            1 => { addr = &mut x; } //[ast]~ ERROR [E0499]
+            1 => { addr.push(&mut x); } //[ast]~ ERROR [E0499]
             //[mir]~^ ERROR [E0499]
-            2 => { addr = &mut x; } //[ast]~ ERROR [E0499]
+            2 => { addr.push(&mut x); } //[ast]~ ERROR [E0499]
             //[mir]~^ ERROR [E0499]
-            _ => { addr = &mut x; } //[ast]~ ERROR [E0499]
+            _ => { addr.push(&mut x); } //[ast]~ ERROR [E0499]
             //[mir]~^ ERROR [E0499]
         }
     }
index 3a4c22eb1395a0f6bfb915d48a804d0543aea9a1..109fe3d1128e1990b186e36158e679b864db127d 100644 (file)
@@ -71,4 +71,5 @@ fn main() {
     s[2] = 20;
     //[ast]~^ ERROR cannot assign to immutable indexed content
     //[mir]~^^ ERROR cannot assign to immutable item
+    drop(rs);
 }
index 0f3a841821080a9748b3a12abde46e039108cde3..d917a0abb88d089a1cd20eafa558562dbe3a4dba 100644 (file)
@@ -21,7 +21,8 @@ fn main() {
       Some(ref i) => {
           // But on this branch, `i` is an outstanding borrow
           x = Some(*i+1); //[ast]~ ERROR cannot assign to `x`
-                          //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+          //[mir]~^ ERROR cannot assign to `x` because it is borrowed
+          drop(i);
       }
     }
     x.clone(); // just to prevent liveness warnings
index 0241b3870c7e64554f8a692b5c4bc8722dc60fc5..97193bd0191237ffe735c3f904257746f4f09abe 100644 (file)
@@ -25,82 +25,98 @@ fn main() {
         {
             let ra = &u.a;
             let ra2 = &u.a; // OK
+            drop(ra);
         }
         {
             let ra = &u.a;
             let a = u.a; // OK
+            drop(ra);
         }
         {
             let ra = &u.a;
             let rma = &mut u.a; //[ast]~ ERROR cannot borrow `u.a` as mutable because it is also borrowed as immutable
                                 //[mir]~^ ERROR cannot borrow `u.a` as mutable because it is also borrowed as immutable
+            drop(ra);
         }
         {
             let ra = &u.a;
             u.a = 1; //[ast]~ ERROR cannot assign to `u.a` because it is borrowed
                      //[mir]~^ ERROR cannot assign to `u.a` because it is borrowed
+            drop(ra);
         }
         // Imm borrow, other field
         {
             let ra = &u.a;
             let rb = &u.b; // OK
+            drop(ra);
         }
         {
             let ra = &u.a;
             let b = u.b; // OK
+            drop(ra);
         }
         {
             let ra = &u.a;
             let rmb = &mut u.b; //[ast]~ ERROR cannot borrow `u` (via `u.b`) as mutable because `u` is also borrowed as immutable (via `u.a`)
                                 //[mir]~^ ERROR cannot borrow `u.b` as mutable because it is also borrowed as immutable
+            drop(ra);
         }
         {
             let ra = &u.a;
             u.b = 1; //[ast]~ ERROR cannot assign to `u.b` because it is borrowed
                      //[mir]~^ ERROR cannot assign to `u.b` because it is borrowed
+            drop(ra);
         }
         // Mut borrow, same field
         {
             let rma = &mut u.a;
             let ra = &u.a; //[ast]~ ERROR cannot borrow `u.a` as immutable because it is also borrowed as mutable
                          //[mir]~^ ERROR cannot borrow `u.a` as immutable because it is also borrowed as mutable
+            drop(rma);
         }
         {
             let ra = &mut u.a;
             let a = u.a; //[ast]~ ERROR cannot use `u.a` because it was mutably borrowed
                          //[mir]~^ ERROR cannot use `u.a` because it was mutably borrowed
+            drop(ra);
         }
         {
             let rma = &mut u.a;
             let rma2 = &mut u.a; //[ast]~ ERROR cannot borrow `u.a` as mutable more than once at a time
                                  //[mir]~^ ERROR cannot borrow `u.a` as mutable more than once at a time
+            drop(rma);
         }
         {
             let rma = &mut u.a;
             u.a = 1; //[ast]~ ERROR cannot assign to `u.a` because it is borrowed
                      //[mir]~^ ERROR cannot assign to `u.a` because it is borrowed
+            drop(rma);
         }
         // Mut borrow, other field
         {
             let rma = &mut u.a;
             let rb = &u.b; //[ast]~ ERROR cannot borrow `u` (via `u.b`) as immutable because `u` is also borrowed as mutable (via `u.a`)
                            //[mir]~^ ERROR cannot borrow `u.b` as immutable because it is also borrowed as mutable
+            drop(rma);
         }
         {
             let ra = &mut u.a;
             let b = u.b; //[ast]~ ERROR cannot use `u.b` because it was mutably borrowed
                          //[mir]~^ ERROR cannot use `u.b` because it was mutably borrowed
 
+            drop(ra);
         }
         {
             let rma = &mut u.a;
             let rmb2 = &mut u.b; //[ast]~ ERROR cannot borrow `u` (via `u.b`) as mutable more than once at a time
                                  //[mir]~^ ERROR cannot borrow `u.b` as mutable more than once at a time
+            drop(rma);
         }
         {
             let rma = &mut u.a;
             u.b = 1; //[ast]~ ERROR cannot assign to `u.b` because it is borrowed
                      //[mir]~^ ERROR cannot assign to `u.b` because it is borrowed
+            drop(rma);
         }
     }
 }
diff --git a/src/test/compile-fail/borrowck/two-phase-across-loop.rs b/src/test/compile-fail/borrowck/two-phase-across-loop.rs
new file mode 100644 (file)
index 0000000..e03a035
--- /dev/null
@@ -0,0 +1,34 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that a borrow which starts as a 2-phase borrow and gets
+// carried around a loop winds up conflicting with itself.
+
+#![feature(nll)]
+
+struct Foo { x: String }
+
+impl Foo {
+    fn get_string(&mut self) -> &str {
+        &self.x
+    }
+}
+
+fn main() {
+    let mut foo = Foo { x: format!("Hello, world") };
+    let mut strings = vec![];
+
+    loop {
+        strings.push(foo.get_string()); //~ ERROR cannot borrow `foo` as mutable
+        if strings.len() > 2 { break; }
+    }
+
+    println!("{:?}", strings);
+}
index 90933c6b31fa868b8ce7bffe37ce552130639be9..77b237e34f2b7d0bc7c3810d5b33b67a12d83304 100644 (file)
 // revisions: nll_target
 
 // The following revisions are disabled due to missing support from two-phase beyond autorefs
-//[lxl_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
-//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref -Z nll
+//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
 
-//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is an important corner case pointed out by Niko: one is
 // allowed to initiate a shared borrow during a reservation, but it
index d2f4154433ab152b4c9a8eac7657d97233162788..5deabf9376482ccdcf5c1ece14710ae1c83f8f24 100644 (file)
 // revisions: nll_target
 
 // The following revisions are disabled due to missing support for two_phase_beyond_autoref
-//[lxl_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two_phase_beyond_autoref
-//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two_phase_beyond_autoref -Z nll
+//[nll_beyond] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two_phase_beyond_autoref
 
-
-//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll_target] compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is the second counter-example from Niko's blog post
 // smallcultfollowing.com/babysteps/blog/2017/03/01/nested-method-calls-via-two-phase-borrowing/
@@ -44,9 +42,8 @@ fn main() {
 
     /*3*/ *p += 1;        // (mutable borrow of `i` starts here, since `p` is used)
 
-    /*4*/ let k = i;      //[lxl_beyond]~   ERROR cannot use `i` because it was mutably borrowed [E0503]
-                          //[nll_beyond]~^  ERROR cannot use `i` because it was mutably borrowed [E0503]
-                          //[nll_target]~^^ ERROR cannot use `i` because it was mutably borrowed [E0503]
+    /*4*/ let k = i;      //[nll_beyond]~  ERROR cannot use `i` because it was mutably borrowed [E0503]
+                          //[nll_target]~^ ERROR cannot use `i` because it was mutably borrowed [E0503]
 
     /*5*/ *p += 1;
 
index 01b04708599c03a489cdfaa2ea65deb1660acd88..15700a1d61a1fa324780195ae44d4ae40fbe10aa 100644 (file)
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is the third counter-example from Niko's blog post
 // smallcultfollowing.com/babysteps/blog/2017/03/01/nested-method-calls-via-two-phase-borrowing/
@@ -26,8 +24,7 @@ fn main() {
     vec.get({
 
         vec.push(2);
-        //[lxl]~^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
-        //[nll]~^^   ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
+        //~^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
 
         0
     });
index ef39fabda10e62f744b4eb186615c49c036f86da..4303048138d91c4bbdc49f6b01445e4097f01028 100644 (file)
@@ -8,12 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: ast lxl nll
+// revisions: ast nll
 //[ast]compile-flags:
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
-//[g2p]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll -Z two-phase-beyond-autoref
+//[g2p]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
 // the above revision is disabled until two-phase-beyond-autoref support is better
 
 // This is a test checking that when we limit two-phase borrows to
@@ -69,44 +68,38 @@ fn overloaded_call_traits() {
 
     fn twice_ten_sm<F: FnMut(i32) -> i32>(f: &mut F) {
         f(f(10));
-        //[lxl]~^     ERROR cannot borrow `*f` as mutable more than once at a time
-        //[nll]~^^   ERROR cannot borrow `*f` as mutable more than once at a time
-        //[g2p]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
-        //[ast]~^^^^ ERROR cannot borrow `*f` as mutable more than once at a time
+        //[nll]~^   ERROR cannot borrow `*f` as mutable more than once at a time
+        //[g2p]~^^ ERROR cannot borrow `*f` as mutable more than once at a time
+        //[ast]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
     }
     fn twice_ten_si<F: Fn(i32) -> i32>(f: &mut F) {
         f(f(10));
     }
     fn twice_ten_so<F: FnOnce(i32) -> i32>(f: Box<F>) {
         f(f(10));
-        //[lxl]~^    ERROR use of moved value: `*f`
-        //[nll]~^^   ERROR use of moved value: `*f`
-        //[g2p]~^^^  ERROR use of moved value: `*f`
-        //[ast]~^^^^ ERROR use of moved value: `*f`
+        //[nll]~^   ERROR use of moved value: `*f`
+        //[g2p]~^^  ERROR use of moved value: `*f`
+        //[ast]~^^^ ERROR use of moved value: `*f`
     }
 
     fn twice_ten_om(f: &mut FnMut(i32) -> i32) {
         f(f(10));
-        //[lxl]~^    ERROR cannot borrow `*f` as mutable more than once at a time
-        //[nll]~^^   ERROR cannot borrow `*f` as mutable more than once at a time
-        //[g2p]~^^^  ERROR cannot borrow `*f` as mutable more than once at a time
-        //[ast]~^^^^ ERROR cannot borrow `*f` as mutable more than once at a time
+        //[nll]~^   ERROR cannot borrow `*f` as mutable more than once at a time
+        //[g2p]~^^  ERROR cannot borrow `*f` as mutable more than once at a time
+        //[ast]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
     }
     fn twice_ten_oi(f: &mut Fn(i32) -> i32) {
         f(f(10));
     }
     fn twice_ten_oo(f: Box<FnOnce(i32) -> i32>) {
         f(f(10));
-        //[lxl]~^             ERROR cannot move a value of type
-        //[lxl]~^^            ERROR cannot move a value of type
-        //[lxl]~^^^           ERROR use of moved value: `*f`
-        //[nll]~^^^^          ERROR cannot move a value of type
-        //[nll]~^^^^^         ERROR cannot move a value of type
-        //[nll]~^^^^^^        ERROR use of moved value: `*f`
-        //[g2p]~^^^^^^^       ERROR cannot move a value of type
-        //[g2p]~^^^^^^^^      ERROR cannot move a value of type
-        //[g2p]~^^^^^^^^^     ERROR use of moved value: `*f`
-        //[ast]~^^^^^^^^^^    ERROR use of moved value: `*f`
+        //[nll]~^          ERROR cannot move a value of type
+        //[nll]~^^         ERROR cannot move a value of type
+        //[nll]~^^^        ERROR use of moved value: `*f`
+        //[g2p]~^^^^       ERROR cannot move a value of type
+        //[g2p]~^^^^^      ERROR cannot move a value of type
+        //[g2p]~^^^^^^     ERROR use of moved value: `*f`
+        //[ast]~^^^^^^^    ERROR use of moved value: `*f`
     }
 
     twice_ten_sm(&mut |x| x + 1);
@@ -144,10 +137,9 @@ fn coerce_unsized() {
 
     // This is not okay.
     double_access(&mut a, &a);
-    //[lxl]~^    ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
-    //[nll]~^^   ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
-    //[g2p]~^^^  ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
-    //[ast]~^^^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
+    //[nll]~^   ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
+    //[g2p]~^^  ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
+    //[ast]~^^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
 
     // But this is okay.
     a.m(a.i(10));
@@ -173,16 +165,14 @@ fn index_mut(&mut self, _: i32) -> &mut i32 {
 fn coerce_index_op() {
     let mut i = I(10);
     i[i[3]] = 4;
-    //[lxl]~^   ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
-    //[nll]~^^  ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
-    //[ast]~^^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+    //[nll]~^  ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+    //[ast]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
 
     i[3] = i[4];
 
     i[i[3]] = i[4];
-    //[lxl]~^   ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
-    //[nll]~^^  ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
-    //[ast]~^^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+    //[nll]~^  ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
+    //[ast]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
 }
 
 fn main() {
index fc9100c8a9a865f493319b2bd9444b82222e844f..f9326d944b8e4220a82896ca77c0032215700ed4 100644 (file)
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is similar to two-phase-reservation-sharing-interference.rs
 // in that it shows a reservation that overlaps with a shared borrow.
 #![feature(rustc_attrs)]
 
 #[rustc_error]
-fn main() { //[nll]~ ERROR compilation successful
+fn main() { //~ ERROR compilation successful
     let mut v = vec![0, 1, 2];
     let shared = &v;
 
     v.push(shared.len());
-    //[lxl]~^  ERROR cannot borrow `v` as mutable because it is also borrowed as immutable [E0502]
 
     assert_eq!(v, [0, 1, 2, 3]);
 }
index 058022ad588e85beb9ceae07b977a9e0bfbf0146..1333167b780a8537f714a571c602046dc4b29ebc 100644 (file)
 
 // ignore-tidy-linelength
 
-// revisions: lxl_beyond nll_beyond nll_target
+// revisions: nll_beyond nll_target
 
 // The following revisions are disabled due to missing support from two-phase beyond autorefs
-//[lxl_beyond]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
-//[lxl_beyond] should-fail
-//[nll_beyond]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref -Z nll
+//[nll_beyond]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z two-phase-beyond-autoref
 //[nll_beyond] should-fail
 
-//[nll_target]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+//[nll_target]compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is a corner case that the current implementation is (probably)
 // treating more conservatively than is necessary. But it also does
@@ -46,9 +44,8 @@ fn main() {
         // with the shared borrow. But in the current implementation,
         // its an error.
         delay = &mut vec;
-        //[lxl_beyond]~^   ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
-        //[nll_beyond]~^^  ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
-        //[nll_target]~^^^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
+        //[nll_beyond]~^  ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
+        //[nll_target]~^^ ERROR cannot borrow `vec` as mutable because it is also borrowed as immutable
 
         shared[0];
     }
index 32747407c67f0195ee6516fca8a318a4c9de89df..eec4b470d75f76d3da7418442533b80cc49e0b4f 100644 (file)
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// cmpile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is the first counter-example from Niko's blog post
 // smallcultfollowing.com/babysteps/blog/2017/03/01/nested-method-calls-via-two-phase-borrowing/
@@ -22,8 +20,7 @@ fn main() {
     v[0].push_str({
 
         v.push(format!("foo"));
-        //[lxl]~^ ERROR cannot borrow `v` as mutable more than once at a time [E0499]
-        //[nll]~^^   ERROR cannot borrow `v` as mutable more than once at a time [E0499]
+        //~^   ERROR cannot borrow `v` as mutable more than once at a time [E0499]
 
         "World!"
     });
index 0487b03171adbc2abd1e57fcb155309ac0baea43..5a05f59c6f402303676de7494218b99a260c5aa0 100644 (file)
@@ -22,6 +22,7 @@ fn double_mut_borrow<T>(x: &mut Box<T>) {
     let z = borrow_mut(x);
     //[ast]~^ ERROR cannot borrow `*x` as mutable more than once at a time
     //[mir]~^^ ERROR cannot borrow `*x` as mutable more than once at a time
+    drop((y, z));
 }
 
 fn double_imm_borrow(x: &mut Box<i32>) {
@@ -30,6 +31,7 @@ fn double_imm_borrow(x: &mut Box<i32>) {
     **x += 1;
     //[ast]~^ ERROR cannot assign to `**x` because it is borrowed
     //[mir]~^^ ERROR cannot assign to `**x` because it is borrowed
+    drop((y, z));
 }
 
 fn double_mut_borrow2<T>(x: &mut Box<T>) {
index 2f4d82e2ef514a43868df734d9ebf23677f7d675..5db901b5ba1c202e08570b239df0412eacd73dcc 100644 (file)
@@ -17,7 +17,7 @@
 #![feature(specialization)]
 
 trait Trait<T> { type Assoc; }
-//~^ cyclic dependency detected [E0391]
+//~^ cycle detected
 
 impl<T> Trait<T> for Vec<T> {
     type Assoc = ();
index 6218dcbf5f2c54ec5f86e5cd27db7fd5c105a372..fed8e1885de8a996296b37ce83cb1affc554262f 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// error-pattern: cyclic dependency detected
+// error-pattern: cycle detected
 
 #![feature(const_fn)]
 
index ee4722c010f1660fe8e01555649da6c38e99cfef..56ad1771e00bd0f2a7403c7c75f7c68c15942720 100644 (file)
@@ -25,7 +25,7 @@
 struct A<T>
     where T : Trait,
           T : Add<T::Item>
-    //~^ ERROR cyclic dependency detected
+    //~^ ERROR cycle detected
     //~| ERROR associated type `Item` not found for `T`
 {
     data: T
index 88672088bcb4c279eb412c6a77f9ed3c77ff7bb5..b8bae2154566e584ca15bed75a0d515b7ea96103 100644 (file)
@@ -12,7 +12,7 @@
 // again references the trait.
 
 trait Foo<X = Box<Foo>> {
-    //~^ ERROR cyclic dependency detected
+    //~^ ERROR cycle detected
 }
 
 fn main() { }
index 626567ccc0ead54616644092ad8e295411cd9153..b802463fcb036dfa47dde3e0d6a7b158210869ad 100644 (file)
@@ -11,7 +11,7 @@
 // Test a supertrait cycle where a trait extends itself.
 
 trait Chromosome: Chromosome {
-    //~^ ERROR cyclic dependency detected
+    //~^ ERROR cycle detected
 }
 
 fn main() { }
index 5f5b70dda5e8170fbe4e18035272a3ee95c102c9..e98cf4eb2abd92f3e09021c1860ef100d40b096c 100644 (file)
@@ -33,6 +33,7 @@ fn call_repeatedly<F>(f: F)
     f.call(&x);
     f.call(&x);
     x = 5;
+    drop(y);
 }
 
 fn main() {
index 25d0590db1b75dbd296bf1e91bf61692346a6639..42c80b54313d0f45217b849e5c91a38bcf4841e6 100644 (file)
@@ -9,6 +9,6 @@
 // except according to those terms.
 
 type x = Vec<x>;
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
 
 fn main() { let b: x = Vec::new(); }
index 88395e5f1eafa1240f99b1bf099a20829fa82538..d67c76a1525d51b55d280e4483fc43e5bf54d9d3 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 trait T : Iterator<Item=Self::Item>
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
 //~| ERROR associated type `Item` not found for `Self`
 {}
 
index aeb798b382875a5a740297723b9160282a405498..cbb987cd512afb65ce78941a2d793d70cefeb230 100644 (file)
@@ -13,7 +13,7 @@ pub trait Subscriber {
 }
 
 pub trait Processor: Subscriber<Input = Self::Input> {
-    //~^ ERROR cyclic dependency detected [E0391]
+    //~^ ERROR cycle detected
     type Input;
 }
 
index 40c95b98f1264d4f9311cf5364a414c8e8b72a89..9da57877a09dcdfff59837ed46bed958ca682861 100644 (file)
@@ -14,7 +14,7 @@ trait Trait {
 }
 
 fn foo<T: Trait<A = T::B>>() { }
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
 //~| ERROR associated type `B` not found for `T`
 
 fn main() { }
index fde2d001542b84a1eda160b7fb62488440189fdb..2396007c3df3ed06ffe717f9132d77ed4ecf701e 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 trait Expr : PartialEq<Self::Item> {
-    //~^ ERROR: cyclic dependency detected
+    //~^ ERROR: cycle detected
     type Item;
 }
 
index 16a650cc6d88632ed57d1c809377e756036f8773..aab674fbb1a44e11635b4a0adcc6035894164722 100644 (file)
@@ -8,11 +8,9 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// error-pattern: cyclic dependency detected
-// note-pattern: the cycle begins when computing layout of
-// note-pattern: ...which then requires computing layout of
-// note-pattern: ...which then again requires computing layout of
-
+// error-pattern: cycle detected when computing layout of
+// note-pattern: ...which requires computing layout of
+// note-pattern: ...which again requires computing layout of
 
 trait Mirror { type It: ?Sized; }
 impl<T: ?Sized> Mirror for T { type It = Self; }
index b18e05af47c97266521065b5711c08e234b64aee..4d7238ad76f75acee3743086feb6639682c8c212 100644 (file)
@@ -14,8 +14,8 @@ trait Trait<T> {
     fn foo(_: T) {}
 }
 
-pub struct Foo<T = Box<Trait<DefaultFoo>>>;
-type DefaultFoo = Foo; //~ ERROR cyclic dependency detected
+pub struct Foo<T = Box<Trait<DefaultFoo>>>;  //~ ERROR cycle detected
+type DefaultFoo = Foo;
 
 fn main() {
 }
index fc3e0633750d8c29ddd7c86092fca49fccbed399..ed6a2f85fbe23a8bb963bf61732b64239649e2d9 100644 (file)
@@ -11,6 +11,8 @@
 // revisions: ast mir
 //[mir]compile-flags: -Z borrowck=mir
 
+// FIXME(#49821) -- No tip about using a let binding
+
 use std::cell::RefCell;
 
 fn main() {
@@ -24,10 +26,9 @@ fn main() {
     //[ast]~| NOTE temporary value does not live long enough
     //[ast]~| NOTE consider using a `let` binding to increase its lifetime
     //[mir]~^^^^^ ERROR borrowed value does not live long enough [E0597]
-    //[mir]~| NOTE temporary value dropped here while still borrowed
     //[mir]~| NOTE temporary value does not live long enough
-    //[mir]~| NOTE consider using a `let` binding to increase its lifetime
+    //[mir]~| NOTE temporary value only lives until here
     println!("{}", val);
+    //[mir]~^ borrow later used here
 }
 //[ast]~^ NOTE temporary value needs to live until here
-//[mir]~^^ NOTE temporary value needs to live until here
index 930a427e9a5e0ec7c58261bfb7516de5bb442b23..ecaf326b0504dde984f16c7f627a6074eb99d108 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// error-pattern: cycle detected when computing layout of
+
 #![feature(const_fn)]
 #![feature(core_intrinsics)]
 
@@ -15,7 +17,6 @@
 
 struct Foo {
     bytes: [u8; unsafe { intrinsics::size_of::<Foo>() }],
-    //~^ ERROR cyclic dependency detected
     x: usize,
 }
 
index 6562efeb6d893a628b9ad646a2f001ccad2c21b2..d8f5956b585303847cde55e450d7aa96b1835070 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Z borrowck=mir
 
 #![allow(dead_code)]
 
@@ -16,7 +16,7 @@ fn bar<'a, 'b>() -> fn(&'a u32, &'b u32) -> &'a u32 {
     let g: fn(_, _) -> _ = |_x, y| y;
     //~^ ERROR free region `'b` does not outlive free region `'a`
     g
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
 }
 
 fn main() {}
index f6ad2820d17ce4d6e651eb1a88198e277767a61e..1f9174b3574a7bb57d0c5177eff5cc12067da4d7 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Zborrowck=mir
 
 #![allow(dead_code)]
 
@@ -44,7 +44,7 @@ fn bar<'a>(x: &'a u32) -> &'static u32 {
     // The MIR type checker must therefore relate `'?0` to `'?1` and `'?2`
     // as part of checking the `ReifyFnPointer`.
     let f: fn(_) -> _ = foo;
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR free region `'a` does not outlive free region `'static`
     f(x)
 }
index c9b378dacd540cb7bfd283efbe2d895a1f5f780c..27ca2728ddfd623a5bb74f2b2710d3531206bba4 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Zborrowck=mir
 
 #![allow(dead_code)]
 
@@ -16,7 +16,7 @@ fn bar<'a>(input: &'a u32, f: fn(&'a u32) -> &'a u32) -> &'static u32 {
     // Here the NLL checker must relate the types in `f` to the types
     // in `g`. These are related via the `UnsafeFnPointer` cast.
     let g: unsafe fn(_) -> _ = f;
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR free region `'a` does not outlive free region `'static`
     unsafe { g(input) }
 }
index 1df56793f73bdab5d5c30c07d6b40f56e4253639..e30bed610581503d7bcb0910eb5d539d0e596488 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Z borrowck=mir
 
 #![allow(dead_code)]
 #![feature(dyn_trait)]
@@ -18,7 +18,7 @@
 fn bar<'a>(x: &'a u32) -> &'static dyn Debug {
     //~^ ERROR free region `'a` does not outlive free region `'static`
     x
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
 }
 
 fn main() {}
index 72727cdfe54d2f56b7a9331b8436ef559b3588d7..a05ee64937eb7ede999fa221e564dcb18ce8dd21 100644 (file)
@@ -25,5 +25,6 @@ fn main() {
     // check borrowing is detected successfully
     let &mut ref x = foo;
     *foo += 1; //[ast]~ ERROR cannot assign to `*foo` because it is borrowed
-               //[mir]~^ ERROR cannot assign to `*foo` because it is borrowed
+    //[mir]~^ ERROR cannot assign to `*foo` because it is borrowed
+    drop(x);
 }
index fdc650a072131c4e4f1a46b34b1acab76ee3bec7..97126e98cbf3ad3ab09efb5401d057d15990ad68 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
index f22d2fc23e0579eb691d73167a5967797206fa63..b5357d0ee827c100fbb1ebdaf9b2541036dad9a8 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
index efa6cc273b6f4a66e32623fbcef67d1f92b73f5c..589a3daa38d853dc344b8bf2faa6602353ee9d33 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 //revisions: ast mir
-//[mir] compile-flags: -Z borrowck=mir -Z nll
+//[mir] compile-flags: -Z borrowck=mir
 
 #![allow(unused_assignments)]
 
index 1128d65af95c5832139ca934347dff5b4ef226e4..e1c47a6bbff2cea9b0bf3f20c7a16a7a590ab692 100644 (file)
@@ -12,7 +12,7 @@
 // in the type of `p` includes the points after `&v[0]` up to (but not
 // including) the call to `use_x`. The `else` branch is not included.
 
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
index 7ed59ef2a879b9fdb96fda2292d84a609e96da1f..13f1ca6431b3d8e4adf793ec5310cd16c3494040 100644 (file)
@@ -12,7 +12,7 @@
 // in the type of `p` includes the points after `&v[0]` up to (but not
 // including) the call to `use_x`. The `else` branch is not included.
 
-// compile-flags:-Zborrowck=compare -Znll
+// compile-flags:-Zborrowck=compare
 
 #![allow(warnings)]
 #![feature(rustc_attrs)]
index ecea8756903aeb9f694ef3830f3a1fa7ab20e2ef..1a3dc76005db4629f86595c575aef3e1e5641b04 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Zborrowck=mir
 
 #![allow(dead_code)]
 
@@ -22,7 +22,7 @@ fn foo<'a, 'b>(x: &'a u32, y: &'b u32) -> (&'a u32, &'b u32)
 fn bar<'a, 'b>(x: &'a u32, y: &'b u32) -> (&'a u32, &'b u32) {
     foo(x, y)
     //~^ ERROR lifetime mismatch [E0623]
-    //~| WARNING not reporting region error due to -Znll
+    //~| WARNING not reporting region error due to nll
 }
 
 fn main() {}
index f1a6dc48e13b80ed809652e7cb18c8042626dfcb..69f0f43af13425af0d5942286d4cefc3d1806a51 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z borrowck=mir -Z nll
+// compile-flags: -Z borrowck=mir
 
 #![allow(dead_code)]
 
@@ -22,7 +22,7 @@ struct Foo<'a: 'b, 'b> {
 fn bar<'a, 'b>(x: Cell<&'a u32>, y: Cell<&'b u32>) {
     Foo { x, y };
     //~^ ERROR lifetime mismatch [E0623]
-    //~| WARNING not reporting region error due to -Znll
+    //~| WARNING not reporting region error due to nll
 }
 
 fn main() {}
index 6fbc65ce6a71f3a01729e240dab2abb9b1b27cbd..1f23dcbb72523baaeb6689d55fd7a144cfde4d58 100644 (file)
@@ -18,7 +18,8 @@ fn main() {
     match (&a1,) {
         (&ref b0,) => {
             a1 = &f; //[ast]~ ERROR cannot assign
-                     //[mir]~^ ERROR cannot assign to `a1` because it is borrowed
+            //[mir]~^ ERROR cannot assign to `a1` because it is borrowed
+            drop(b0);
         }
     }
 }
index 13f93090fbbb4fae5260fe6296d19cca0a8ca8bb..a0097b9f6d7df84ae0e3ce2e8e59cabc5bec13e0 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // revisions: ll nll
-//[nll] compile-flags: -Znll -Zborrowck=mir
+//[nll] compile-flags:-Zborrowck=mir
 
 fn static_id<'a,'b>(t: &'a ()) -> &'static ()
     where 'a: 'static { t }
@@ -17,16 +17,16 @@ fn static_id_indirect<'a,'b>(t: &'a ()) -> &'static ()
     where 'a: 'b, 'b: 'static { t }
 fn static_id_wrong_way<'a>(t: &'a ()) -> &'static () where 'static: 'a {
     t //[ll]~ ERROR E0312
-        //[nll]~^ WARNING not reporting region error due to -Znll
+        //[nll]~^ WARNING not reporting region error due to nll
         //[nll]~| ERROR free region `'a` does not outlive free region `'static`
 }
 
 fn error(u: &(), v: &()) {
     static_id(&u); //[ll]~ ERROR explicit lifetime required in the type of `u` [E0621]
-    //[nll]~^ WARNING not reporting region error due to -Znll
+    //[nll]~^ WARNING not reporting region error due to nll
     //[nll]~| ERROR explicit lifetime required in the type of `u` [E0621]
     static_id_indirect(&v); //[ll]~ ERROR explicit lifetime required in the type of `v` [E0621]
-    //[nll]~^ WARNING not reporting region error due to -Znll
+    //[nll]~^ WARNING not reporting region error due to nll
     //[nll]~| ERROR explicit lifetime required in the type of `v` [E0621]
 }
 
index 7210c857125d6d9e026acdc9273fed733dc26d95..2a894c57b8df19c5a83d9b131f7d0de6ffb90d06 100644 (file)
@@ -21,10 +21,10 @@ impl Tr for S where Self: Copy {} // OK
 impl Tr for S where S<Self>: Copy {} // OK
 impl Tr for S where Self::A: Copy {} // OK
 
-impl Tr for Self {} //~ ERROR cyclic dependency detected
-impl Tr for S<Self> {} //~ ERROR cyclic dependency detected
-impl Self {} //~ ERROR cyclic dependency detected
-impl S<Self> {} //~ ERROR cyclic dependency detected
-impl Tr<Self::A> for S {} //~ ERROR cyclic dependency detected
+impl Tr for Self {} //~ ERROR cycle detected
+impl Tr for S<Self> {} //~ ERROR cycle detected
+impl Self {} //~ ERROR cycle detected
+impl S<Self> {} //~ ERROR cycle detected
+impl Tr<Self::A> for S {} //~ ERROR cycle detected
 
 fn main() {}
diff --git a/src/test/incremental/static_cycle/b.rs b/src/test/incremental/static_cycle/b.rs
new file mode 100644 (file)
index 0000000..b659703
--- /dev/null
@@ -0,0 +1,19 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions:rpass1 rpass2
+
+#![cfg_attr(rpass2, warn(dead_code))]
+
+pub static mut BAA: *const i8 = unsafe { &BOO as *const _ as *const i8 };
+
+pub static mut BOO: *const i8 = unsafe { &BAA as *const _ as *const i8 };
+
+fn main() {}
index 09288cf69ff586064efc7116894ccf6d5257dc89..f41b39845a579681bf5811aa2e9fcebc4deac32b 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll
+// compile-flags:-Zborrowck=mir
 
 fn can_panic() -> Box<usize> {
     Box::new(44)
index 64ffc7446062cb171b7ced9ca4e792e0e16ce304..073b44d6e338763ef9a8a5d63d289469b7012d8f 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll
+// compile-flags:-Zborrowck=mir
 
 #![allow(warnings)]
 
index 671f5e5292aa439029b6e98221ed7e51bb9da502..6a8749084068a30fa6afcd518b5a2b0cb1f14a44 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll
+// compile-flags:-Zborrowck=mir
 
 fn cond() -> bool { false }
 
index 8feac15d69a82a484b442589d047b18fb95c003b..5cca3e55259d0f874ff7b81fdd0af9a401a64b69 100644 (file)
@@ -13,8 +13,8 @@
 // suitable variables and that we setup the outlives relationship
 // between R0 and R1 properly.
 
-// compile-flags:-Znll -Zverbose
-//                     ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+//                              ^^^^^^^^^ force compiler to dump more region information
 // ignore-tidy-linelength
 
 #![allow(warnings)]
index 92e42a73bbb6a9679fe764e804cf55d30acb6928..b7c8d81b77fdbaf91b0caeefda1d9c578731ebcb 100644 (file)
@@ -12,8 +12,8 @@
 // in the type of `r_a` must outlive the region (`R7`) that appears in
 // the type of `r_b`
 
-// compile-flags:-Znll -Zverbose
-//                     ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+//                              ^^^^^^^^^ force compiler to dump more region information
 
 #![allow(warnings)]
 
index 19d733d4f6b6a8931bdc0a4d2b5617b970b7b85c..75d8a6a4f6ac55ca3b830ecf4a58b3e79a9bade7 100644 (file)
@@ -12,8 +12,8 @@
 // in the type of `p` includes the points after `&v[0]` up to (but not
 // including) the call to `use_x`. The `else` branch is not included.
 
-// compile-flags:-Znll -Zverbose
-//                     ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+//                              ^^^^^^^^^ force compiler to dump more region information
 
 #![allow(warnings)]
 
index 821cd73667193afb145773501869b5755a5ab314..153739133ac82900258866451e2cdf16a1be753d 100644 (file)
@@ -14,8 +14,8 @@
 // but only at a particular point, and hence they wind up including
 // distinct regions.
 
-// compile-flags:-Znll -Zverbose
-//                     ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+//                              ^^^^^^^^^ force compiler to dump more region information
 
 #![allow(warnings)]
 
index a3f68ed5ebf5eecd7f7f17d8bafb2bced9318296..16952143d0a5bc6a294682f139da8c89adbf6795 100644 (file)
@@ -12,8 +12,8 @@
 // in the type of `p` includes the points after `&v[0]` up to (but not
 // including) the call to `use_x`. The `else` branch is not included.
 
-// compile-flags:-Znll -Zverbose
-//                     ^^^^^^^^^ force compiler to dump more region information
+// compile-flags:-Zborrowck=mir -Zverbose
+//                              ^^^^^^^^^ force compiler to dump more region information
 
 #![allow(warnings)]
 
diff --git a/src/test/parse-fail/obsolete-proc.rs b/src/test/parse-fail/obsolete-proc.rs
deleted file mode 100644 (file)
index 648c46d..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -Z parse-only
-
-// Test that we generate obsolete syntax errors around usages of `proc`.
-
-fn foo(p: proc()) { } //~ ERROR expected type, found reserved keyword `proc`
-
-fn bar() { proc() 1; } //~ ERROR expected expression, found reserved keyword `proc`
-
-fn main() { }
diff --git a/src/test/run-make-fulldeps/save-analysis-rfc2126/Makefile b/src/test/run-make-fulldeps/save-analysis-rfc2126/Makefile
new file mode 100644 (file)
index 0000000..a132668
--- /dev/null
@@ -0,0 +1,10 @@
+-include ../tools.mk
+
+all: extern_absolute_paths.rs extern_in_paths.rs krate2
+       $(RUSTC) extern_absolute_paths.rs -Zsave-analysis
+       cat $(TMPDIR)/save-analysis/extern_absolute_paths.json | "$(PYTHON)" validate_json.py
+       $(RUSTC) extern_in_paths.rs -Zsave-analysis
+       cat $(TMPDIR)/save-analysis/extern_in_paths.json | "$(PYTHON)" validate_json.py
+
+krate2: krate2.rs
+       $(RUSTC) $<
diff --git a/src/test/run-make-fulldeps/save-analysis-rfc2126/extern_absolute_paths.rs b/src/test/run-make-fulldeps/save-analysis-rfc2126/extern_absolute_paths.rs
new file mode 100644 (file)
index 0000000..b1fe535
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(extern_absolute_paths)]
+
+use krate2::hello;
+
+fn main() {
+    hello();
+    ::krate2::hello();
+}
diff --git a/src/test/run-make-fulldeps/save-analysis-rfc2126/extern_in_paths.rs b/src/test/run-make-fulldeps/save-analysis-rfc2126/extern_in_paths.rs
new file mode 100644 (file)
index 0000000..e48627e
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(extern_in_paths)]
+
+use extern::krate2;
+
+fn main() {
+    extern::krate2::hello();
+}
diff --git a/src/test/run-make-fulldeps/save-analysis-rfc2126/krate2.rs b/src/test/run-make-fulldeps/save-analysis-rfc2126/krate2.rs
new file mode 100644 (file)
index 0000000..21fc57c
--- /dev/null
@@ -0,0 +1,15 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "krate2"]
+#![crate_type = "lib"]
+
+pub fn hello() {
+}
diff --git a/src/test/run-make-fulldeps/save-analysis-rfc2126/validate_json.py b/src/test/run-make-fulldeps/save-analysis-rfc2126/validate_json.py
new file mode 100644 (file)
index 0000000..caab8d0
--- /dev/null
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+# Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+import sys
+import json
+
+crates = json.loads(sys.stdin.readline().strip())["prelude"]["external_crates"]
+assert any(map(lambda c: c["id"]["name"] == "krate2", crates))
diff --git a/src/test/run-make/wasm-panic-small/Makefile b/src/test/run-make/wasm-panic-small/Makefile
new file mode 100644 (file)
index 0000000..330ae30
--- /dev/null
@@ -0,0 +1,17 @@
+-include ../../run-make-fulldeps/tools.mk
+
+ifeq ($(TARGET),wasm32-unknown-unknown)
+all:
+       $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg a
+       wc -c < $(TMPDIR)/foo.wasm
+       [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "1024" ]
+       $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg b
+       wc -c < $(TMPDIR)/foo.wasm
+       [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+       $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg c
+       wc -c < $(TMPDIR)/foo.wasm
+       [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+else
+all:
+endif
+
diff --git a/src/test/run-make/wasm-panic-small/foo.rs b/src/test/run-make/wasm-panic-small/foo.rs
new file mode 100644 (file)
index 0000000..1ea724c
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "cdylib"]
+
+#[no_mangle]
+#[cfg(a)]
+pub fn foo() {
+    panic!("test");
+}
+
+#[no_mangle]
+#[cfg(b)]
+pub fn foo() {
+    panic!("{}", 1);
+}
+
+#[no_mangle]
+#[cfg(c)]
+pub fn foo() {
+    panic!("{}", "a");
+}
index 66234449263982ad635838fecdbb1da69df100e8..ca15591a10192deee48078ca565dca0c2691d391 100644 (file)
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // This is the "goto example" for why we want two phase borrows.
 
index b1db203071788cce72e3acf9e271c41897cdea06..e3b7322e48b6b93ac2b05050dc98417cdb818982 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // revisions: normal nll
-//[nll] compile-flags: -Znll -Zborrowck=mir
+//[nll] compile-flags:-Zborrowck=mir
 
 #![feature(fn_traits,
            step_trait,
index fb35ce33157d6ffcbbca6016373c1d839be0434f..ecd8a3390b75316cd47561651694b48acc78f94b 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//`#[cfg]` on struct field permits empty unusable struct
+// `#[cfg]` on struct field permits empty unusable struct
 
 struct S {
     #[cfg(untrue)]
index b7e942f938321ae8f033a65909cea38975d856e2..c08effe549328705ae5843475b2c4f54c5032033 100644 (file)
@@ -32,25 +32,13 @@ macro_rules! foo {
     } }
 }
 
-macro_rules! baz {
-    ($($a:ident),? ; $num:expr) => { { // comma separator is meaningless for `?`
-        let mut x = 0;
-
-        $(
-            x += $a;
-         )?
-
-        assert_eq!(x, $num);
-    } }
-}
-
 macro_rules! barplus {
     ($($a:ident)?+ ; $num:expr) => { {
         let mut x = 0;
 
         $(
             x += $a;
-         )+
+         )?
 
         assert_eq!(x, $num);
     } }
@@ -62,7 +50,7 @@ macro_rules! barstar {
 
         $(
             x += $a;
-         )*
+         )?
 
         assert_eq!(x, $num);
     } }
@@ -74,15 +62,10 @@ pub fn main() {
     // accept 0 or 1 repetitions
     foo!( ; 0);
     foo!(a ; 1);
-    baz!( ; 0);
-    baz!(a ; 1);
 
     // Make sure using ? as a separator works as before
-    barplus!(a ; 1);
-    barplus!(a?a ; 2);
-    barplus!(a?a?a ; 3);
-    barstar!( ; 0);
-    barstar!(a ; 1);
-    barstar!(a?a ; 2);
-    barstar!(a?a?a ; 3);
+    barplus!(+ ; 0);
+    barplus!(a + ; 1);
+    barstar!(* ; 0);
+    barstar!(a * ; 1);
 }
index 3c54921ac6e02409d3811f65650dacd5e20453e1..139da046452646b77c1193c8f18691b3ae084a6d 100644 (file)
@@ -11,6 +11,7 @@
 // ignore-emscripten
 
 #![feature(repr_simd, target_feature, cfg_target_feature)]
+#![feature(avx512_target_feature)]
 
 use std::process::{Command, ExitStatus};
 use std::env;
diff --git a/src/test/rustdoc-js/multi-query.js b/src/test/rustdoc-js/multi-query.js
new file mode 100644 (file)
index 0000000..3793ca6
--- /dev/null
@@ -0,0 +1,20 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const QUERY = 'str,u8';
+
+const EXPECTED = {
+    'others': [
+        { 'path': 'std', 'name': 'str' },
+        { 'path': 'std', 'name': 'u8' },
+        { 'path': 'std::ffi', 'name': 'CStr' },
+        { 'path': 'std::simd', 'name': 'u8x2' },
+    ],
+};
diff --git a/src/test/rustdoc-ui/intra-links-warning.rs b/src/test/rustdoc-ui/intra-links-warning.rs
new file mode 100644 (file)
index 0000000..2a00d31
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+//! Test with [Foo::baz], [Bar::foo], [Uniooon::X]
+
+pub struct Foo {
+    pub bar: usize,
+}
diff --git a/src/test/rustdoc-ui/intra-links-warning.stderr b/src/test/rustdoc-ui/intra-links-warning.stderr
new file mode 100644 (file)
index 0000000..67d7bdd
--- /dev/null
@@ -0,0 +1,6 @@
+warning: [Foo::baz] cannot be resolved, ignoring it...
+
+warning: [Bar::foo] cannot be resolved, ignoring it...
+
+warning: [Uniooon::X] cannot be resolved, ignoring it...
+
index ec391319b183ce48475f7dd1d885e418bf3b9a3f..1969cf859eee4c8e9a50f89b0ea78c5152e341c1 100644 (file)
@@ -28,3 +28,11 @@ pub union Union {
 pub const CONST: u32 = 0;
 pub static STATIC: &str = "baguette";
 pub fn function() {}
+
+mod private_module {
+    pub struct ReexportedStruct;
+}
+
+// @has foo/all.html '//a[@href="struct.ReexportedStruct.html"]' 'ReexportedStruct'
+// @!has foo/all.html 'private_module'
+pub use private_module::ReexportedStruct;
diff --git a/src/test/rustdoc/force-target-feature.rs b/src/test/rustdoc/force-target-feature.rs
new file mode 100644 (file)
index 0000000..08f1f06
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// only-x86_64
+// compile-flags:--test -C target-feature=+avx
+// should-fail
+
+/// (written on a spider's web) Some Struct
+///
+/// ```
+/// panic!("oh no");
+/// ```
+#[doc(cfg(target_feature = "avx"))]
+pub struct SomeStruct;
index b6946154fa00a0a5870c495b172305b8cf2657fd..55e73af9e6a1ba06f1a6616b76e5a46ad547d5ec 100644 (file)
@@ -23,6 +23,7 @@ fn a() {
     let c1 = to_fn_mut(|| x = 4);
     let c2 = to_fn_mut(|| x = 5); //~ ERROR cannot borrow `x` as mutable more than once
     //~| ERROR cannot borrow `x` as mutable more than once
+    drop((c1, c2));
 }
 
 fn set(x: &mut isize) {
@@ -34,6 +35,7 @@ fn b() {
     let c1 = to_fn_mut(|| set(&mut x));
     let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as mutable more than once
     //~| ERROR cannot borrow `x` as mutable more than once
+    drop((c1, c2));
 }
 
 fn c() {
@@ -41,6 +43,7 @@ fn c() {
     let c1 = to_fn_mut(|| x = 5);
     let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as mutable more than once
     //~| ERROR cannot borrow `x` as mutable more than once
+    drop((c1, c2));
 }
 
 fn d() {
@@ -49,6 +52,7 @@ fn d() {
     let c2 = to_fn_mut(|| { let _y = to_fn_mut(|| set(&mut x)); }); // (nested closure)
     //~^ ERROR cannot borrow `x` as mutable more than once
     //~| ERROR cannot borrow `x` as mutable more than once
+    drop((c1, c2));
 }
 
 fn g() {
@@ -61,6 +65,7 @@ struct Foo {
     let c2 = to_fn_mut(|| set(&mut *x.f));
     //~^ ERROR cannot borrow `x` as mutable more than once
     //~| ERROR cannot borrow `x` as mutable more than once
+    drop((c1, c2));
 }
 
 fn main() {
index c739165ddbd3734568b45d54d6170a06531abc81..a4f8e8b408ba501b15e6783764e00a01b8e309f0 100644 (file)
@@ -9,12 +9,12 @@ LL |     let c2 = to_fn_mut(|| x = 5); //~ ERROR cannot borrow `x` as mutable mo
    |                        ^^ - borrow occurs due to use of `x` in closure
    |                        |
    |                        second mutable borrow occurs here
-LL |     //~| ERROR cannot borrow `x` as mutable more than once
+...
 LL | }
    | - first borrow ends here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
-  --> $DIR/borrowck-closures-two-mut.rs:35:24
+  --> $DIR/borrowck-closures-two-mut.rs:36:24
    |
 LL |     let c1 = to_fn_mut(|| set(&mut x));
    |                        --          - previous borrow occurs due to use of `x` in closure
@@ -24,12 +24,12 @@ LL |     let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as muta
    |                        ^^          - borrow occurs due to use of `x` in closure
    |                        |
    |                        second mutable borrow occurs here
-LL |     //~| ERROR cannot borrow `x` as mutable more than once
+...
 LL | }
    | - first borrow ends here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
-  --> $DIR/borrowck-closures-two-mut.rs:42:24
+  --> $DIR/borrowck-closures-two-mut.rs:44:24
    |
 LL |     let c1 = to_fn_mut(|| x = 5);
    |                        -- - previous borrow occurs due to use of `x` in closure
@@ -39,12 +39,12 @@ LL |     let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as muta
    |                        ^^          - borrow occurs due to use of `x` in closure
    |                        |
    |                        second mutable borrow occurs here
-LL |     //~| ERROR cannot borrow `x` as mutable more than once
+...
 LL | }
    | - first borrow ends here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
-  --> $DIR/borrowck-closures-two-mut.rs:49:24
+  --> $DIR/borrowck-closures-two-mut.rs:52:24
    |
 LL |     let c1 = to_fn_mut(|| x = 5);
    |                        -- - previous borrow occurs due to use of `x` in closure
@@ -59,7 +59,7 @@ LL | }
    | - first borrow ends here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Ast)
-  --> $DIR/borrowck-closures-two-mut.rs:61:24
+  --> $DIR/borrowck-closures-two-mut.rs:65:24
    |
 LL |     let c1 = to_fn_mut(|| set(&mut *x.f));
    |                        --           - previous borrow occurs due to use of `x` in closure
@@ -85,11 +85,11 @@ LL |     let c2 = to_fn_mut(|| x = 5); //~ ERROR cannot borrow `x` as mutable mo
    |                        |
    |                        second mutable borrow occurs here
 LL |     //~| ERROR cannot borrow `x` as mutable more than once
-LL | }
-   | - first borrow ends here
+LL |     drop((c1, c2));
+   |           -- borrow later used here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
-  --> $DIR/borrowck-closures-two-mut.rs:35:24
+  --> $DIR/borrowck-closures-two-mut.rs:36:24
    |
 LL |     let c1 = to_fn_mut(|| set(&mut x));
    |                        --          - previous borrow occurs due to use of `x` in closure
@@ -100,11 +100,11 @@ LL |     let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as muta
    |                        |
    |                        second mutable borrow occurs here
 LL |     //~| ERROR cannot borrow `x` as mutable more than once
-LL | }
-   | - first borrow ends here
+LL |     drop((c1, c2));
+   |           -- borrow later used here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
-  --> $DIR/borrowck-closures-two-mut.rs:42:24
+  --> $DIR/borrowck-closures-two-mut.rs:44:24
    |
 LL |     let c1 = to_fn_mut(|| x = 5);
    |                        -- - previous borrow occurs due to use of `x` in closure
@@ -115,11 +115,11 @@ LL |     let c2 = to_fn_mut(|| set(&mut x)); //~ ERROR cannot borrow `x` as muta
    |                        |
    |                        second mutable borrow occurs here
 LL |     //~| ERROR cannot borrow `x` as mutable more than once
-LL | }
-   | - first borrow ends here
+LL |     drop((c1, c2));
+   |           -- borrow later used here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
-  --> $DIR/borrowck-closures-two-mut.rs:49:24
+  --> $DIR/borrowck-closures-two-mut.rs:52:24
    |
 LL |     let c1 = to_fn_mut(|| x = 5);
    |                        -- - previous borrow occurs due to use of `x` in closure
@@ -130,11 +130,11 @@ LL |     let c2 = to_fn_mut(|| { let _y = to_fn_mut(|| set(&mut x)); }); // (nes
    |                        |
    |                        second mutable borrow occurs here
 ...
-LL | }
-   | - first borrow ends here
+LL |     drop((c1, c2));
+   |           -- borrow later used here
 
 error[E0499]: cannot borrow `x` as mutable more than once at a time (Mir)
-  --> $DIR/borrowck-closures-two-mut.rs:61:24
+  --> $DIR/borrowck-closures-two-mut.rs:65:24
    |
 LL |     let c1 = to_fn_mut(|| set(&mut *x.f));
    |                        --           - previous borrow occurs due to use of `x` in closure
@@ -145,8 +145,8 @@ LL |     let c2 = to_fn_mut(|| set(&mut *x.f));
    |                        |
    |                        second mutable borrow occurs here
 ...
-LL | }
-   | - first borrow ends here
+LL |     drop((c1, c2));
+   |           -- borrow later used here
 
 error: aborting due to 10 previous errors
 
index e690263a916f3cf5f42750f229a1fb8f424b25eb..6d7ed61bdd8b550073f324894a7912a07c700e22 100644 (file)
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // run-pass
 
index e1ed41bfb2c64cb97e9fd3bf92ed672e3cc518ca..9ea9696511b7194ba0e330dfc8e9e69137ad4535 100644 (file)
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// revisions: lxl nll
-//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
-//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+// compile-flags: -Z borrowck=mir -Z two-phase-borrows
 
 // run-pass
 
index 2083ada6d2de5481b7177e8f246ffceadcdc9cad..671d77efbea3aac7f50850cdfef324cbdb481415 100644 (file)
@@ -15,6 +15,15 @@ trait Foo { }
 #[rustc_dump_program_clauses] //~ ERROR Implemented(T: Foo) :-
 impl<T: 'static> Foo for T where T: Iterator<Item = i32> { }
 
+trait Bar {
+    type Assoc;
+}
+
+impl<T> Bar for T where T: Iterator<Item = i32> {
+    #[rustc_dump_program_clauses] //~ ERROR Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :-
+    type Assoc = Vec<T>;
+}
+
 fn main() {
     println!("hello");
 }
index b5d791d640ada9dcd79a4c07aff37b2e845f90fa..f253f9847d16294bc9fe39e55bb6c68e9351ee51 100644 (file)
@@ -4,5 +4,11 @@ error: Implemented(T: Foo) :- ProjectionEq(<T as std::iter::Iterator>::Item == i
 LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(T: Foo) :-
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error: aborting due to previous error
+error: Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :- Implemented(T: Bar).
+  --> $DIR/lower_impl.rs:23:5
+   |
+LL |     #[rustc_dump_program_clauses] //~ ERROR Normalize(<T as Bar>::Assoc == std::vec::Vec<T>) :-
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
 
index 447505e886f81ef07a4684d351d78167fa6f9686..f240a34817db5ff8c20e1247ffd182695fadb78c 100644 (file)
@@ -15,10 +15,9 @@ trait A: B {
 }
 
 trait B: C {
+    //~^ ERROR cycle detected
 }
 
 trait C: B { }
-    //~^ ERROR cyclic dependency detected
-    //~| cyclic reference
 
 fn main() { }
index 68c20df5f72179f8e8dbde89c9f1555c4e6761db..85681b478e21df537afe9fade5f8fe5594836be0 100644 (file)
@@ -1,20 +1,20 @@
-error[E0391]: cyclic dependency detected
-  --> $DIR/cycle-trait-supertrait-indirect.rs:20:1
+error[E0391]: cycle detected when computing the supertraits of `B`
+  --> $DIR/cycle-trait-supertrait-indirect.rs:17:1
    |
-LL | trait C: B { }
-   | ^^^^^^^^^^ cyclic reference
+LL | trait B: C {
+   | ^^^^^^^^^^
    |
-note: the cycle begins when computing the supertraits of `B`...
-  --> $DIR/cycle-trait-supertrait-indirect.rs:14:1
+note: ...which requires computing the supertraits of `C`...
+  --> $DIR/cycle-trait-supertrait-indirect.rs:21:1
    |
-LL | trait A: B {
+LL | trait C: B { }
    | ^^^^^^^^^^
-note: ...which then requires computing the supertraits of `C`...
-  --> $DIR/cycle-trait-supertrait-indirect.rs:17:1
+   = note: ...which again requires computing the supertraits of `B`, completing the cycle
+note: cycle used when computing the supertraits of `A`
+  --> $DIR/cycle-trait-supertrait-indirect.rs:14:1
    |
-LL | trait B: C {
+LL | trait A: B {
    | ^^^^^^^^^^
-   = note: ...which then again requires computing the supertraits of `B`, completing the cycle.
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/feature-gate-cfg-target-feature.rs b/src/test/ui/feature-gate-cfg-target-feature.rs
deleted file mode 100644 (file)
index 7832e1c..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[cfg(target_feature = "x")] //~ ERROR `cfg(target_feature)` is experimental
-#[cfg_attr(target_feature = "x", x)] //~ ERROR `cfg(target_feature)` is experimental
-struct Foo(u64, u64);
-
-#[cfg(not(any(all(target_feature = "x"))))] //~ ERROR `cfg(target_feature)` is experimental
-fn foo() {}
-
-fn main() {
-    cfg!(target_feature = "x");
-    //~^ ERROR `cfg(target_feature)` is experimental and subject to change
-}
diff --git a/src/test/ui/feature-gate-cfg-target-feature.stderr b/src/test/ui/feature-gate-cfg-target-feature.stderr
deleted file mode 100644 (file)
index bf9e596..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
-  --> $DIR/feature-gate-cfg-target-feature.rs:12:12
-   |
-LL | #[cfg_attr(target_feature = "x", x)] //~ ERROR `cfg(target_feature)` is experimental
-   |            ^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
-  --> $DIR/feature-gate-cfg-target-feature.rs:11:7
-   |
-LL | #[cfg(target_feature = "x")] //~ ERROR `cfg(target_feature)` is experimental
-   |       ^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
-  --> $DIR/feature-gate-cfg-target-feature.rs:15:19
-   |
-LL | #[cfg(not(any(all(target_feature = "x"))))] //~ ERROR `cfg(target_feature)` is experimental
-   |                   ^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error[E0658]: `cfg(target_feature)` is experimental and subject to change (see issue #29717)
-  --> $DIR/feature-gate-cfg-target-feature.rs:19:10
-   |
-LL |     cfg!(target_feature = "x");
-   |          ^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(cfg_target_feature)] to the crate attributes to enable
-
-error: aborting due to 4 previous errors
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/feature-gate-target_feature.rs b/src/test/ui/feature-gate-target_feature.rs
deleted file mode 100644 (file)
index da2e41a..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[target_feature = "+sse2"]
-//~^ the `#[target_feature]` attribute is an experimental feature
-fn foo() {}
diff --git a/src/test/ui/feature-gate-target_feature.stderr b/src/test/ui/feature-gate-target_feature.stderr
deleted file mode 100644 (file)
index 0f31abf..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-error[E0658]: the `#[target_feature]` attribute is an experimental feature
-  --> $DIR/feature-gate-target_feature.rs:11:1
-   |
-LL | #[target_feature = "+sse2"]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(target_feature)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
index 99a7dd5e7852b22caa618ac8a78e3ec1a79faee6..54d548757643320c57c973f1359202c5eceaaba7 100644 (file)
@@ -40,8 +40,7 @@ fn after() -> impl Fn(i32) {
 // independently resolved and only require the concrete
 // return type, which can't depend on the obligation.
 fn cycle1() -> impl Clone {
-    //~^ ERROR cyclic dependency detected
-    //~| cyclic reference
+    //~^ ERROR cycle detected
     send(cycle2().clone());
 
     Rc::new(Cell::new(5))
index ca639f1076d3c814ff082ff765d47e81e38c1390..3b20451b10215afd4495b47b8ac21bf110a96e0c 100644 (file)
@@ -28,33 +28,29 @@ note: required by `send`
 LL | fn send<T: Send>(_: T) {}
    | ^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0391]: cyclic dependency detected
-  --> $DIR/auto-trait-leak.rs:42:1
-   |
-LL | fn cycle1() -> impl Clone {
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^ cyclic reference
-   |
-note: the cycle begins when processing `cycle1`...
+error[E0391]: cycle detected when processing `cycle1`
   --> $DIR/auto-trait-leak.rs:42:1
    |
 LL | fn cycle1() -> impl Clone {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which then requires processing `cycle2::{{impl-Trait}}`...
-  --> $DIR/auto-trait-leak.rs:50:16
+   |
+note: ...which requires processing `cycle2::{{impl-Trait}}`...
+  --> $DIR/auto-trait-leak.rs:49:16
    |
 LL | fn cycle2() -> impl Clone {
    |                ^^^^^^^^^^
-note: ...which then requires processing `cycle2`...
-  --> $DIR/auto-trait-leak.rs:50:1
+note: ...which requires processing `cycle2`...
+  --> $DIR/auto-trait-leak.rs:49:1
    |
 LL | fn cycle2() -> impl Clone {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which then requires processing `cycle1::{{impl-Trait}}`...
+note: ...which requires processing `cycle1::{{impl-Trait}}`...
   --> $DIR/auto-trait-leak.rs:42:16
    |
 LL | fn cycle1() -> impl Clone {
    |                ^^^^^^^^^^
-   = note: ...which then again requires processing `cycle1`, completing the cycle.
+   = note: ...which again requires processing `cycle1`, completing the cycle
+note: cycle used when type-checking all item bodies
 
 error: aborting due to 3 previous errors
 
index e4d6076868717f0678b6b31924327cae4b2a0e50..83359bf1675d94bd362955044bc416da1fc75342 100644 (file)
@@ -9,11 +9,10 @@
 // except according to those terms.
 
 trait t1 : t2 {
+//~^ ERROR cycle detected
 }
 
 trait t2 : t1 {
-//~^ ERROR cyclic dependency detected
-//~| cyclic reference
 }
 
 fn main() { }
index c1612b8cb67861e169ff37c891bd6c170912281c..1a48e6a6de1c6a8f2d56219f84be9578a9650949 100644 (file)
@@ -1,20 +1,15 @@
-error[E0391]: cyclic dependency detected
-  --> $DIR/issue-12511.rs:14:1
-   |
-LL | trait t2 : t1 {
-   | ^^^^^^^^^^^^^ cyclic reference
-   |
-note: the cycle begins when computing the supertraits of `t1`...
+error[E0391]: cycle detected when computing the supertraits of `t1`
   --> $DIR/issue-12511.rs:11:1
    |
 LL | trait t1 : t2 {
    | ^^^^^^^^^^^^^
-note: ...which then requires computing the supertraits of `t2`...
-  --> $DIR/issue-12511.rs:11:1
    |
-LL | trait t1 : t2 {
+note: ...which requires computing the supertraits of `t2`...
+  --> $DIR/issue-12511.rs:15:1
+   |
+LL | trait t2 : t1 {
    | ^^^^^^^^^^^^^
-   = note: ...which then again requires computing the supertraits of `t1`, completing the cycle.
+   = note: ...which again requires computing the supertraits of `t1`, completing the cycle
 
 error: aborting due to previous error
 
index c587c00279bde4f6634456cb7b412e3c9099b0a9..0fbe2f7a41177a5458b02e45d63c09009e01ee12 100644 (file)
@@ -1,20 +1,11 @@
-error[E0391]: cyclic dependency detected
-  --> $DIR/issue-23302-1.rs:14:9
-   |
-LL |     A = X::A as isize, //~ ERROR E0391
-   |         ^^^^^^^^^^^^^ cyclic reference
-   |
-note: the cycle begins when const-evaluating `X::A::{{initializer}}`...
-  --> $DIR/issue-23302-1.rs:14:9
-   |
-LL |     A = X::A as isize, //~ ERROR E0391
-   |         ^^^^^^^^^^^^^
-note: ...which then requires computing layout of `X`...
+error[E0391]: cycle detected when const-evaluating `X::A::{{initializer}}`
   --> $DIR/issue-23302-1.rs:14:9
    |
 LL |     A = X::A as isize, //~ ERROR E0391
    |         ^^^^
-   = note: ...which then again requires const-evaluating `X::A::{{initializer}}`, completing the cycle.
+   |
+note: ...which requires computing layout of `X`...
+   = note: ...which again requires const-evaluating `X::A::{{initializer}}`, completing the cycle
 
 error: aborting due to previous error
 
index 553ddaa1a8104d9d0429db0e1575e29958feeb2a..313cfa0c16260af9f2a46b6c8125a29c01b3d91d 100644 (file)
@@ -1,20 +1,11 @@
-error[E0391]: cyclic dependency detected
-  --> $DIR/issue-23302-2.rs:14:9
-   |
-LL |     A = Y::B as isize, //~ ERROR E0391
-   |         ^^^^^^^^^^^^^ cyclic reference
-   |
-note: the cycle begins when const-evaluating `Y::A::{{initializer}}`...
-  --> $DIR/issue-23302-2.rs:14:9
-   |
-LL |     A = Y::B as isize, //~ ERROR E0391
-   |         ^^^^^^^^^^^^^
-note: ...which then requires computing layout of `Y`...
+error[E0391]: cycle detected when const-evaluating `Y::A::{{initializer}}`
   --> $DIR/issue-23302-2.rs:14:9
    |
 LL |     A = Y::B as isize, //~ ERROR E0391
    |         ^^^^
-   = note: ...which then again requires const-evaluating `Y::A::{{initializer}}`, completing the cycle.
+   |
+note: ...which requires computing layout of `Y`...
+   = note: ...which again requires const-evaluating `Y::A::{{initializer}}`, completing the cycle
 
 error: aborting due to previous error
 
index 5903acc8b7a6d51c58079b71af97ebed169818eb..c3664e0abe1149664d979e7ac07571a4ef6eafd7 100644 (file)
@@ -8,8 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-const A: i32 = B;
+const A: i32 = B; //~ ERROR cycle detected
 
-const B: i32 = A; //~ ERROR cyclic dependency detected
+const B: i32 = A;
 
 fn main() { }
index 8cf296bc6db6c82f4335e716d258b3b78bc05778..54edf42679aeff372be1589bebb10761135f1738 100644 (file)
@@ -1,30 +1,25 @@
-error[E0391]: cyclic dependency detected
-  --> $DIR/issue-23302-3.rs:13:16
-   |
-LL | const B: i32 = A; //~ ERROR cyclic dependency detected
-   |                ^ cyclic reference
-   |
-note: the cycle begins when const checking if rvalue is promotable to static `A`...
+error[E0391]: cycle detected when const checking if rvalue is promotable to static `A`
   --> $DIR/issue-23302-3.rs:11:1
    |
-LL | const A: i32 = B;
+LL | const A: i32 = B; //~ ERROR cycle detected
    | ^^^^^^^^^^^^^^^^^
-note: ...which then requires checking which parts of `A` are promotable to static...
-  --> $DIR/issue-23302-3.rs:11:1
    |
-LL | const A: i32 = B;
-   | ^^^^^^^^^^^^^^^^^
-note: ...which then requires const checking if rvalue is promotable to static `B`...
+note: ...which requires checking which parts of `A` are promotable to static...
   --> $DIR/issue-23302-3.rs:11:16
    |
-LL | const A: i32 = B;
+LL | const A: i32 = B; //~ ERROR cycle detected
    |                ^
-note: ...which then requires checking which parts of `B` are promotable to static...
+note: ...which requires const checking if rvalue is promotable to static `B`...
   --> $DIR/issue-23302-3.rs:13:1
    |
-LL | const B: i32 = A; //~ ERROR cyclic dependency detected
+LL | const B: i32 = A;
    | ^^^^^^^^^^^^^^^^^
-   = note: ...which then again requires const checking if rvalue is promotable to static `A`, completing the cycle.
+note: ...which requires checking which parts of `B` are promotable to static...
+  --> $DIR/issue-23302-3.rs:13:16
+   |
+LL | const B: i32 = A;
+   |                ^
+   = note: ...which again requires const checking if rvalue is promotable to static `A`, completing the cycle
 
 error: aborting due to previous error
 
index 7ab4bd46ebf46d486b1b9c041107c559e68bf076..541f54ca7689148a1d4dcec6219b3e4583da68db 100644 (file)
@@ -1,30 +1,21 @@
-error[E0391]: cyclic dependency detected
-  --> $DIR/issue-36163.rs:14:9
-   |
-LL |     B = A, //~ ERROR E0391
-   |         ^ cyclic reference
-   |
-note: the cycle begins when const-evaluating `Foo::B::{{initializer}}`...
+error[E0391]: cycle detected when const-evaluating `Foo::B::{{initializer}}`
   --> $DIR/issue-36163.rs:14:9
    |
 LL |     B = A, //~ ERROR E0391
    |         ^
-note: ...which then requires processing `Foo::B::{{initializer}}`...
+   |
+note: ...which requires processing `Foo::B::{{initializer}}`...
   --> $DIR/issue-36163.rs:14:9
    |
 LL |     B = A, //~ ERROR E0391
    |         ^
-note: ...which then requires const-evaluating `A`...
-  --> $DIR/issue-36163.rs:11:1
-   |
-LL | const A: isize = Foo::B as isize;
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which then requires computing layout of `Foo`...
+note: ...which requires const-evaluating `A`...
   --> $DIR/issue-36163.rs:11:18
    |
 LL | const A: isize = Foo::B as isize;
    |                  ^^^^^^
-   = note: ...which then again requires const-evaluating `Foo::B::{{initializer}}`, completing the cycle.
+note: ...which requires computing layout of `Foo`...
+   = note: ...which again requires const-evaluating `Foo::B::{{initializer}}`, completing the cycle
 
 error: aborting due to previous error
 
index 7734b14b2ab7b008f3a10d5469f5538dae8389c7..b8be209833a6e15a3c3f1dd5cda254080c6913e0 100644 (file)
@@ -30,6 +30,7 @@ fn main() {
         *y.pointer += 1;
         //~^ ERROR cannot assign to `*y.pointer` because it is borrowed (Ast) [E0506]
         //~| ERROR cannot use `*y.pointer` because it was mutably borrowed (Mir) [E0503]
+        //~| ERROR cannot assign to `*y.pointer` because it is borrowed (Mir) [E0506]
         *z.pointer += 1;
     }
 }
index aa899c5aee9eb429ba4ad2ca36df1193bcac2f3b..cf108691a0e4f65a2034c7ca927a1a79450e00b4 100644 (file)
@@ -13,8 +13,22 @@ LL |         let z = copy_borrowed_ptr(&mut y);
    |                                   ------ borrow of `y` occurs here
 LL |         *y.pointer += 1;
    |         ^^^^^^^^^^^^^^^ use of borrowed `y`
+...
+LL |         *z.pointer += 1;
+   |         --------------- borrow later used here
 
-error: aborting due to 2 previous errors
+error[E0506]: cannot assign to `*y.pointer` because it is borrowed (Mir)
+  --> $DIR/issue-45697-1.rs:30:9
+   |
+LL |         let z = copy_borrowed_ptr(&mut y);
+   |                                   ------ borrow of `*y.pointer` occurs here
+LL |         *y.pointer += 1;
+   |         ^^^^^^^^^^^^^^^ assignment to borrowed `*y.pointer` occurs here
+...
+LL |         *z.pointer += 1;
+   |         --------------- borrow later used here
+
+error: aborting due to 3 previous errors
 
 Some errors occurred: E0503, E0506.
 For more information about an error, try `rustc --explain E0503`.
index 4e93eccd6f6496ee1f4fde63302ed376073117cc..27acc2c89f75d435060f6baa2a32e40abe7b5744 100644 (file)
@@ -30,6 +30,7 @@ fn main() {
         *y.pointer += 1;
         //~^ ERROR cannot assign to `*y.pointer` because it is borrowed (Ast) [E0506]
         //~| ERROR cannot use `*y.pointer` because it was mutably borrowed (Mir) [E0503]
+        //~| ERROR cannot assign to `*y.pointer` because it is borrowed (Mir) [E0506]
         *z.pointer += 1;
     }
 }
index babfc33b94582b7254e834f8e583295b8be1dabc..a85972fcd7a1c05b6e24577a7352cf3ba92a7ca4 100644 (file)
@@ -13,8 +13,22 @@ LL |         let z = copy_borrowed_ptr(&mut y);
    |                                   ------ borrow of `y` occurs here
 LL |         *y.pointer += 1;
    |         ^^^^^^^^^^^^^^^ use of borrowed `y`
+...
+LL |         *z.pointer += 1;
+   |         --------------- borrow later used here
 
-error: aborting due to 2 previous errors
+error[E0506]: cannot assign to `*y.pointer` because it is borrowed (Mir)
+  --> $DIR/issue-45697.rs:30:9
+   |
+LL |         let z = copy_borrowed_ptr(&mut y);
+   |                                   ------ borrow of `*y.pointer` occurs here
+LL |         *y.pointer += 1;
+   |         ^^^^^^^^^^^^^^^ assignment to borrowed `*y.pointer` occurs here
+...
+LL |         *z.pointer += 1;
+   |         --------------- borrow later used here
+
+error: aborting due to 3 previous errors
 
 Some errors occurred: E0503, E0506.
 For more information about an error, try `rustc --explain E0503`.
index bfd5bfa9f7274e5185504f7ae9b7bf913807a074..0108056bc7278265ae61aba0d7c536908bfcd6d8 100644 (file)
@@ -12,13 +12,16 @@ LL | }
 error[E0597]: `z` does not live long enough (Mir)
   --> $DIR/issue-46471-1.rs:16:9
    |
-LL |         &mut z
-   |         ^^^^^^ borrowed value does not live long enough
-LL |     };
-   |     - `z` dropped here while still borrowed
-...
-LL | }
-   | - borrowed value needs to live until here
+LL |       let y = {
+   |  _____________-
+LL | |         let mut z = 0;
+LL | |         &mut z
+   | |         ^^^^^^ borrowed value does not live long enough
+LL | |     };
+   | |     -
+   | |     |
+   | |_____borrowed value only lives until here
+   |       borrow later used here
 
 error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/issue-48803.rs b/src/test/ui/issue-48803.rs
new file mode 100644 (file)
index 0000000..c6d14ac
--- /dev/null
@@ -0,0 +1,25 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+fn flatten<'a, 'b, T>(x: &'a &'b T) -> &'a T {
+    x
+}
+
+fn main() {
+    let mut x = "original";
+    let y = &x;
+    let z = &y;
+    let w = flatten(z);
+    x = "modified";
+    //~^ ERROR cannot assign to `x` because it is borrowed [E0506]
+    println!("{}", w); // prints "modified"
+}
diff --git a/src/test/ui/issue-48803.stderr b/src/test/ui/issue-48803.stderr
new file mode 100644 (file)
index 0000000..b37e2c0
--- /dev/null
@@ -0,0 +1,15 @@
+error[E0506]: cannot assign to `x` because it is borrowed
+  --> $DIR/issue-48803.rs:22:5
+   |
+LL |     let y = &x;
+   |             -- borrow of `x` occurs here
+...
+LL |     x = "modified";
+   |     ^^^^^^^^^^^^^^ assignment to borrowed `x` occurs here
+LL |     //~^ ERROR cannot assign to `x` because it is borrowed [E0506]
+LL |     println!("{}", w); // prints "modified"
+   |                    - borrow later used here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0506`.
index a5660f8b41f8da43aeeea29deefa8607dc764bf4..e25c3ccfcd980d36b67d63a0e7cb885ef5332792 100644 (file)
@@ -8,30 +8,26 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// The logic for parsing Kleene operators in macros has a special case to disambiguate `?`.
-// Specifically, `$(pat)?` is the ZeroOrOne operator whereas `$(pat)?+` or `$(pat)?*` are the
-// ZeroOrMore and OneOrMore operators using `?` as a separator. These tests are intended to
-// exercise that logic in the macro parser.
-//
-// Moreover, we also throw in some tests for using a separator with `?`, which is meaningless but
-// included for consistency with `+` and `*`.
-//
-// This test focuses on error cases.
+// Tests the behavior of various Kleene operators in macros with respect to `?` terminals. In
+// particular, `?` in the position of a separator and of a Kleene operator is tested.
 
 #![feature(macro_at_most_once_rep)]
 
+// should match `` and `a`
 macro_rules! foo {
     ($(a)?) => {}
 }
 
 macro_rules! baz {
-    ($(a),?) => {} // comma separator is meaningless for `?`
+    ($(a),?) => {} //~ ERROR `?` macro repetition does not allow a separator
 }
 
+// should match `+` and `a+`
 macro_rules! barplus {
     ($(a)?+) => {}
 }
 
+// should match `*` and `a*`
 macro_rules! barstar {
     ($(a)?*) => {}
 }
@@ -40,14 +36,14 @@ pub fn main() {
     foo!(a?a?a); //~ ERROR no rules expected the token `?`
     foo!(a?a); //~ ERROR no rules expected the token `?`
     foo!(a?); //~ ERROR no rules expected the token `?`
-    baz!(a?a?a); //~ ERROR no rules expected the token `?`
-    baz!(a?a); //~ ERROR no rules expected the token `?`
-    baz!(a?); //~ ERROR no rules expected the token `?`
-    baz!(a,); //~ ERROR unexpected end of macro invocation
-    baz!(a?a?a,); //~ ERROR no rules expected the token `?`
-    baz!(a?a,); //~ ERROR no rules expected the token `?`
-    baz!(a?,); //~ ERROR no rules expected the token `?`
     barplus!(); //~ ERROR unexpected end of macro invocation
-    barplus!(a?); //~ ERROR unexpected end of macro invocation
-    barstar!(a?); //~ ERROR unexpected end of macro invocation
+    barstar!(); //~ ERROR unexpected end of macro invocation
+    barplus!(a?); //~ ERROR no rules expected the token `?`
+    barplus!(a); //~ ERROR unexpected end of macro invocation
+    barstar!(a?); //~ ERROR no rules expected the token `?`
+    barstar!(a); //~ ERROR unexpected end of macro invocation
+    barplus!(+); // ok
+    barstar!(*); // ok
+    barplus!(a+); // ok
+    barstar!(a*); // ok
 }
index d382082a5758562bb4f6163ef74237c8c0fbc3af..cb1e360471cc8671969a0cb274f3dd67b721f64d 100644 (file)
@@ -1,80 +1,62 @@
+error: `?` macro repetition does not allow a separator
+  --> $DIR/macro-at-most-once-rep-ambig.rs:22:10
+   |
+LL |     ($(a),?) => {} //~ ERROR `?` macro repetition does not allow a separator
+   |          ^
+
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:40:11
+  --> $DIR/macro-at-most-once-rep-ambig.rs:36:11
    |
 LL |     foo!(a?a?a); //~ ERROR no rules expected the token `?`
    |           ^
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:41:11
+  --> $DIR/macro-at-most-once-rep-ambig.rs:37:11
    |
 LL |     foo!(a?a); //~ ERROR no rules expected the token `?`
    |           ^
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:42:11
+  --> $DIR/macro-at-most-once-rep-ambig.rs:38:11
    |
 LL |     foo!(a?); //~ ERROR no rules expected the token `?`
    |           ^
 
-error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:43:11
-   |
-LL |     baz!(a?a?a); //~ ERROR no rules expected the token `?`
-   |           ^
-
-error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:44:11
-   |
-LL |     baz!(a?a); //~ ERROR no rules expected the token `?`
-   |           ^
-
-error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:45:11
-   |
-LL |     baz!(a?); //~ ERROR no rules expected the token `?`
-   |           ^
-
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-ambig.rs:46:11
-   |
-LL |     baz!(a,); //~ ERROR unexpected end of macro invocation
-   |           ^
-
-error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:47:11
+  --> $DIR/macro-at-most-once-rep-ambig.rs:39:5
    |
-LL |     baz!(a?a?a,); //~ ERROR no rules expected the token `?`
-   |           ^
+LL |     barplus!(); //~ ERROR unexpected end of macro invocation
+   |     ^^^^^^^^^^^
 
-error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:48:11
+error: unexpected end of macro invocation
+  --> $DIR/macro-at-most-once-rep-ambig.rs:40:5
    |
-LL |     baz!(a?a,); //~ ERROR no rules expected the token `?`
-   |           ^
+LL |     barstar!(); //~ ERROR unexpected end of macro invocation
+   |     ^^^^^^^^^^^
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-ambig.rs:49:11
+  --> $DIR/macro-at-most-once-rep-ambig.rs:41:15
    |
-LL |     baz!(a?,); //~ ERROR no rules expected the token `?`
-   |           ^
+LL |     barplus!(a?); //~ ERROR no rules expected the token `?`
+   |               ^
 
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-ambig.rs:50:5
+  --> $DIR/macro-at-most-once-rep-ambig.rs:42:14
    |
-LL |     barplus!(); //~ ERROR unexpected end of macro invocation
-   |     ^^^^^^^^^^^
+LL |     barplus!(a); //~ ERROR unexpected end of macro invocation
+   |              ^
 
-error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-ambig.rs:51:15
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-ambig.rs:43:15
    |
-LL |     barplus!(a?); //~ ERROR unexpected end of macro invocation
+LL |     barstar!(a?); //~ ERROR no rules expected the token `?`
    |               ^
 
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-ambig.rs:52:15
+  --> $DIR/macro-at-most-once-rep-ambig.rs:44:14
    |
-LL |     barstar!(a?); //~ ERROR unexpected end of macro invocation
-   |               ^
+LL |     barstar!(a); //~ ERROR unexpected end of macro invocation
+   |              ^
 
-error: aborting due to 13 previous errors
+error: aborting due to 10 previous errors
 
index 41c744fec6e762024430a75c82cc4ed51acc1534..1e168028c7c9a9c48f2be595d7eb6026ae445ccf 100644 (file)
@@ -22,7 +22,7 @@
 // that appear free in its type (hence, we see it before the closure's
 // "external requirements" report).
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -35,7 +35,7 @@ fn test() {
         let y = 22;
         let mut closure = expect_sig(|p, y| *p = y);
         //~^ ERROR does not outlive free region
-        //~| WARNING not reporting region error due to -Znll
+        //~| WARNING not reporting region error due to nll
         closure(&mut p, &y);
     }
 
index 5f84001a8fb99163b294a5158af44e90d9ec1257..d876c751a41d2aa13e1f7c5475cacc320a39496e 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/escape-argument-callee.rs:36:50
    |
 LL |         let mut closure = expect_sig(|p, y| *p = y);
index 7e918c6431de4450e874558b91bee2001218119e..7a28cb26f3570620998ebdff319b76249e677ac9 100644 (file)
@@ -22,7 +22,7 @@
 // basically checking that the MIR type checker correctly enforces the
 // closure signature.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
index 05700ae00ad4f72900ad96f2cdec06474a528f22..598839f872e01805bc1d896c3f6755754cdc8500 100644 (file)
@@ -15,7 +15,7 @@
 //
 // except that the closure does so via a second closure.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
index 93d8bfafcbaa4ffed82d2b0be51f828a7d2b679b..49d31bbc139d73f0b88aca1551900018681a7e6c 100644 (file)
@@ -19,7 +19,7 @@
 // `'b`.  This relationship is propagated to the closure creator,
 // which reports an error.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
index 30a6dfc5b3edd05bf737f99c8b22c69a633f4427..e7ec0b9684d2c980d785f8b7a6093e6e521a6ae0 100644 (file)
@@ -11,7 +11,7 @@
 // Test where we fail to approximate due to demanding a postdom
 // relationship between our upper bounds.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -53,7 +53,7 @@ fn supply<'a, 'b, 'c>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>, cell_c: Cell
         |_outlives1, _outlives2, _outlives3, x, y| {
             // Only works if 'x: 'y:
             let p = x.get();
-            //~^ WARN not reporting region error due to -Znll
+            //~^ WARN not reporting region error due to nll
             //~| ERROR does not outlive free region
             demand_y(x, y, p)
         },
index 0c058e40a5086927d99222a31a43566385c060c0..ef5a31e40d445afa5c9c55a615607f66fbf8c075 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
    |
 LL |             let p = x.get();
@@ -16,7 +16,7 @@ note: No external requirements
 LL | /         |_outlives1, _outlives2, _outlives3, x, y| {
 LL | |             // Only works if 'x: 'y:
 LL | |             let p = x.get();
-LL | |             //~^ WARN not reporting region error due to -Znll
+LL | |             //~^ WARN not reporting region error due to nll
 LL | |             //~| ERROR does not outlive free region
 LL | |             demand_y(x, y, p)
 LL | |         },
index 91128035f3d95162d81615b188c68c3ecde35611..da8ce55162f37e706ce35dbfb4cf9f4386449389 100644 (file)
@@ -22,7 +22,7 @@
 // Note: the use of `Cell` here is to introduce invariance. One less
 // variable.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -54,7 +54,7 @@ fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
         //~^ ERROR lifetime mismatch
 
         // Only works if 'x: 'y:
-        demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+        demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
     });
 }
 
index 947b95b1c53218887a257a4470b2153f3d91c855..3a3236fd16c49e03b90e7edb007867ec646b131b 100644 (file)
@@ -1,7 +1,7 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-ref.rs:57:9
    |
-LL |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
    |         ^^^^^^^^^^^^^^^^^^^^^^^
 
 note: External requirements
@@ -12,7 +12,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x,
 LL | |         //~^ ERROR lifetime mismatch
 LL | |
 LL | |         // Only works if 'x: 'y:
-LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
    |
index f210346a82a675b1f9a5062cbd063f320a56306d..84bfd6ea4f25324cb479b77ce5b01ce333e0468a 100644 (file)
@@ -12,7 +12,7 @@
 // where `'x` is bound in closure type but `'a` is free. This forces
 // us to approximate `'x` one way or the other.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -29,7 +29,7 @@ fn case1() {
     let a = 0;
     let cell = Cell::new(&a);
     foo(cell, |cell_a, cell_x| {
-        //~^ WARNING not reporting region error due to -Znll
+        //~^ WARNING not reporting region error due to nll
         cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
         //~^ ERROR does not outlive free region
     })
index d39cdc34471abcf3615064c0614af3d88f117c85..6480cbe44312785885070fecd8df69f1e508e829 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:31:5
    |
 LL |     foo(cell, |cell_a, cell_x| {
@@ -15,7 +15,7 @@ note: No external requirements
    |
 LL |       foo(cell, |cell_a, cell_x| {
    |  _______________^
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |         cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
 LL | |         //~^ ERROR does not outlive free region
 LL | |     })
index c66472d5ce9b1ec1060843ca0532457e56e61e1c..df715c52921a0de5033f0303daa4bb37b1dc33d6 100644 (file)
@@ -16,7 +16,7 @@
 // FIXME(#45827) Because of shortcomings in the MIR type checker,
 // these errors are not (yet) reported.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -46,7 +46,7 @@ fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
         //~^ ERROR does not outlive free region
 
         // Only works if 'x: 'y:
-        demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+        demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
     });
 }
 
index 3131142ec73bfcda7e0ff840d3ff691c66117d50..6dcc8421177d9cf37cd3b06b2ecc02e984597379 100644 (file)
@@ -1,7 +1,7 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:49:9
    |
-LL |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
    |         ^^^^^^^^^^^^^^^^^^^^^^^
 
 note: External requirements
@@ -12,7 +12,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
 LL | |         //~^ ERROR does not outlive free region
 LL | |
 LL | |         // Only works if 'x: 'y:
-LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
    |
@@ -31,7 +31,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
 LL | |         //~^ ERROR does not outlive free region
 LL | |
 LL | |         // Only works if 'x: 'y:
-LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
 
index f4011a0e5335e8fdee25a809af6ef98ef133a545..fdbb312572f894dd3085ffe80490844f864b868e 100644 (file)
@@ -17,7 +17,7 @@
 // FIXME(#45827) Because of shortcomings in the MIR type checker,
 // these errors are not (yet) reported.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -49,7 +49,7 @@ fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
         //~^ ERROR does not outlive free region
         // Only works if 'x: 'y:
         demand_y(x, y, x.get())
-        //~^ WARNING not reporting region error due to -Znll
+        //~^ WARNING not reporting region error due to nll
     });
 }
 
index 5b038653b606837d4cabf8afeb8be70ebd11bd42..1291f2e9901b0e75880cc4245891dd2c5f8e2332 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:51:9
    |
 LL |         demand_y(x, y, x.get())
@@ -12,7 +12,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x,
 LL | |         //~^ ERROR does not outlive free region
 LL | |         // Only works if 'x: 'y:
 LL | |         demand_y(x, y, x.get())
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
    |
@@ -31,7 +31,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x,
 LL | |         //~^ ERROR does not outlive free region
 LL | |         // Only works if 'x: 'y:
 LL | |         demand_y(x, y, x.get())
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
 
index d163f304ae5b17f2509448dd3f1404e8b2aec70c..0449dc1d1a75c2846134f95c52c21993c79f43c1 100644 (file)
@@ -15,7 +15,7 @@
 // relationships. In the 'main' variant, there are a number of
 // anonymous regions as well.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -47,7 +47,7 @@ fn test<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
         //~^ ERROR lifetime mismatch
 
         // Only works if 'x: 'y:
-        demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to -Znll
+        demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to nll
     });
 }
 
index 475fdd947817e4e3e7162deee01c68923799c72f..d1824a941510266033b5750f0fe6f39f4b7cfbdc 100644 (file)
@@ -1,7 +1,7 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-approximated-val.rs:50:9
    |
-LL |         demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to -Znll
+LL |         demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to nll
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 note: External requirements
@@ -12,7 +12,7 @@ LL |       establish_relationships(cell_a, cell_b, |outlives1, outlives2, x, y|
 LL | |         //~^ ERROR lifetime mismatch
 LL | |
 LL | |         // Only works if 'x: 'y:
-LL | |         demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to -Znll
+LL | |         demand_y(outlives1, outlives2, x.get()) //~ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
    |
index 1ccfa61f4ce4665b86869b19b0ab57b3476ed096..c21c824b22c3bb093bf74f2c5fda51bcd94cfc73 100644 (file)
@@ -13,7 +13,7 @@
 // need to propagate; but in fact we do because identity of free
 // regions is erased.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 // compile-pass
 
 #![feature(rustc_attrs)]
index ab4faaca756331c8f472a1c8ee271b6f18ae26a5..d6eeda881daf26b91920981ded1076cda25aadd9 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-despite-same-free-region.rs:54:21
    |
 LL |             let p = x.get();
index eb512a3b9b1fb13eded1a71570b5c7510a9b82ac..7699d101734958122ee2eca1d296521de81ddeae 100644 (file)
@@ -17,7 +17,7 @@
 // as it knows of no relationships between `'x` and any
 // non-higher-ranked regions.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -45,7 +45,7 @@ fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
     establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
         // Only works if 'x: 'y:
         demand_y(x, y, x.get())
-        //~^ WARN not reporting region error due to -Znll
+        //~^ WARN not reporting region error due to nll
         //~| ERROR does not outlive free region
     });
 }
index ce808f56b42974ee4bf4b8827ead3fe005adea46..ffae47bd081c33119a0ab55ab012ab22e6c2f188 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:9
    |
 LL |         demand_y(x, y, x.get())
@@ -17,7 +17,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
    |  _______________________________________________^
 LL | |         // Only works if 'x: 'y:
 LL | |         demand_y(x, y, x.get())
-LL | |         //~^ WARN not reporting region error due to -Znll
+LL | |         //~^ WARN not reporting region error due to nll
 LL | |         //~| ERROR does not outlive free region
 LL | |     });
    | |_____^
index 930742464297202f9abd7f651fa9189c11935d01..afb61b221be942394a2e011734a7a0ae1fda1bda 100644 (file)
@@ -17,7 +17,7 @@
 // as it only knows of regions that `'x` is outlived by, and none that
 // `'x` outlives.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
@@ -49,7 +49,7 @@ fn supply<'a, 'b>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>) {
     establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
         // Only works if 'x: 'y:
         demand_y(x, y, x.get())
-        //~^ WARN not reporting region error due to -Znll
+        //~^ WARN not reporting region error due to nll
         //~| ERROR does not outlive free region
     });
 }
index 547ff75bac62c5d363add30ef27f30262c2da0f0..01af756b8332c639632b9047d6083bcc0162e3e3 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:9
    |
 LL |         demand_y(x, y, x.get())
@@ -17,7 +17,7 @@ LL |       establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x,
    |  _______________________________________________^
 LL | |         // Only works if 'x: 'y:
 LL | |         demand_y(x, y, x.get())
-LL | |         //~^ WARN not reporting region error due to -Znll
+LL | |         //~^ WARN not reporting region error due to nll
 LL | |         //~| ERROR does not outlive free region
 LL | |     });
    | |_____^
index 91796355752a5eb00af3da727479fae8bfbf5bf0..7baf24f88f8fa419b35643e9304b973decd1039e 100644 (file)
@@ -14,7 +14,7 @@
 // the same `'a` for which it implements `Trait`, which can only be the `'a`
 // from the function definition.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 #![allow(dead_code)]
@@ -53,7 +53,7 @@ fn supply<'a, T>(value: T)
         // The latter does not hold.
 
         require(value);
-        //~^ WARNING not reporting region error due to -Znll
+        //~^ WARNING not reporting region error due to nll
     });
 }
 
index 5bdfc7e935fea99557e50fece0c83efe90ddb6fc..a8b4ed528015fbf3c9e26a92ce0c6429d51cfd8d 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/propagate-from-trait-match.rs:55:9
    |
 LL |         require(value);
@@ -13,7 +13,7 @@ LL | |         //~^ ERROR the parameter type `T` may not live long enough
 LL | |
 LL | |         // This function call requires that
 ...  |
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
    |
@@ -35,7 +35,7 @@ LL | |         //~^ ERROR the parameter type `T` may not live long enough
 LL | |
 LL | |         // This function call requires that
 ...  |
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     });
    | |_____^
    |
index ac21fe25bd112e5a9183bb737012f15b9d06704c..a6b2e531ac28fcf9cb0e4d75a41cef1d921aafa5 100644 (file)
 // a variety of errors from the older, AST-based machinery (notably
 // borrowck), and then we get the NLL error at the end.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 fn foo(x: &u32) -> &'static u32 {
     &*x
-        //~^ WARN not reporting region error due to -Znll
+        //~^ WARN not reporting region error due to nll
         //~| ERROR explicit lifetime required in the type of `x`
 }
 
index 1e93ae1ee07c6f828810d8cd9c26d9ac68656e15..a823e62d3b843f76b5184a2eeb305dfd46664a7a 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/region-lbr-anon-does-not-outlive-static.rs:19:5
    |
 LL |     &*x
index a1be8e851851599161c10f5e79b868bede8f43d3..dedbd8df41b130ca8d771fcd71418e092cffa38e 100644 (file)
 // a variety of errors from the older, AST-based machinery (notably
 // borrowck), and then we get the NLL error at the end.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 fn foo<'a>(x: &'a u32) -> &'static u32 {
     &*x
-        //~^ WARN not reporting region error due to -Znll
+        //~^ WARN not reporting region error due to nll
         //~| ERROR does not outlive free region
 }
 
index ac3bf4b459fec119d7744fed078ec6f147d3e1e2..9520b446303c3fc8c06cef12e97ebb099734f082 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/region-lbr-named-does-not-outlive-static.rs:19:5
    |
 LL |     &*x
index 00b09e2ab21ada08e8600b54eb510271993ea1c1..8598668bef50ea59002d5466e6d895976a97d7bc 100644 (file)
 // a variety of errors from the older, AST-based machinery (notably
 // borrowck), and then we get the NLL error at the end.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 fn foo<'a, 'b>(x: &'a u32, y: &'b u32) -> &'b u32 {
     &*x
-        //~^ WARN not reporting region error due to -Znll
+        //~^ WARN not reporting region error due to nll
         //~| ERROR lifetime mismatch
 }
 
index 3af6d7d21f75310c484546931587dd004ac63acf..415aefdeee947a97b91f71c667aa57cc82a78f64 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/region-lbr1-does-not-outlive-ebr2.rs:19:5
    |
 LL |     &*x
index 7454a8e71f2708c7b1a1a46b8f1bc011e4b1e93a..6d2bb30980236dd42a6655a3bfdbede99de99f4b 100644 (file)
@@ -11,7 +11,7 @@
 // Basic test for free regions in the NLL code. This test does not
 // report an error because of the (implied) bound that `'b: 'a`.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 // compile-pass
 
 #![allow(warnings)]
index 754df4f2c5db624b95f6dfdab3b49da248635869..60f82ca0eefb9a9a5add36cfb845cdb9739a6bd5 100644 (file)
 // the first, but actually returns the second. This should fail within
 // the closure.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![feature(rustc_attrs)]
 
 #[rustc_regions]
 fn test() {
     expect_sig(|a, b| b); // ought to return `a`
-    //~^ WARN not reporting region error due to -Znll
+    //~^ WARN not reporting region error due to nll
     //~| ERROR does not outlive free region
 }
 
index b34f4c470df49da99fef7a622cfcc0797619d964..4d021fb545494c9a6ccb7798d7b8a64cab817fcd 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/return-wrong-bound-region.rs:21:23
    |
 LL |     expect_sig(|a, b| b); // ought to return `a`
@@ -26,7 +26,7 @@ note: No external requirements
    |
 LL | / fn test() {
 LL | |     expect_sig(|a, b| b); // ought to return `a`
-LL | |     //~^ WARN not reporting region error due to -Znll
+LL | |     //~^ WARN not reporting region error due to nll
 LL | |     //~| ERROR does not outlive free region
 LL | | }
    | |_^
index 589d8ffd28f0b3aeeff8b301f2a36270635ec596..10ce0652d43c2f6df54b6b698ff4220d7c9c74ca 100644 (file)
@@ -11,7 +11,7 @@
 // Test that MIR borrowck and NLL analysis can handle constants of
 // arbitrary types without ICEs.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 // compile-pass
 
 const HI: &str = "hi";
index 6dcbe0a528d90a3a522defa96ea87a305a3a42f0..b357b3facf9bb4ab46070d7751cbad28cb58f517 100644 (file)
@@ -12,7 +12,7 @@
 // in the type of `p` includes the points after `&v[0]` up to (but not
 // including) the call to `use_x`. The `else` branch is not included.
 
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
 // compile-pass
 
 #![allow(warnings)]
index e5478e39fecca9e3dddf2a394371d48ea55d8237..513609316311a3e567bd1e7e7b86c36638b5e1bd 100644 (file)
@@ -13,7 +13,7 @@
 // because of destructor. (Note that the stderr also identifies this
 // destructor in the error message.)
 
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
 
 #![allow(warnings)]
 #![feature(dropck_eyepatch)]
index e5944e75e424177fad5749b7c110b00e7930d19b..728c84695eacf0e3117e8246a75ebb12b18a2c19 100644 (file)
@@ -13,7 +13,7 @@
 // a variety of errors from the older, AST-based machinery (notably
 // borrowck), and then we get the NLL error at the end.
 
-// compile-flags:-Znll -Zborrowck=compare
+// compile-flags:-Zborrowck=compare
 
 struct Map {
 }
index 272cb6510aa3ddc02ab27cf11444b502696ffb74..f3952c49a2a36fecd13ff84996782ed83cff5d39 100644 (file)
@@ -2,7 +2,7 @@ error[E0594]: cannot assign to data in a `&` reference
   --> $DIR/issue-47388.rs:18:5
    |
 LL |     let fancy_ref = &(&mut fancy);
-   |                     ------------- help: consider changing this to be a mutable reference: `&mut`
+   |                     ------------- help: consider changing this to be a mutable reference: `&mut (&mut fancy)`
 LL |     fancy_ref.num = 6; //~ ERROR E0594
    |     ^^^^^^^^^^^^^^^^^ `fancy_ref` is a `&` reference, so the data it refers to cannot be written
 
index d4df2a01c8143e97a6c4a30df74c8d30be74e158..5538eca362974acc261c0e35fee9d53e8a0297ce 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
 
 
 #![allow(warnings)]
index 7e8a4e0ec95fc9e313f225da4128c1b6065c868f..ae815a5efe97c7f528ed287d9255156aabc68baf 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Z nll
+// compile-flags: -Z emit-end-regions -Zborrowck=mir
 // compile-pass
 
 #![allow(warnings)]
index 2eb90dca7026ec2182d728b9d2c6e8ed7c2f0970..00d146e0f02d6b672099fe1a164e6a378d140586 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
 
 #![allow(warnings)]
 
index f639d8f243f1423cfeb124a4bdd3267cd6f49b50..cd46014a7f5ca45dfbd2c1ee533f41c9c43ac9ef 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
 
 #![allow(warnings)]
 
index c2cc479d28e3e78a12a69a2c098ad34449d71378..9a3aca346208d72d4b330b4f40f6b51c60abca63 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//compile-flags: -Z emit-end-regions -Zborrowck=mir -Znll
+//compile-flags: -Z emit-end-regions -Zborrowck=mir
 
 #![allow(warnings)]
 
index 1515911fe870eb777a47e635fb7ee90e59f4f955..c1abcb434e68f8486cfdcd8742316265ec94266c 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
 // compile-pass
 
 #![feature(rustc_attrs)]
index 571bd9fd76e86eb011759b94af1a452c73105a34..f21127064d4c2ee1880b2e22300242ea85d55fa8 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 
@@ -19,7 +19,7 @@ impl<'a, T> Foo<'a> for T { }
 
 fn foo<'a, T>(x: &T) -> impl Foo<'a> {
     x
-        //~^ WARNING not reporting region error due to -Znll
+        //~^ WARNING not reporting region error due to nll
         //~| ERROR explicit lifetime required in the type of `x` [E0621]
 }
 
index 92e4f72da3a10ddb0b0ae837cce397de916c10ef..f836960a28cf372d4b492945dd41ab6fcdb1deb7 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/impl-trait-captures.rs:21:5
    |
 LL |     x
index 2e0671f1a51e84a0b1dec72ef66f33d20164b24c..182e11da082f8d367afccd1b33a8e7784caf196a 100644 (file)
@@ -8,14 +8,14 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 
 use std::fmt::Debug;
 
 fn no_region<'a, T>(x: Box<T>) -> impl Debug + 'a
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
 where
     T: Debug,
 {
@@ -31,7 +31,7 @@ fn correct_region<'a, T>(x: Box<T>) -> impl Debug + 'a
 }
 
 fn wrong_region<'a, 'b, T>(x: Box<T>) -> impl Debug + 'a
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
 where
     T: 'b + Debug,
 {
index 2b90d53774e6515df02bb16b962063ead42c0a6c..50b80282e6241daae24f6a5e1093ab0344359f4f 100644 (file)
@@ -1,10 +1,10 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/impl-trait-outlives.rs:17:35
    |
 LL | fn no_region<'a, T>(x: Box<T>) -> impl Debug + 'a
    |                                   ^^^^^^^^^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/impl-trait-outlives.rs:33:42
    |
 LL | fn wrong_region<'a, 'b, T>(x: Box<T>) -> impl Debug + 'a
index 0ec6d7b74ad5add6c7f9948d3d2babe23347e9c2..d8f077467d9fcfca2bb4f030aa91974c7a8ec8df 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 // Test that we can deduce when projections like `T::Item` outlive the
 // function body. Test that this does not imply that `T: 'a` holds.
@@ -43,7 +43,7 @@ fn invoke1<'a, T>(x: Cell<&'a Option<T>>)
 #[rustc_errors]
 fn generic2<T: Iterator>(value: T) {
     twice(value, |value_ref, item| invoke2(value_ref, item));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
 }
 
index fa53967ed3aca652e207e6b04112f3380d4a9000..0a2bd3247655ae019783391c7b618567e64ffd21 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-implied-bounds.rs:45:36
    |
 LL |     twice(value, |value_ref, item| invoke2(value_ref, item));
index 0493bd1ea0d9cce01b3602e5332330e275f8cdaf..7b3ed6a94fcbb642951a69d156517a304f03c3fd 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 // Tests closures that propagate an outlives relationship to their
 // creator where the subject is a projection with no regions (`<T as
@@ -34,7 +34,7 @@ fn no_region<'a, T>(x: Box<T>) -> Box<dyn Anything + 'a>
     T: Iterator,
 {
     with_signature(x, |mut y| Box::new(y.next()))
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the associated type `<T as std::iter::Iterator>::Item` may not live long enough
 }
 
@@ -52,7 +52,7 @@ fn wrong_region<'a, 'b, T>(x: Box<T>) -> Box<dyn Anything + 'a>
     T: 'b + Iterator,
 {
     with_signature(x, |mut y| Box::new(y.next()))
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the associated type `<T as std::iter::Iterator>::Item` may not live long enough
 }
 
index bcdf984f65a8a8db6889a79cd8401da26dcb23c3..0efbbdff12a3f25627b881adbf3d6f67d8821da6 100644 (file)
@@ -1,10 +1,10 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-no-regions-closure.rs:36:31
    |
 LL |     with_signature(x, |mut y| Box::new(y.next()))
    |                               ^^^^^^^^^^^^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-no-regions-closure.rs:54:31
    |
 LL |     with_signature(x, |mut y| Box::new(y.next()))
index 5f2e84e247a3c925911b708fabe438d96c73685c..32b73a51e11a527c198893dd30b74ad114270e53 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -22,7 +22,7 @@ fn no_region<'a, T>(mut x: T) -> Box<dyn Anything + 'a>
     T: Iterator,
 {
     Box::new(x.next())
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| the associated type `<T as std::iter::Iterator>::Item` may not live long enough
 }
 
@@ -38,7 +38,7 @@ fn wrong_region<'a, 'b, T>(mut x: T) -> Box<dyn Anything + 'a>
     T: 'b + Iterator,
 {
     Box::new(x.next())
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| the associated type `<T as std::iter::Iterator>::Item` may not live long enough
 }
 
index 95851e7edc70c62bfb2ba2c1f0c13f177f84f5cf..b2c5f28268db893382fbc78a9741a9cfe5e8c15e 100644 (file)
@@ -1,10 +1,10 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-no-regions-fn.rs:24:5
    |
 LL |     Box::new(x.next())
    |     ^^^^^^^^^^^^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-no-regions-fn.rs:40:5
    |
 LL |     Box::new(x.next())
index 9a5e04deddfc92ce5a20265c30446027c0047c49..cfe2880bfed476795ff5c84470c3f70444de5362 100644 (file)
@@ -22,7 +22,7 @@
 //
 // Ensuring that both `T: 'a` and `'b: 'a` holds does work (`elements_outlive`).
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -54,7 +54,7 @@ fn no_relationships_late<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     T: Anything<'b>,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
     //~| ERROR does not outlive free region
 }
@@ -66,7 +66,7 @@ fn no_relationships_early<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     'a: 'a,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
     //~| ERROR does not outlive free region
 }
@@ -88,7 +88,7 @@ fn projection_outlives<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     // can do better here with a more involved verification step.
 
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
     //~| ERROR free region `ReEarlyBound(1, 'b)` does not outlive free region `ReEarlyBound(0, 'a)`
 }
index aa45cf187010625eeb1ed76697e1c560eae44054..0d5a2dc7c559855b5138e8e9f2b3767bbe0fe48b 100644 (file)
@@ -1,16 +1,16 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-one-region-closure.rs:56:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-one-region-closure.rs:68:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-one-region-closure.rs:90:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
index 232025b57355cf98f5037d52e371b6e1e1358f90..16e91f2708fe94495627d0422a670af279e32d78 100644 (file)
@@ -14,7 +14,7 @@
 // case, the best way to satisfy the trait bound is to show that `'b:
 // 'a`, which can be done in various ways.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -46,7 +46,7 @@ fn no_relationships_late<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     T: Anything<'b>,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR does not outlive free region
 }
 
@@ -57,7 +57,7 @@ fn no_relationships_early<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     'a: 'a,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR does not outlive free region
 }
 
@@ -78,7 +78,7 @@ fn projection_outlives<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     // can do better here with a more involved verification step.
 
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR does not outlive free region
 }
 
index 7a8010ad8e0ea94f7c2d8503df2e2a95b312f4b7..d4aca8380b469f2812d143a79db70f17cf7a4932 100644 (file)
@@ -1,16 +1,16 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-one-region-trait-bound-closure.rs:48:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-one-region-trait-bound-closure.rs:59:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-one-region-trait-bound-closure.rs:80:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
index a1bdd1b89f9a96c06e9eab8daa8e6d8a0135efce..0d42636c844a55acfbe54bc65a8394aa5b7cc3ce 100644 (file)
@@ -12,7 +12,7 @@
 // outlive `'static`. In this case, we don't get any errors, and in fact
 // we don't even propagate constraints from the closures to the callers.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 // compile-pass
 
 #![allow(warnings)]
index e3cee00ed4eb3b7539928eb4bdfb31403fa13d10..7c8ef140a29074296ec8cb6f885e12a343fd8090 100644 (file)
@@ -15,7 +15,7 @@
 // the trait bound, and hence we propagate it to the caller as a type
 // test.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -47,7 +47,7 @@ fn no_relationships_late<'a, 'b, 'c, T>(cell: Cell<&'a ()>, t: T)
     T: Anything<'b, 'c>,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR associated type `<T as Anything<'_#5r, '_#6r>>::AssocType` may not live long enough
 }
 
@@ -58,7 +58,7 @@ fn no_relationships_early<'a, 'b, 'c, T>(cell: Cell<&'a ()>, t: T)
     'a: 'a,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR associated type `<T as Anything<'_#6r, '_#7r>>::AssocType` may not live long enough
 }
 
@@ -79,7 +79,7 @@ fn projection_outlives<'a, 'b, 'c, T>(cell: Cell<&'a ()>, t: T)
     // can do better here with a more involved verification step.
 
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR associated type `<T as Anything<'_#6r, '_#7r>>::AssocType` may not live long enough
 }
 
@@ -107,7 +107,7 @@ fn two_regions<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
     T: Anything<'b, 'b>,
 {
     with_signature(cell, t, |cell, t| require(cell, t));
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR does not outlive free region
 }
 
index c7cbdaec3395c18174a963744b5a77e2c66ee992..7e36e467e4eba32ac700b48afea06ddae3016521 100644 (file)
@@ -1,22 +1,22 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-two-region-trait-bound-closure.rs:49:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-two-region-trait-bound-closure.rs:60:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-two-region-trait-bound-closure.rs:81:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
    |                                       ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/projection-two-region-trait-bound-closure.rs:109:39
    |
 LL |     with_signature(cell, t, |cell, t| require(cell, t));
index 423747a6bd6cbb927aa271f8d9739d93607ec1c4..80b42c29563f1b5fb579a174117817032412f924 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
index 997cc57cfa28c48269ed8fc1bedd01df7c10acd1..c8feaddff9382ddf2d335c5e1954445256690ed4 100644 (file)
@@ -1,16 +1,16 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-approximate-lower-bound.rs:35:31
    |
 LL |     twice(cell, value, |a, b| invoke(a, b));
    |                               ^^^^^^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-approximate-lower-bound.rs:43:31
    |
 LL |     twice(cell, value, |a, b| invoke(a, b));
    |                               ^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-approximate-lower-bound.rs:43:31
    |
 LL |     twice(cell, value, |a, b| invoke(a, b));
index 95a483b3c355d00058fe1fd36f9c1cb65949c6ef..50763a1d5080808c6572da682d43073cbcf30cdf 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -35,7 +35,7 @@ fn no_region<'a, T>(x: Box<T>) -> Box<dyn Debug + 'a>
     // `'a` (and subsequently reports an error).
 
     with_signature(x, |y| y)
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
 }
 
@@ -51,7 +51,7 @@ fn wrong_region<'a, 'b, T>(x: Box<T>) -> Box<Debug + 'a>
     T: 'b + Debug,
 {
     x
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
 }
 
index ba08bc1ff7b43c3c513d8e94fe4763bc3abac49d..500595e0c5dcaf21e32d80fe80168cd661a86e94 100644 (file)
@@ -1,10 +1,10 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-outlives-from-return-type.rs:37:27
    |
 LL |     with_signature(x, |y| y)
    |                           ^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-outlives-from-return-type.rs:53:5
    |
 LL |     x
index 1149f250a46e2f53b7deacba724f8f295033eb40..b70fc2b2ec4b40d6f0ee38e911a7e06afd4716b8 100644 (file)
@@ -12,7 +12,7 @@
 // `correct_region` for an explanation of how this test is setup; it's
 // somewhat intricate.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -43,7 +43,7 @@ fn no_region<'a, T>(a: Cell<&'a ()>, b: T) {
         // function, there is no where clause *anywhere*, and hence we
         // get an error (but reported by the closure creator).
         require(&x, &y)
-        //~^ WARNING not reporting region error due to -Znll
+        //~^ WARNING not reporting region error due to nll
     })
 }
 
@@ -77,7 +77,7 @@ fn wrong_region<'a, 'b, T>(a: Cell<&'a ()>, b: T)
         //~^ ERROR the parameter type `T` may not live long enough
         // See `correct_region`
         require(&x, &y)
-        //~^ WARNING not reporting region error due to -Znll
+        //~^ WARNING not reporting region error due to nll
     })
 }
 
index fcdb0b0a4a9fea15bbf8034329a2a8220da120e7..4d8a66ba8e1c4cea182584b02fc2fda762a9383b 100644 (file)
@@ -1,10 +1,10 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-outlives-from-where-clause.rs:45:9
    |
 LL |         require(&x, &y)
    |         ^^^^^^^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-closure-outlives-from-where-clause.rs:79:9
    |
 LL |         require(&x, &y)
@@ -19,7 +19,7 @@ LL | |         //~^ ERROR the parameter type `T` may not live long enough
 LL | |         //
 LL | |         // See `correct_region`, which explains the point of this
 ...  |
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     })
    | |_____^
    |
@@ -40,7 +40,7 @@ LL | |         //~^ ERROR the parameter type `T` may not live long enough
 LL | |         //
 LL | |         // See `correct_region`, which explains the point of this
 ...  |
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     })
    | |_____^
    |
@@ -109,7 +109,7 @@ LL |       with_signature(a, b, |x, y| {
 LL | |         //~^ ERROR the parameter type `T` may not live long enough
 LL | |         // See `correct_region`
 LL | |         require(&x, &y)
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     })
    | |_____^
    |
@@ -130,7 +130,7 @@ LL |       with_signature(a, b, |x, y| {
 LL | |         //~^ ERROR the parameter type `T` may not live long enough
 LL | |         // See `correct_region`
 LL | |         require(&x, &y)
-LL | |         //~^ WARNING not reporting region error due to -Znll
+LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     })
    | |_____^
    |
index e66c1853b64b9167e73a39b6e8c9f20d2c2ce0da..fb4ea63f8532cd9c24d9d63f52c3c0e40f03d6bd 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
 
 // Test that we assume that universal types like `T` outlive the
 // function body.
@@ -28,7 +28,7 @@ fn region_within_body<T>(t: T) {
 // Error here, because T: 'a is not satisfied.
 fn region_static<'a, T>(cell: Cell<&'a usize>, t: T) {
     outlives(cell, t)
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| ERROR the parameter type `T` may not live long enough
 }
 
index 34ed709a2730e0afae779688e78596601412e3e9..0596861e67b572b8bbf67e9b10a15d9d3c394bdf 100644 (file)
@@ -1,4 +1,4 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-fn-body.rs:30:5
    |
 LL |     outlives(cell, t)
index aa3a03afa35cdbfd2acb818366c050389ad568d0..42d662e14193ccbce257bdb8ea6da923b8bd9366 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir
+// compile-flags:-Zborrowck=mir
 
 #![allow(warnings)]
 #![feature(dyn_trait)]
@@ -20,7 +20,7 @@ fn no_region<'a, T>(x: Box<T>) -> Box<Debug + 'a>
     T: Debug,
 {
     x
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| the parameter type `T` may not live long enough
 }
 
@@ -36,7 +36,7 @@ fn wrong_region<'a, 'b, T>(x: Box<T>) -> Box<Debug + 'a>
     T: 'b + Debug,
 {
     x
-    //~^ WARNING not reporting region error due to -Znll
+    //~^ WARNING not reporting region error due to nll
     //~| the parameter type `T` may not live long enough
 }
 
index 98ccfc52029ffbe9dc00f94049978379284411dd..0d09cac8c38513da8abfeffbcdd91ffbb5a16c6a 100644 (file)
@@ -1,10 +1,10 @@
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-fn.rs:22:5
    |
 LL |     x
    |     ^
 
-warning: not reporting region error due to -Znll
+warning: not reporting region error due to nll
   --> $DIR/ty-param-fn.rs:38:5
    |
 LL |     x
index d4ae9b20e56ff21e15cc3dec621d9a7fffb742fd..51927d353ecc49ce12edd8bbabebd870ae716925 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags:-Znll -Zborrowck=mir -Zverbose
+// compile-flags:-Zborrowck=mir -Zverbose
 // compile-pass
 
 // Test that we assume that universal types like `T` outlive the
index 34f8a0a48431c8f36d3205dcf617ad86e36d6973..5ae4ce1f7ee302e54740282d5ec0a2cb9242e9c0 100644 (file)
@@ -13,6 +13,6 @@ pub trait ToNbt<T> {
 }
 
 impl ToNbt<Self> {}
-//~^ ERROR cyclic dependency detected
+//~^ ERROR cycle detected
 
 fn main() {}
index 63b7ab78b13e0ef9a5d6b8345180d72c832e2e63..d25a072fe0a886bdec4a57bcd6a88d655b1aef88 100644 (file)
@@ -1,15 +1,10 @@
-error[E0391]: cyclic dependency detected
+error[E0391]: cycle detected when processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`
   --> $DIR/issue-23305.rs:15:12
    |
 LL | impl ToNbt<Self> {}
-   |            ^^^^ cyclic reference
+   |            ^^^^
    |
-note: the cycle begins when processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`...
-  --> $DIR/issue-23305.rs:15:1
-   |
-LL | impl ToNbt<Self> {}
-   | ^^^^^^^^^^^^^^^^
-   = note: ...which then again requires processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`, completing the cycle.
+   = note: ...which again requires processing `<impl at $DIR/issue-23305.rs:15:1: 15:20>`, completing the cycle
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/target-feature-gate.rs b/src/test/ui/target-feature-gate.rs
new file mode 100644 (file)
index 0000000..69208f1
--- /dev/null
@@ -0,0 +1,31 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-arm
+// ignore-aarch64
+// ignore-wasm
+// ignore-emscripten
+// gate-test-sse4a_target_feature
+// gate-test-powerpc_target_feature
+// gate-test-avx512_target_feature
+// gate-test-tbm_target_feature
+// gate-test-arm_target_feature
+// gate-test-aarch64_target_feature
+// gate-test-hexagon_target_feature
+// gate-test-mips_target_feature
+// gate-test-mmx_target_feature
+// min-llvm-version 6.0
+
+#[target_feature(enable = "avx512bw")]
+//~^ ERROR: currently unstable
+unsafe fn foo() {
+}
+
+fn main() {}
diff --git a/src/test/ui/target-feature-gate.stderr b/src/test/ui/target-feature-gate.stderr
new file mode 100644 (file)
index 0000000..dc5e174
--- /dev/null
@@ -0,0 +1,11 @@
+error[E0658]: the target feature `avx512bw` is currently unstable
+  --> $DIR/target-feature-gate.rs:26:18
+   |
+LL | #[target_feature(enable = "avx512bw")]
+   |                  ^^^^^^^^^^^^^^^^^^^
+   |
+   = help: add #![feature(avx512_target_feature)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
index eb83ee724c785804d5f6332164ae28cdcbc57d39..0edd51ba779ac4eb0009a15ac3f3aa3a58f654fb 100644 (file)
@@ -19,7 +19,7 @@
 #![feature(target_feature)]
 
 #[target_feature = "+sse2"]
-//~^ WARN: deprecated
+//~^ ERROR: must be of the form
 #[target_feature(enable = "foo")]
 //~^ ERROR: not valid for this target
 #[target_feature(bar)]
index b5e650eaf9ac4373acd8cb2284fa1c5665ef261b..ed86687bb2fccbec23171b60c6a03989041a6a84 100644 (file)
@@ -1,4 +1,4 @@
-warning: #[target_feature = ".."] is deprecated and will eventually be removed, use #[target_feature(enable = "..")] instead
+error: #[target_feature] attribute must be of the form #[target_feature(..)]
   --> $DIR/target-feature-wrong.rs:21:1
    |
 LL | #[target_feature = "+sse2"]
@@ -43,5 +43,5 @@ error: cannot use #[inline(always)] with #[target_feature]
 LL | #[inline(always)]
    | ^^^^^^^^^^^^^^^^^
 
-error: aborting due to 6 previous errors
+error: aborting due to 7 previous errors
 
index 80cab96434ba784fa2679228d56a0c6d390b54e0..ae4f4aa4046099c89e7192e0d698553ac469de60 100644 (file)
@@ -283,6 +283,8 @@ fn make_absolute(path: PathBuf) -> PathBuf {
         ),
     };
 
+    let src_base = opt_path(matches, "src-base");
+    let run_ignored = matches.opt_present("ignored");
     Config {
         compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
         run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
@@ -293,7 +295,7 @@ fn make_absolute(path: PathBuf) -> PathBuf {
         valgrind_path: matches.opt_str("valgrind-path"),
         force_valgrind: matches.opt_present("force-valgrind"),
         llvm_filecheck: matches.opt_str("llvm-filecheck").map(|s| PathBuf::from(&s)),
-        src_base: opt_path(matches, "src-base"),
+        src_base,
         build_base: opt_path(matches, "build-base"),
         stage_id: matches.opt_str("stage-id").unwrap(),
         mode: matches
@@ -301,7 +303,7 @@ fn make_absolute(path: PathBuf) -> PathBuf {
             .unwrap()
             .parse()
             .expect("invalid mode"),
-        run_ignored: matches.opt_present("ignored"),
+        run_ignored,
         filter: matches.free.first().cloned(),
         filter_exact: matches.opt_present("exact"),
         logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
index db0ac9279046ca8403f35103e4e98471460c2c39..e79aefb723614afd9af92ff9b51f934d84716c55 100644 (file)
@@ -1288,7 +1288,9 @@ fn compile_test(&self) -> ProcRes {
                 // want to actually assert warnings about all this code. Instead
                 // let's just ignore unused code warnings by defaults and tests
                 // can turn it back on if needed.
-                rustc.args(&["-A", "unused"]);
+                if !self.config.src_base.ends_with("rustdoc-ui") {
+                    rustc.args(&["-A", "unused"]);
+                }
             }
             _ => {}
         }
@@ -1582,7 +1584,12 @@ fn compose_and_run(
     }
 
     fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> Command {
-        let mut rustc = Command::new(&self.config.rustc_path);
+        let is_rustdoc = self.config.src_base.ends_with("rustdoc-ui");
+        let mut rustc = if !is_rustdoc {
+            Command::new(&self.config.rustc_path)
+        } else {
+            Command::new(&self.config.rustdoc_path.clone().expect("no rustdoc built yet"))
+        };
         rustc.arg(input_file).arg("-L").arg(&self.config.build_base);
 
         // Optionally prevent default --target if specified in test compile-flags.
@@ -1605,17 +1612,19 @@ fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> C
             rustc.args(&["--cfg", revision]);
         }
 
-        if let Some(ref incremental_dir) = self.props.incremental_dir {
-            rustc.args(&[
-                "-C",
-                &format!("incremental={}", incremental_dir.display()),
-            ]);
-            rustc.args(&["-Z", "incremental-verify-ich"]);
-            rustc.args(&["-Z", "incremental-queries"]);
-        }
+        if !is_rustdoc {
+            if let Some(ref incremental_dir) = self.props.incremental_dir {
+                rustc.args(&[
+                    "-C",
+                    &format!("incremental={}", incremental_dir.display()),
+                ]);
+                rustc.args(&["-Z", "incremental-verify-ich"]);
+                rustc.args(&["-Z", "incremental-queries"]);
+            }
 
-        if self.config.mode == CodegenUnits {
-            rustc.args(&["-Z", "human_readable_cgu_names"]);
+            if self.config.mode == CodegenUnits {
+                rustc.args(&["-Z", "human_readable_cgu_names"]);
+            }
         }
 
         match self.config.mode {
@@ -1668,11 +1677,12 @@ fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> C
             }
         }
 
-
-        if self.config.target == "wasm32-unknown-unknown" {
-            // rustc.arg("-g"); // get any backtrace at all on errors
-        } else if !self.props.no_prefer_dynamic {
-            rustc.args(&["-C", "prefer-dynamic"]);
+        if !is_rustdoc {
+            if self.config.target == "wasm32-unknown-unknown" {
+                // rustc.arg("-g"); // get any backtrace at all on errors
+            } else if !self.props.no_prefer_dynamic {
+                rustc.args(&["-C", "prefer-dynamic"]);
+            }
         }
 
         match output_file {
@@ -1686,7 +1696,7 @@ fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> C
 
         match self.config.compare_mode {
             Some(CompareMode::Nll) => {
-                rustc.args(&["-Znll", "-Zborrowck=mir", "-Ztwo-phase-borrows"]);
+                rustc.args(&["-Zborrowck=mir", "-Ztwo-phase-borrows"]);
             },
             None => {},
         }
@@ -1696,8 +1706,10 @@ fn make_compile_args(&self, input_file: &Path, output_file: TargetLocation) -> C
         } else {
             rustc.args(self.split_maybe_args(&self.config.target_rustcflags));
         }
-        if let Some(ref linker) = self.config.linker {
-            rustc.arg(format!("-Clinker={}", linker));
+        if !is_rustdoc {
+            if let Some(ref linker) = self.config.linker {
+                rustc.arg(format!("-Clinker={}", linker));
+            }
         }
 
         rustc.args(&self.props.compile_flags);
@@ -2509,7 +2521,6 @@ fn run_ui_test(&self) {
             .compile_flags
             .iter()
             .any(|s| s.contains("--error-format"));
-
         let proc_res = self.compile_test();
         self.check_if_test_should_compile(&proc_res);
 
index 7c9ee2a49430bee0e3d9eabf3976f743ac7a0295..1e6c4336a9e995cf054399bc5cef863d1f67471e 100644 (file)
@@ -157,7 +157,8 @@ function main(argv) {
     // execQuery first parameter is built in getQuery (which takes in the search input).
     // execQuery last parameter is built in buildIndex.
     // buildIndex requires the hashmap from search-index.
-    var functionsToLoad = ["levenshtein", "validateResult", "getQuery", "buildIndex", "execQuery"];
+    var functionsToLoad = ["levenshtein", "validateResult", "getQuery", "buildIndex", "execQuery",
+                           "execSearch"];
 
     finalJS += 'window = { "currentCrate": "std" };\n';
     finalJS += loadThings(arraysToLoad, 'array', extractArrayVariable, mainJs);
@@ -174,7 +175,7 @@ function main(argv) {
                                'exports.QUERY = QUERY;exports.EXPECTED = EXPECTED;');
         const expected = loadedFile.EXPECTED;
         const query = loadedFile.QUERY;
-        var results = loaded.execQuery(loaded.getQuery(query), index);
+        var results = loaded.execSearch(loaded.getQuery(query), index);
         process.stdout.write('Checking "' + file + '" ... ');
         var error_text = [];
         for (var key in expected) {
index 1415a4dc23f28644cb197b6bb69c311245c216e2..dd807e24656c91b4ad22d3cac146edd86315e633 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 1415a4dc23f28644cb197b6bb69c311245c216e2
+Subproject commit dd807e24656c91b4ad22d3cac146edd86315e633