]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #55510 - bitshifter:repr-feature-gate-fix, r=petrochenkov
authorkennytm <kennytm@gmail.com>
Wed, 7 Nov 2018 10:01:53 +0000 (18:01 +0800)
committerGitHub <noreply@github.com>
Wed, 7 Nov 2018 10:01:53 +0000 (18:01 +0800)
Fix feature gate only being checked on first repr attr.

Reported in https://github.com/rust-lang/rust/issues/33158#issuecomment-412185357.

553 files changed:
.gitmodules
.travis.yml
appveyor.yml
config.toml.example
src/Cargo.lock
src/bootstrap/bin/rustc.rs
src/bootstrap/bootstrap.py
src/bootstrap/builder.rs
src/bootstrap/cache.rs
src/bootstrap/channel.rs
src/bootstrap/compile.rs
src/bootstrap/config.rs
src/bootstrap/configure.py
src/bootstrap/dist.rs
src/bootstrap/doc.rs
src/bootstrap/lib.rs
src/bootstrap/mk/Makefile.in
src/bootstrap/sanity.rs
src/bootstrap/test.rs
src/ci/docker/disabled/dist-powerpcspe-linux/Dockerfile [new file with mode: 0644]
src/ci/docker/dist-i686-linux/Dockerfile
src/ci/docker/dist-various-1/Dockerfile
src/ci/docker/dist-various-2/Dockerfile
src/ci/docker/dist-x86_64-linux/Dockerfile
src/ci/docker/x86_64-gnu/Dockerfile
src/doc/rustdoc/src/unstable-features.md
src/doc/unstable-book/src/language-features/trait-alias.md [new file with mode: 0644]
src/etc/gdb_rust_pretty_printing.py
src/jemalloc [deleted submodule]
src/liballoc/boxed.rs
src/liballoc/lib.rs
src/liballoc/rc.rs
src/liballoc/sync.rs
src/liballoc/tests/heap.rs
src/liballoc/tests/lib.rs
src/liballoc_jemalloc/Cargo.toml [deleted file]
src/liballoc_jemalloc/build.rs [deleted file]
src/liballoc_jemalloc/lib.rs [deleted file]
src/liballoc_jemalloc/pthread_atfork_dummy.c [deleted file]
src/liballoc_system/lib.rs
src/libcompiler_builtins
src/libcore/cell.rs
src/libcore/default.rs
src/libcore/lib.rs
src/libcore/mem.rs
src/libcore/nonzero.rs
src/libcore/num/mod.rs
src/libcore/ops/mod.rs
src/libcore/ops/range.rs
src/libcore/ops/unsize.rs
src/libcore/pin.rs
src/libcore/ptr.rs
src/libcore/sync/atomic.rs
src/libcore/tests/lib.rs
src/libcore/time.rs
src/librustc/hir/lowering.rs
src/librustc/hir/map/mod.rs
src/librustc/ich/impls_mir.rs
src/librustc/ich/impls_ty.rs
src/librustc/infer/canonical/canonicalizer.rs
src/librustc/infer/canonical/mod.rs
src/librustc/infer/canonical/query_response.rs
src/librustc/infer/canonical/substitute.rs
src/librustc/infer/combine.rs
src/librustc/infer/error_reporting/mod.rs
src/librustc/infer/freshen.rs
src/librustc/infer/glb.rs
src/librustc/infer/higher_ranked/mod.rs
src/librustc/infer/lexical_region_resolve/mod.rs
src/librustc/infer/lub.rs
src/librustc/infer/outlives/obligations.rs
src/librustc/infer/outlives/verify.rs
src/librustc/infer/region_constraints/mod.rs
src/librustc/infer/sub.rs
src/librustc/lib.rs
src/librustc/lint/context.rs
src/librustc/middle/dead.rs
src/librustc/middle/intrinsicck.rs
src/librustc/middle/lang_items.rs
src/librustc/middle/resolve_lifetime.rs
src/librustc/mir/interpret/mod.rs
src/librustc/mir/interpret/value.rs
src/librustc/mir/mod.rs
src/librustc/mir/visit.rs
src/librustc/session/config.rs
src/librustc/session/mod.rs
src/librustc/traits/auto_trait.rs
src/librustc/traits/coherence.rs
src/librustc/traits/error_reporting.rs
src/librustc/traits/fulfill.rs
src/librustc/traits/mod.rs
src/librustc/traits/object_safety.rs
src/librustc/traits/project.rs
src/librustc/traits/query/dropck_outlives.rs
src/librustc/traits/query/normalize.rs
src/librustc/traits/query/outlives_bounds.rs
src/librustc/traits/query/type_op/implied_outlives_bounds.rs
src/librustc/traits/query/type_op/outlives.rs
src/librustc/traits/select.rs
src/librustc/traits/structural_impls.rs
src/librustc/traits/util.rs
src/librustc/ty/context.rs
src/librustc/ty/error.rs
src/librustc/ty/fast_reject.rs
src/librustc/ty/flags.rs
src/librustc/ty/fold.rs
src/librustc/ty/instance.rs
src/librustc/ty/item_path.rs
src/librustc/ty/layout.rs
src/librustc/ty/mod.rs
src/librustc/ty/outlives.rs
src/librustc/ty/structural_impls.rs
src/librustc/ty/sty.rs
src/librustc/ty/subst.rs
src/librustc/ty/util.rs
src/librustc/ty/walk.rs
src/librustc/ty/wf.rs
src/librustc/util/ppaux.rs
src/librustc_borrowck/borrowck/check_loans.rs
src/librustc_borrowck/borrowck/gather_loans/mod.rs
src/librustc_codegen_llvm/abi.rs
src/librustc_codegen_llvm/back/archive.rs
src/librustc_codegen_llvm/back/command.rs [deleted file]
src/librustc_codegen_llvm/back/link.rs
src/librustc_codegen_llvm/back/linker.rs [deleted file]
src/librustc_codegen_llvm/back/lto.rs
src/librustc_codegen_llvm/back/symbol_export.rs [deleted file]
src/librustc_codegen_llvm/back/write.rs
src/librustc_codegen_llvm/base.rs
src/librustc_codegen_llvm/builder.rs
src/librustc_codegen_llvm/callee.rs
src/librustc_codegen_llvm/context.rs
src/librustc_codegen_llvm/debuginfo/metadata.rs
src/librustc_codegen_llvm/debuginfo/type_names.rs
src/librustc_codegen_llvm/intrinsic.rs
src/librustc_codegen_llvm/lib.rs
src/librustc_codegen_llvm/llvm/ffi.rs
src/librustc_codegen_llvm/mir/analyze.rs
src/librustc_codegen_llvm/mir/block.rs
src/librustc_codegen_llvm/mir/constant.rs
src/librustc_codegen_llvm/mir/statement.rs
src/librustc_codegen_llvm/type_of.rs
src/librustc_codegen_utils/Cargo.toml
src/librustc_codegen_utils/command.rs [new file with mode: 0644]
src/librustc_codegen_utils/lib.rs
src/librustc_codegen_utils/link.rs
src/librustc_codegen_utils/linker.rs [new file with mode: 0644]
src/librustc_codegen_utils/symbol_export.rs [new file with mode: 0644]
src/librustc_data_structures/lib.rs
src/librustc_data_structures/obligation_forest/mod.rs
src/librustc_data_structures/obligation_forest/test.rs
src/librustc_driver/Cargo.toml
src/librustc_driver/lib.rs
src/librustc_driver/test.rs
src/librustc_errors/diagnostic.rs
src/librustc_errors/diagnostic_builder.rs
src/librustc_lint/types.rs
src/librustc_lint/unused.rs
src/librustc_metadata/Cargo.toml
src/librustc_metadata/creader.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/lib.rs
src/librustc_metadata_utils/Cargo.toml [deleted file]
src/librustc_metadata_utils/lib.rs [deleted file]
src/librustc_mir/borrow_check/error_reporting.rs
src/librustc_mir/borrow_check/mod.rs
src/librustc_mir/borrow_check/nll/invalidation.rs
src/librustc_mir/borrow_check/nll/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
src/librustc_mir/borrow_check/nll/region_infer/mod.rs
src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs
src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs
src/librustc_mir/borrow_check/nll/type_check/mod.rs
src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs
src/librustc_mir/build/expr/stmt.rs
src/librustc_mir/const_eval.rs
src/librustc_mir/dataflow/impls/borrows.rs
src/librustc_mir/dataflow/move_paths/builder.rs
src/librustc_mir/dataflow/move_paths/mod.rs
src/librustc_mir/hair/cx/mod.rs
src/librustc_mir/hair/pattern/_match.rs
src/librustc_mir/hair/pattern/check_match.rs
src/librustc_mir/hair/pattern/mod.rs
src/librustc_mir/interpret/cast.rs
src/librustc_mir/interpret/eval_context.rs
src/librustc_mir/interpret/intrinsics.rs
src/librustc_mir/interpret/machine.rs
src/librustc_mir/interpret/memory.rs
src/librustc_mir/interpret/mod.rs
src/librustc_mir/interpret/operand.rs
src/librustc_mir/interpret/operator.rs
src/librustc_mir/interpret/place.rs
src/librustc_mir/interpret/snapshot.rs
src/librustc_mir/interpret/step.rs
src/librustc_mir/interpret/terminator.rs
src/librustc_mir/interpret/traits.rs
src/librustc_mir/interpret/validity.rs
src/librustc_mir/lib.rs
src/librustc_mir/monomorphize/collector.rs
src/librustc_mir/monomorphize/item.rs
src/librustc_mir/monomorphize/partitioning.rs
src/librustc_mir/shim.rs
src/librustc_mir/transform/add_retag.rs [new file with mode: 0644]
src/librustc_mir/transform/add_validation.rs [deleted file]
src/librustc_mir/transform/check_unsafety.rs
src/librustc_mir/transform/const_prop.rs
src/librustc_mir/transform/erase_regions.rs
src/librustc_mir/transform/inline.rs
src/librustc_mir/transform/lower_128bit.rs
src/librustc_mir/transform/mod.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_mir/transform/qualify_min_const_fn.rs
src/librustc_mir/transform/remove_noop_landing_pads.rs
src/librustc_mir/transform/rustc_peek.rs
src/librustc_mir/util/borrowck_errors.rs
src/librustc_mir/util/liveness.rs
src/librustc_passes/ast_validation.rs
src/librustc_passes/mir_stats.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_resolve/resolve_imports.rs
src/librustc_save_analysis/dump_visitor.rs
src/librustc_save_analysis/json_dumper.rs
src/librustc_target/Cargo.toml
src/librustc_target/abi/call/aarch64.rs
src/librustc_target/abi/call/amdgpu.rs
src/librustc_target/abi/call/arm.rs
src/librustc_target/abi/call/asmjs.rs
src/librustc_target/abi/call/mips.rs
src/librustc_target/abi/call/mips64.rs
src/librustc_target/abi/call/mod.rs
src/librustc_target/abi/call/powerpc.rs
src/librustc_target/abi/call/powerpc64.rs
src/librustc_target/abi/call/s390x.rs
src/librustc_target/abi/call/sparc.rs
src/librustc_target/abi/call/sparc64.rs
src/librustc_target/abi/call/x86.rs
src/librustc_target/abi/call/x86_64.rs
src/librustc_target/abi/mod.rs
src/librustc_target/lib.rs
src/librustc_target/spec/aarch64_fuchsia.rs
src/librustc_target/spec/apple_base.rs
src/librustc_target/spec/apple_ios_base.rs
src/librustc_target/spec/arm_linux_androideabi.rs
src/librustc_target/spec/cloudabi_base.rs
src/librustc_target/spec/dragonfly_base.rs
src/librustc_target/spec/freebsd_base.rs
src/librustc_target/spec/fuchsia_base.rs
src/librustc_target/spec/linux_base.rs
src/librustc_target/spec/mod.rs
src/librustc_target/spec/solaris_base.rs
src/librustc_target/spec/x86_64_fuchsia.rs
src/librustc_traits/chalk_context.rs
src/librustc_traits/dropck_outlives.rs
src/librustc_traits/implied_outlives_bounds.rs
src/librustc_traits/lowering/environment.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/_match.rs
src/librustc_typeck/check/cast.rs
src/librustc_typeck/check/closure.rs
src/librustc_typeck/check/demand.rs
src/librustc_typeck/check/intrinsic.rs
src/librustc_typeck/check/method/mod.rs
src/librustc_typeck/check/method/probe.rs
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/check/wfcheck.rs
src/librustc_typeck/coherence/builtin.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/diagnostics.rs
src/librustc_typeck/outlives/utils.rs
src/librustc_typeck/variance/constraints.rs
src/librustdoc/clean/mod.rs
src/librustdoc/config.rs [new file with mode: 0644]
src/librustdoc/core.rs
src/librustdoc/externalfiles.rs
src/librustdoc/html/markdown.rs
src/librustdoc/html/render.rs
src/librustdoc/html/static/main.js
src/librustdoc/html/static/rustdoc.css
src/librustdoc/html/static/storage.js
src/librustdoc/lib.rs
src/librustdoc/markdown.rs
src/librustdoc/test.rs
src/libstd/Cargo.toml
src/libstd/alloc.rs
src/libstd/collections/hash/map.rs
src/libstd/fs.rs
src/libstd/lib.rs
src/libstd/panic.rs
src/libstd/sys/unix/process/process_common.rs
src/libstd/sys/unix/process/process_unix.rs
src/libstd/thread/local.rs
src/libstd/thread/mod.rs
src/libsyntax/ast.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/feature_gate.rs
src/libsyntax/lib.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/util/parser_testing.rs
src/libsyntax_ext/deriving/generic/mod.rs
src/llvm
src/rustc/Cargo.toml
src/rustllvm/ArchiveWrapper.cpp
src/rustllvm/Linker.cpp
src/rustllvm/PassWrapper.cpp
src/rustllvm/RustWrapper.cpp
src/rustllvm/rustllvm.h
src/stage0.txt
src/stdsimd
src/test/codegen/enum-debug-clike.rs [new file with mode: 0644]
src/test/codegen/enum-debug-niche.rs [new file with mode: 0644]
src/test/codegen/enum-debug-tagged.rs [new file with mode: 0644]
src/test/debuginfo/basic-types.rs
src/test/debuginfo/borrowed-enum-legacy.rs [new file with mode: 0644]
src/test/debuginfo/borrowed-enum.rs
src/test/debuginfo/cross-crate-spans.rs
src/test/debuginfo/destructured-for-loop-variable.rs
src/test/debuginfo/generic-enum-with-different-disr-sizes-legacy.rs [new file with mode: 0644]
src/test/debuginfo/generic-enum-with-different-disr-sizes.rs
src/test/debuginfo/generic-struct-style-enum-legacy.rs [new file with mode: 0644]
src/test/debuginfo/generic-struct-style-enum.rs
src/test/debuginfo/generic-tuple-style-enum-legacy.rs [new file with mode: 0644]
src/test/debuginfo/generic-tuple-style-enum.rs
src/test/debuginfo/method-on-tuple-struct.rs
src/test/debuginfo/nil-enum.rs
src/test/debuginfo/recursive-struct-legacy.rs [new file with mode: 0644]
src/test/debuginfo/recursive-struct.rs
src/test/debuginfo/struct-style-enum-legacy.rs [new file with mode: 0644]
src/test/debuginfo/struct-style-enum.rs
src/test/debuginfo/tuple-style-enum-legacy.rs [new file with mode: 0644]
src/test/debuginfo/tuple-style-enum.rs
src/test/debuginfo/unique-enum-legacy.rs [new file with mode: 0644]
src/test/debuginfo/unique-enum.rs
src/test/debuginfo/vec-slices.rs
src/test/mir-opt/basic_assignment.rs
src/test/mir-opt/inline-retag.rs [new file with mode: 0644]
src/test/mir-opt/retag.rs [new file with mode: 0644]
src/test/mir-opt/validate_1.rs [deleted file]
src/test/mir-opt/validate_2.rs [deleted file]
src/test/mir-opt/validate_3.rs [deleted file]
src/test/mir-opt/validate_4.rs [deleted file]
src/test/mir-opt/validate_5.rs [deleted file]
src/test/run-make-fulldeps/atomic-lock-free/atomic_lock_free.rs
src/test/run-make/thumb-none-qemu/Makefile [new file with mode: 0644]
src/test/run-make/thumb-none-qemu/example/.cargo/config [new file with mode: 0644]
src/test/run-make/thumb-none-qemu/example/Cargo.toml [new file with mode: 0644]
src/test/run-make/thumb-none-qemu/example/memory.x [new file with mode: 0644]
src/test/run-make/thumb-none-qemu/example/src/main.rs [new file with mode: 0644]
src/test/run-make/thumb-none-qemu/script.sh [new file with mode: 0644]
src/test/run-make/wasm-panic-small/Makefile
src/test/run-make/wasm-panic-small/foo.rs
src/test/run-make/wasm-symbols-not-imported/foo.rs
src/test/run-pass/arbitrary_self_types_pointers_and_wrappers.rs [new file with mode: 0644]
src/test/run-pass/arbitrary_self_types_stdlib_pointers.rs [new file with mode: 0644]
src/test/run-pass/atomic-alignment.rs [new file with mode: 0644]
src/test/run-pass/command-exec.rs
src/test/run-pass/impl-trait/example-calendar.rs
src/test/run-pass/invalid_const_promotion.rs
src/test/run-pass/issues/issue-54477-reduced-2.rs [new file with mode: 0644]
src/test/run-pass/macros/colorful-write-macros.rs
src/test/run-pass/optimization-fuel-0.rs
src/test/run-pass/optimization-fuel-0.stderr [new file with mode: 0644]
src/test/run-pass/optimization-fuel-0.stdout [deleted file]
src/test/run-pass/optimization-fuel-1.rs
src/test/run-pass/optimization-fuel-1.stderr [new file with mode: 0644]
src/test/run-pass/optimization-fuel-1.stdout [deleted file]
src/test/run-pass/thread-local-not-in-prelude.rs [new file with mode: 0644]
src/test/run-pass/traits/trait-alias-bounds.rs [new file with mode: 0644]
src/test/run-pass/traits/trait-alias-object-type.rs [new file with mode: 0644]
src/test/run-pass/traits/trait-alias-syntax.rs [new file with mode: 0644]
src/test/rustdoc-ui/failed-doctest-output.stdout
src/test/rustdoc/assoc-consts.rs
src/test/rustdoc/auxiliary/enum_primitive.rs [new file with mode: 0644]
src/test/rustdoc/index-page.rs [new file with mode: 0644]
src/test/rustdoc/issue-54705.rs [new file with mode: 0644]
src/test/rustdoc/manual_impl.rs
src/test/rustdoc/no-stack-overflow-25295.rs [new file with mode: 0644]
src/test/ui/arbitrary-self-types-not-object-safe.rs
src/test/ui/arbitrary-self-types-not-object-safe.stderr
src/test/ui/binop/binop-move-semantics.nll.stderr
src/test/ui/binop/binop-move-semantics.rs
src/test/ui/borrowck/borrowck-closures-mut-of-imm.nll.stderr
src/test/ui/borrowck/borrowck-closures-mut-of-imm.rs
src/test/ui/borrowck/borrowck-closures-mut-of-imm.stderr
src/test/ui/borrowck/borrowck-closures-mut-of-mut.nll.stderr [new file with mode: 0644]
src/test/ui/borrowck/borrowck-closures-mut-of-mut.rs [new file with mode: 0644]
src/test/ui/borrowck/borrowck-closures-mut-of-mut.stderr [new file with mode: 0644]
src/test/ui/borrowck/borrowck-lend-flow-loop.ast.stderr [new file with mode: 0644]
src/test/ui/borrowck/borrowck-lend-flow-loop.nll.stderr
src/test/ui/borrowck/borrowck-lend-flow-loop.rs
src/test/ui/borrowck/borrowck-lend-flow-loop.stderr [deleted file]
src/test/ui/borrowck/borrowck-overloaded-call.nll.stderr
src/test/ui/borrowck/borrowck-overloaded-call.rs
src/test/ui/borrowck/borrowck-overloaded-call.stderr
src/test/ui/borrowck/borrowck-overloaded-index-move-index.nll.stderr
src/test/ui/borrowck/borrowck-overloaded-index-move-index.rs
src/test/ui/borrowck/borrowck-reborrow-from-mut.nll.stderr
src/test/ui/borrowck/borrowck-reborrow-from-mut.rs
src/test/ui/borrowck/borrowck-reborrow-from-mut.stderr
src/test/ui/borrowck/borrowck-unboxed-closures.nll.stderr
src/test/ui/borrowck/borrowck-unboxed-closures.rs
src/test/ui/borrowck/borrowck-unboxed-closures.stderr
src/test/ui/codemap_tests/overlapping_spans.nll.stderr [deleted file]
src/test/ui/codemap_tests/overlapping_spans.rs [deleted file]
src/test/ui/codemap_tests/overlapping_spans.stderr [deleted file]
src/test/ui/codemap_tests/two_files.stderr
src/test/ui/consts/const-eval/double_check.rs
src/test/ui/consts/const-eval/double_check2.rs
src/test/ui/consts/const-eval/double_check2.stderr
src/test/ui/consts/dangling-alloc-id-ice.rs [new file with mode: 0644]
src/test/ui/consts/dangling-alloc-id-ice.stderr [new file with mode: 0644]
src/test/ui/consts/dangling_raw_ptr.rs [new file with mode: 0644]
src/test/ui/consts/dangling_raw_ptr.stderr [new file with mode: 0644]
src/test/ui/derive-uninhabited-enum-38885.rs [new file with mode: 0644]
src/test/ui/derive-uninhabited-enum-38885.stderr [new file with mode: 0644]
src/test/ui/did_you_mean/issue-46836-identifier-not-instead-of-negation.stderr
src/test/ui/dropck/dropck-eyepatch-extern-crate.ast.stderr [new file with mode: 0644]
src/test/ui/dropck/dropck-eyepatch-extern-crate.rs
src/test/ui/dropck/dropck-eyepatch-extern-crate.stderr [deleted file]
src/test/ui/dropck/dropck-eyepatch-reorder.ast.stderr [new file with mode: 0644]
src/test/ui/dropck/dropck-eyepatch-reorder.rs
src/test/ui/dropck/dropck-eyepatch-reorder.stderr [deleted file]
src/test/ui/dropck/dropck-eyepatch.ast.stderr [new file with mode: 0644]
src/test/ui/dropck/dropck-eyepatch.rs
src/test/ui/dropck/dropck-eyepatch.stderr [deleted file]
src/test/ui/error-codes/E0004-2.stderr
src/test/ui/error-codes/E0375.stderr
src/test/ui/feature-gates/feature-gate-allow_fail.stderr
src/test/ui/feature-gates/feature-gate-cfg-target-has-atomic.rs
src/test/ui/feature-gates/feature-gate-cfg-target-has-atomic.stderr
src/test/ui/feature-gates/feature-gate-crate_visibility_modifier.stderr
src/test/ui/feature-gates/feature-gate-extern_crate_item_prelude.stderr
src/test/ui/feature-gates/feature-gate-extern_in_paths.stderr
src/test/ui/feature-gates/feature-gate-nll.nll.stderr [deleted file]
src/test/ui/feature-gates/feature-gate-nll.rs
src/test/ui/feature-gates/feature-gate-panic-implementation.rs [deleted file]
src/test/ui/feature-gates/feature-gate-panic-implementation.stderr [deleted file]
src/test/ui/feature-gates/feature-gate-trait-alias.rs [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-trait-alias.stderr [new file with mode: 0644]
src/test/ui/if/if-without-block.rs
src/test/ui/if/if-without-block.stderr
src/test/ui/imports/issue-55457.rs [new file with mode: 0644]
src/test/ui/imports/issue-55457.stderr [new file with mode: 0644]
src/test/ui/inline-asm-bad-operand.rs
src/test/ui/inline-asm-bad-operand.stderr
src/test/ui/invalid_dispatch_from_dyn_impls.rs [new file with mode: 0644]
src/test/ui/invalid_dispatch_from_dyn_impls.stderr [new file with mode: 0644]
src/test/ui/issue-51602.stderr
src/test/ui/issues/issue-17263.ast.stderr [new file with mode: 0644]
src/test/ui/issues/issue-17263.nll.stderr
src/test/ui/issues/issue-17263.rs
src/test/ui/issues/issue-17263.stderr [deleted file]
src/test/ui/issues/issue-26905.stderr
src/test/ui/issues/issue-3096-1.stderr
src/test/ui/issues/issue-3096-2.stderr
src/test/ui/issues/issue-31076.rs [new file with mode: 0644]
src/test/ui/issues/issue-31076.stderr [new file with mode: 0644]
src/test/ui/issues/issue-39848.stderr
src/test/ui/issues/issue-52126-assign-op-invariance.nll.stderr
src/test/ui/issues/issue-52126-assign-op-invariance.rs
src/test/ui/label/label_break_value_illegal_uses.stderr
src/test/ui/lub-glb/old-lub-glb-hr.stderr
src/test/ui/lub-glb/old-lub-glb-object.stderr
src/test/ui/macros/must-use-in-macro-55516.rs [new file with mode: 0644]
src/test/ui/macros/must-use-in-macro-55516.stderr [new file with mode: 0644]
src/test/ui/missing/missing-block-hint.stderr
src/test/ui/missing/missing-semicolon-warning.stderr
src/test/ui/nll/user-annotations/dump-adt-brace-struct.stderr
src/test/ui/nll/user-annotations/dump-fn-method.stderr
src/test/ui/panic-implementation/panic-implementation-deprecated.rs [deleted file]
src/test/ui/panic-implementation/panic-implementation-deprecated.stderr [deleted file]
src/test/ui/parser/doc-before-identifier.rs
src/test/ui/parser/doc-before-identifier.stderr
src/test/ui/parser/doc-comment-in-if-statement.rs [new file with mode: 0644]
src/test/ui/parser/doc-comment-in-if-statement.stderr [new file with mode: 0644]
src/test/ui/parser/fn-arg-doc-comment.rs [new file with mode: 0644]
src/test/ui/parser/fn-arg-doc-comment.stderr [new file with mode: 0644]
src/test/ui/parser/import-from-rename.stderr
src/test/ui/parser/import-glob-rename.stderr
src/test/ui/parser/issue-17904-2.rs
src/test/ui/parser/issue-17904-2.stderr
src/test/ui/parser/issue-33413.rs
src/test/ui/parser/issue-33413.stderr
src/test/ui/parser/match-refactor-to-expr.rs
src/test/ui/parser/match-refactor-to-expr.stderr
src/test/ui/parser/removed-syntax-mode.rs
src/test/ui/parser/removed-syntax-mode.stderr
src/test/ui/parser/unsized.rs
src/test/ui/parser/unsized.stderr
src/test/ui/parser/virtual-structs.rs
src/test/ui/parser/virtual-structs.stderr
src/test/ui/print-fuel/print-fuel.rs
src/test/ui/print-fuel/print-fuel.stderr [new file with mode: 0644]
src/test/ui/print-fuel/print-fuel.stdout [deleted file]
src/test/ui/resolve/issue-3907.stderr
src/test/ui/resolve/issue-5035.stderr
src/test/ui/resolve/unboxed-closure-sugar-nonexistent-trait.stderr
src/test/ui/single-use-lifetime/one-use-in-fn-argument-in-band.fixed [new file with mode: 0644]
src/test/ui/single-use-lifetime/one-use-in-fn-argument-in-band.rs
src/test/ui/single-use-lifetime/one-use-in-fn-argument-in-band.stderr
src/test/ui/single-use-lifetime/one-use-in-fn-argument.rs
src/test/ui/single-use-lifetime/one-use-in-fn-argument.stderr
src/test/ui/single-use-lifetime/one-use-in-inherent-impl-header.rs
src/test/ui/single-use-lifetime/one-use-in-inherent-impl-header.stderr
src/test/ui/single-use-lifetime/one-use-in-inherent-method-argument.rs
src/test/ui/single-use-lifetime/one-use-in-inherent-method-argument.stderr
src/test/ui/single-use-lifetime/one-use-in-trait-method-argument.rs
src/test/ui/single-use-lifetime/one-use-in-trait-method-argument.stderr
src/test/ui/single-use-lifetime/zero-uses-in-fn.fixed
src/test/ui/single-use-lifetime/zero-uses-in-fn.rs
src/test/ui/single-use-lifetime/zero-uses-in-fn.stderr
src/test/ui/single-use-lifetime/zero-uses-in-impl.stderr
src/test/ui/span/borrowck-borrow-overloaded-auto-deref-mut.nll.stderr
src/test/ui/span/borrowck-borrow-overloaded-auto-deref-mut.rs
src/test/ui/span/borrowck-borrow-overloaded-auto-deref-mut.stderr
src/test/ui/trait-alias-fail.rs [deleted file]
src/test/ui/trait-alias-fail.stderr [deleted file]
src/test/ui/traits/trait-alias-impl.rs [new file with mode: 0644]
src/test/ui/traits/trait-alias-impl.stderr [new file with mode: 0644]
src/test/ui/traits/trait-alias-objects.rs [new file with mode: 0644]
src/test/ui/traits/trait-alias-objects.stderr [new file with mode: 0644]
src/test/ui/traits/trait-alias-wf.rs [new file with mode: 0644]
src/test/ui/traits/trait-alias-wf.stderr [new file with mode: 0644]
src/test/ui/traits/trait-alias.rs [deleted file]
src/test/ui/traits/trait-alias.stderr [deleted file]
src/test/ui/traits/trait-item-privacy.rs
src/test/ui/traits/trait-item-privacy.stderr
src/test/ui/traits/trait-test-2.rs
src/test/ui/traits/trait-test-2.stderr
src/test/ui/try-block/try-block-in-match.stderr
src/test/ui/uninhabited/uninhabited-matches-feature-gated.stderr
src/test/ui/union/union-borrow-move-parent-sibling.nll.stderr
src/test/ui/union/union-borrow-move-parent-sibling.rs
src/test/ui/union/union-borrow-move-parent-sibling.stderr
src/test/ui/unop-move-semantics.nll.stderr
src/test/ui/unop-move-semantics.rs
src/tools/cargo
src/tools/clippy
src/tools/compiletest/src/header.rs
src/tools/compiletest/src/main.rs
src/tools/error_index_generator/main.rs
src/tools/linkchecker/main.rs
src/tools/lldb
src/tools/publish_toolstate.py
src/tools/tidy/src/cargo.rs
src/tools/tidy/src/lib.rs
src/tools/tidy/src/pal.rs

index 038237aa179a9b421aab40eb989f03d1679a073f..bf9bdd9a5b4b0a25fce4a97707f1a875ec1d97f0 100644 (file)
@@ -2,9 +2,6 @@
        path = src/llvm
        url = https://github.com/rust-lang/llvm.git
        branch = master
-[submodule "src/jemalloc"]
-       path = src/jemalloc
-       url = https://github.com/rust-lang/jemalloc.git
 [submodule "src/rust-installer"]
        path = src/tools/rust-installer
        url = https://github.com/rust-lang/rust-installer.git
@@ -64,4 +61,4 @@
        path = src/tools/clang
        url = https://github.com/rust-lang-nursery/clang.git
        branch = rust-release-80-v1
-  
\ No newline at end of file
+
index ec8060b9f56aaae8c934c60f9e11ae613306dd74..b208e760d95c672b1a60891970bd7d0cb7a77f01 100644 (file)
@@ -30,7 +30,7 @@ matrix:
 
     - env: >
         RUST_CHECK_TARGET=dist
-        RUST_CONFIGURE_ARGS="--enable-extended --enable-profiler --enable-lldb"
+        RUST_CONFIGURE_ARGS="--enable-extended --enable-profiler --enable-lldb --set rust.jemalloc"
         SRC=.
         DEPLOY_ALT=1
         RUSTC_RETRY_LINKER_ON_SEGFAULT=1
@@ -53,7 +53,7 @@ matrix:
     # version that we're using, 8.2, cannot compile LLVM for OSX 10.7.
     - env: >
         RUST_CHECK_TARGET=check
-        RUST_CONFIGURE_ARGS="--build=x86_64-apple-darwin --enable-sanitizers --enable-profiler"
+        RUST_CONFIGURE_ARGS="--build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc"
         SRC=.
         RUSTC_RETRY_LINKER_ON_SEGFAULT=1
         MACOSX_DEPLOYMENT_TARGET=10.8
@@ -67,7 +67,7 @@ matrix:
 
     - env: >
         RUST_CHECK_TARGET=check
-        RUST_CONFIGURE_ARGS=--build=i686-apple-darwin
+        RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --set rust.jemalloc"
         SRC=.
         RUSTC_RETRY_LINKER_ON_SEGFAULT=1
         MACOSX_DEPLOYMENT_TARGET=10.8
@@ -87,7 +87,7 @@ matrix:
     # OSX 10.7 and `xcode7` is the latest Xcode able to compile LLVM for 10.7.
     - env: >
         RUST_CHECK_TARGET=dist
-        RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --enable-full-tools --enable-profiler --enable-lldb"
+        RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --enable-full-tools --enable-profiler --enable-lldb --set rust.jemalloc"
         SRC=.
         DEPLOY=1
         RUSTC_RETRY_LINKER_ON_SEGFAULT=1
@@ -102,7 +102,7 @@ matrix:
 
     - env: >
         RUST_CHECK_TARGET=dist
-        RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-full-tools --enable-sanitizers --enable-profiler --enable-lldb"
+        RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-full-tools --enable-sanitizers --enable-profiler --enable-lldb --set rust.jemalloc"
         SRC=.
         DEPLOY=1
         RUSTC_RETRY_LINKER_ON_SEGFAULT=1
@@ -197,23 +197,10 @@ matrix:
         . src/ci/docker/x86_64-gnu-tools/repo.sh;
         commit_toolstate_change "$MESSAGE_FILE" "$TRAVIS_BUILD_DIR/src/tools/publish_toolstate.py" "$(git rev-parse HEAD)" "$(git log --format=%s -n1 HEAD)" "$MESSAGE_FILE" "$TOOLSTATE_REPO_ACCESS_TOKEN";
 
-env:
-  global:
-    - SCCACHE_BUCKET=rust-lang-ci-sccache2
-    - SCCACHE_REGION=us-west-1
-    - AWS_ACCESS_KEY_ID=AKIAJAMV3QAMMA6AXHFQ
-    # AWS_SECRET_ACCESS_KEY=...
-    - secure: "j96XxTVOSUf4s4r4htIxn/fvIa5DWbMgLqWl7r8z2QfgUwscmkMXAwXuFNc7s7bGTpV/+CgDiMFFM6BAFLGKutytIF6oA02s9b+usQYnM0th7YQ2AIgm9GtMTJCJp4AoyfFmh8F2faUICBZlfVLUJ34udHEe35vOklix+0k4WDo="
-    # TOOLSTATE_REPO_ACCESS_TOKEN=...
-    - secure: "ESfcXqv4N2VMhqi2iIyw6da9VrsA78I4iR1asouCaq4hzTTrkB4WNRrfURy6xg72gQ4nMhtRJbB0/2jmc9Cu1+g2CzXtyiL223aJ5CKrXdcvbitopQSDfp07dMWm+UED+hNFEanpErKAeU/6FM3A+J+60PMk8MCF1h9tqNRISJw="
-
 before_install:
-  # We'll use the AWS cli to download/upload cached docker layers, so install
-  # that here.
-  - if [ "$TRAVIS_OS_NAME" = linux ]; then
-      pip install --user awscli;
-      export PATH=$PATH:$HOME/.local/bin;
-    fi
+  # We'll use the AWS cli to download/upload cached docker layers as well as
+  # push our deployments, so download that here.
+  - pip install --user awscli; export PATH=$PATH:$HOME/.local/bin:$HOME/Library/Python/2.7/bin/
   - mkdir -p $HOME/rustsrc
   # FIXME(#46924): these two commands are required to enable IPv6,
   # they shouldn't exist, please revert once more official solutions appeared.
@@ -276,6 +263,23 @@ after_success:
       echo "#### Build successful; Disk usage after running script:";
       df -h;
       du . | sort -nr | head -n100
+  - >
+      if [ "$DEPLOY$DEPLOY_ALT" == "1" ]; then
+        mkdir -p deploy/$TRAVIS_COMMIT;
+        if [ "$TRAVIS_OS_NAME" == "osx" ]; then
+            rm -rf build/dist/doc &&
+            cp -r build/dist/* deploy/$TRAVIS_COMMIT;
+        else
+            rm -rf obj/build/dist/doc &&
+            cp -r obj/build/dist/* deploy/$TRAVIS_COMMIT;
+        fi;
+        ls -la deploy/$TRAVIS_COMMIT;
+        deploy_dir=rustc-builds;
+        if [ "$DEPLOY_ALT" == "1" ]; then
+            deploy_dir=rustc-builds-alt;
+        fi;
+        travis_retry aws s3 cp --no-progress --recursive --acl public-read ./deploy s3://rust-lang-ci2/$deploy_dir
+      fi
 
 after_failure:
   - >
@@ -322,77 +326,3 @@ after_failure:
 
 notifications:
   email: false
-
-before_deploy:
-  - mkdir -p deploy/$TRAVIS_COMMIT
-  - >
-      if [ "$TRAVIS_OS_NAME" == "osx" ]; then
-          rm -rf build/dist/doc &&
-          cp -r build/dist/* deploy/$TRAVIS_COMMIT;
-      else
-          rm -rf obj/build/dist/doc &&
-          cp -r obj/build/dist/* deploy/$TRAVIS_COMMIT;
-      fi
-  - ls -la deploy/$TRAVIS_COMMIT
-
-deploy:
-  - provider: s3
-    bucket: rust-lang-ci2
-    skip_cleanup: true
-    local_dir: deploy
-    upload_dir: rustc-builds
-    acl: public_read
-    region: us-west-1
-    access_key_id: AKIAJVBODR3IA4O72THQ
-    secret_access_key:
-      secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
-    on:
-      branch: auto
-      condition: $DEPLOY = 1
-
-  # this is the same as the above deployment provider except that it uploads to
-  # a slightly different directory and has a different trigger
-  - provider: s3
-    bucket: rust-lang-ci2
-    skip_cleanup: true
-    local_dir: deploy
-    upload_dir: rustc-builds-alt
-    acl: public_read
-    region: us-west-1
-    access_key_id: AKIAJVBODR3IA4O72THQ
-    secret_access_key:
-      secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
-    on:
-      branch: auto
-      condition: $DEPLOY_ALT = 1
-
-  # These two providers are the same as the two above, except deploy on the
-  # try branch. Travis does not appear to provide a way to use "or" in these
-  # conditions.
-  - provider: s3
-    bucket: rust-lang-ci2
-    skip_cleanup: true
-    local_dir: deploy
-    upload_dir: rustc-builds
-    acl: public_read
-    region: us-west-1
-    access_key_id: AKIAJVBODR3IA4O72THQ
-    secret_access_key:
-      secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
-    on:
-      branch: try
-      condition: $DEPLOY = 1
-
-  - provider: s3
-    bucket: rust-lang-ci2
-    skip_cleanup: true
-    local_dir: deploy
-    upload_dir: rustc-builds-alt
-    acl: public_read
-    region: us-west-1
-    access_key_id: AKIAJVBODR3IA4O72THQ
-    secret_access_key:
-      secure: "kUGd3t7JcVWFESgIlzvsM8viZgCA9Encs3creW0xLJaLSeI1iVjlJK4h/2/nO6y224AFrh/GUfsNr4/4AlxPuYb8OU5oC5Lv+Ff2JiRDYtuNpyQSKAQp+bRYytWMtrmhja91h118Mbm90cUfcLPwkdiINgJNTXhPKg5Cqu3VYn0="
-    on:
-      branch: try
-      condition: $DEPLOY_ALT = 1
index 04951454c29e1316fa7a08fcd5a5cbf3cc9c760d..743e615c3f3cadfbdc4634c7ebae70c40e121bd6 100644 (file)
@@ -1,12 +1,5 @@
 environment:
-  SCCACHE_BUCKET: rust-lang-ci-sccache2
-  SCCACHE_REGION: us-west-1
-  AWS_ACCESS_KEY_ID: AKIAJAMV3QAMMA6AXHFQ
-  AWS_SECRET_ACCESS_KEY:
-    secure: 7Y+JiquYedOAgnUU26uL0DPzrxmTtR+qIwG6rNKSuWDffqU3vVZxbGXim9QpTO80
   SCCACHE_DIGEST: f808afabb4a4eb1d7112bcb3fa6be03b61e93412890c88e177c667eb37f46353d7ec294e559b16f9f4b5e894f2185fe7670a0df15fd064889ecbd80f0c34166c
-  TOOLSTATE_REPO_ACCESS_TOKEN:
-    secure: gKGlVktr7iuqCoYSxHxDE9ltLOKU0nYDEuQxvWbNxUIW7ri5ppn8L06jQzN0GGzN
 
   # By default schannel checks revocation of certificates unlike some other SSL
   # backends, but we've historically had problems on CI where a revocation
@@ -82,12 +75,14 @@ environment:
   # 32/64 bit MSVC and GNU deployment
   - RUST_CONFIGURE_ARGS: >
       --build=x86_64-pc-windows-msvc
+      --target=x86_64-pc-windows-msvc,aarch64-pc-windows-msvc
       --enable-full-tools
       --enable-profiler
     SCRIPT: python x.py dist
     DIST_REQUIRE_ALL_TOOLS: 1
     DEPLOY: 1
     CI_JOB_NAME: dist-x86_64-msvc
+    APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 Preview
   - RUST_CONFIGURE_ARGS: >
       --build=i686-pc-windows-msvc
       --target=i586-pc-windows-msvc
@@ -151,9 +146,9 @@ install:
   # Note that the LLVM installer is an NSIS installer
   #
   # Original downloaded here came from
-  # http://releases.llvm.org/6.0.0/LLVM-6.0.0-win64.exe
-  - if NOT defined MINGW_URL appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/LLVM-6.0.0-win64.exe
-  - if NOT defined MINGW_URL .\LLVM-6.0.0-win64.exe /S /NCRC /D=C:\clang-rust
+  # http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe
+  - if NOT defined MINGW_URL appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/LLVM-7.0.0-win64.exe
+  - if NOT defined MINGW_URL .\LLVM-7.0.0-win64.exe /S /NCRC /D=C:\clang-rust
   - if NOT defined MINGW_URL set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --set llvm.clang-cl=C:\clang-rust\bin\clang-cl.exe
 
   # Here we do a pretty heinous thing which is to mangle the MinGW installation
@@ -235,10 +230,8 @@ before_deploy:
 
 deploy:
   - provider: S3
-    skip_cleanup: true
-    access_key_id: AKIAJVBODR3IA4O72THQ
-    secret_access_key:
-      secure: tQWIE+DJHjXaV4np/3YeETkEmXngtIuIgAO/LYKQaUshGLgN8cBCFGG3cHx5lKLt
+    access_key_id: $(AWS_ACCESS_KEY_ID)
+    secret_access_key: $(AWS_SECRET_ACCESS_KEY)
     bucket: rust-lang-ci2
     set_public: true
     region: us-west-1
@@ -252,10 +245,8 @@ deploy:
   # This provider is the same as the one above except that it has a slightly
   # different upload directory and a slightly different trigger
   - provider: S3
-    skip_cleanup: true
-    access_key_id: AKIAJVBODR3IA4O72THQ
-    secret_access_key:
-      secure: tQWIE+DJHjXaV4np/3YeETkEmXngtIuIgAO/LYKQaUshGLgN8cBCFGG3cHx5lKLt
+    access_key_id: $(AWS_ACCESS_KEY_ID)
+    secret_access_key: $(AWS_SECRET_ACCESS_KEY)
     bucket: rust-lang-ci2
     set_public: true
     region: us-west-1
index 093b8f9e526eca8778e43293a3614e0bc70f1fae..8b11014edae795c1a4638680d475e16247c5b056 100644 (file)
 # Adding debuginfo makes them several times larger.
 #debuginfo-tools = false
 
-# Whether or not jemalloc is built and enabled
-#use-jemalloc = true
-
-# Whether or not jemalloc is built with its debug option set
-#debug-jemalloc = false
-
 # Whether or not `panic!`s generate backtraces (RUST_BACKTRACE)
 #backtrace = true
 
 # generally only set for releases
 #remap-debuginfo = false
 
+# Link the compiler against `jemalloc`, where on Linux and OSX it should
+# override the default allocator for rustc and LLVM.
+#jemalloc = false
+
 # =============================================================================
 # Options for specific targets
 #
 # not, you can specify an explicit file name for it.
 #llvm-filecheck = "/path/to/FileCheck"
 
-# Path to the custom jemalloc static library to link into the standard library
-# by default. This is only used if jemalloc is still enabled above
-#jemalloc = "/path/to/jemalloc/libjemalloc_pic.a"
-
 # If this target is for Android, this option will be required to specify where
 # the NDK for the target lives. This is used to find the C compiler to link and
 # build native code.
index 8ef815842df727009f6d5916ba88d61b708b3583..b4317864502cee0fd48979f54fb6f057b47c0bcf 100644 (file)
@@ -15,17 +15,6 @@ dependencies = [
  "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
-[[package]]
-name = "alloc_jemalloc"
-version = "0.0.0"
-dependencies = [
- "build_helper 0.1.0",
- "cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.0.0",
- "core 0.0.0",
- "libc 0.0.0",
-]
-
 [[package]]
 name = "alloc_system"
 version = "0.0.0"
@@ -198,14 +187,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "cargo"
-version = "0.32.0"
+version = "0.33.0"
 dependencies = [
  "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "bytesize 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-foundation 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "crates-io 0.20.0",
+ "crates-io 0.21.0",
  "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "crypto-hash 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "curl 0.4.18 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -483,7 +472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "crates-io"
-version = "0.20.0"
+version = "0.21.0"
 dependencies = [
  "curl 0.4.18 (registry+https://github.com/rust-lang/crates.io-index)",
  "failure 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -785,6 +774,11 @@ dependencies = [
  "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "fs_extra"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "fst"
 version = "0.3.0"
@@ -996,6 +990,16 @@ name = "itoa"
 version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "jemalloc-sys"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "jobserver"
 version = "0.1.11"
@@ -1784,7 +1788,7 @@ dependencies = [
 name = "rls"
 version = "0.130.5"
 dependencies = [
- "cargo 0.32.0",
+ "cargo 0.33.0",
  "cargo_metadata 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "clippy_lints 0.0.212",
  "crossbeam-channel 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2137,11 +2141,13 @@ dependencies = [
  "flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
+ "rustc_allocator 0.0.0",
  "rustc_data_structures 0.0.0",
  "rustc_incremental 0.0.0",
- "rustc_metadata_utils 0.0.0",
+ "rustc_metadata 0.0.0",
  "rustc_mir 0.0.0",
  "rustc_target 0.0.0",
+ "serialize 0.0.0",
  "syntax 0.0.0",
  "syntax_pos 0.0.0",
 ]
@@ -2181,6 +2187,7 @@ dependencies = [
  "arena 0.0.0",
  "env_logger 0.5.12 (registry+https://github.com/rust-lang/crates.io-index)",
  "graphviz 0.0.0",
+ "jemalloc-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc-rayon 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2284,7 +2291,6 @@ dependencies = [
  "rustc 0.0.0",
  "rustc_data_structures 0.0.0",
  "rustc_errors 0.0.0",
- "rustc_metadata_utils 0.0.0",
  "rustc_target 0.0.0",
  "serialize 0.0.0",
  "syntax 0.0.0",
@@ -2292,15 +2298,6 @@ dependencies = [
  "syntax_pos 0.0.0",
 ]
 
-[[package]]
-name = "rustc_metadata_utils"
-version = "0.0.0"
-dependencies = [
- "rustc 0.0.0",
- "syntax 0.0.0",
- "syntax_pos 0.0.0",
-]
-
 [[package]]
 name = "rustc_mir"
 version = "0.0.0"
@@ -2680,7 +2677,6 @@ name = "std"
 version = "0.0.0"
 dependencies = [
  "alloc 0.0.0",
- "alloc_jemalloc 0.0.0",
  "alloc_system 0.0.0",
  "build_helper 0.1.0",
  "cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3237,6 +3233,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
 "checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
 "checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
+"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
 "checksum fst 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d94485a00b1827b861dd9d1a2cc9764f9044d4c535514c0760a5a2012ef3399f"
 "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
 "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
@@ -3259,6 +3256,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum is-match 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7e5b386aef33a1c677be65237cb9d32c3f3ef56bd035949710c4bb13083eb053"
 "checksum itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "f58856976b776fedd95533137617a02fb25719f40e7d9b01c7043cd65474f450"
 "checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b"
+"checksum jemalloc-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "bfc62c8e50e381768ce8ee0428ee53741929f7ebd73e4d83f669bcf7693e00ae"
 "checksum jobserver 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "60af5f849e1981434e4a31d3d782c4774ae9b434ce55b101a96ecfd09147e8be"
 "checksum json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9ad0485404155f45cce53a40d4b2d6ac356418300daed05273d9e26f91c390be"
 "checksum jsonrpc-core 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ddf83704f4e79979a424d1082dd2c1e52683058056c9280efa19ac5f6bc9033c"
index b6764c1aaeab6a22c3ef8af857c626b01529a601..afe1b4c42eaef8f198d682539a3a7d60ec7a6625 100644 (file)
@@ -232,7 +232,9 @@ fn main() {
                 // flesh out rpath support more fully in the future.
                 cmd.arg("-Z").arg("osx-rpath-install-name");
                 Some("-Wl,-rpath,@loader_path/../lib")
-            } else if !target.contains("windows") && !target.contains("wasm32") {
+            } else if !target.contains("windows") &&
+                      !target.contains("wasm32") &&
+                      !target.contains("fuchsia") {
                 Some("-Wl,-rpath,$ORIGIN/../lib")
             } else {
                 None
@@ -253,8 +255,15 @@ fn main() {
 
         // When running miri tests, we need to generate MIR for all libraries
         if env::var("TEST_MIRI").ok().map_or(false, |val| val == "true") {
+            // The flags here should be kept in sync with `add_miri_default_args`
+            // in miri's `src/lib.rs`.
             cmd.arg("-Zalways-encode-mir");
-            cmd.arg("-Zmir-emit-validate=1");
+            // These options are preferred by miri, to be able to perform better validation,
+            // but the bootstrap compiler might not understand them.
+            if stage != "0" {
+                cmd.arg("-Zmir-emit-retag");
+                cmd.arg("-Zmir-opt-level=0");
+            }
         }
 
         // Force all crates compiled by this compiler to (a) be unstable and (b)
index b5dc0090c8b9ce2652fe5862ce2d2103b3b06cb4..fdd8784453bdb8c509b8bf4824921e327fb2246e 100644 (file)
@@ -715,11 +715,6 @@ class RustBuild(object):
                 backends = self.get_toml('codegen-backends')
                 if backends is None or not 'emscripten' in backends:
                     continue
-            if module.endswith("jemalloc"):
-                if self.get_toml('use-jemalloc') == 'false':
-                    continue
-                if self.get_toml('jemalloc'):
-                    continue
             if module.endswith("lld"):
                 config = self.get_toml('lld')
                 if config is None or config == 'false':
index 5abc0455b587186d67292fee50ade8874e91ee2b..900f336ef8cdc1dd06f8c9bc1dd42c7f225ecf6e 100644 (file)
@@ -130,7 +130,7 @@ fn one<P: Into<PathBuf>>(path: P) -> PathSet {
     fn has(&self, needle: &Path) -> bool {
         match self {
             PathSet::Set(set) => set.iter().any(|p| p.ends_with(needle)),
-            PathSet::Suite(_) => false,
+            PathSet::Suite(suite) => suite.ends_with(needle),
         }
     }
 
@@ -713,7 +713,7 @@ pub fn cargo(
             "build" => self.cargo_out(compiler, mode, target),
 
             // This is the intended out directory for crate documentation.
-            "doc" =>  self.crate_doc_out(target),
+            "doc" | "rustdoc" =>  self.crate_doc_out(target),
 
             _ => self.stage_out(compiler, mode),
         };
@@ -742,7 +742,7 @@ pub fn cargo(
             _ => compile::librustc_stamp(self, cmp, target),
         };
 
-        if cmd == "doc" {
+        if cmd == "doc" || cmd == "rustdoc" {
             if mode == Mode::Rustc || mode == Mode::ToolRustc || mode == Mode::Codegen {
                 // This is the intended out directory for compiler documentation.
                 my_out = self.compiler_doc_out(target);
@@ -882,7 +882,7 @@ pub fn cargo(
             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
             .env(
                 "RUSTDOC_REAL",
-                if cmd == "doc" || (cmd == "test" && want_rustdoc) {
+                if cmd == "doc" || cmd == "rustdoc" || (cmd == "test" && want_rustdoc) {
                     self.rustdoc(compiler.host)
                 } else {
                     PathBuf::from("/path/to/nowhere/rustdoc/not/required")
@@ -1849,7 +1849,7 @@ fn test_with_no_doc_stage0() {
         );
 
         // Ensure we don't build any compiler artifacts.
-        assert!(builder.cache.all::<compile::Rustc>().is_empty());
+        assert!(!builder.cache.contains::<compile::Rustc>());
         assert_eq!(
             first(builder.cache.all::<test::Crate>()),
             &[test::Crate {
@@ -1861,4 +1861,34 @@ fn test_with_no_doc_stage0() {
             },]
         );
     }
+
+    #[test]
+    fn test_exclude() {
+        let mut config = configure(&[], &[]);
+        config.exclude = vec![
+            "src/test/run-pass".into(),
+            "src/tools/tidy".into(),
+        ];
+        config.cmd = Subcommand::Test {
+            paths: Vec::new(),
+            test_args: Vec::new(),
+            rustc_args: Vec::new(),
+            fail_fast: true,
+            doc_tests: DocTests::No,
+            bless: false,
+            compare_mode: None,
+        };
+
+        let build = Build::new(config);
+        let builder = Builder::new(&build);
+        builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]);
+
+        // Ensure we have really excluded run-pass & tidy
+        assert!(!builder.cache.contains::<test::RunPass>());
+        assert!(!builder.cache.contains::<test::Tidy>());
+
+        // Ensure other tests are not affected.
+        assert!(builder.cache.contains::<test::RunPassFullDeps>());
+        assert!(builder.cache.contains::<test::RustdocUi>());
+    }
 }
index fd9a1be207280f788e98f0bacfebbe861c40679d..0b561a3523f2b483460d8549b75f4913d7a6bcd0 100644 (file)
@@ -286,4 +286,9 @@ pub fn all<S: Ord + Copy + Step>(&mut self) -> Vec<(S, S::Output)> {
         v.sort_by_key(|&(a, _)| a);
         v
     }
+
+    #[cfg(test)]
+    pub fn contains<S: Step>(&self) -> bool {
+        self.0.borrow().contains_key(&TypeId::of::<S>())
+    }
 }
index 91bec69cfa401913105aa44418b6b86a88e28fbd..88b6925b2b1e72b445c5cb06fd67e1704ccb19ea 100644 (file)
@@ -24,7 +24,7 @@
 use config::Config;
 
 // The version number
-pub const CFG_RELEASE_NUM: &str = "1.31.0";
+pub const CFG_RELEASE_NUM: &str = "1.32.0";
 
 pub struct GitInfo {
     inner: Option<Info>,
index d009d9645c35aa388ebf7f639f533984491b7de5..885ad07e0873604adb3b420b6eff5cee74f6ad98 100644 (file)
@@ -158,16 +158,7 @@ pub fn std_cargo(builder: &Builder,
             .arg("--manifest-path")
             .arg(builder.src.join("src/rustc/compiler_builtins_shim/Cargo.toml"));
     } else {
-        let mut features = builder.std_features();
-
-        // When doing a local rebuild we tell cargo that we're stage1 rather than
-        // stage0. This works fine if the local rust and being-built rust have the
-        // same view of what the default allocator is, but fails otherwise. Since
-        // we don't have a way to express an allocator preference yet, work
-        // around the issue in the case of a local rebuild with jemalloc disabled.
-        if compiler.stage == 0 && builder.local_rebuild && !builder.config.use_jemalloc {
-            features.push_str(" force_alloc_system");
-        }
+        let features = builder.std_features();
 
         if compiler.stage != 0 && builder.config.sanitizers {
             // This variable is used by the sanitizer runtime crates, e.g.
@@ -188,11 +179,6 @@ pub fn std_cargo(builder: &Builder,
             .arg("--manifest-path")
             .arg(builder.src.join("src/libstd/Cargo.toml"));
 
-        if let Some(target) = builder.config.target_config.get(&target) {
-            if let Some(ref jemalloc) = target.jemalloc {
-                cargo.env("JEMALLOC_OVERRIDE", jemalloc);
-            }
-        }
         if target.contains("musl") {
             if let Some(p) = builder.musl_root(target) {
                 cargo.env("MUSL_ROOT", p);
index 3eb6e8d84e877fdfe8a4665b72718a7141b8498d..0f249eee000224a9f0b8d1ff48878dae20e0551b 100644 (file)
@@ -116,6 +116,7 @@ pub struct Config {
     pub hosts: Vec<Interned<String>>,
     pub targets: Vec<Interned<String>>,
     pub local_rebuild: bool,
+    pub jemalloc: bool,
 
     // dist misc
     pub dist_sign_folder: Option<PathBuf>,
@@ -123,8 +124,6 @@ pub struct Config {
     pub dist_gpg_password_file: Option<PathBuf>,
 
     // libstd features
-    pub debug_jemalloc: bool,
-    pub use_jemalloc: bool,
     pub backtrace: bool, // support for RUST_BACKTRACE
     pub wasm_syscall: bool,
 
@@ -166,7 +165,6 @@ pub struct Target {
     pub llvm_config: Option<PathBuf>,
     /// Some(path to FileCheck) if one was specified.
     pub llvm_filecheck: Option<PathBuf>,
-    pub jemalloc: Option<PathBuf>,
     pub cc: Option<PathBuf>,
     pub cxx: Option<PathBuf>,
     pub ar: Option<PathBuf>,
@@ -263,7 +261,7 @@ struct Llvm {
     link_jobs: Option<u32>,
     link_shared: Option<bool>,
     version_suffix: Option<String>,
-    clang_cl: Option<String>
+    clang_cl: Option<String>,
 }
 
 #[derive(Deserialize, Default, Clone)]
@@ -302,8 +300,6 @@ struct Rust {
     debuginfo_only_std: Option<bool>,
     debuginfo_tools: Option<bool>,
     experimental_parallel_queries: Option<bool>,
-    debug_jemalloc: Option<bool>,
-    use_jemalloc: Option<bool>,
     backtrace: Option<bool>,
     default_linker: Option<String>,
     channel: Option<String>,
@@ -329,6 +325,7 @@ struct Rust {
     backtrace_on_ice: Option<bool>,
     verify_llvm_ir: Option<bool>,
     remap_debuginfo: Option<bool>,
+    jemalloc: Option<bool>,
 }
 
 /// TOML representation of how each build target is configured.
@@ -337,7 +334,6 @@ struct Rust {
 struct TomlTarget {
     llvm_config: Option<String>,
     llvm_filecheck: Option<String>,
-    jemalloc: Option<String>,
     cc: Option<String>,
     cxx: Option<String>,
     ar: Option<String>,
@@ -363,7 +359,6 @@ pub fn default_opts() -> Config {
         config.llvm_enabled = true;
         config.llvm_optimize = true;
         config.llvm_version_check = true;
-        config.use_jemalloc = true;
         config.backtrace = true;
         config.rust_optimize = true;
         config.rust_optimize_tests = true;
@@ -499,7 +494,6 @@ pub fn parse(args: &[String]) -> Config {
         let mut debuginfo_only_std = None;
         let mut debuginfo_tools = None;
         let mut debug = None;
-        let mut debug_jemalloc = None;
         let mut debuginfo = None;
         let mut debug_assertions = None;
         let mut optimize = None;
@@ -541,12 +535,11 @@ pub fn parse(args: &[String]) -> Config {
             debuginfo_tools = rust.debuginfo_tools;
             optimize = rust.optimize;
             ignore_git = rust.ignore_git;
-            debug_jemalloc = rust.debug_jemalloc;
             set(&mut config.rust_optimize_tests, rust.optimize_tests);
             set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
             set(&mut config.codegen_tests, rust.codegen_tests);
             set(&mut config.rust_rpath, rust.rpath);
-            set(&mut config.use_jemalloc, rust.use_jemalloc);
+            set(&mut config.jemalloc, rust.jemalloc);
             set(&mut config.backtrace, rust.backtrace);
             set(&mut config.channel, rust.channel.clone());
             set(&mut config.rust_dist_src, rust.dist_src);
@@ -596,9 +589,6 @@ pub fn parse(args: &[String]) -> Config {
                 if let Some(ref s) = cfg.llvm_filecheck {
                     target.llvm_filecheck = Some(config.src.join(s));
                 }
-                if let Some(ref s) = cfg.jemalloc {
-                    target.jemalloc = Some(config.src.join(s));
-                }
                 if let Some(ref s) = cfg.android_ndk {
                     target.ndk = Some(config.src.join(s));
                 }
@@ -644,7 +634,6 @@ pub fn parse(args: &[String]) -> Config {
         config.rust_debuginfo_tools = debuginfo_tools.unwrap_or(false);
 
         let default = debug == Some(true);
-        config.debug_jemalloc = debug_jemalloc.unwrap_or(default);
         config.rust_debuginfo = debuginfo.unwrap_or(default);
         config.rust_debug_assertions = debug_assertions.unwrap_or(default);
 
index d5f8d9d27d95632b8d6fc9fc799323fcb5423b8c..5467c9f9d5bf914499deaefcbfa4913c393cdaf5 100755 (executable)
@@ -40,7 +40,7 @@ def v(*args):
     options.append(Option(*args, value=True))
 
 
-o("debug", "rust.debug", "debug mode; disables optimization unless `--enable-optimize` given")
+o("debug", "rust.debug", "enables debugging environment; does not affect optimization of bootstrapped code (use `--disable-optimize` for that)")
 o("docs", "build.docs", "build standard library documentation")
 o("compiler-docs", "build.compiler-docs", "build compiler documentation")
 o("optimize-tests", "rust.optimize-tests", "build tests with optimizations")
@@ -68,6 +68,7 @@ o("cargo-native-static", "build.cargo-native-static", "static native libraries i
 o("profiler", "build.profiler", "build the profiler runtime")
 o("emscripten", None, "compile the emscripten backend as well as LLVM")
 o("full-tools", None, "enable all tools")
+o("lld", "rust.lld", "build lld")
 o("lldb", "rust.lldb", "build lldb")
 o("missing-tools", "dist.missing-tools", "allow failures when building tools")
 
@@ -82,7 +83,6 @@ o("debuginfo", "rust.debuginfo", "build with debugger metadata")
 o("debuginfo-lines", "rust.debuginfo-lines", "build with line number debugger metadata")
 o("debuginfo-only-std", "rust.debuginfo-only-std", "build only libstd with debugging information")
 o("debuginfo-tools", "rust.debuginfo-tools", "build extended tools with debugging information")
-o("debug-jemalloc", "rust.debug-jemalloc", "build jemalloc with --enable-debug --enable-fill")
 v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file")
 
 v("prefix", "install.prefix", "set installation prefix")
@@ -99,7 +99,6 @@ v("llvm-root", None, "set LLVM root")
 v("llvm-config", None, "set path to llvm-config")
 v("llvm-filecheck", None, "set path to LLVM's FileCheck utility")
 v("python", "build.python", "set path to python")
-v("jemalloc-root", None, "set directory where libjemalloc_pic.a is located")
 v("android-cross-path", "target.arm-linux-androideabi.android-ndk",
   "Android NDK standalone path (deprecated)")
 v("i686-linux-android-ndk", "target.i686-linux-android.android-ndk",
@@ -148,7 +147,6 @@ v("default-linker", "rust.default-linker", "the default linker")
 # Many of these are saved below during the "writing configuration" step
 # (others are conditionally saved).
 o("manage-submodules", "build.submodules", "let the build manage the git submodules")
-o("jemalloc", "rust.use-jemalloc", "build liballoc with jemalloc")
 o("full-bootstrap", "build.full-bootstrap", "build three compilers instead of two")
 o("extended", "build.extended", "build an extended rust tool set")
 
@@ -330,8 +328,6 @@ for key in known_args:
         set('target.{}.llvm-config'.format(build()), value)
     elif option.name == 'llvm-filecheck':
         set('target.{}.llvm-filecheck'.format(build()), value)
-    elif option.name == 'jemalloc-root':
-        set('target.{}.jemalloc'.format(build()), value + '/libjemalloc_pic.a')
     elif option.name == 'tools':
         set('build.tools', value.split(','))
     elif option.name == 'host':
index fea6302d0a119e6a5f0916dfefa4628bc040eebe..567b47a70a12cc52143c3c534e2ce45d1dff4529 100644 (file)
@@ -859,7 +859,6 @@ fn run(self, builder: &Builder) -> PathBuf {
             "src/build_helper",
             "src/dlmalloc",
             "src/liballoc",
-            "src/liballoc_jemalloc",
             "src/liballoc_system",
             "src/libbacktrace",
             "src/libcompiler_builtins",
@@ -878,13 +877,12 @@ fn run(self, builder: &Builder) -> PathBuf {
             "src/rustc/dlmalloc_shim",
             "src/libtest",
             "src/libterm",
-            "src/jemalloc",
             "src/libprofiler_builtins",
             "src/stdsimd",
+            "src/libproc_macro",
         ];
         let std_src_dirs_exclude = [
             "src/libcompiler_builtins/compiler-rt/test",
-            "src/jemalloc/test/unit",
         ];
 
         copy_src_dirs(builder, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
@@ -911,7 +909,7 @@ fn run(self, builder: &Builder) -> PathBuf {
     }
 }
 
-const CARGO_VENDOR_VERSION: &str = "0.1.4";
+const CARGO_VENDOR_VERSION: &str = "0.1.19";
 
 #[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
 pub struct PlainSourceTarball;
index ed4805b8ea330de7ac5f847d180ab43be19cac09..7623ca1e27ea6cdbc691fa4b91678413f80daec5 100644 (file)
@@ -405,14 +405,15 @@ fn run(self, builder: &Builder) {
             cmd.arg("--html-after-content").arg(&footer)
                .arg("--html-before-content").arg(&version_info)
                .arg("--html-in-header").arg(&favicon)
+               .arg("--markdown-no-toc")
+               .arg("--index-page").arg(&builder.src.join("src/doc/index.md"))
                .arg("--markdown-playground-url")
                .arg("https://play.rust-lang.org/")
                .arg("-o").arg(&out)
                .arg(&path);
 
             if filename == "not_found.md" {
-                cmd.arg("--markdown-no-toc")
-                   .arg("--markdown-css")
+                cmd.arg("--markdown-css")
                    .arg("https://doc.rust-lang.org/rust.css");
             } else {
                 cmd.arg("--markdown-css").arg("rust.css");
@@ -480,23 +481,31 @@ fn run(self, builder: &Builder) {
         // will also directly handle merging.
         let my_out = builder.crate_doc_out(target);
         t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
+        t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
 
-        let mut cargo = builder.cargo(compiler, Mode::Std, target, "doc");
-        compile::std_cargo(builder, &compiler, target, &mut cargo);
+        let run_cargo_rustdoc_for = |package: &str| {
+            let mut cargo = builder.cargo(compiler, Mode::Std, target, "rustdoc");
+            compile::std_cargo(builder, &compiler, target, &mut cargo);
 
-        // Keep a whitelist so we do not build internal stdlib crates, these will be
-        // build by the rustc step later if enabled.
-        cargo.arg("--no-deps");
-        for krate in &["alloc", "core", "std"] {
-            cargo.arg("-p").arg(krate);
+            // Keep a whitelist so we do not build internal stdlib crates, these will be
+            // build by the rustc step later if enabled.
+            cargo.arg("-Z").arg("unstable-options")
+                 .arg("-p").arg(package);
             // Create all crate output directories first to make sure rustdoc uses
             // relative links.
             // FIXME: Cargo should probably do this itself.
-            t!(fs::create_dir_all(out_dir.join(krate)));
+            t!(fs::create_dir_all(out_dir.join(package)));
+            cargo.arg("--")
+                 .arg("--markdown-css").arg("rust.css")
+                 .arg("--markdown-no-toc")
+                 .arg("--index-page").arg(&builder.src.join("src/doc/index.md"));
+
+            builder.run(&mut cargo);
+            builder.cp_r(&my_out, &out);
+        };
+        for krate in &["alloc", "core", "std"] {
+            run_cargo_rustdoc_for(krate);
         }
-
-        builder.run(&mut cargo);
-        builder.cp_r(&my_out, &out);
     }
 }
 
index ba601249ea895fb7f43afd08322d06270f111288..76697e482d3a8b898e23380777f6752a3c5f6c16 100644 (file)
@@ -516,12 +516,6 @@ fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool {
     fn std_features(&self) -> String {
         let mut features = "panic-unwind".to_string();
 
-        if self.config.debug_jemalloc {
-            features.push_str(" debug-jemalloc");
-        }
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
         if self.config.backtrace {
             features.push_str(" backtrace");
         }
@@ -537,8 +531,8 @@ fn std_features(&self) -> String {
     /// Get the space-separated set of activated features for the compiler.
     fn rustc_features(&self) -> String {
         let mut features = String::new();
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
+        if self.config.jemalloc {
+            features.push_str("jemalloc");
         }
         features
     }
@@ -791,7 +785,7 @@ fn cflags(&self, target: Interned<String>, which: GitRepo) -> Vec<String> {
         // If we're compiling on macOS then we add a few unconditional flags
         // indicating that we want libc++ (more filled out than libstdc++) and
         // we want to compile for 10.7. This way we can ensure that
-        // LLVM/jemalloc/etc are all properly compiled.
+        // LLVM/etc are all properly compiled.
         if target.contains("apple-darwin") {
             base.push("-stdlib=libc++".into());
         }
@@ -843,7 +837,8 @@ fn linker(&self, target: Interned<String>) -> Option<&Path> {
         } else if target != self.config.build &&
                   !target.contains("msvc") &&
                   !target.contains("emscripten") &&
-                  !target.contains("wasm32") {
+                  !target.contains("wasm32") &&
+                  !target.contains("fuchsia") {
             Some(self.cc(target))
         } else {
             None
index bcf2f6a675e02715340fcd22a0350c9d264703ef..862fbbf1f286bce28a9d4e65415e3e7b3aeceed0 100644 (file)
@@ -85,7 +85,12 @@ check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu:
 check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu:
        $(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-musl
 
-TESTS_IN_2 := src/test/run-pass src/test/compile-fail src/test/run-pass-fulldeps
+TESTS_IN_2 := \
+       src/test/ui \
+       src/test/run-pass \
+       src/test/compile-fail \
+       src/test/run-pass-fulldeps \
+       src/tools/linkchecker
 
 appveyor-subset-1:
        $(Q)$(BOOTSTRAP) test $(TESTS_IN_2:%=--exclude %)
index 2eb11b06e4e08acaa0e614495643c50b9f2b3fb1..15d3bccba09cc6e448b63240d0cefc0259a79460 100644 (file)
@@ -152,12 +152,6 @@ pub fn check(build: &mut Build) {
         if !build.config.dry_run {
             cmd_finder.must_have(build.cxx(*host).unwrap());
         }
-
-        // The msvc hosts don't use jemalloc, turn it off globally to
-        // avoid packaging the dummy liballoc_jemalloc on that platform.
-        if host.contains("msvc") {
-            build.config.use_jemalloc = false;
-        }
     }
 
     // Externally configured LLVM requires FileCheck to exist
index e411e0d17342ef66e092c6aaead0bd95e1510349..e55773011df8eb41f157e3959fbbc5254ac33308 100644 (file)
@@ -1504,8 +1504,7 @@ impl Step for CrateNotDefault {
     type Output = ();
 
     fn should_run(run: ShouldRun) -> ShouldRun {
-        run.path("src/liballoc_jemalloc")
-            .path("src/librustc_asan")
+        run.path("src/librustc_asan")
             .path("src/librustc_lsan")
             .path("src/librustc_msan")
             .path("src/librustc_tsan")
@@ -1522,7 +1521,6 @@ fn make_run(run: RunConfig) {
             target: run.target,
             test_kind,
             krate: match run.path {
-                _ if run.path.ends_with("src/liballoc_jemalloc") => "alloc_jemalloc",
                 _ if run.path.ends_with("src/librustc_asan") => "rustc_asan",
                 _ if run.path.ends_with("src/librustc_lsan") => "rustc_lsan",
                 _ if run.path.ends_with("src/librustc_msan") => "rustc_msan",
@@ -1561,7 +1559,6 @@ fn should_run(mut run: ShouldRun) -> ShouldRun {
         run = run.krate("test");
         for krate in run.builder.in_tree_crates("std") {
             if krate.is_local(&run.builder)
-                && !krate.name.contains("jemalloc")
                 && !(krate.name.starts_with("rustc_") && krate.name.ends_with("san"))
                 && krate.name != "dlmalloc"
             {
diff --git a/src/ci/docker/disabled/dist-powerpcspe-linux/Dockerfile b/src/ci/docker/disabled/dist-powerpcspe-linux/Dockerfile
new file mode 100644 (file)
index 0000000..3227819
--- /dev/null
@@ -0,0 +1,26 @@
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+  g++ \
+  make \
+  file \
+  curl \
+  ca-certificates \
+  python2.7 \
+  git \
+  cmake \
+  sudo \
+  gdb \
+  xz-utils \
+  g++-powerpc-linux-gnuspe \
+  libssl-dev \
+  pkg-config
+
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+ENV HOSTS=powerpc-unknown-linux-gnuspe
+
+ENV RUST_CONFIGURE_ARGS --enable-extended --disable-docs
+ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
index d99e409e42671f3acafbecc4f52c8726fbf0df48..8df49f364a372eb50d6b9edbd7d9e850652ffc37 100644 (file)
@@ -98,7 +98,8 @@ ENV RUST_CONFIGURE_ARGS \
       --enable-sanitizers \
       --enable-profiler \
       --set target.i686-unknown-linux-gnu.linker=clang \
-      --build=i686-unknown-linux-gnu
+      --build=i686-unknown-linux-gnu \
+      --set rust.jemalloc
 ENV SCRIPT python2.7 ../x.py dist --build $HOSTS --host $HOSTS --target $HOSTS
 ENV CARGO_TARGET_I686_UNKNOWN_LINUX_GNU_LINKER=clang
 
index e2484b7224b26fb96decc794a86464ee6fb3a495..c7e6af28f9d4fbb385c4d8f21394eea12f492ac8 100644 (file)
@@ -22,7 +22,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   libssl-dev \
   pkg-config \
   gcc-arm-none-eabi \
-  libnewlib-arm-none-eabi
+  libnewlib-arm-none-eabi \
+  qemu-system-arm
 
 WORKDIR /build
 
index 7adb32efa1d414ef8daa1f1030fd7d92bcd51f24..944c2a51b8d1fb4c1939328438cd96df4e498091 100644 (file)
@@ -47,6 +47,17 @@ ENV \
     CC_x86_64_sun_solaris=x86_64-sun-solaris2.10-gcc \
     CXX_x86_64_sun_solaris=x86_64-sun-solaris2.10-g++
 
+ENV CARGO_TARGET_X86_64_FUCHSIA_AR /usr/local/bin/llvm-ar
+ENV CARGO_TARGET_X86_64_FUCHSIA_RUSTFLAGS \
+-C link-arg=--sysroot=/usr/local/x86_64-fuchsia \
+-C link-arg=-L/usr/local/x86_64-fuchsia/lib \
+-C link-arg=-L/usr/local/lib/x86_64-fuchsia/lib
+ENV CARGO_TARGET_AARCH64_FUCHSIA_AR /usr/local/bin/llvm-ar
+ENV CARGO_TARGET_AARCH64_FUCHSIA_RUSTFLAGS \
+-C link-arg=--sysroot=/usr/local/aarch64-fuchsia \
+-C link-arg=-L/usr/local/aarch64-fuchsia/lib \
+-C link-arg=-L/usr/local/lib/aarch64-fuchsia/lib
+
 ENV TARGETS=x86_64-fuchsia
 ENV TARGETS=$TARGETS,aarch64-fuchsia
 ENV TARGETS=$TARGETS,sparcv9-sun-solaris
@@ -55,5 +66,5 @@ ENV TARGETS=$TARGETS,x86_64-sun-solaris
 ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32
 ENV TARGETS=$TARGETS,x86_64-unknown-cloudabi
 
-ENV RUST_CONFIGURE_ARGS --enable-extended --disable-docs
+ENV RUST_CONFIGURE_ARGS --enable-extended --enable-lld --disable-docs
 ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
index 8696f72e0e3881004fcaa77b4a576c714a1d30ed..0a2dae72f73828d7497625f6de96c1efdf2a67c8 100644 (file)
@@ -101,7 +101,8 @@ ENV RUST_CONFIGURE_ARGS \
       --set target.x86_64-unknown-linux-gnu.linker=clang \
       --set target.x86_64-unknown-linux-gnu.ar=/rustroot/bin/llvm-ar \
       --set target.x86_64-unknown-linux-gnu.ranlib=/rustroot/bin/llvm-ranlib \
-      --set llvm.thin-lto=true
+      --set llvm.thin-lto=true \
+      --set rust.jemalloc
 ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
 ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang
 
index 444a8fe5da87ada787299721764de0589d7cd722..dd94f2652b4c9a86a9145cec5114dacdaec428d9 100644 (file)
@@ -1,4 +1,4 @@
-FROM ubuntu:16.04
+FROM ubuntu:18.10
 
 RUN apt-get update && apt-get install -y --no-install-recommends \
   g++ \
index 071575b1fc094e7c48de83422ed53e7c6497e165..43cdab27e9daee3d159cb77b270386dffbfc9ffd 100644 (file)
@@ -197,6 +197,22 @@ issue][issue-include].
 [unstable-include]: ../unstable-book/language-features/external-doc.html
 [issue-include]: https://github.com/rust-lang/rust/issues/44732
 
+### Add aliases for an item in documentation search
+
+This feature allows you to add alias(es) to an item when using the `rustdoc` search through the
+`doc(alias)` attribute. Example:
+
+```rust,no_run
+#![feature(doc_alias)]
+
+#[doc(alias = "x")]
+#[doc(alias = "big")]
+pub struct BigX;
+```
+
+Then, when looking for it through the `rustdoc` search, if you enter "x" or
+"big", search will show the `BigX` struct first.
+
 ## Unstable command-line arguments
 
 These features are enabled by passing a command-line flag to Rustdoc, but the flags in question are
@@ -374,18 +390,15 @@ This is an internal flag intended for the standard library and compiler that app
 allows `rustdoc` to be able to generate documentation for the compiler crates and the standard
 library, as an equivalent command-line argument is provided to `rustc` when building those crates.
 
-### `doc_alias` feature
+### `--index-page`: provide a top-level landing page for docs
 
-This feature allows you to add alias(es) to an item when using the `rustdoc` search through the
-`doc(alias)` attribute. Example:
+This feature allows you to generate an index-page with a given markdown file. A good example of it
+is the [rust documentation index](https://doc.rust-lang.org/index.html).
 
-```rust,no_run
-#![feature(doc_alias)]
+With this, you'll have a page which you can custom as much as you want at the top of your crates.
 
-#[doc(alias = "x")]
-#[doc(alias = "big")]
-pub struct BigX;
-```
+Using `index-page` option enables `enable-index-page` option as well.
 
-Then, when looking for it through the `rustdoc` search, if you enter "x" or
-"big", search will show the `BigX` struct first.
+### `--enable-index-page`: generate a default index page for docs
+
+This feature allows the generation of a default index-page which lists the generated crates.
diff --git a/src/doc/unstable-book/src/language-features/trait-alias.md b/src/doc/unstable-book/src/language-features/trait-alias.md
new file mode 100644 (file)
index 0000000..4f2db04
--- /dev/null
@@ -0,0 +1,34 @@
+# `trait_alias`
+
+The tracking issue for this feature is: [#41517]
+
+[#41417]: https://github.com/rust-lang/rust/issues/41517
+
+------------------------
+
+The `trait_alias` feature adds support for trait aliases. These allow aliases
+to be created for one or more traits (currently just a single regular trait plus
+any number of auto-traits), and used wherever traits would normally be used as
+either bounds or trait objects.
+
+```rust
+#![feature(trait_alias)]
+
+trait Foo = std::fmt::Debug + Send;
+trait Bar = Foo + Sync;
+
+// Use trait alias as bound on type parameter.
+fn foo<T: Foo>(v: &T) {
+    println!("{:?}", v);
+}
+
+pub fn main() {
+    foo(&1);
+
+    // Use trait alias for trait objects.
+    let a: &Bar = &123;
+    println!("{:?}", a);
+    let b = Box::new(456) as Box<dyn Foo>;
+    println!("{:?}", b);
+}
+```
index b1252f386df364dcd18708dc5bc6139fe465e0dd..e6d5ef1a23ff786890e14b28c81029ff1ce20ea6 100755 (executable)
@@ -9,7 +9,6 @@
 # except according to those terms.
 
 import gdb
-import re
 import sys
 import debugger_pretty_printers_common as rustpp
 
diff --git a/src/jemalloc b/src/jemalloc
deleted file mode 160000 (submodule)
index 1f5a287..0000000
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 1f5a28755e301ac581e2048011e4e0ff3da482ef
index f989e701913a573750578d0a32bab6a6598f60fc..74354f605e5370d1824dc58d03d0b4266e4ce23a 100644 (file)
@@ -77,7 +77,7 @@
 use core::marker::{Unpin, Unsize};
 use core::mem;
 use core::pin::Pin;
-use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState};
+use core::ops::{CoerceUnsized, DispatchFromDyn, Deref, DerefMut, Generator, GeneratorState};
 use core::ptr::{self, NonNull, Unique};
 use core::task::{LocalWaker, Poll};
 
@@ -696,6 +696,9 @@ extern "rust-call" fn call_once(self, args: A) -> R {
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
 
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T> {}
+
 #[stable(feature = "box_slice_clone", since = "1.3.0")]
 impl<T: Clone> Clone for Box<[T]> {
     fn clone(&self) -> Self {
index 84ca7c4fec9e5d4f154b4fe98a6aaea545887844..ad6e594c884af9674c7311ffcd0200bac1fd79da 100644 (file)
@@ -86,7 +86,7 @@
 #![feature(box_syntax)]
 #![feature(cfg_target_has_atomic)]
 #![feature(coerce_unsized)]
-#![cfg_attr(stage0, feature(min_const_fn))]
+#![feature(dispatch_from_dyn)]
 #![feature(core_intrinsics)]
 #![feature(custom_attribute)]
 #![feature(dropck_eyepatch)]
index 40bb2faa3623b0f357b983fd3ac0d7d3a9461eca..45f035ad04f8ff812de4c3bd4e1862bb5d47c65a 100644 (file)
 use core::marker::{Unpin, Unsize, PhantomData};
 use core::mem::{self, align_of_val, forget, size_of_val};
 use core::ops::Deref;
-use core::ops::CoerceUnsized;
+use core::ops::{CoerceUnsized, DispatchFromDyn};
 use core::pin::Pin;
 use core::ptr::{self, NonNull};
 use core::convert::From;
@@ -282,7 +282,7 @@ struct RcBox<T: ?Sized> {
 /// type `T`.
 ///
 /// [get_mut]: #method.get_mut
-#[cfg_attr(all(not(stage0), not(test)), lang = "rc")]
+#[cfg_attr(not(test), lang = "rc")]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Rc<T: ?Sized> {
     ptr: NonNull<RcBox<T>>,
@@ -297,6 +297,9 @@ impl<T: ?Sized> !marker::Sync for Rc<T> {}
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
 
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
+
 impl<T> Rc<T> {
     /// Constructs a new `Rc<T>`.
     ///
@@ -665,15 +668,17 @@ pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
 impl<T: ?Sized> Rc<T> {
     // Allocates an `RcBox<T>` with sufficient space for an unsized value
     unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
-        // Create a fake RcBox to find allocation size and alignment
-        let fake_ptr = ptr as *mut RcBox<T>;
-
-        let layout = Layout::for_value(&*fake_ptr);
+        // Calculate layout using the given value.
+        // Previously, layout was calculated on the expression
+        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
+        // reference (see #54908).
+        let (layout, _) = Layout::new::<RcBox<()>>()
+            .extend(Layout::for_value(&*ptr)).unwrap();
 
         let mem = Global.alloc(layout)
             .unwrap_or_else(|_| handle_alloc_error(layout));
 
-        // Initialize the real RcBox
+        // Initialize the RcBox
         let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>;
 
         ptr::write(&mut (*inner).strong, Cell::new(1));
@@ -1176,6 +1181,9 @@ impl<T: ?Sized> !marker::Sync for Weak<T> {}
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
 
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
+
 impl<T> Weak<T> {
     /// Constructs a new `Weak<T>`, without allocating any memory.
     /// Calling [`upgrade`][Weak::upgrade] on the return value always gives [`None`].
index 35935861fb18297ef11646d7e26feada3b2c8356..2c396b3b06bda34351046554a73a3f41e79844c9 100644 (file)
@@ -25,7 +25,7 @@
 use core::intrinsics::abort;
 use core::mem::{self, align_of_val, size_of_val};
 use core::ops::Deref;
-use core::ops::CoerceUnsized;
+use core::ops::{CoerceUnsized, DispatchFromDyn};
 use core::pin::Pin;
 use core::ptr::{self, NonNull};
 use core::marker::{Unpin, Unsize, PhantomData};
 /// counting in general.
 ///
 /// [rc_examples]: ../../std/rc/index.html#examples
-#[cfg_attr(all(not(stage0), not(test)), lang = "arc")]
+#[cfg_attr(not(test), lang = "arc")]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Arc<T: ?Sized> {
     ptr: NonNull<ArcInner<T>>,
@@ -214,6 +214,9 @@ unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
 
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
+
 /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
 /// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
 /// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
@@ -254,6 +257,8 @@ unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
 
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
 
 #[stable(feature = "arc_weak", since = "1.4.0")]
 impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
@@ -566,15 +571,17 @@ pub fn ptr_eq(this: &Self, other: &Self) -> bool {
 impl<T: ?Sized> Arc<T> {
     // Allocates an `ArcInner<T>` with sufficient space for an unsized value
     unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
-        // Create a fake ArcInner to find allocation size and alignment
-        let fake_ptr = ptr as *mut ArcInner<T>;
-
-        let layout = Layout::for_value(&*fake_ptr);
+        // Calculate layout using the given value.
+        // Previously, layout was calculated on the expression
+        // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
+        // reference (see #54908).
+        let (layout, _) = Layout::new::<ArcInner<()>>()
+            .extend(Layout::for_value(&*ptr)).unwrap();
 
         let mem = Global.alloc(layout)
             .unwrap_or_else(|_| handle_alloc_error(layout));
 
-        // Initialize the real ArcInner
+        // Initialize the ArcInner
         let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
 
         ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
index 6fa88ce969a0e4866941b80337bd2c5841d31a06..b6be38107da7f33eefd4607b3cd6c3f145a5de7f 100644 (file)
@@ -12,9 +12,6 @@
 use std::alloc::{Global, Alloc, Layout};
 
 /// https://github.com/rust-lang/rust/issues/45955
-///
-/// Note that `#[global_allocator]` is not used,
-/// so `liballoc_jemalloc` is linked (on some platforms).
 #[test]
 fn alloc_system_overaligned_request() {
     check_overalign_requests(System)
index 6d1cfb10859d4bc4ee433c9247c66dbcbad1c088..3294837cb91c4ff2fa92defccc08e410ccae84a4 100644 (file)
@@ -11,7 +11,6 @@
 #![feature(allocator_api)]
 #![feature(alloc_system)]
 #![feature(box_syntax)]
-#![cfg_attr(stage0, feature(min_const_fn))]
 #![feature(drain_filter)]
 #![feature(exact_size_is_empty)]
 #![feature(pattern)]
diff --git a/src/liballoc_jemalloc/Cargo.toml b/src/liballoc_jemalloc/Cargo.toml
deleted file mode 100644 (file)
index 7986d5d..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-[package]
-authors = ["The Rust Project Developers"]
-name = "alloc_jemalloc"
-version = "0.0.0"
-build = "build.rs"
-links = "jemalloc"
-
-[lib]
-name = "alloc_jemalloc"
-path = "lib.rs"
-test = false
-doc = false
-
-[dependencies]
-core = { path = "../libcore" }
-libc = { path = "../rustc/libc_shim" }
-compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
-
-[build-dependencies]
-build_helper = { path = "../build_helper" }
-cc = "1.0.1"
-
-[features]
-debug = []
diff --git a/src/liballoc_jemalloc/build.rs b/src/liballoc_jemalloc/build.rs
deleted file mode 100644 (file)
index fbda425..0000000
+++ /dev/null
@@ -1,151 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![deny(warnings)]
-
-extern crate build_helper;
-extern crate cc;
-
-use std::env;
-use std::path::PathBuf;
-use std::process::Command;
-use build_helper::{run, native_lib_boilerplate};
-
-fn main() {
-    // FIXME: This is a hack to support building targets that don't
-    // support jemalloc alongside hosts that do. The jemalloc build is
-    // controlled by a feature of the std crate, and if that feature
-    // changes between targets, it invalidates the fingerprint of
-    // std's build script (this is a cargo bug); so we must ensure
-    // that the feature set used by std is the same across all
-    // targets, which means we have to build the alloc_jemalloc crate
-    // for targets like emscripten, even if we don't use it.
-    let target = env::var("TARGET").expect("TARGET was not set");
-    let host = env::var("HOST").expect("HOST was not set");
-    if target.contains("bitrig") || target.contains("emscripten") || target.contains("fuchsia") ||
-       target.contains("msvc") || target.contains("openbsd") || target.contains("redox") ||
-       target.contains("rumprun") || target.contains("wasm32") {
-        println!("cargo:rustc-cfg=dummy_jemalloc");
-        return;
-    }
-
-    // CloudABI ships with a copy of jemalloc that has been patched to
-    // work well with sandboxing. Don't attempt to build our own copy,
-    // as it won't build.
-    if target.contains("cloudabi") {
-        return;
-    }
-
-    if target.contains("android") {
-        println!("cargo:rustc-link-lib=gcc");
-    } else if !target.contains("windows") && !target.contains("musl") {
-        println!("cargo:rustc-link-lib=pthread");
-    }
-
-    if let Some(jemalloc) = env::var_os("JEMALLOC_OVERRIDE") {
-        let jemalloc = PathBuf::from(jemalloc);
-        println!("cargo:rustc-link-search=native={}",
-                 jemalloc.parent().unwrap().display());
-        let stem = jemalloc.file_stem().unwrap().to_str().unwrap();
-        let name = jemalloc.file_name().unwrap().to_str().unwrap();
-        let kind = if name.ends_with(".a") {
-            "static"
-        } else {
-            "dylib"
-        };
-        println!("cargo:rustc-link-lib={}={}", kind, &stem[3..]);
-        return;
-    }
-
-    let link_name = if target.contains("windows") { "jemalloc" } else { "jemalloc_pic" };
-    let native = match native_lib_boilerplate("jemalloc", "jemalloc", link_name, "lib") {
-        Ok(native) => native,
-        _ => return,
-    };
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(native.src_dir.join("configure")
-                          .to_str()
-                          .unwrap()
-                          .replace("C:\\", "/c/")
-                          .replace("\\", "/"))
-       .current_dir(&native.out_dir)
-       // jemalloc generates Makefile deps using GCC's "-MM" flag. This means
-       // that GCC will run the preprocessor, and only the preprocessor, over
-       // jemalloc's source files. If we don't specify CPPFLAGS, then at least
-       // on ARM that step fails with a "Missing implementation for 32-bit
-       // atomic operations" error. This is because no "-march" flag will be
-       // passed to GCC, and then GCC won't define the
-       // "__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4" macro that jemalloc needs to
-       // select an atomic operation implementation.
-       .env("CPPFLAGS", env::var_os("CFLAGS").unwrap_or_default());
-
-    if target.contains("ios") {
-        cmd.arg("--disable-tls");
-    } else if target.contains("android") {
-        // We force android to have prefixed symbols because apparently
-        // replacement of the libc allocator doesn't quite work. When this was
-        // tested (unprefixed symbols), it was found that the `realpath`
-        // function in libc would allocate with libc malloc (not jemalloc
-        // malloc), and then the standard library would free with jemalloc free,
-        // causing a segfault.
-        //
-        // If the test suite passes, however, without symbol prefixes then we
-        // should be good to go!
-        cmd.arg("--with-jemalloc-prefix=je_");
-        cmd.arg("--disable-tls");
-    } else if target.contains("dragonfly") || target.contains("musl") {
-        cmd.arg("--with-jemalloc-prefix=je_");
-    }
-
-    if cfg!(feature = "debug") {
-        // Enable jemalloc assertions.
-        cmd.arg("--enable-debug");
-    }
-
-    cmd.arg(format!("--host={}", build_helper::gnu_target(&target)));
-    cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
-
-    // for some reason, jemalloc configure doesn't detect this value
-    // automatically for this target
-    if target == "sparc64-unknown-linux-gnu" {
-        cmd.arg("--with-lg-quantum=4");
-    }
-
-    run(&mut cmd);
-
-    let mut make = Command::new(build_helper::make(&host));
-    make.current_dir(&native.out_dir)
-        .arg("build_lib_static");
-
-    // These are intended for mingw32-make which we don't use
-    if cfg!(windows) {
-        make.env_remove("MAKEFLAGS").env_remove("MFLAGS");
-    }
-
-    // mingw make seems... buggy? unclear...
-    if !host.contains("windows") {
-        make.arg("-j")
-            .arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"));
-    }
-
-    run(&mut make);
-
-    // The pthread_atfork symbols is used by jemalloc on android but the really
-    // old android we're building on doesn't have them defined, so just make
-    // sure the symbols are available.
-    if target.contains("androideabi") {
-        println!("cargo:rerun-if-changed=pthread_atfork_dummy.c");
-        cc::Build::new()
-            .flag("-fvisibility=hidden")
-            .file("pthread_atfork_dummy.c")
-            .compile("pthread_atfork_dummy");
-    }
-}
diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs
deleted file mode 100644 (file)
index 0065e84..0000000
+++ /dev/null
@@ -1,127 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![no_std]
-#![allow(unused_attributes)]
-#![unstable(feature = "alloc_jemalloc",
-            reason = "implementation detail of std, does not provide any public API",
-            issue = "0")]
-#![feature(core_intrinsics)]
-#![feature(libc)]
-#![feature(linkage)]
-#![feature(nll)]
-#![feature(staged_api)]
-#![feature(rustc_attrs)]
-#![cfg_attr(dummy_jemalloc, allow(dead_code, unused_extern_crates))]
-#![cfg_attr(not(dummy_jemalloc), feature(allocator_api))]
-#![rustc_alloc_kind = "exe"]
-
-extern crate libc;
-
-#[cfg(not(dummy_jemalloc))]
-pub use contents::*;
-#[cfg(not(dummy_jemalloc))]
-mod contents {
-    use libc::{c_int, c_void, size_t};
-
-    // Note that the symbols here are prefixed by default on macOS and Windows (we
-    // don't explicitly request it), and on Android and DragonFly we explicitly
-    // request it as unprefixing cause segfaults (mismatches in allocators).
-    extern "C" {
-        #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
-                       target_os = "dragonfly", target_os = "windows", target_env = "musl"),
-                   link_name = "je_mallocx")]
-        fn mallocx(size: size_t, flags: c_int) -> *mut c_void;
-        #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
-                       target_os = "dragonfly", target_os = "windows", target_env = "musl"),
-                   link_name = "je_calloc")]
-        fn calloc(size: size_t, flags: c_int) -> *mut c_void;
-        #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
-                       target_os = "dragonfly", target_os = "windows", target_env = "musl"),
-                   link_name = "je_rallocx")]
-        fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
-        #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
-                       target_os = "dragonfly", target_os = "windows", target_env = "musl"),
-                   link_name = "je_sdallocx")]
-        fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
-    }
-
-    const MALLOCX_ZERO: c_int = 0x40;
-
-    // The minimum alignment guaranteed by the architecture. This value is used to
-    // add fast paths for low alignment values.
-    #[cfg(all(any(target_arch = "arm",
-                  target_arch = "mips",
-                  target_arch = "powerpc")))]
-    const MIN_ALIGN: usize = 8;
-    #[cfg(all(any(target_arch = "x86",
-                  target_arch = "x86_64",
-                  target_arch = "aarch64",
-                  target_arch = "powerpc64",
-                  target_arch = "mips64",
-                  target_arch = "s390x",
-                  target_arch = "sparc64")))]
-    const MIN_ALIGN: usize = 16;
-
-    // MALLOCX_ALIGN(a) macro
-    fn mallocx_align(a: usize) -> c_int {
-        a.trailing_zeros() as c_int
-    }
-
-    fn align_to_flags(align: usize, size: usize) -> c_int {
-        if align <= MIN_ALIGN && align <= size {
-            0
-        } else {
-            mallocx_align(align)
-        }
-    }
-
-    // for symbol names src/librustc/middle/allocator.rs
-    // for signatures src/librustc_allocator/lib.rs
-
-    // linkage directives are provided as part of the current compiler allocator
-    // ABI
-
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_alloc(size: usize, align: usize) -> *mut u8 {
-        let flags = align_to_flags(align, size);
-        let ptr = mallocx(size as size_t, flags) as *mut u8;
-        ptr
-    }
-
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_dealloc(ptr: *mut u8,
-                                       size: usize,
-                                       align: usize) {
-        let flags = align_to_flags(align, size);
-        sdallocx(ptr as *mut c_void, size, flags);
-    }
-
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_realloc(ptr: *mut u8,
-                                       _old_size: usize,
-                                       align: usize,
-                                       new_size: usize) -> *mut u8 {
-        let flags = align_to_flags(align, new_size);
-        let ptr = rallocx(ptr as *mut c_void, new_size, flags) as *mut u8;
-        ptr
-    }
-
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
-        let ptr = if align <= MIN_ALIGN && align <= size {
-            calloc(size as size_t, 1) as *mut u8
-        } else {
-            let flags = align_to_flags(align, size) | MALLOCX_ZERO;
-            mallocx(size as size_t, flags) as *mut u8
-        };
-        ptr
-    }
-}
diff --git a/src/liballoc_jemalloc/pthread_atfork_dummy.c b/src/liballoc_jemalloc/pthread_atfork_dummy.c
deleted file mode 100644 (file)
index 4e3df0a..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// See comments in build.rs for why this exists
-int pthread_atfork(void* prefork,
-                   void* postfork_parent,
-                   void* postfork_child) {
-  return 0;
-}
index 15283036bb49522d35aec181804831dba3fb4dce..0e6887a508223331201cca2a0363240d99a312dd 100644 (file)
@@ -405,6 +405,7 @@ fn drop(&mut self) {
 
     #[cfg(not(target_feature = "atomics"))]
     mod lock {
+        #[inline]
         pub fn lock() {} // no atomics, no threads, that's easy!
     }
 }
index 0703bfa72524e01e414477657ca9b64794c5c1c3..939cbca6e9d829265d6cf006d3532142a4061cd3 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 0703bfa72524e01e414477657ca9b64794c5c1c3
+Subproject commit 939cbca6e9d829265d6cf006d3532142a4061cd3
index ec7d366c3f5ce90fbd24cb2d3be6af7a2d352feb..689cf319bd750bd1c710e135c4ee0bd2f54d157d 100644 (file)
 ///
 /// # Examples
 ///
-/// Here you can see how using `Cell<T>` allows to use mutable field inside
-/// immutable struct (which is also called 'interior mutability').
+/// In this example, you can see that `Cell<T>` enables mutation inside an
+/// immutable struct. In other words, it enables "interior mutability".
 ///
 /// ```
 /// use std::cell::Cell;
 ///
 /// let new_value = 100;
 ///
-/// // ERROR, because my_struct is immutable
+/// // ERROR: `my_struct` is immutable
 /// // my_struct.regular_field = new_value;
 ///
-/// // WORKS, although `my_struct` is immutable, field `special_field` is mutable because it is Cell
+/// // WORKS: although `my_struct` is immutable, `special_field` is a `Cell`,
+/// // which can always be mutated
 /// my_struct.special_field.set(new_value);
 /// assert_eq!(my_struct.special_field.get(), new_value);
 /// ```
index ab36e29b1e1d4238cc4778754e605e0178c92d40..638acebd617bf873033e13d019a88f05074118c0 100644 (file)
@@ -76,7 +76,7 @@
 /// }
 ///
 /// impl Default for Kind {
-///     fn default() -> Kind { Kind::A }
+///     fn default() -> Self { Kind::A }
 /// }
 /// ```
 ///
@@ -118,7 +118,7 @@ pub trait Default: Sized {
     /// }
     ///
     /// impl Default for Kind {
-    ///     fn default() -> Kind { Kind::A }
+    ///     fn default() -> Self { Kind::A }
     /// }
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
index 06727d8292d367e422f89d87542a82413d80cc0e..1bbc7892c6bef664ca4c91524252b436d1c36083 100644 (file)
@@ -88,7 +88,6 @@
 #![feature(doc_spotlight)]
 #![feature(extern_types)]
 #![feature(fundamental)]
-#![cfg_attr(stage0, feature(impl_header_lifetime_elision))]
 #![feature(intrinsics)]
 #![feature(lang_items)]
 #![feature(link_llvm_intrinsics)]
index 22016e8cf41742612d1a1e62665ac03f93aec27f..1d0b194487e68d9bc9d65ef6841421544dffe5c9 100644 (file)
@@ -285,7 +285,7 @@ pub fn forget<T>(t: T) {
 /// [alignment]: ./fn.align_of.html
 #[inline]
 #[stable(feature = "rust1", since = "1.0.0")]
-#[cfg_attr(not(stage0), rustc_promotable)]
+#[rustc_promotable]
 pub const fn size_of<T>() -> usize {
     intrinsics::size_of::<T>()
 }
@@ -377,7 +377,7 @@ pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
 /// ```
 #[inline]
 #[stable(feature = "rust1", since = "1.0.0")]
-#[cfg_attr(not(stage0), rustc_promotable)]
+#[rustc_promotable]
 pub const fn align_of<T>() -> usize {
     intrinsics::min_align_of::<T>()
 }
@@ -458,19 +458,10 @@ pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
 #[inline]
 #[stable(feature = "needs_drop", since = "1.21.0")]
 #[rustc_const_unstable(feature = "const_needs_drop")]
-#[cfg(not(stage0))]
 pub const fn needs_drop<T>() -> bool {
     intrinsics::needs_drop::<T>()
 }
 
-#[inline]
-#[stable(feature = "needs_drop", since = "1.21.0")]
-#[cfg(stage0)]
-/// Ceci n'est pas la documentation
-pub fn needs_drop<T>() -> bool {
-    unsafe { intrinsics::needs_drop::<T>() }
-}
-
 /// Creates a value whose bytes are all zero.
 ///
 /// This has the same effect as allocating space with
index 118e75e1ee70469bb275185650ab904a0effc251..436cd1fc0572837e408407e9d0bb7b310eea92b2 100644 (file)
@@ -10,7 +10,7 @@
 
 //! Exposes the NonZero lang item which provides optimization hints.
 
-use ops::CoerceUnsized;
+use ops::{CoerceUnsized, DispatchFromDyn};
 
 /// A wrapper type for raw pointers and integers that will never be
 /// NULL or 0 that might allow certain optimizations.
@@ -20,3 +20,5 @@
 pub(crate) struct NonZero<T>(pub(crate) T);
 
 impl<T: CoerceUnsized<U>, U> CoerceUnsized<NonZero<U>> for NonZero<T> {}
+
+impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<NonZero<U>> for NonZero<T> {}
index 772502cc800e868c4ca230328d548a0aabb21402..c6cbeea5a0ea61788979c6689e5a910bf9bd2b3e 100644 (file)
@@ -216,7 +216,7 @@ macro_rules! int_impl {
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
             #[inline]
-            #[cfg_attr(not(stage0), rustc_promotable)]
+            #[rustc_promotable]
             pub const fn min_value() -> Self {
                 !0 ^ ((!0 as $UnsignedT) >> 1) as Self
             }
@@ -235,7 +235,7 @@ pub const fn min_value() -> Self {
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
             #[inline]
-            #[cfg_attr(not(stage0), rustc_promotable)]
+            #[rustc_promotable]
             pub const fn max_value() -> Self {
                 !Self::min_value()
             }
index ce4f45762de48d3571a3f8e68660262c6d844a75..edfa6df11aceb6e958273084271ccf87762d20e9 100644 (file)
 
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 pub use self::unsize::CoerceUnsized;
+
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+pub use self::unsize::DispatchFromDyn;
index fd3e50998fe8c5e5909f7d4fb60f63a7c1f96551..6cfb1005325ba644223311f773bcdcfc4cd0033c 100644 (file)
@@ -391,7 +391,7 @@ impl<Idx> RangeInclusive<Idx> {
     /// ```
     #[stable(feature = "inclusive_range_methods", since = "1.27.0")]
     #[inline]
-    #[cfg_attr(not(stage0), rustc_promotable)]
+    #[rustc_promotable]
     pub const fn new(start: Idx, end: Idx) -> Self {
         Self { start, end, is_empty: None }
     }
index da72f3748425d9f1e77b54c837dddf9a339b9a28..4d9a40a1b9089b078ee08dc453d9d16870ada099 100644 (file)
@@ -43,7 +43,7 @@
 /// [nomicon-coerce]: ../../nomicon/coercions.html
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 #[lang = "coerce_unsized"]
-pub trait CoerceUnsized<T> {
+pub trait CoerceUnsized<T: ?Sized> {
     // Empty.
 }
 
@@ -77,3 +77,37 @@ impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
 // *const T -> *const U
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
+
+
+/// This is used for object safety, to check that a method's receiver type can be dispatched on.
+///
+/// example impl:
+///
+/// ```
+/// # #![feature(dispatch_from_dyn, unsize)]
+/// # use std::{ops::DispatchFromDyn, marker::Unsize};
+/// # struct Rc<T: ?Sized>(::std::rc::Rc<T>);
+/// impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T>
+/// where
+///     T: Unsize<U>,
+/// {}
+/// ```
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+#[cfg_attr(not(stage0), lang = "dispatch_from_dyn")]
+pub trait DispatchFromDyn<T> {
+    // Empty.
+}
+
+// &T -> &U
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<'a, T: ?Sized+Unsize<U>, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {}
+// &mut T -> &mut U
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<'a, T: ?Sized+Unsize<U>, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {}
+// *const T -> *const U
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized+Unsize<U>, U: ?Sized> DispatchFromDyn<*const U> for *const T {}
+// *mut T -> *mut U
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized+Unsize<U>, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {}
+
index a03c080fb3f3418fad92dba5a06c1390f168de3a..68de82d294529e5cefa0d3eae3159a00674849f3 100644 (file)
@@ -91,7 +91,7 @@
 
 use fmt;
 use marker::Sized;
-use ops::{Deref, DerefMut, CoerceUnsized};
+use ops::{Deref, DerefMut, CoerceUnsized, DispatchFromDyn};
 
 #[doc(inline)]
 pub use marker::Unpin;
@@ -324,5 +324,11 @@ impl<P, U> CoerceUnsized<Pin<U>> for Pin<P>
     P: CoerceUnsized<U>,
 {}
 
+#[unstable(feature = "pin", issue = "49150")]
+impl<'a, P, U> DispatchFromDyn<Pin<U>> for Pin<P>
+where
+    P: DispatchFromDyn<U>,
+{}
+
 #[unstable(feature = "pin", issue = "49150")]
 impl<P> Unpin for Pin<P> {}
index 0fe82b93ff7a14bca4c961ca03d8ab68dd38ca44..62ccf6c865cd96735a92bf30969e5be5de08f815 100644 (file)
@@ -75,7 +75,7 @@
 
 use convert::From;
 use intrinsics;
-use ops::CoerceUnsized;
+use ops::{CoerceUnsized, DispatchFromDyn};
 use fmt;
 use hash;
 use marker::{PhantomData, Unsize};
@@ -209,7 +209,7 @@ pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
 /// ```
 #[inline]
 #[stable(feature = "rust1", since = "1.0.0")]
-#[cfg_attr(not(stage0), rustc_promotable)]
+#[rustc_promotable]
 pub const fn null<T>() -> *const T { 0 as *const T }
 
 /// Creates a null mutable raw pointer.
@@ -224,7 +224,7 @@ pub const fn null<T>() -> *const T { 0 as *const T }
 /// ```
 #[inline]
 #[stable(feature = "rust1", since = "1.0.0")]
-#[cfg_attr(not(stage0), rustc_promotable)]
+#[rustc_promotable]
 pub const fn null_mut<T>() -> *mut T { 0 as *mut T }
 
 /// Swaps the values at two mutable locations of the same type, without
@@ -2795,6 +2795,9 @@ impl<T: ?Sized> Copy for Unique<T> { }
 #[unstable(feature = "ptr_internals", issue = "0")]
 impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> { }
 
+#[unstable(feature = "ptr_internals", issue = "0")]
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> { }
+
 #[unstable(feature = "ptr_internals", issue = "0")]
 impl<T: ?Sized> fmt::Pointer for Unique<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@@ -2951,6 +2954,9 @@ impl<T: ?Sized> Copy for NonNull<T> { }
 #[unstable(feature = "coerce_unsized", issue = "27732")]
 impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> { }
 
+#[unstable(feature = "dispatch_from_dyn", issue = "0")]
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> { }
+
 #[stable(feature = "nonnull", since = "1.25.0")]
 impl<T: ?Sized> fmt::Debug for NonNull<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
index f130dbfb0e3dfea57259d2f89df93f6be11c00ab..56d3b429fdb44ef92779180d8f71e35875e3736c 100644 (file)
@@ -124,6 +124,7 @@ pub fn spin_loop_hint() {
 /// [`bool`]: ../../../std/primitive.bool.html
 #[cfg(target_has_atomic = "8")]
 #[stable(feature = "rust1", since = "1.0.0")]
+#[repr(C, align(1))]
 pub struct AtomicBool {
     v: UnsafeCell<u8>,
 }
@@ -147,6 +148,9 @@ unsafe impl Sync for AtomicBool {}
 /// This type has the same in-memory representation as a `*mut T`.
 #[cfg(target_has_atomic = "ptr")]
 #[stable(feature = "rust1", since = "1.0.0")]
+#[cfg_attr(target_pointer_width = "16", repr(C, align(2)))]
+#[cfg_attr(target_pointer_width = "32", repr(C, align(4)))]
+#[cfg_attr(target_pointer_width = "64", repr(C, align(8)))]
 pub struct AtomicPtr<T> {
     p: UnsafeCell<*mut T>,
 }
@@ -1088,6 +1092,7 @@ macro_rules! atomic_int {
      $s_int_type:expr, $int_ref:expr,
      $extra_feature:expr,
      $min_fn:ident, $max_fn:ident,
+     $align:expr,
      $int_type:ident $atomic_type:ident $atomic_init:ident) => {
         /// An integer type which can be safely shared between threads.
         ///
@@ -1101,6 +1106,7 @@ macro_rules! atomic_int {
         ///
         /// [module-level documentation]: index.html
         #[$stable]
+        #[repr(C, align($align))]
         pub struct $atomic_type {
             v: UnsafeCell<$int_type>,
         }
@@ -1831,6 +1837,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "i8", "../../../std/primitive.i8.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
+    1,
     i8 AtomicI8 ATOMIC_I8_INIT
 }
 #[cfg(target_has_atomic = "8")]
@@ -1844,6 +1851,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "u8", "../../../std/primitive.u8.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
+    1,
     u8 AtomicU8 ATOMIC_U8_INIT
 }
 #[cfg(target_has_atomic = "16")]
@@ -1857,6 +1865,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "i16", "../../../std/primitive.i16.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
+    2,
     i16 AtomicI16 ATOMIC_I16_INIT
 }
 #[cfg(target_has_atomic = "16")]
@@ -1870,6 +1879,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "u16", "../../../std/primitive.u16.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
+    2,
     u16 AtomicU16 ATOMIC_U16_INIT
 }
 #[cfg(target_has_atomic = "32")]
@@ -1883,6 +1893,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "i32", "../../../std/primitive.i32.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
+    4,
     i32 AtomicI32 ATOMIC_I32_INIT
 }
 #[cfg(target_has_atomic = "32")]
@@ -1896,6 +1907,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "u32", "../../../std/primitive.u32.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
+    4,
     u32 AtomicU32 ATOMIC_U32_INIT
 }
 #[cfg(target_has_atomic = "64")]
@@ -1909,6 +1921,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "i64", "../../../std/primitive.i64.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_min, atomic_max,
+    8,
     i64 AtomicI64 ATOMIC_I64_INIT
 }
 #[cfg(target_has_atomic = "64")]
@@ -1922,8 +1935,49 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "u64", "../../../std/primitive.u64.html",
     "#![feature(integer_atomics)]\n\n",
     atomic_umin, atomic_umax,
+    8,
     u64 AtomicU64 ATOMIC_U64_INIT
 }
+#[cfg(all(not(stage0), target_has_atomic = "128"))]
+atomic_int! {
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    "i128", "../../../std/primitive.i128.html",
+    "#![feature(integer_atomics)]\n\n",
+    atomic_min, atomic_max,
+    16,
+    i128 AtomicI128 ATOMIC_I128_INIT
+}
+#[cfg(all(not(stage0), target_has_atomic = "128"))]
+atomic_int! {
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    unstable(feature = "integer_atomics", issue = "32976"),
+    "u128", "../../../std/primitive.u128.html",
+    "#![feature(integer_atomics)]\n\n",
+    atomic_umin, atomic_umax,
+    16,
+    u128 AtomicU128 ATOMIC_U128_INIT
+}
+#[cfg(target_pointer_width = "16")]
+macro_rules! ptr_width {
+    () => { 2 }
+}
+#[cfg(target_pointer_width = "32")]
+macro_rules! ptr_width {
+    () => { 4 }
+}
+#[cfg(target_pointer_width = "64")]
+macro_rules! ptr_width {
+    () => { 8 }
+}
 #[cfg(target_has_atomic = "ptr")]
 atomic_int!{
     stable(feature = "rust1", since = "1.0.0"),
@@ -1935,6 +1989,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "isize", "../../../std/primitive.isize.html",
     "",
     atomic_min, atomic_max,
+    ptr_width!(),
     isize AtomicIsize ATOMIC_ISIZE_INIT
 }
 #[cfg(target_has_atomic = "ptr")]
@@ -1948,6 +2003,7 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "usize", "../../../std/primitive.usize.html",
     "",
     atomic_umin, atomic_umax,
+    ptr_width!(),
     usize AtomicUsize ATOMIC_USIZE_INIT
 }
 
index d340924aab1db7b602ef44070b11a1f7eaa3327f..5ac89912268986d0e75f7940f333e289dac51d1d 100644 (file)
@@ -19,7 +19,6 @@
 #![feature(flt2dec)]
 #![feature(fmt_internals)]
 #![feature(hashmap_internals)]
-#![cfg_attr(stage0, feature(impl_header_lifetime_elision))]
 #![feature(pattern)]
 #![feature(range_is_empty)]
 #![feature(raw)]
index 81ae8ade12d93146afc32e15859fc96dcded4551..cfbd431aef0a00e87d5d9f0bf82cfbf15cf90919 100644 (file)
@@ -109,7 +109,7 @@ pub fn new(secs: u64, nanos: u32) -> Duration {
     /// ```
     #[stable(feature = "duration", since = "1.3.0")]
     #[inline]
-    #[cfg_attr(not(stage0), rustc_promotable)]
+    #[rustc_promotable]
     pub const fn from_secs(secs: u64) -> Duration {
         Duration { secs, nanos: 0 }
     }
@@ -128,7 +128,7 @@ pub const fn from_secs(secs: u64) -> Duration {
     /// ```
     #[stable(feature = "duration", since = "1.3.0")]
     #[inline]
-    #[cfg_attr(not(stage0), rustc_promotable)]
+    #[rustc_promotable]
     pub const fn from_millis(millis: u64) -> Duration {
         Duration {
             secs: millis / MILLIS_PER_SEC,
@@ -150,7 +150,7 @@ pub const fn from_millis(millis: u64) -> Duration {
     /// ```
     #[stable(feature = "duration_from_micros", since = "1.27.0")]
     #[inline]
-    #[cfg_attr(not(stage0), rustc_promotable)]
+    #[rustc_promotable]
     pub const fn from_micros(micros: u64) -> Duration {
         Duration {
             secs: micros / MICROS_PER_SEC,
@@ -172,7 +172,7 @@ pub const fn from_micros(micros: u64) -> Duration {
     /// ```
     #[stable(feature = "duration_extras", since = "1.27.0")]
     #[inline]
-    #[cfg_attr(not(stage0), rustc_promotable)]
+    #[rustc_promotable]
     pub const fn from_nanos(nanos: u64) -> Duration {
         Duration {
             secs: nanos / (NANOS_PER_SEC as u64),
index c3c65816b2615309e7080a187f043f2c16be7cab..e558d945516713d2f690d4b698cbadc6a3c6a886 100644 (file)
@@ -1754,7 +1754,6 @@ fn lower_path_extra(
         &mut self,
         def: Def,
         p: &Path,
-        ident: Option<Ident>,
         param_mode: ParamMode,
         explicit_owner: Option<NodeId>,
     ) -> hir::Path {
@@ -1773,7 +1772,6 @@ fn lower_path_extra(
                         explicit_owner,
                     )
                 })
-                .chain(ident.map(|ident| hir::PathSegment::from_ident(ident)))
                 .collect(),
             span: p.span,
         }
@@ -1781,7 +1779,7 @@ fn lower_path_extra(
 
     fn lower_path(&mut self, id: NodeId, p: &Path, param_mode: ParamMode) -> hir::Path {
         let def = self.expect_full_def(id);
-        self.lower_path_extra(def, p, None, param_mode, None)
+        self.lower_path_extra(def, p, param_mode, None)
     }
 
     fn lower_path_segment(
@@ -3014,7 +3012,7 @@ fn lower_use_tree(
                     self.with_hir_id_owner(new_node_id, |this| {
                         let new_id = this.lower_node_id(new_node_id);
                         let path =
-                            this.lower_path_extra(def, &path, None, ParamMode::Explicit, None);
+                            this.lower_path_extra(def, &path, ParamMode::Explicit, None);
                         let item = hir::ItemKind::Use(P(path), hir::UseKind::Single);
                         let vis_kind = match vis.node {
                             hir::VisibilityKind::Public => hir::VisibilityKind::Public,
@@ -3053,7 +3051,7 @@ fn lower_use_tree(
                 }
 
                 let path =
-                    P(self.lower_path_extra(ret_def, &path, None, ParamMode::Explicit, None));
+                    P(self.lower_path_extra(ret_def, &path, ParamMode::Explicit, None));
                 hir::ItemKind::Use(path, hir::UseKind::Single)
             }
             UseTreeKind::Glob => {
@@ -3140,7 +3138,7 @@ fn lower_use_tree(
                 // the stability of `use a::{};`, to avoid it showing up as
                 // a re-export by accident when `pub`, e.g. in documentation.
                 let def = self.expect_full_def_from_use(id).next().unwrap_or(Def::Err);
-                let path = P(self.lower_path_extra(def, &prefix, None, ParamMode::Explicit, None));
+                let path = P(self.lower_path_extra(def, &prefix, ParamMode::Explicit, None));
                 *vis = respan(prefix.span.shrink_to_lo(), hir::VisibilityKind::Inherited);
                 hir::ItemKind::Use(path, hir::UseKind::ListStem)
             }
@@ -4550,7 +4548,6 @@ fn lower_visibility(
                     path: P(self.lower_path_extra(
                         def,
                         path,
-                        None,
                         ParamMode::Explicit,
                         explicit_owner,
                     )),
@@ -4878,23 +4875,24 @@ fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> hir::
         let node = match qpath {
             hir::QPath::Resolved(None, path) => {
                 // Turn trait object paths into `TyKind::TraitObject` instead.
-                if let Def::Trait(_) = path.def {
-                    let principal = hir::PolyTraitRef {
-                        bound_generic_params: hir::HirVec::new(),
-                        trait_ref: hir::TraitRef {
-                            path: path.and_then(|path| path),
-                            ref_id: id.node_id,
-                            hir_ref_id: id.hir_id,
-                        },
-                        span,
-                    };
+                match path.def {
+                    Def::Trait(_) | Def::TraitAlias(_) => {
+                        let principal = hir::PolyTraitRef {
+                            bound_generic_params: hir::HirVec::new(),
+                            trait_ref: hir::TraitRef {
+                                path: path.and_then(|path| path),
+                                ref_id: id.node_id,
+                                hir_ref_id: id.hir_id,
+                            },
+                            span,
+                        };
 
-                    // The original ID is taken by the `PolyTraitRef`,
-                    // so the `Ty` itself needs a different one.
-                    id = self.next_id();
-                    hir::TyKind::TraitObject(hir_vec![principal], self.elided_dyn_bound(span))
-                } else {
-                    hir::TyKind::Path(hir::QPath::Resolved(None, path))
+                        // The original ID is taken by the `PolyTraitRef`,
+                        // so the `Ty` itself needs a different one.
+                        id = self.next_id();
+                        hir::TyKind::TraitObject(hir_vec![principal], self.elided_dyn_bound(span))
+                    }
+                    _ => hir::TyKind::Path(hir::QPath::Resolved(None, path)),
                 }
             }
             _ => hir::TyKind::Path(qpath),
index 7a20146130d9480e640e0d1ba328bd980eed2b38..cf7a7abf95a6c55956d041461c18f04c615dc93d 100644 (file)
@@ -301,9 +301,7 @@ pub fn describe_def(&self, node_id: NodeId) -> Option<Def> {
                     ItemKind::Struct(..) => Some(Def::Struct(def_id())),
                     ItemKind::Union(..) => Some(Def::Union(def_id())),
                     ItemKind::Trait(..) => Some(Def::Trait(def_id())),
-                    ItemKind::TraitAlias(..) => {
-                        bug!("trait aliases are not yet implemented (see issue #41517)")
-                    },
+                    ItemKind::TraitAlias(..) => Some(Def::TraitAlias(def_id())),
                     ItemKind::ExternCrate(_) |
                     ItemKind::Use(..) |
                     ItemKind::ForeignMod(..) |
@@ -1254,7 +1252,7 @@ fn node_id_to_string(map: &Map<'_>, id: NodeId, include_id: bool) -> String {
         Some(Node::MacroDef(_)) => {
             format!("macro {}{}",  path_str(), id_str)
         }
-        Some(Node::Crate) => format!("root_crate"),
+        Some(Node::Crate) => String::from("root_crate"),
         None => format!("unknown node{}", id_str),
     }
 }
index 274a2df283cbd05aa0eac6e978b84ffb044ba21f..a73fe2b8a1ab31e6535a7059bce266a338d6cba1 100644 (file)
@@ -257,9 +257,9 @@ fn hash_stable<W: StableHasherResult>(&self,
             mir::StatementKind::EndRegion(ref region_scope) => {
                 region_scope.hash_stable(hcx, hasher);
             }
-            mir::StatementKind::Validate(ref op, ref places) => {
-                op.hash_stable(hcx, hasher);
-                places.hash_stable(hcx, hasher);
+            mir::StatementKind::Retag { fn_entry, ref place } => {
+                fn_entry.hash_stable(hcx, hasher);
+                place.hash_stable(hcx, hasher);
             }
             mir::StatementKind::AscribeUserType(ref place, ref variance, ref c_ty) => {
                 place.hash_stable(hcx, hasher);
@@ -278,23 +278,6 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 impl_stable_hash_for!(enum mir::FakeReadCause { ForMatchGuard, ForMatchedPlace, ForLet });
 
-impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>>
-    for mir::ValidationOperand<'gcx, T>
-    where T: HashStable<StableHashingContext<'a>>
-{
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>)
-    {
-        self.place.hash_stable(hcx, hasher);
-        self.ty.hash_stable(hcx, hasher);
-        self.re.hash_stable(hcx, hasher);
-        self.mutbl.hash_stable(hcx, hasher);
-    }
-}
-
-impl_stable_hash_for!(enum mir::ValidationOp { Acquire, Release, Suspend(region_scope) });
-
 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for mir::Place<'gcx> {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
index 8f837327ddb151d5dbf2891fa8c7f0abeb8f83a9..7d25ecedb4e046de7389bc32359f47dbd45dfc5f 100644 (file)
@@ -100,9 +100,6 @@ fn hash_stable<W: StableHasherResult>(&self,
             ty::ReEmpty => {
                 // No variant fields to hash for these ...
             }
-            ty::ReCanonical(c) => {
-                c.hash_stable(hcx, hasher);
-            }
             ty::ReLateBound(db, ty::BrAnon(i)) => {
                 db.hash_stable(hcx, hasher);
                 i.hash_stable(hcx, hasher);
@@ -147,7 +144,7 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
-impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::BoundTyIndex {
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::BoundVar {
     #[inline]
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'gcx>,
@@ -852,6 +849,9 @@ fn hash_stable<W: StableHasherResult>(&self,
             Param(param_ty) => {
                 param_ty.hash_stable(hcx, hasher);
             }
+            Bound(bound_ty) => {
+                bound_ty.hash_stable(hcx, hasher);
+            }
             Foreign(def_id) => {
                 def_id.hash_stable(hcx, hasher);
             }
@@ -869,7 +869,6 @@ fn hash_stable<W: StableHasherResult>(&self,
     FreshTy(a),
     FreshIntTy(a),
     FreshFloatTy(a),
-    BoundTy(a),
 });
 
 impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
@@ -1119,6 +1118,7 @@ fn hash_stable<W: StableHasherResult>(&self,
             &VtableClosure(ref table_closure) => table_closure.hash_stable(hcx, hasher),
             &VtableFnPointer(ref table_fn_pointer) => table_fn_pointer.hash_stable(hcx, hasher),
             &VtableGenerator(ref table_generator) => table_generator.hash_stable(hcx, hasher),
+            &VtableTraitAlias(ref table_alias) => table_alias.hash_stable(hcx, hasher),
         }
     }
 }
@@ -1227,9 +1227,25 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
+impl<'a, 'gcx, N> HashStable<StableHashingContext<'a>>
+for traits::VtableTraitAliasData<'gcx, N> where N: HashStable<StableHashingContext<'a>> {
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          hcx: &mut StableHashingContext<'a>,
+                                          hasher: &mut StableHasher<W>) {
+        let traits::VtableTraitAliasData {
+            alias_def_id,
+            substs,
+            ref nested,
+        } = *self;
+        alias_def_id.hash_stable(hcx, hasher);
+        substs.hash_stable(hcx, hasher);
+        nested.hash_stable(hcx, hasher);
+    }
+}
+
 impl_stable_hash_for!(
     impl<'tcx, V> for struct infer::canonical::Canonical<'tcx, V> {
-        variables, value
+        max_universe, variables, value
     }
 );
 
@@ -1245,7 +1261,8 @@ impl<'tcx> for struct infer::canonical::CanonicalVarValues<'tcx> {
 
 impl_stable_hash_for!(enum infer::canonical::CanonicalVarKind {
     Ty(k),
-    Region
+    Region(ui),
+    PlaceholderRegion(placeholder),
 });
 
 impl_stable_hash_for!(enum infer::canonical::CanonicalTyVarKind {
index 2b085a3407ccc40f453da1af6f17ab6d2fd370ac..61a861a8a1cd8b193a08c3a269c1641973498d31 100644 (file)
@@ -23,7 +23,7 @@
 use std::sync::atomic::Ordering;
 use ty::fold::{TypeFoldable, TypeFolder};
 use ty::subst::Kind;
-use ty::{self, BoundTy, BoundTyIndex, Lift, List, Ty, TyCtxt, TypeFlags};
+use ty::{self, BoundTy, BoundVar, Lift, List, Ty, TyCtxt, TypeFlags};
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::indexed_vec::Idx;
@@ -107,6 +107,20 @@ pub fn canonicalize_response<V>(&self, value: &V) -> Canonicalized<'gcx, V>
         )
     }
 
+    pub fn canonicalize_user_type_annotation<V>(&self, value: &V) -> Canonicalized<'gcx, V>
+    where
+        V: TypeFoldable<'tcx> + Lift<'gcx>,
+    {
+        let mut query_state = OriginalQueryValues::default();
+        Canonicalizer::canonicalize(
+            value,
+            Some(self),
+            self.tcx,
+            &CanonicalizeUserTypeAnnotation,
+            &mut query_state,
+        )
+    }
+
     /// A hacky variant of `canonicalize_query` that does not
     /// canonicalize `'static`.  Unfortunately, the existing leak
     /// check treaks `'static` differently in some cases (see also
@@ -162,11 +176,26 @@ fn canonicalize_free_region(
 impl CanonicalizeRegionMode for CanonicalizeQueryResponse {
     fn canonicalize_free_region(
         &self,
-        _canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+        canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
         r: ty::Region<'tcx>,
     ) -> ty::Region<'tcx> {
         match r {
             ty::ReFree(_) | ty::ReEmpty | ty::ReErased | ty::ReStatic | ty::ReEarlyBound(..) => r,
+            ty::RePlaceholder(placeholder) => canonicalizer.canonical_var_for_region(
+                CanonicalVarInfo {
+                    kind: CanonicalVarKind::PlaceholderRegion(*placeholder),
+                },
+                r,
+            ),
+            ty::ReVar(vid) => {
+                let universe = canonicalizer.region_var_universe(*vid);
+                canonicalizer.canonical_var_for_region(
+                    CanonicalVarInfo {
+                        kind: CanonicalVarKind::Region(universe),
+                    },
+                    r,
+                )
+            }
             _ => {
                 // Other than `'static` or `'empty`, the query
                 // response should be executing in a fully
@@ -182,6 +211,29 @@ fn any(&self) -> bool {
     }
 }
 
+struct CanonicalizeUserTypeAnnotation;
+
+impl CanonicalizeRegionMode for CanonicalizeUserTypeAnnotation {
+    fn canonicalize_free_region(
+        &self,
+        canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+        r: ty::Region<'tcx>,
+    ) -> ty::Region<'tcx> {
+        match r {
+            ty::ReEarlyBound(_) | ty::ReFree(_) | ty::ReErased | ty::ReEmpty | ty::ReStatic => r,
+            ty::ReVar(_) => canonicalizer.canonical_var_for_region_in_root_universe(r),
+            _ => {
+                // We only expect region names that the user can type.
+                bug!("unexpected region in query response: `{:?}`", r)
+            }
+        }
+    }
+
+    fn any(&self) -> bool {
+        false
+    }
+}
+
 struct CanonicalizeAllFreeRegions;
 
 impl CanonicalizeRegionMode for CanonicalizeAllFreeRegions {
@@ -190,7 +242,7 @@ fn canonicalize_free_region(
         canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
         r: ty::Region<'tcx>,
     ) -> ty::Region<'tcx> {
-        canonicalizer.canonical_var_for_region(r)
+        canonicalizer.canonical_var_for_region_in_root_universe(r)
     }
 
     fn any(&self) -> bool {
@@ -209,7 +261,7 @@ fn canonicalize_free_region(
         if let ty::ReStatic = r {
             r
         } else {
-            canonicalizer.canonical_var_for_region(r)
+            canonicalizer.canonical_var_for_region_in_root_universe(r)
         }
     }
 
@@ -225,9 +277,11 @@ struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     query_state: &'cx mut OriginalQueryValues<'tcx>,
     // Note that indices is only used once `var_values` is big enough to be
     // heap-allocated.
-    indices: FxHashMap<Kind<'tcx>, BoundTyIndex>,
+    indices: FxHashMap<Kind<'tcx>, BoundVar>,
     canonicalize_region_mode: &'cx dyn CanonicalizeRegionMode,
     needs_canonical_flags: TypeFlags,
+
+    binder_index: ty::DebruijnIndex,
 }
 
 impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
@@ -235,11 +289,23 @@ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
         self.tcx
     }
 
+    fn fold_binder<T>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T>
+        where T: TypeFoldable<'tcx>
+    {
+        self.binder_index.shift_in(1);
+        let t = t.super_fold_with(self);
+        self.binder_index.shift_out(1);
+        t
+    }
+
     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
         match *r {
-            ty::ReLateBound(..) => {
-                // leave bound regions alone
-                r
+            ty::ReLateBound(index, ..) => {
+                if index >= self.binder_index {
+                    bug!("escaping late bound region during canonicalization")
+                } else {
+                    r
+                }
             }
 
             ty::ReVar(vid) => {
@@ -252,7 +318,8 @@ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
                      opportunistically resolved to {:?}",
                     vid, r
                 );
-                self.canonical_var_for_region(r)
+                self.canonicalize_region_mode
+                    .canonicalize_free_region(self, r)
             }
 
             ty::ReStatic
@@ -261,10 +328,11 @@ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
             | ty::ReScope(_)
             | ty::RePlaceholder(..)
             | ty::ReEmpty
-            | ty::ReErased => self.canonicalize_region_mode.canonicalize_free_region(self, r),
+            | ty::ReErased => self.canonicalize_region_mode
+                .canonicalize_free_region(self, r),
 
-            ty::ReClosureBound(..) | ty::ReCanonical(_) => {
-                bug!("canonical region encountered during canonicalization")
+            ty::ReClosureBound(..) => {
+                bug!("closure bound region encountered during canonicalization")
             }
         }
     }
@@ -283,8 +351,12 @@ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
                 bug!("encountered a fresh type during canonicalization")
             }
 
-            ty::Infer(ty::BoundTy(_)) => {
-                bug!("encountered a canonical type during canonicalization")
+            ty::Bound(bound_ty) => {
+                if bound_ty.index >= self.binder_index {
+                    bug!("escaping bound type during canonicalization")
+                } else {
+                    t
+                }
             }
 
             ty::Closure(..)
@@ -335,12 +407,6 @@ fn canonicalize<V>(
     where
         V: TypeFoldable<'tcx> + Lift<'gcx>,
     {
-        debug_assert!(
-            !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
-            "canonicalizing a canonical value: {:?}",
-            value,
-        );
-
         let needs_canonical_flags = if canonicalize_region_mode.any() {
             TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
         } else {
@@ -353,6 +419,7 @@ fn canonicalize<V>(
         if !value.has_type_flags(needs_canonical_flags) {
             let out_value = gcx.lift(value).unwrap();
             let canon_value = Canonical {
+                max_universe: ty::UniverseIndex::ROOT,
                 variables: List::empty(),
                 value: out_value,
             };
@@ -367,6 +434,7 @@ fn canonicalize<V>(
             variables: SmallVec::new(),
             query_state,
             indices: FxHashMap::default(),
+            binder_index: ty::INNERMOST,
         };
         let out_value = value.fold_with(&mut canonicalizer);
 
@@ -383,7 +451,14 @@ fn canonicalize<V>(
 
         let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables);
 
+        let max_universe = canonical_variables
+            .iter()
+            .map(|cvar| cvar.universe())
+            .max()
+            .unwrap_or(ty::UniverseIndex::ROOT);
+
         Canonical {
+            max_universe,
             variables: canonical_variables,
             value: out_value,
         }
@@ -393,7 +468,7 @@ fn canonicalize<V>(
     /// or returns an existing variable if `kind` has already been
     /// seen. `kind` is expected to be an unbound variable (or
     /// potentially a free region).
-    fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> BoundTy {
+    fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> BoundVar {
         let Canonicalizer {
             variables,
             query_state,
@@ -413,7 +488,7 @@ fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> BoundTy
             // direct linear search of `var_values`.
             if let Some(idx) = var_values.iter().position(|&k| k == kind) {
                 // `kind` is already present in `var_values`.
-                BoundTyIndex::new(idx)
+                BoundVar::new(idx)
             } else {
                 // `kind` isn't present in `var_values`. Append it. Likewise
                 // for `info` and `variables`.
@@ -428,11 +503,11 @@ fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> BoundTy
                     *indices = var_values
                         .iter()
                         .enumerate()
-                        .map(|(i, &kind)| (kind, BoundTyIndex::new(i)))
+                        .map(|(i, &kind)| (kind, BoundVar::new(i)))
                         .collect();
                 }
                 // The cv is the index of the appended element.
-                BoundTyIndex::new(var_values.len() - 1)
+                BoundVar::new(var_values.len() - 1)
             }
         } else {
             // `var_values` is large. Do a hashmap search via `indices`.
@@ -440,23 +515,59 @@ fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> BoundTy
                 variables.push(info);
                 var_values.push(kind);
                 assert_eq!(variables.len(), var_values.len());
-                BoundTyIndex::new(variables.len() - 1)
+                BoundVar::new(variables.len() - 1)
             })
         };
 
-        BoundTy {
-            level: ty::INNERMOST,
-            var,
-        }
+        var
     }
 
-    fn canonical_var_for_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
-        let info = CanonicalVarInfo {
-            kind: CanonicalVarKind::Region,
-        };
-        let b = self.canonical_var(info, r.into());
-        debug_assert_eq!(ty::INNERMOST, b.level);
-        self.tcx().mk_region(ty::ReCanonical(b.var))
+    /// Shorthand helper that creates a canonical region variable for
+    /// `r` (always in the root universe). The reason that we always
+    /// put these variables into the root universe is because this
+    /// method is used during **query construction:** in that case, we
+    /// are taking all the regions and just putting them into the most
+    /// generic context we can. This may generate solutions that don't
+    /// fit (e.g., that equate some region variable with a placeholder
+    /// it can't name) on the caller side, but that's ok, the caller
+    /// can figure that out. In the meantime, it maximizes our
+    /// caching.
+    ///
+    /// (This works because unification never fails -- and hence trait
+    /// selection is never affected -- due to a universe mismatch.)
+    fn canonical_var_for_region_in_root_universe(
+        &mut self,
+        r: ty::Region<'tcx>,
+    ) -> ty::Region<'tcx> {
+        self.canonical_var_for_region(
+            CanonicalVarInfo {
+                kind: CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
+            },
+            r,
+        )
+    }
+
+    /// Returns the universe in which `vid` is defined.
+    fn region_var_universe(&self, vid: ty::RegionVid) -> ty::UniverseIndex {
+        self.infcx
+            .unwrap()
+            .borrow_region_constraints()
+            .var_universe(vid)
+    }
+
+    /// Create a canonical variable (with the given `info`)
+    /// representing the region `r`; return a region referencing it.
+    fn canonical_var_for_region(
+        &mut self,
+        info: CanonicalVarInfo,
+        r: ty::Region<'tcx>,
+    ) -> ty::Region<'tcx> {
+        let var = self.canonical_var(info, r.into());
+        let region = ty::ReLateBound(
+            self.binder_index,
+            ty::BoundRegion::BrAnon(var.as_u32())
+        );
+        self.tcx().mk_region(region)
     }
 
     /// Given a type variable `ty_var` of the given kind, first check
@@ -472,9 +583,8 @@ fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>)
             let info = CanonicalVarInfo {
                 kind: CanonicalVarKind::Ty(ty_kind),
             };
-            let b = self.canonical_var(info, ty_var.into());
-            debug_assert_eq!(ty::INNERMOST, b.level);
-            self.tcx().mk_infer(ty::InferTy::BoundTy(b))
+            let var = self.canonical_var(info, ty_var.into());
+            self.tcx().mk_ty(ty::Bound(BoundTy::new(self.binder_index, var)))
         }
     }
 }
index e3bd407d17a90785a27ff6da5fc02dbda78211ed..f7eb7118f412f6f0c55676c6966eff30628f8104 100644 (file)
@@ -20,7 +20,7 @@
 //! - a map M (of type `CanonicalVarValues`) from those canonical
 //!   variables back to the original.
 //!
-//! We can then do queries using T2. These will give back constriants
+//! We can then do queries using T2. These will give back constraints
 //! on the canonical variables which can be translated, using the map
 //! M, into constraints in our source context. This process of
 //! translating the results back is done by the
 
 use infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin};
 use rustc_data_structures::indexed_vec::IndexVec;
-use smallvec::SmallVec;
 use rustc_data_structures::sync::Lrc;
 use serialize::UseSpecializedDecodable;
+use smallvec::SmallVec;
 use std::ops::Index;
 use syntax::source_map::Span;
 use ty::fold::TypeFoldable;
 use ty::subst::Kind;
-use ty::{self, BoundTyIndex, Lift, Region, List, TyCtxt};
+use ty::{self, BoundVar, Lift, List, Region, TyCtxt};
 
 mod canonicalizer;
 
@@ -53,6 +53,7 @@
 /// numbered starting from 0 in order of first appearance.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
 pub struct Canonical<'gcx, V> {
+    pub max_universe: ty::UniverseIndex,
     pub variables: CanonicalVarInfos<'gcx>,
     pub value: V,
 }
@@ -72,20 +73,38 @@ impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> {}
 /// canonicalized query response.
 #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
 pub struct CanonicalVarValues<'tcx> {
-    pub var_values: IndexVec<BoundTyIndex, Kind<'tcx>>,
+    pub var_values: IndexVec<BoundVar, Kind<'tcx>>,
 }
 
 /// When we canonicalize a value to form a query, we wind up replacing
 /// various parts of it with canonical variables. This struct stores
 /// those replaced bits to remember for when we process the query
 /// result.
-#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
 pub struct OriginalQueryValues<'tcx> {
+    /// Map from the universes that appear in the query to the
+    /// universes in the caller context. For the time being, we only
+    /// ever put ROOT values into the query, so this map is very
+    /// simple.
+    pub universe_map: SmallVec<[ty::UniverseIndex; 4]>,
+
     /// This is equivalent to `CanonicalVarValues`, but using a
     /// `SmallVec` yields a significant performance win.
     pub var_values: SmallVec<[Kind<'tcx>; 8]>,
 }
 
+impl Default for OriginalQueryValues<'tcx> {
+    fn default() -> Self {
+        let mut universe_map = SmallVec::default();
+        universe_map.push(ty::UniverseIndex::ROOT);
+
+        Self {
+            universe_map,
+            var_values: SmallVec::default(),
+        }
+    }
+}
+
 /// Information about a canonical variable that is included with the
 /// canonical value. This is sufficient information for code to create
 /// a copy of the canonical value in some other inference context,
@@ -95,6 +114,20 @@ pub struct CanonicalVarInfo {
     pub kind: CanonicalVarKind,
 }
 
+impl CanonicalVarInfo {
+    pub fn universe(&self) -> ty::UniverseIndex {
+        self.kind.universe()
+    }
+
+    pub fn is_existential(&self) -> bool {
+        match self.kind {
+            CanonicalVarKind::Ty(_) => true,
+            CanonicalVarKind::Region(_) => true,
+            CanonicalVarKind::PlaceholderRegion(..) => false,
+        }
+    }
+}
+
 /// Describes the "kind" of the canonical variable. This is a "kind"
 /// in the type-theory sense of the term -- i.e., a "meta" type system
 /// that analyzes type-like values.
@@ -104,7 +137,27 @@ pub enum CanonicalVarKind {
     Ty(CanonicalTyVarKind),
 
     /// Region variable `'?R`.
-    Region,
+    Region(ty::UniverseIndex),
+
+    /// A "placeholder" that represents "any region". Created when you
+    /// are solving a goal like `for<'a> T: Foo<'a>` to represent the
+    /// bound region `'a`.
+    PlaceholderRegion(ty::Placeholder),
+}
+
+impl CanonicalVarKind {
+    pub fn universe(self) -> ty::UniverseIndex {
+        match self {
+            // At present, we don't support higher-ranked
+            // quantification over types, so all type variables are in
+            // the root universe.
+            CanonicalVarKind::Ty(_) => ty::UniverseIndex::ROOT,
+
+            // Region variables can be created in sub-universes.
+            CanonicalVarKind::Region(ui) => ui,
+            CanonicalVarKind::PlaceholderRegion(placeholder) => placeholder.universe,
+        }
+    }
 }
 
 /// Rust actually has more than one category of type variables;
@@ -220,8 +273,16 @@ impl<'gcx, V> Canonical<'gcx, V> {
     /// let b: Canonical<'tcx, (T, Ty<'tcx>)> = a.unchecked_map(|v| (v, ty));
     /// ```
     pub fn unchecked_map<W>(self, map_op: impl FnOnce(V) -> W) -> Canonical<'gcx, W> {
-        let Canonical { variables, value } = self;
-        Canonical { variables, value: map_op(value) }
+        let Canonical {
+            max_universe,
+            variables,
+            value,
+        } = self;
+        Canonical {
+            max_universe,
+            variables,
+            value: map_op(value),
+        }
     }
 }
 
@@ -249,35 +310,50 @@ pub fn instantiate_canonical_with_fresh_inference_vars<T>(
     where
         T: TypeFoldable<'tcx>,
     {
+        // For each universe that is referred to in the incoming
+        // query, create a universe in our local inference context. In
+        // practice, as of this writing, all queries have no universes
+        // in them, so this code has no effect, but it is looking
+        // forward to the day when we *do* want to carry universes
+        // through into queries.
+        let universes: IndexVec<ty::UniverseIndex, _> = std::iter::once(ty::UniverseIndex::ROOT)
+            .chain((0..canonical.max_universe.as_u32()).map(|_| self.create_next_universe()))
+            .collect();
+
         let canonical_inference_vars =
-            self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
+            self.instantiate_canonical_vars(span, canonical.variables, |ui| universes[ui]);
         let result = canonical.substitute(self.tcx, &canonical_inference_vars);
         (result, canonical_inference_vars)
     }
 
     /// Given the "infos" about the canonical variables from some
-    /// canonical, creates fresh inference variables with the same
-    /// characteristics. You can then use `substitute` to instantiate
-    /// the canonical variable with these inference variables.
-    fn fresh_inference_vars_for_canonical_vars(
+    /// canonical, creates fresh variables with the same
+    /// characteristics (see `instantiate_canonical_var` for
+    /// details). You can then use `substitute` to instantiate the
+    /// canonical variable with these inference variables.
+    fn instantiate_canonical_vars(
         &self,
         span: Span,
         variables: &List<CanonicalVarInfo>,
+        universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex,
     ) -> CanonicalVarValues<'tcx> {
-        let var_values: IndexVec<BoundTyIndex, Kind<'tcx>> = variables
+        let var_values: IndexVec<BoundVar, Kind<'tcx>> = variables
             .iter()
-            .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
+            .map(|info| self.instantiate_canonical_var(span, *info, &universe_map))
             .collect();
 
         CanonicalVarValues { var_values }
     }
 
     /// Given the "info" about a canonical variable, creates a fresh
-    /// inference variable with the same characteristics.
-    fn fresh_inference_var_for_canonical_var(
+    /// variable for it. If this is an existentially quantified
+    /// variable, then you'll get a new inference variable; if it is a
+    /// universally quantified variable, you get a placeholder.
+    fn instantiate_canonical_var(
         &self,
         span: Span,
         cv_info: CanonicalVarInfo,
+        universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex,
     ) -> Kind<'tcx> {
         match cv_info.kind {
             CanonicalVarKind::Ty(ty_kind) => {
@@ -293,9 +369,21 @@ fn fresh_inference_var_for_canonical_var(
                 ty.into()
             }
 
-            CanonicalVarKind::Region => self
-                .next_region_var(RegionVariableOrigin::MiscVariable(span))
-                .into(),
+            CanonicalVarKind::Region(ui) => self.next_region_var_in_universe(
+                RegionVariableOrigin::MiscVariable(span),
+                universe_map(ui),
+            ).into(),
+
+            CanonicalVarKind::PlaceholderRegion(ty::Placeholder { universe, name }) => {
+                let universe_mapped = universe_map(universe);
+                let placeholder_mapped = ty::Placeholder {
+                    universe: universe_mapped,
+                    name,
+                };
+                self.tcx
+                    .mk_region(ty::RePlaceholder(placeholder_mapped))
+                    .into()
+            }
         }
     }
 }
@@ -314,6 +402,7 @@ fn fresh_inference_var_for_canonical_var(
 
 BraceStructTypeFoldableImpl! {
     impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
+        max_universe,
         variables,
         value,
     } where C: TypeFoldable<'tcx>
@@ -322,7 +411,7 @@ impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
 BraceStructLiftImpl! {
     impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
         type Lifted = Canonical<'tcx, T::Lifted>;
-        variables, value
+        max_universe, variables, value
     } where T: Lift<'tcx>
 }
 
@@ -367,10 +456,10 @@ impl<'a, 'tcx, R> Lift<'tcx> for QueryResponse<'a, R> {
     } where R: Lift<'tcx>
 }
 
-impl<'tcx> Index<BoundTyIndex> for CanonicalVarValues<'tcx> {
+impl<'tcx> Index<BoundVar> for CanonicalVarValues<'tcx> {
     type Output = Kind<'tcx>;
 
-    fn index(&self, value: BoundTyIndex) -> &Kind<'tcx> {
+    fn index(&self, value: BoundVar) -> &Kind<'tcx> {
         &self.var_values[value]
     }
 }
index 38788186eb0710ddb6ccbd8262aa58a0cbcf1fb4..f4607f7a9092f4ee01bd6a31b534bc06a88a1d84 100644 (file)
@@ -35,7 +35,7 @@
 use traits::{Obligation, ObligationCause, PredicateObligation};
 use ty::fold::TypeFoldable;
 use ty::subst::{Kind, UnpackedKind};
-use ty::{self, BoundTyIndex, Lift, Ty, TyCtxt};
+use ty::{self, BoundVar, Lift, Ty, TyCtxt};
 
 impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
     /// The "main method" for a canonicalized trait query. Given the
@@ -273,7 +273,7 @@ pub fn instantiate_nll_query_response_and_region_obligations<R>(
         for (index, original_value) in original_values.var_values.iter().enumerate() {
             // ...with the value `v_r` of that variable from the query.
             let result_value = query_response.substitute_projected(self.tcx, &result_subst, |v| {
-                &v.var_values[BoundTyIndex::new(index)]
+                &v.var_values[BoundVar::new(index)]
             });
             match (original_value.unpack(), result_value.unpack()) {
                 (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => {
@@ -308,11 +308,14 @@ pub fn instantiate_nll_query_response_and_region_obligations<R>(
         // ...also include the other query region constraints from the query.
         output_query_region_constraints.extend(
             query_response.value.region_constraints.iter().filter_map(|r_c| {
-                let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder(); // reconstructed below
-                let k1 = substitute_value(self.tcx, &result_subst, &k1);
-                let r2 = substitute_value(self.tcx, &result_subst, &r2);
+                let r_c = substitute_value(self.tcx, &result_subst, r_c);
+
+                // Screen out `'a: 'a` cases -- we skip the binder here but
+                // only care the inner values to one another, so they are still at
+                // consistent binding levels.
+                let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder();
                 if k1 != r2.into() {
-                    Some(ty::Binder::bind(ty::OutlivesPredicate(k1, r2)))
+                    Some(r_c)
                 } else {
                     None
                 }
@@ -394,6 +397,21 @@ fn query_response_substitution_guess<R>(
             original_values, query_response,
         );
 
+        // For each new universe created in the query result that did
+        // not appear in the original query, create a local
+        // superuniverse.
+        let mut universe_map = original_values.universe_map.clone();
+        let num_universes_in_query = original_values.universe_map.len();
+        let num_universes_in_response = query_response.max_universe.as_usize() + 1;
+        for _ in num_universes_in_query..num_universes_in_response {
+            universe_map.push(self.create_next_universe());
+        }
+        assert!(universe_map.len() >= 1); // always have the root universe
+        assert_eq!(
+            universe_map[ty::UniverseIndex::ROOT.as_usize()],
+            ty::UniverseIndex::ROOT
+        );
+
         // Every canonical query result includes values for each of
         // the inputs to the query. Therefore, we begin by unifying
         // these values with the original inputs that were
@@ -408,7 +426,7 @@ fn query_response_substitution_guess<R>(
         // is directly equal to one of the canonical variables in the
         // result, then we can type the corresponding value from the
         // input. See the example above.
-        let mut opt_values: IndexVec<BoundTyIndex, Option<Kind<'tcx>>> =
+        let mut opt_values: IndexVec<BoundVar, Option<Kind<'tcx>>> =
             IndexVec::from_elem_n(None, query_response.variables.len());
 
         // In terms of our example above, we are iterating over pairs like:
@@ -417,16 +435,22 @@ fn query_response_substitution_guess<R>(
             match result_value.unpack() {
                 UnpackedKind::Type(result_value) => {
                     // e.g., here `result_value` might be `?0` in the example above...
-                    if let ty::Infer(ty::InferTy::BoundTy(b)) = result_value.sty {
-                        // in which case we would set `canonical_vars[0]` to `Some(?U)`.
+                    if let ty::Bound(b) = result_value.sty {
+                        // ...in which case we would set `canonical_vars[0]` to `Some(?U)`.
+
+                        // We only allow a `ty::INNERMOST` index in substitutions.
+                        assert_eq!(b.index, ty::INNERMOST);
                         opt_values[b.var] = Some(*original_value);
                     }
                 }
                 UnpackedKind::Lifetime(result_value) => {
                     // e.g., here `result_value` might be `'?1` in the example above...
-                    if let &ty::RegionKind::ReCanonical(index) = result_value {
-                        // in which case we would set `canonical_vars[0]` to `Some('static)`.
-                        opt_values[index] = Some(*original_value);
+                    if let &ty::RegionKind::ReLateBound(index, br) = result_value {
+                        // ... in which case we would set `canonical_vars[0]` to `Some('static)`.
+
+                        // We only allow a `ty::INNERMOST` index in substitutions.
+                        assert_eq!(index, ty::INNERMOST);
+                        opt_values[br.assert_bound_var()] = Some(*original_value);
                     }
                 }
             }
@@ -440,9 +464,20 @@ fn query_response_substitution_guess<R>(
                 .variables
                 .iter()
                 .enumerate()
-                .map(|(index, info)| opt_values[BoundTyIndex::new(index)].unwrap_or_else(||
-                    self.fresh_inference_var_for_canonical_var(cause.span, *info)
-                ))
+                .map(|(index, info)| {
+                    if info.is_existential() {
+                        match opt_values[BoundVar::new(index)] {
+                            Some(k) => k,
+                            None => self.instantiate_canonical_var(cause.span, *info, |u| {
+                                universe_map[u.as_usize()]
+                            }),
+                        }
+                    } else {
+                        self.instantiate_canonical_var(cause.span, *info, |u| {
+                            universe_map[u.as_usize()]
+                        })
+                    }
+                })
                 .collect(),
         };
 
@@ -470,7 +505,7 @@ fn unify_query_response_substitution_guess<R>(
         // canonical variable; this is taken from
         // `query_response.var_values` after applying the substitution
         // `result_subst`.
-        let substituted_query_response = |index: BoundTyIndex| -> Kind<'tcx> {
+        let substituted_query_response = |index: BoundVar| -> Kind<'tcx> {
             query_response.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
         };
 
@@ -497,22 +532,23 @@ fn query_region_constraints_into_obligations<'a>(
             unsubstituted_region_constraints
                 .iter()
                 .map(move |constraint| {
-                    let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
-                    let k1 = substitute_value(self.tcx, result_subst, k1);
-                    let r2 = substitute_value(self.tcx, result_subst, r2);
+                    let constraint = substitute_value(self.tcx, result_subst, constraint);
+                    let &ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
 
                     Obligation::new(
                         cause.clone(),
                         param_env,
                         match k1.unpack() {
                             UnpackedKind::Lifetime(r1) => ty::Predicate::RegionOutlives(
-                                ty::Binder::dummy(
+                                ty::Binder::bind(
                                     ty::OutlivesPredicate(r1, r2)
-                            )),
+                                )
+                            ),
                             UnpackedKind::Type(t1) => ty::Predicate::TypeOutlives(
-                                ty::Binder::dummy(ty::OutlivesPredicate(
-                                    t1, r2
-                            )))
+                                ty::Binder::bind(
+                                    ty::OutlivesPredicate(t1, r2)
+                                )
+                            ),
                         }
                     )
                 })
@@ -526,12 +562,12 @@ fn unify_canonical_vars(
         cause: &ObligationCause<'tcx>,
         param_env: ty::ParamEnv<'tcx>,
         variables1: &OriginalQueryValues<'tcx>,
-        variables2: impl Fn(BoundTyIndex) -> Kind<'tcx>,
+        variables2: impl Fn(BoundVar) -> Kind<'tcx>,
     ) -> InferResult<'tcx, ()> {
         self.commit_if_ok(|_| {
             let mut obligations = vec![];
             for (index, value1) in variables1.var_values.iter().enumerate() {
-                let value2 = variables2(BoundTyIndex::new(index));
+                let value2 = variables2(BoundVar::new(index));
 
                 match (value1.unpack(), value2.unpack()) {
                     (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
@@ -594,11 +630,11 @@ pub fn make_query_outlives<'tcx>(
             }
             Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
         })
-        .map(ty::Binder::dummy) // no bound regions in the code above
+        .map(ty::Binder::dummy) // no bound vars in the code above
         .chain(
             outlives_obligations
                 .map(|(ty, r)| ty::OutlivesPredicate(ty.into(), r))
-                .map(ty::Binder::dummy), // no bound regions in the code above
+                .map(ty::Binder::dummy) // no bound vars in the code above
         )
         .collect();
 
index 03441c3dee35e2ba40c49fb6b7e12bbe19fa3afa..b8c1ed236c0ba17aededa8aff24c46641052eb22 100644 (file)
@@ -17,9 +17,9 @@
 //! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
 
 use infer::canonical::{Canonical, CanonicalVarValues};
-use ty::fold::{TypeFoldable, TypeFolder};
+use ty::fold::TypeFoldable;
 use ty::subst::UnpackedKind;
-use ty::{self, Ty, TyCtxt, TypeFlags};
+use ty::{self, TyCtxt};
 
 impl<'tcx, V> Canonical<'tcx, V> {
     /// Instantiate the wrapped value, replacing each canonical value
@@ -64,51 +64,22 @@ pub(super) fn substitute_value<'a, 'tcx, T>(
     T: TypeFoldable<'tcx>,
 {
     if var_values.var_values.is_empty() {
-        debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
-        value.clone()
-    } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
         value.clone()
     } else {
-        value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
-    }
-}
-
-struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
-    tcx: TyCtxt<'cx, 'gcx, 'tcx>,
-    var_values: &'cx CanonicalVarValues<'tcx>,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
-    fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
-        self.tcx
-    }
-
-    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
-        match t.sty {
-            ty::Infer(ty::InferTy::BoundTy(b)) => {
-                debug_assert_eq!(ty::INNERMOST, b.level);
-                match self.var_values.var_values[b.var].unpack() {
-                    UnpackedKind::Type(ty) => ty,
-                    r => bug!("{:?} is a type but value is {:?}", b, r),
-                }
+        let fld_r = |br: ty::BoundRegion| {
+            match var_values.var_values[br.assert_bound_var()].unpack() {
+                UnpackedKind::Lifetime(l) => l,
+                r => bug!("{:?} is a region but value is {:?}", br, r),
             }
-            _ => {
-                if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
-                    t
-                } else {
-                    t.super_fold_with(self)
-                }
+        };
+
+        let fld_t = |bound_ty: ty::BoundTy| {
+            match var_values.var_values[bound_ty.var].unpack() {
+                UnpackedKind::Type(ty) => ty,
+                r => bug!("{:?} is a type but value is {:?}", bound_ty, r),
             }
-        }
-    }
+        };
 
-    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
-        match r {
-            ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
-                UnpackedKind::Lifetime(l) => l,
-                r => bug!("{:?} is a region but value is {:?}", c, r),
-            },
-            _ => r.super_fold_with(self),
-        }
+        tcx.replace_escaping_bound_vars(value, fld_r, fld_t)
     }
 }
index 0ee03bc4c6e00902d0026a78e23b8be1c63a0a5d..f13210926a79b77361e21c83c2d5f3b814b7d88f 100644 (file)
@@ -485,7 +485,6 @@ fn regions(&mut self, r: ty::Region<'tcx>, r2: ty::Region<'tcx>)
                 }
             }
 
-            ty::ReCanonical(..) |
             ty::ReClosureBound(..) => {
                 span_bug!(
                     self.span,
index d19c495af3b9685c11240ace5d55bfc4dcc9b4ad..1963d366e7a66d5d5daa5ba3111852663416bd6c 100644 (file)
@@ -152,7 +152,7 @@ pub fn note_and_explain_region(
             }
 
             // We shouldn't encounter an error message with ReClosureBound.
-            ty::ReCanonical(..) | ty::ReClosureBound(..) => {
+            ty::ReClosureBound(..) => {
                 bug!("encountered unexpected ReClosureBound: {:?}", region,);
             }
         };
index 1647f259db9fb27e855ae0db4fe08cdb39480104..b53444992fa216972d4e32b36bbdabd7d36e27d4 100644 (file)
@@ -114,7 +114,6 @@ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
                 self.tcx().types.re_erased
             }
 
-            ty::ReCanonical(..) |
             ty::ReClosureBound(..) => {
                 bug!(
                     "encountered unexpected region: {:?}",
@@ -171,8 +170,8 @@ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
                 t
             }
 
-            ty::Infer(ty::BoundTy(..)) =>
-                bug!("encountered canonical ty during freshening"),
+            ty::Bound(..) =>
+                bug!("encountered bound ty during freshening"),
 
             ty::Generator(..) |
             ty::Bool |
index fd14e0e40e234985629e587307fa0083faa4b86d..8968c5949b617b8ccb8c525e547d78e6826c5b7c 100644 (file)
@@ -15,7 +15,6 @@
 
 use traits::ObligationCause;
 use ty::{self, Ty, TyCtxt};
-use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
 
 /// "Greatest lower bound" (common subtype)
@@ -76,31 +75,12 @@ fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
         where T: Relate<'tcx>
     {
         debug!("binders(a={:?}, b={:?})", a, b);
-        let was_error = self.infcx().probe(|_snapshot| {
-            // Subtle: use a fresh combine-fields here because we recover
-            // from Err. Doing otherwise could propagate obligations out
-            // through our `self.obligations` field.
-            self.infcx()
-                .combine_fields(self.fields.trace.clone(), self.fields.param_env)
-                .higher_ranked_glb(a, b, self.a_is_expected)
-                .is_err()
-        });
-        debug!("binders: was_error={:?}", was_error);
 
         // When higher-ranked types are involved, computing the LUB is
         // very challenging, switch to invariance. This is obviously
         // overly conservative but works ok in practice.
-        match self.relate_with_variance(ty::Variance::Invariant, a, b) {
-            Ok(_) => Ok(a.clone()),
-            Err(err) => {
-                debug!("binders: error occurred, was_error={:?}", was_error);
-                if !was_error {
-                    Err(TypeError::OldStyleLUB(Box::new(err)))
-                } else {
-                    Err(err)
-                }
-            }
-        }
+        self.relate_with_variance(ty::Variance::Invariant, a, b)?;
+        Ok(a.clone())
     }
 }
 
index d85a3e84f857a85584931db18333814ac0e3f1c5..3e08a4e021aedb5660041fbed31271699adc7781 100644 (file)
@@ -22,7 +22,6 @@
 use ty::{self, TyCtxt, Binder, TypeFoldable};
 use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
-use std::collections::BTreeMap;
 use syntax_pos::Span;
 use util::nodemap::{FxHashMap, FxHashSet};
 
@@ -202,261 +201,6 @@ pub fn higher_ranked_match<T, U>(&mut self,
             Ok(HrMatchResult { value: a_value })
         });
     }
-
-    pub fn higher_ranked_lub<T>(&mut self, a: &Binder<T>, b: &Binder<T>, a_is_expected: bool)
-                                -> RelateResult<'tcx, Binder<T>>
-        where T: Relate<'tcx>
-    {
-        // Start a snapshot so we can examine "all bindings that were
-        // created as part of this type comparison".
-        return self.infcx.commit_if_ok(|snapshot| {
-            // Instantiate each bound region with a fresh region variable.
-            let span = self.trace.cause.span;
-            let (a_with_fresh, a_map) =
-                self.infcx.replace_late_bound_regions_with_fresh_var(
-                    span, HigherRankedType, a);
-            let (b_with_fresh, _) =
-                self.infcx.replace_late_bound_regions_with_fresh_var(
-                    span, HigherRankedType, b);
-
-            // Collect constraints.
-            let result0 =
-                self.lub(a_is_expected).relate(&a_with_fresh, &b_with_fresh)?;
-            let result0 =
-                self.infcx.resolve_type_vars_if_possible(&result0);
-            debug!("lub result0 = {:?}", result0);
-
-            // Generalize the regions appearing in result0 if possible
-            let new_vars = self.infcx.region_vars_confined_to_snapshot(snapshot);
-            let span = self.trace.cause.span;
-            let result1 =
-                fold_regions_in(
-                    self.tcx(),
-                    &result0,
-                    |r, debruijn| generalize_region(self.infcx, span, snapshot, debruijn,
-                                                    &new_vars, &a_map, r));
-
-            debug!("lub({:?},{:?}) = {:?}",
-                   a,
-                   b,
-                   result1);
-
-            Ok(ty::Binder::bind(result1))
-        });
-
-        fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-                                             span: Span,
-                                             snapshot: &CombinedSnapshot<'a, 'tcx>,
-                                             debruijn: ty::DebruijnIndex,
-                                             new_vars: &[ty::RegionVid],
-                                             a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
-                                             r0: ty::Region<'tcx>)
-                                             -> ty::Region<'tcx> {
-            // Regions that pre-dated the LUB computation stay as they are.
-            if !is_var_in_set(new_vars, r0) {
-                assert!(!r0.is_late_bound());
-                debug!("generalize_region(r0={:?}): not new variable", r0);
-                return r0;
-            }
-
-            let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
-
-            // Variables created during LUB computation which are
-            // *related* to regions that pre-date the LUB computation
-            // stay as they are.
-            if !tainted.iter().all(|&r| is_var_in_set(new_vars, r)) {
-                debug!("generalize_region(r0={:?}): \
-                        non-new-variables found in {:?}",
-                       r0, tainted);
-                assert!(!r0.is_late_bound());
-                return r0;
-            }
-
-            // Otherwise, the variable must be associated with at
-            // least one of the variables representing bound regions
-            // in both A and B.  Replace the variable with the "first"
-            // bound region from A that we find it to be associated
-            // with.
-            for (a_br, a_r) in a_map {
-                if tainted.iter().any(|x| x == a_r) {
-                    debug!("generalize_region(r0={:?}): \
-                            replacing with {:?}, tainted={:?}",
-                           r0, *a_br, tainted);
-                    return infcx.tcx.mk_region(ty::ReLateBound(debruijn, *a_br));
-                }
-            }
-
-            span_bug!(
-                span,
-                "region {:?} is not associated with any bound region from A!",
-                r0)
-        }
-    }
-
-    pub fn higher_ranked_glb<T>(&mut self, a: &Binder<T>, b: &Binder<T>, a_is_expected: bool)
-                                -> RelateResult<'tcx, Binder<T>>
-        where T: Relate<'tcx>
-    {
-        debug!("higher_ranked_glb({:?}, {:?})",
-               a, b);
-
-        // Make a snapshot so we can examine "all bindings that were
-        // created as part of this type comparison".
-        return self.infcx.commit_if_ok(|snapshot| {
-            // Instantiate each bound region with a fresh region variable.
-            let (a_with_fresh, a_map) =
-                self.infcx.replace_late_bound_regions_with_fresh_var(
-                    self.trace.cause.span, HigherRankedType, a);
-            let (b_with_fresh, b_map) =
-                self.infcx.replace_late_bound_regions_with_fresh_var(
-                    self.trace.cause.span, HigherRankedType, b);
-            let a_vars = var_ids(self, &a_map);
-            let b_vars = var_ids(self, &b_map);
-
-            // Collect constraints.
-            let result0 =
-                self.glb(a_is_expected).relate(&a_with_fresh, &b_with_fresh)?;
-            let result0 =
-                self.infcx.resolve_type_vars_if_possible(&result0);
-            debug!("glb result0 = {:?}", result0);
-
-            // Generalize the regions appearing in result0 if possible
-            let new_vars = self.infcx.region_vars_confined_to_snapshot(snapshot);
-            let span = self.trace.cause.span;
-            let result1 =
-                fold_regions_in(
-                    self.tcx(),
-                    &result0,
-                    |r, debruijn| generalize_region(self.infcx, span, snapshot, debruijn,
-                                                    &new_vars,
-                                                    &a_map, &a_vars, &b_vars,
-                                                    r));
-
-            debug!("glb({:?},{:?}) = {:?}",
-                   a,
-                   b,
-                   result1);
-
-            Ok(ty::Binder::bind(result1))
-        });
-
-        fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-                                             span: Span,
-                                             snapshot: &CombinedSnapshot<'a, 'tcx>,
-                                             debruijn: ty::DebruijnIndex,
-                                             new_vars: &[ty::RegionVid],
-                                             a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
-                                             a_vars: &[ty::RegionVid],
-                                             b_vars: &[ty::RegionVid],
-                                             r0: ty::Region<'tcx>)
-                                             -> ty::Region<'tcx> {
-            if !is_var_in_set(new_vars, r0) {
-                assert!(!r0.is_late_bound());
-                return r0;
-            }
-
-            let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
-
-            let mut a_r = None;
-            let mut b_r = None;
-            let mut only_new_vars = true;
-            for r in &tainted {
-                if is_var_in_set(a_vars, *r) {
-                    if a_r.is_some() {
-                        return fresh_bound_variable(infcx, debruijn);
-                    } else {
-                        a_r = Some(*r);
-                    }
-                } else if is_var_in_set(b_vars, *r) {
-                    if b_r.is_some() {
-                        return fresh_bound_variable(infcx, debruijn);
-                    } else {
-                        b_r = Some(*r);
-                    }
-                } else if !is_var_in_set(new_vars, *r) {
-                    only_new_vars = false;
-                }
-            }
-
-            // NB---I do not believe this algorithm computes
-            // (necessarily) the GLB.  As written it can
-            // spuriously fail. In particular, if there is a case
-            // like: |fn(&a)| and fn(fn(&b)), where a and b are
-            // free, it will return fn(&c) where c = GLB(a,b).  If
-            // however this GLB is not defined, then the result is
-            // an error, even though something like
-            // "fn<X>(fn(&X))" where X is bound would be a
-            // subtype of both of those.
-            //
-            // The problem is that if we were to return a bound
-            // variable, we'd be computing a lower-bound, but not
-            // necessarily the *greatest* lower-bound.
-            //
-            // Unfortunately, this problem is non-trivial to solve,
-            // because we do not know at the time of computing the GLB
-            // whether a GLB(a,b) exists or not, because we haven't
-            // run region inference (or indeed, even fully computed
-            // the region hierarchy!). The current algorithm seems to
-            // works ok in practice.
-
-            if a_r.is_some() && b_r.is_some() && only_new_vars {
-                // Related to exactly one bound variable from each fn:
-                return rev_lookup(infcx, span, a_map, a_r.unwrap());
-            } else if a_r.is_none() && b_r.is_none() {
-                // Not related to bound variables from either fn:
-                assert!(!r0.is_late_bound());
-                return r0;
-            } else {
-                // Other:
-                return fresh_bound_variable(infcx, debruijn);
-            }
-        }
-
-        fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-                                      span: Span,
-                                      a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
-                                      r: ty::Region<'tcx>) -> ty::Region<'tcx>
-        {
-            for (a_br, a_r) in a_map {
-                if *a_r == r {
-                    return infcx.tcx.mk_region(ty::ReLateBound(ty::INNERMOST, *a_br));
-                }
-            }
-            span_bug!(
-                span,
-                "could not find original bound region for {:?}",
-                r);
-        }
-
-        fn fresh_bound_variable<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-                                                debruijn: ty::DebruijnIndex)
-                                                -> ty::Region<'tcx> {
-            infcx.borrow_region_constraints().new_bound(infcx.tcx, debruijn)
-        }
-    }
-}
-
-fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>,
-                           map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
-                           -> Vec<ty::RegionVid> {
-    map.iter()
-       .map(|(_, &r)| match *r {
-           ty::ReVar(r) => { r }
-           _ => {
-               span_bug!(
-                   fields.trace.cause.span,
-                   "found non-region-vid: {:?}",
-                   r);
-           }
-       })
-       .collect()
-}
-
-fn is_var_in_set(new_vars: &[ty::RegionVid], r: ty::Region<'_>) -> bool {
-    match *r {
-        ty::ReVar(ref v) => new_vars.iter().any(|x| x == v),
-        _ => false
-    }
 }
 
 fn fold_regions_in<'a, 'gcx, 'tcx, T, F>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
index 8f28e9a320df24854af4fb9088387a458310da74..75f503d3bcfb4b331780bcd1e50cc6177f564c9b 100644 (file)
@@ -260,9 +260,7 @@ fn expand_node(
     fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> {
         let tcx = self.tcx();
         match (a, b) {
-            (&ty::ReCanonical(..), _)
-            | (_, &ty::ReCanonical(..))
-            | (&ty::ReClosureBound(..), _)
+            (&ty::ReClosureBound(..), _)
             | (_, &ty::ReClosureBound(..))
             | (&ReLateBound(..), _)
             | (_, &ReLateBound(..))
index 55c7eef607bbe41fc6a7d7e31f6b379d8d2d7afe..8875b4169dd6e796028916f558fc9ae009e3858d 100644 (file)
@@ -15,7 +15,6 @@
 
 use traits::ObligationCause;
 use ty::{self, Ty, TyCtxt};
-use ty::error::TypeError;
 use ty::relate::{Relate, RelateResult, TypeRelation};
 
 /// "Least upper bound" (common supertype)
@@ -76,31 +75,12 @@ fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
         where T: Relate<'tcx>
     {
         debug!("binders(a={:?}, b={:?})", a, b);
-        let was_error = self.infcx().probe(|_snapshot| {
-            // Subtle: use a fresh combine-fields here because we recover
-            // from Err. Doing otherwise could propagate obligations out
-            // through our `self.obligations` field.
-            self.infcx()
-                .combine_fields(self.fields.trace.clone(), self.fields.param_env)
-                .higher_ranked_lub(a, b, self.a_is_expected)
-                .is_err()
-        });
-        debug!("binders: was_error={:?}", was_error);
 
         // When higher-ranked types are involved, computing the LUB is
         // very challenging, switch to invariance. This is obviously
         // overly conservative but works ok in practice.
-        match self.relate_with_variance(ty::Variance::Invariant, a, b) {
-            Ok(_) => Ok(a.clone()),
-            Err(err) => {
-                debug!("binders: error occurred, was_error={:?}", was_error);
-                if !was_error {
-                    Err(TypeError::OldStyleLUB(Box::new(err)))
-                } else {
-                    Err(err)
-                }
-            }
-        }
+        self.relate_with_variance(ty::Variance::Invariant, a, b)?;
+        Ok(a.clone())
     }
 }
 
index 5db850f1588b6a24bd4c784d56a28e64751a95a6..523f03c2cfc4773afc81f9c008a0fa7294bb5f92 100644 (file)
@@ -305,7 +305,7 @@ pub fn type_must_outlive(
             ty, region, origin
         );
 
-        assert!(!ty.has_escaping_regions());
+        assert!(!ty.has_escaping_bound_vars());
 
         let components = self.tcx.outlives_components(ty);
         self.components_must_outlive(origin, components, region);
index e1db295b7e14d6e10dcfa3fd006ccbb5ff3b1403..88d45671b9afdfab86538d6a1b6d6345cfeed3ba 100644 (file)
@@ -323,7 +323,7 @@ fn collect_outlives_from_predicate_list(
         predicates
             .into_iter()
             .filter_map(|p| p.as_ref().to_opt_type_outlives())
-            .filter_map(|p| p.no_late_bound_regions())
+            .filter_map(|p| p.no_bound_vars())
             .filter(move |p| compare_ty(p.0))
     }
 }
index c82603bf5605788cbd6db75fb9708df35087c6cc..46b12d01829e7089835c2090715878efd39a79f9 100644 (file)
@@ -833,10 +833,6 @@ fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex {
             ty::RePlaceholder(placeholder) => placeholder.universe,
             ty::ReClosureBound(vid) | ty::ReVar(vid) => self.var_universe(vid),
             ty::ReLateBound(..) => bug!("universe(): encountered bound region {:?}", region),
-            ty::ReCanonical(..) => bug!(
-                "region_universe(): encountered canonical region {:?}",
-                region
-            ),
         }
     }
 
index 048810c042722f552de6844b3e5862f1d13b9a53..3b0f9a5e545fde48e157985c952b8b510f9cb83a 100644 (file)
@@ -84,8 +84,8 @@ fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
                 // Shouldn't have any LBR here, so we can safely put
                 // this under a binder below without fear of accidental
                 // capture.
-                assert!(!a.has_escaping_regions());
-                assert!(!b.has_escaping_regions());
+                assert!(!a.has_escaping_bound_vars());
+                assert!(!b.has_escaping_bound_vars());
 
                 // can't make progress on `A <: B` if both A and B are
                 // type variables, so record an obligation. We also
index d8defabd3fe6616f90802efe8d956f438952c158..0aa964a44fd2cf962d5d8fe83d6cfef765af0f6e 100644 (file)
@@ -42,7 +42,6 @@
 
 #![feature(box_patterns)]
 #![feature(box_syntax)]
-#![cfg_attr(stage0, feature(min_const_fn))]
 #![feature(core_intrinsics)]
 #![feature(drain_filter)]
 #![cfg_attr(windows, feature(libc))]
@@ -67,7 +66,6 @@
 #![feature(step_trait)]
 #![feature(integer_atomics)]
 #![feature(test)]
-#![cfg_attr(stage0, feature(impl_header_lifetime_elision))]
 #![feature(in_band_lifetimes)]
 #![feature(macro_at_most_once_rep)]
 #![feature(crate_visibility_modifier)]
index 7e2532bb1c4e868aef9508d8db2e9b307d253a44..5470aff77f8a43abac0cca195716e28665c3e246 100644 (file)
@@ -783,11 +783,11 @@ pub fn current_lint_root(&self) -> ast::NodeId {
     }
 }
 
-impl<'a, 'tcx> LayoutOf for &'a LateContext<'a, 'tcx> {
+impl<'a, 'tcx> LayoutOf for LateContext<'a, 'tcx> {
     type Ty = Ty<'tcx>;
     type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
 
-    fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
+    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
         self.tcx.layout_of(self.param_env.and(ty))
     }
 }
index dc5f736172503be4805b680731c79dd72105aac1..c5bcfd48cf39a5dbcc46fc2b527f0900031acf70 100644 (file)
@@ -291,10 +291,8 @@ fn has_allow_dead_code_or_lang_attr(tcx: TyCtxt<'_, '_, '_>,
         return true;
     }
 
-    // (To be) stable attribute for #[lang = "panic_impl"]
-    if attr::contains_name(attrs, "panic_implementation") ||
-        attr::contains_name(attrs, "panic_handler")
-    {
+    // Stable attribute for #[lang = "panic_impl"]
+    if attr::contains_name(attrs, "panic_handler") {
         return true;
     }
 
index 9d54e798469f7f36fa33ccdae179cee9e895572d..0a5b1e184deec4ff98f7ecbd8210cd3643f3cac7 100644 (file)
@@ -84,7 +84,7 @@ fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
             // `Option<typeof(function)>` to present a clearer error.
             let from = unpack_option_like(self.tcx.global_tcx(), from);
             if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) {
-                if size_to == Pointer.size(self.tcx) {
+                if size_to == Pointer.size(&self.tcx) {
                     struct_span_err!(self.tcx.sess, span, E0591,
                                      "can't transmute zero-sized type")
                         .note(&format!("source type: {}", from))
index 45de958e72eba419259b67436915d0e1a2b41d71..55ffa50e7c896b0600390f434b4c8599d2a62083 100644 (file)
@@ -204,9 +204,7 @@ pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
             if let Some(value) = attribute.value_str() {
                 return Some((value, attribute.span));
             }
-        } else if attribute.check_name("panic_implementation") ||
-            attribute.check_name("panic_handler")
-        {
+        } else if attribute.check_name("panic_handler") {
             return Some((Symbol::intern("panic_impl"), attribute.span))
         } else if attribute.check_name("alloc_error_handler") {
             return Some((Symbol::intern("oom"), attribute.span))
@@ -271,6 +269,7 @@ pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LanguageItems {
     DropTraitLangItem,           "drop",               drop_trait,              Target::Trait;
 
     CoerceUnsizedTraitLangItem,  "coerce_unsized",     coerce_unsized_trait,    Target::Trait;
+    DispatchFromDynTraitLangItem,"dispatch_from_dyn",  dispatch_from_dyn_trait, Target::Trait;
 
     AddTraitLangItem,            "add",                add_trait,               Target::Trait;
     SubTraitLangItem,            "sub",                sub_trait,               Target::Trait;
index 361abb1689619e1cd92ee0557b54cfef1f17b292..79cd8b21f1b6aa7d7fd68499f4f8f604c4ca3553 100644 (file)
@@ -447,6 +447,17 @@ fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap {
     map
 }
 
+/// In traits, there is an implicit `Self` type parameter which comes before the generics.
+/// We have to account for this when computing the index of the other generic parameters.
+/// This function returns whether there is such an implicit parameter defined on the given item.
+fn sub_items_have_self_param(node: &hir::ItemKind) -> bool {
+    match *node {
+        hir::ItemKind::Trait(..) |
+        hir::ItemKind::TraitAlias(..) => true,
+        _ => false,
+    }
+}
+
 impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
     fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
         NestedVisitorMap::All(&self.tcx.hir)
@@ -522,8 +533,8 @@ fn visit_item(&mut self, item: &'tcx hir::Item) {
                     hir::ItemKind::Impl(..) => true,
                     _ => false,
                 };
-                // These kinds of items have only early bound lifetime parameters.
-                let mut index = if let hir::ItemKind::Trait(..) = item.node {
+                // These kinds of items have only early-bound lifetime parameters.
+                let mut index = if sub_items_have_self_param(&item.node) {
                     1 // Self comes before lifetimes
                 } else {
                     0
@@ -1443,23 +1454,101 @@ fn with<F>(&mut self, wrap_scope: Scope<'_>, f: F)
     /// helper method to determine the span to remove when suggesting the
     /// deletion of a lifetime
     fn lifetime_deletion_span(&self, name: ast::Ident, generics: &hir::Generics) -> Option<Span> {
-        if generics.params.len() == 1 {
-            // if sole lifetime, remove the `<>` brackets
-            Some(generics.span)
-        } else {
-            generics.params.iter().enumerate().find_map(|(i, param)| {
-                if param.name.ident() == name {
-                    // We also want to delete a leading or trailing comma
-                    // as appropriate
-                    if i >= generics.params.len() - 1 {
-                        Some(generics.params[i - 1].span.shrink_to_hi().to(param.span))
-                    } else {
-                        Some(param.span.to(generics.params[i + 1].span.shrink_to_lo()))
+        generics.params.iter().enumerate().find_map(|(i, param)| {
+            if param.name.ident() == name {
+                let mut in_band = false;
+                if let hir::GenericParamKind::Lifetime { kind } = param.kind {
+                    if let hir::LifetimeParamKind::InBand = kind {
+                        in_band = true;
                     }
+                }
+                if in_band {
+                    Some(param.span)
                 } else {
-                    None
+                    if generics.params.len() == 1 {
+                        // if sole lifetime, remove the entire `<>` brackets
+                        Some(generics.span)
+                    } else {
+                        // if removing within `<>` brackets, we also want to
+                        // delete a leading or trailing comma as appropriate
+                        if i >= generics.params.len() - 1 {
+                            Some(generics.params[i - 1].span.shrink_to_hi().to(param.span))
+                        } else {
+                            Some(param.span.to(generics.params[i + 1].span.shrink_to_lo()))
+                        }
+                    }
                 }
-            })
+            } else {
+                None
+            }
+        })
+    }
+
+    // helper method to issue suggestions from `fn rah<'a>(&'a T)` to `fn rah(&T)`
+    fn suggest_eliding_single_use_lifetime(
+        &self, err: &mut DiagnosticBuilder<'_>, def_id: DefId, lifetime: &hir::Lifetime
+    ) {
+        // FIXME: future work: also suggest `impl Foo<'_>` for `impl<'a> Foo<'a>`
+        let name = lifetime.name.ident();
+        let mut remove_decl = None;
+        if let Some(parent_def_id) = self.tcx.parent(def_id) {
+            if let Some(generics) = self.tcx.hir.get_generics(parent_def_id) {
+                remove_decl = self.lifetime_deletion_span(name, generics);
+            }
+        }
+
+        let mut remove_use = None;
+        let mut find_arg_use_span = |inputs: &hir::HirVec<hir::Ty>| {
+            for input in inputs {
+                if let hir::TyKind::Rptr(lt, _) = input.node {
+                    if lt.name.ident() == name {
+                        // include the trailing whitespace between the ampersand and the type name
+                        let lt_through_ty_span = lifetime.span.to(input.span.shrink_to_hi());
+                        remove_use = Some(
+                            self.tcx.sess.source_map()
+                                .span_until_non_whitespace(lt_through_ty_span)
+                        );
+                        break;
+                    }
+                }
+            }
+        };
+        if let Node::Lifetime(hir_lifetime) = self.tcx.hir.get(lifetime.id) {
+            if let Some(parent) = self.tcx.hir.find(self.tcx.hir.get_parent(hir_lifetime.id)) {
+                match parent {
+                    Node::Item(item) => {
+                        if let hir::ItemKind::Fn(decl, _, _, _) = &item.node {
+                            find_arg_use_span(&decl.inputs);
+                        }
+                    },
+                    Node::ImplItem(impl_item) => {
+                        if let hir::ImplItemKind::Method(sig, _) = &impl_item.node {
+                            find_arg_use_span(&sig.decl.inputs);
+                        }
+                    }
+                    _ => {}
+                }
+            }
+        }
+
+        if let (Some(decl_span), Some(use_span)) = (remove_decl, remove_use) {
+            // if both declaration and use deletion spans start at the same
+            // place ("start at" because the latter includes trailing
+            // whitespace), then this is an in-band lifetime
+            if decl_span.shrink_to_lo() == use_span.shrink_to_lo() {
+                err.span_suggestion_with_applicability(
+                    use_span,
+                    "elide the single-use lifetime",
+                    String::new(),
+                    Applicability::MachineApplicable,
+                );
+            } else {
+                err.multipart_suggestion_with_applicability(
+                    "elide the single-use lifetime",
+                    vec![(decl_span, String::new()), (use_span, String::new())],
+                    Applicability::MachineApplicable,
+                );
+            }
         }
     }
 
@@ -1515,14 +1604,26 @@ fn check_uses_for_lifetimes_defined_by_scope(&mut self) {
                         _ => None,
                     } {
                         debug!("id = {:?} span = {:?} name = {:?}", node_id, span, name);
+
+                        if name == keywords::UnderscoreLifetime.ident() {
+                            continue;
+                        }
+
                         let mut err = self.tcx.struct_span_lint_node(
                             lint::builtin::SINGLE_USE_LIFETIMES,
                             id,
                             span,
                             &format!("lifetime parameter `{}` only used once", name),
                         );
-                        err.span_label(span, "this lifetime...");
-                        err.span_label(lifetime.span, "...is used only here");
+
+                        if span == lifetime.span {
+                            // spans are the same for in-band lifetime declarations
+                            err.span_label(span, "this lifetime is only used here");
+                        } else {
+                            err.span_label(span, "this lifetime...");
+                            err.span_label(lifetime.span, "...is used only here");
+                        }
+                        self.suggest_eliding_single_use_lifetime(&mut err, def_id, lifetime);
                         err.emit();
                     }
                 }
@@ -1555,7 +1656,7 @@ fn check_uses_for_lifetimes_defined_by_scope(&mut self) {
                                 if let Some(span) = unused_lt_span {
                                     err.span_suggestion_with_applicability(
                                         span,
-                                        "remove it",
+                                        "elide the unused lifetime",
                                         String::new(),
                                         Applicability::MachineApplicable,
                                     );
@@ -1602,8 +1703,8 @@ fn visit_early_late<F>(
         let mut index = 0;
         if let Some(parent_id) = parent_id {
             let parent = self.tcx.hir.expect_item(parent_id);
-            if let hir::ItemKind::Trait(..) = parent.node {
-                index += 1; // Self comes first.
+            if sub_items_have_self_param(&parent.node) {
+                index += 1; // Self comes before lifetimes
             }
             match parent.node {
                 hir::ItemKind::Trait(_, _, ref generics, ..)
index 62cc3113a3d37f6a9bfb70e70eac0bf91a9c7a56..f8a5dbc6905ca9be31429b042d183fe26926a95e 100644 (file)
@@ -86,18 +86,18 @@ pub trait PointerArithmetic: layout::HasDataLayout {
     // These are not supposed to be overridden.
 
     #[inline(always)]
-    fn pointer_size(self) -> Size {
+    fn pointer_size(&self) -> Size {
         self.data_layout().pointer_size
     }
 
     //// Trunace the given value to the pointer size; also return whether there was an overflow
-    fn truncate_to_ptr(self, val: u128) -> (u64, bool) {
+    fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
         let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
         ((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
     }
 
     // Overflow checking only works properly on the range from -u64 to +u64.
-    fn overflowing_signed_offset(self, val: u64, i: i128) -> (u64, bool) {
+    fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
         // FIXME: is it possible to over/underflow here?
         if i < 0 {
             // trickery to ensure that i64::min_value() works fine
@@ -109,23 +109,23 @@ fn overflowing_signed_offset(self, val: u64, i: i128) -> (u64, bool) {
         }
     }
 
-    fn overflowing_offset(self, val: u64, i: u64) -> (u64, bool) {
+    fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
         let (res, over1) = val.overflowing_add(i);
         let (res, over2) = self.truncate_to_ptr(res as u128);
         (res, over1 || over2)
     }
 
-    fn signed_offset<'tcx>(self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
+    fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
         let (res, over) = self.overflowing_signed_offset(val, i as i128);
         if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
     }
 
-    fn offset<'tcx>(self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
+    fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
         let (res, over) = self.overflowing_offset(val, i);
         if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
     }
 
-    fn wrapping_signed_offset(self, val: u64, i: i64) -> u64 {
+    fn wrapping_signed_offset(&self, val: u64, i: i64) -> u64 {
         self.overflowing_signed_offset(val, i as i128).0
     }
 }
@@ -176,7 +176,7 @@ pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
         Pointer { alloc_id, offset, tag }
     }
 
-    pub fn wrapping_signed_offset(self, i: i64, cx: impl HasDataLayout) -> Self {
+    pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
         Pointer::new_with_tag(
             self.alloc_id,
             Size::from_bytes(cx.data_layout().wrapping_signed_offset(self.offset.bytes(), i)),
@@ -184,12 +184,12 @@ pub fn wrapping_signed_offset(self, i: i64, cx: impl HasDataLayout) -> Self {
         )
     }
 
-    pub fn overflowing_signed_offset(self, i: i128, cx: impl HasDataLayout) -> (Self, bool) {
+    pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
         let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
         (Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
     }
 
-    pub fn signed_offset(self, i: i64, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> {
+    pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
         Ok(Pointer::new_with_tag(
             self.alloc_id,
             Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
@@ -197,12 +197,12 @@ pub fn signed_offset(self, i: i64, cx: impl HasDataLayout) -> EvalResult<'tcx, S
         ))
     }
 
-    pub fn overflowing_offset(self, i: Size, cx: impl HasDataLayout) -> (Self, bool) {
+    pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
         let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
         (Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
     }
 
-    pub fn offset(self, i: Size, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> {
+    pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
         Ok(Pointer::new_with_tag(
             self.alloc_id,
             Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
@@ -295,12 +295,10 @@ pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
     }
 
     pub fn new(data_offsets: Vec<u32>) -> AllocDecodingState {
-        let decoding_state: Vec<_> = ::std::iter::repeat(Mutex::new(State::Empty))
-            .take(data_offsets.len())
-            .collect();
+        let decoding_state = vec![Mutex::new(State::Empty); data_offsets.len()];
 
         AllocDecodingState {
-            decoding_state: decoding_state,
+            decoding_state,
             data_offsets,
         }
     }
index 4304f08a78f0c56b757f8fb0c9f8927f2918d52c..3b8e19c6ecaa9b4c1c7b272da17ddc95a1757a5f 100644 (file)
@@ -65,7 +65,7 @@ pub fn try_to_ptr(&self) -> Option<Pointer> {
     pub fn new_slice(
         val: Scalar,
         len: u64,
-        cx: impl HasDataLayout
+        cx: &impl HasDataLayout
     ) -> Self {
         ConstValue::ScalarPair(val, Scalar::Bits {
             bits: len as u128,
@@ -121,7 +121,7 @@ pub fn erase_tag(self) -> Scalar {
     }
 
     #[inline]
-    pub fn ptr_null(cx: impl HasDataLayout) -> Self {
+    pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
         Scalar::Bits {
             bits: 0,
             size: cx.data_layout().pointer_size.bytes() as u8,
@@ -134,52 +134,52 @@ pub fn zst() -> Self {
     }
 
     #[inline]
-    pub fn ptr_signed_offset(self, i: i64, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> {
-        let layout = cx.data_layout();
+    pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
+        let dl = cx.data_layout();
         match self {
             Scalar::Bits { bits, size } => {
-                assert_eq!(size as u64, layout.pointer_size.bytes());
+                assert_eq!(size as u64, dl.pointer_size.bytes());
                 Ok(Scalar::Bits {
-                    bits: layout.signed_offset(bits as u64, i)? as u128,
+                    bits: dl.signed_offset(bits as u64, i)? as u128,
                     size,
                 })
             }
-            Scalar::Ptr(ptr) => ptr.signed_offset(i, layout).map(Scalar::Ptr),
+            Scalar::Ptr(ptr) => ptr.signed_offset(i, dl).map(Scalar::Ptr),
         }
     }
 
     #[inline]
-    pub fn ptr_offset(self, i: Size, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> {
-        let layout = cx.data_layout();
+    pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
+        let dl = cx.data_layout();
         match self {
             Scalar::Bits { bits, size } => {
-                assert_eq!(size as u64, layout.pointer_size.bytes());
+                assert_eq!(size as u64, dl.pointer_size.bytes());
                 Ok(Scalar::Bits {
-                    bits: layout.offset(bits as u64, i.bytes())? as u128,
+                    bits: dl.offset(bits as u64, i.bytes())? as u128,
                     size,
                 })
             }
-            Scalar::Ptr(ptr) => ptr.offset(i, layout).map(Scalar::Ptr),
+            Scalar::Ptr(ptr) => ptr.offset(i, dl).map(Scalar::Ptr),
         }
     }
 
     #[inline]
-    pub fn ptr_wrapping_signed_offset(self, i: i64, cx: impl HasDataLayout) -> Self {
-        let layout = cx.data_layout();
+    pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
+        let dl = cx.data_layout();
         match self {
             Scalar::Bits { bits, size } => {
-                assert_eq!(size as u64, layout.pointer_size.bytes());
+                assert_eq!(size as u64, dl.pointer_size.bytes());
                 Scalar::Bits {
-                    bits: layout.wrapping_signed_offset(bits as u64, i) as u128,
+                    bits: dl.wrapping_signed_offset(bits as u64, i) as u128,
                     size,
                 }
             }
-            Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, layout)),
+            Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, dl)),
         }
     }
 
     #[inline]
-    pub fn is_null_ptr(self, cx: impl HasDataLayout) -> bool {
+    pub fn is_null_ptr(self, cx: &impl HasDataLayout) -> bool {
         match self {
             Scalar::Bits { bits, size } => {
                 assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
@@ -301,7 +301,7 @@ pub fn to_u64(self) -> EvalResult<'static, u64> {
         Ok(b as u64)
     }
 
-    pub fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'static, u64> {
+    pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'static, u64> {
         let b = self.to_bits(cx.data_layout().pointer_size)?;
         assert_eq!(b as u64 as u128, b);
         Ok(b as u64)
@@ -331,7 +331,7 @@ pub fn to_i64(self) -> EvalResult<'static, i64> {
         Ok(b as i64)
     }
 
-    pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'static, i64> {
+    pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'static, i64> {
         let b = self.to_bits(cx.data_layout().pointer_size)?;
         let b = sign_extend(b, cx.data_layout().pointer_size) as i128;
         assert_eq!(b as i64 as i128, b);
index 4fea07011ccfbb65268e7d443d4e7174732a602c..36bc2edcf584eb6293b686f6c3a7ab4b5b7720dc 100644 (file)
@@ -506,25 +506,25 @@ pub enum BorrowKind {
     /// implicit closure bindings. It is needed when the closure is
     /// borrowing or mutating a mutable referent, e.g.:
     ///
-    ///    let x: &mut isize = ...;
-    ///    let y = || *x += 5;
+    ///     let x: &mut isize = ...;
+    ///     let y = || *x += 5;
     ///
     /// If we were to try to translate this closure into a more explicit
     /// form, we'd encounter an error with the code as written:
     ///
-    ///    struct Env { x: & &mut isize }
-    ///    let x: &mut isize = ...;
-    ///    let y = (&mut Env { &x }, fn_ptr);  // Closure is pair of env and fn
-    ///    fn fn_ptr(env: &mut Env) { **env.x += 5; }
+    ///     struct Env { x: & &mut isize }
+    ///     let x: &mut isize = ...;
+    ///     let y = (&mut Env { &x }, fn_ptr);  // Closure is pair of env and fn
+    ///     fn fn_ptr(env: &mut Env) { **env.x += 5; }
     ///
     /// This is then illegal because you cannot mutate an `&mut` found
     /// in an aliasable location. To solve, you'd have to translate with
     /// an `&mut` borrow:
     ///
-    ///    struct Env { x: & &mut isize }
-    ///    let x: &mut isize = ...;
-    ///    let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x
-    ///    fn fn_ptr(env: &mut Env) { **env.x += 5; }
+    ///     struct Env { x: & &mut isize }
+    ///     let x: &mut isize = ...;
+    ///     let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x
+    ///     fn fn_ptr(env: &mut Env) { **env.x += 5; }
     ///
     /// Now the assignment to `**env.x` is legal, but creating a
     /// mutable pointer to `x` is not because `x` is not mutable. We
@@ -1751,13 +1751,20 @@ pub enum StatementKind<'tcx> {
     InlineAsm {
         asm: Box<InlineAsm>,
         outputs: Box<[Place<'tcx>]>,
-        inputs: Box<[Operand<'tcx>]>,
+        inputs: Box<[(Span, Operand<'tcx>)]>,
     },
 
-    /// Assert the given places to be valid inhabitants of their type.  These statements are
-    /// currently only interpreted by miri and only generated when "-Z mir-emit-validate" is passed.
-    /// See <https://internals.rust-lang.org/t/types-as-contracts/5562/73> for more details.
-    Validate(ValidationOp, Vec<ValidationOperand<'tcx, Place<'tcx>>>),
+    /// Retag references in the given place, ensuring they got fresh tags.  This is
+    /// part of the Stacked Borrows model. These statements are currently only interpreted
+    /// by miri and only generated when "-Z mir-emit-retag" is passed.
+    /// See <https://internals.rust-lang.org/t/stacked-borrows-an-aliasing-model-for-rust/8153/>
+    /// for more details.
+    Retag {
+        /// `fn_entry` indicates whether this is the initial retag that happens in the
+        /// function prolog.
+        fn_entry: bool,
+        place: Place<'tcx>,
+    },
 
     /// Mark one terminating point of a region scope (i.e. static region).
     /// (The starting point(s) arise implicitly from borrows.)
@@ -1810,57 +1817,6 @@ pub enum FakeReadCause {
     ForLet,
 }
 
-/// The `ValidationOp` describes what happens with each of the operands of a
-/// `Validate` statement.
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq)]
-pub enum ValidationOp {
-    /// Recursively traverse the place following the type and validate that all type
-    /// invariants are maintained.  Furthermore, acquire exclusive/read-only access to the
-    /// memory reachable from the place.
-    Acquire,
-    /// Recursive traverse the *mutable* part of the type and relinquish all exclusive
-    /// access.
-    Release,
-    /// Recursive traverse the *mutable* part of the type and relinquish all exclusive
-    /// access *until* the given region ends.  Then, access will be recovered.
-    Suspend(region::Scope),
-}
-
-impl Debug for ValidationOp {
-    fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
-        use self::ValidationOp::*;
-        match *self {
-            Acquire => write!(fmt, "Acquire"),
-            Release => write!(fmt, "Release"),
-            // (reuse lifetime rendering policy from ppaux.)
-            Suspend(ref ce) => write!(fmt, "Suspend({})", ty::ReScope(*ce)),
-        }
-    }
-}
-
-// This is generic so that it can be reused by miri
-#[derive(Clone, Hash, PartialEq, Eq, RustcEncodable, RustcDecodable)]
-pub struct ValidationOperand<'tcx, T> {
-    pub place: T,
-    pub ty: Ty<'tcx>,
-    pub re: Option<region::Scope>,
-    pub mutbl: hir::Mutability,
-}
-
-impl<'tcx, T: Debug> Debug for ValidationOperand<'tcx, T> {
-    fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{:?}: {:?}", self.place, self.ty)?;
-        if let Some(ce) = self.re {
-            // (reuse lifetime rendering policy from ppaux.)
-            write!(fmt, "/{}", ty::ReScope(ce))?;
-        }
-        if let hir::MutImmutable = self.mutbl {
-            write!(fmt, " (imm)")?;
-        }
-        Ok(())
-    }
-}
-
 impl<'tcx> Debug for Statement<'tcx> {
     fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
         use self::StatementKind::*;
@@ -1869,7 +1825,8 @@ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
             FakeRead(ref cause, ref place) => write!(fmt, "FakeRead({:?}, {:?})", cause, place),
             // (reuse lifetime rendering policy from ppaux.)
             EndRegion(ref ce) => write!(fmt, "EndRegion({})", ty::ReScope(*ce)),
-            Validate(ref op, ref places) => write!(fmt, "Validate({:?}, {:?})", op, places),
+            Retag { fn_entry, ref place } =>
+                write!(fmt, "Retag({}{:?})", if fn_entry { "[fn entry] " } else { "" }, place),
             StorageLive(ref place) => write!(fmt, "StorageLive({:?})", place),
             StorageDead(ref place) => write!(fmt, "StorageDead({:?})", place),
             SetDiscriminant {
@@ -2944,7 +2901,6 @@ pub enum ClosureOutlivesSubject<'tcx> {
     SourceInfo,
     UpvarDecl,
     FakeReadCause,
-    ValidationOp,
     SourceScope,
     SourceScopeData,
     SourceScopeLocalData,
@@ -2997,12 +2953,6 @@ impl<'tcx> TypeFoldable<'tcx> for BasicBlockData<'tcx> {
     }
 }
 
-BraceStructTypeFoldableImpl! {
-    impl<'tcx> TypeFoldable<'tcx> for ValidationOperand<'tcx, Place<'tcx>> {
-        place, ty, re, mutbl
-    }
-}
-
 BraceStructTypeFoldableImpl! {
     impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> {
         source_info, kind
@@ -3017,7 +2967,7 @@ impl<'tcx> TypeFoldable<'tcx> for StatementKind<'tcx> {
         (StatementKind::StorageLive)(a),
         (StatementKind::StorageDead)(a),
         (StatementKind::InlineAsm) { asm, outputs, inputs },
-        (StatementKind::Validate)(a, b),
+        (StatementKind::Retag) { fn_entry, place },
         (StatementKind::EndRegion)(a),
         (StatementKind::AscribeUserType)(a, v, b),
         (StatementKind::Nop),
index caa627441ceb6107b5caacb8092004c42466deea..c130e047e47d11e19ca1ae3b0cf3a6f89b8bf13e 100644 (file)
@@ -152,6 +152,13 @@ fn visit_ascribe_user_ty(&mut self,
                 self.super_ascribe_user_ty(place, variance, user_ty, location);
             }
 
+            fn visit_retag(&mut self,
+                           fn_entry: & $($mutability)* bool,
+                           place: & $($mutability)* Place<'tcx>,
+                           location: Location) {
+                self.super_retag(fn_entry, place, location);
+            }
+
             fn visit_place(&mut self,
                             place: & $($mutability)* Place<'tcx>,
                             context: PlaceContext<'tcx>,
@@ -371,17 +378,6 @@ fn super_statement(&mut self,
                         );
                     }
                     StatementKind::EndRegion(_) => {}
-                    StatementKind::Validate(_, ref $($mutability)* places) => {
-                        for operand in places {
-                            self.visit_place(
-                                & $($mutability)* operand.place,
-                                PlaceContext::NonUse(NonUseContext::Validate),
-                                location
-                            );
-                            self.visit_ty(& $($mutability)* operand.ty,
-                                          TyContext::Location(location));
-                        }
-                    }
                     StatementKind::SetDiscriminant{ ref $($mutability)* place, .. } => {
                         self.visit_place(
                             place,
@@ -413,10 +409,15 @@ fn super_statement(&mut self,
                                 location
                             );
                         }
-                        for input in & $($mutability)* inputs[..] {
+                        for (span, input) in & $($mutability)* inputs[..] {
+                            self.visit_span(span);
                             self.visit_operand(input, location);
                         }
                     }
+                    StatementKind::Retag { ref $($mutability)* fn_entry,
+                                           ref $($mutability)* place } => {
+                        self.visit_retag(fn_entry, place, location);
+                    }
                     StatementKind::AscribeUserType(
                         ref $($mutability)* place,
                         ref $($mutability)* variance,
@@ -719,6 +720,17 @@ fn super_ascribe_user_ty(&mut self,
                 self.visit_user_type_projection(user_ty);
             }
 
+            fn super_retag(&mut self,
+                           _fn_entry: & $($mutability)* bool,
+                           place: & $($mutability)* Place<'tcx>,
+                           location: Location) {
+                self.visit_place(
+                    place,
+                    PlaceContext::MutatingUse(MutatingUseContext::Retag),
+                    location,
+                );
+            }
+
             fn super_place(&mut self,
                             place: & $($mutability)* Place<'tcx>,
                             context: PlaceContext<'tcx>,
@@ -1010,6 +1022,8 @@ pub enum MutatingUseContext<'tcx> {
     ///     f(&mut x.y);
     ///
     Projection,
+    /// Retagging (updating the "Stacked Borrows" tag)
+    Retag,
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -1020,8 +1034,6 @@ pub enum NonUseContext {
     StorageDead,
     /// User type annotation assertions for NLL.
     AscribeUserTy,
-    /// Validation command.
-    Validate,
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
index 569e7a24d2353ce8652d5112b6f3738b109c5023..76200777584012a60666a7d654ba7bd4e1dafbf5 100644 (file)
@@ -1282,9 +1282,8 @@ fn parse_cross_lang_lto(slot: &mut CrossLangLto, v: Option<&str>) -> bool {
         "in addition to `.mir` files, create graphviz `.dot` files"),
     dump_mir_exclude_pass_number: bool = (false, parse_bool, [UNTRACKED],
         "if set, exclude the pass number when dumping MIR (used in tests)"),
-    mir_emit_validate: usize = (0, parse_uint, [TRACKED],
-        "emit Validate MIR statements, interpreted e.g. by miri (0: do not emit; 1: if function \
-         contains unsafe block, only validate arguments; 2: always emit full validation)"),
+    mir_emit_retag: bool = (false, parse_bool, [TRACKED],
+        "emit Retagging MIR statements, interpreted e.g. by miri; implies -Zmir-opt-level=0"),
     perf_stats: bool = (false, parse_bool, [UNTRACKED],
         "print some performance-related statistics"),
     hir_stats: bool = (false, parse_bool, [UNTRACKED],
index a17825a877d88cb45ecaba6e31d2cc6a15e0860a..fe94b62ef19e266c6e0989daba86a1992fd46d45 100644 (file)
@@ -868,7 +868,7 @@ pub fn consider_optimizing<T: Fn() -> String>(&self, crate_name: &str, msg: T) -
                 let fuel = self.optimization_fuel_limit.get();
                 ret = fuel != 0;
                 if fuel == 0 && !self.out_of_fuel.get() {
-                    println!("optimization-fuel-exhausted: {}", msg());
+                    eprintln!("optimization-fuel-exhausted: {}", msg());
                     self.out_of_fuel.set(true);
                 } else if fuel > 0 {
                     self.optimization_fuel_limit.set(fuel - 1);
index 50ca6ca78ab3aa5b81dd85670bf025e8101673e2..8239e5ac56e44c4a5d5eab84ce07ff0cac543755 100644 (file)
@@ -447,27 +447,51 @@ fn add_user_pred<'c>(
                                     ty::RegionKind::ReLateBound(_, _),
                                 ) => {}
 
-                                (ty::RegionKind::ReLateBound(_, _), _) => {
+                                (ty::RegionKind::ReLateBound(_, _), _) |
+                                (_, ty::RegionKind::ReVar(_)) => {
+                                    // One of these is true:
                                     // The new predicate has a HRTB in a spot where the old
                                     // predicate does not (if they both had a HRTB, the previous
-                                    // match arm would have executed).
+                                    // match arm would have executed). A HRBT is a 'stricter'
+                                    // bound than anything else, so we want to keep the newer
+                                    // predicate (with the HRBT) in place of the old predicate.
                                     //
-                                    // The means we want to remove the older predicate from
-                                    // user_computed_preds, since having both it and the new
+                                    // OR
+                                    //
+                                    // The old predicate has a region variable where the new
+                                    // predicate has some other kind of region. An region
+                                    // variable isn't something we can actually display to a user,
+                                    // so we choose ther new predicate (which doesn't have a region
+                                    // varaible).
+                                    //
+                                    // In both cases, we want to remove the old predicate,
+                                    // from user_computed_preds, and replace it with the new
+                                    // one. Having both the old and the new
                                     // predicate in a ParamEnv would confuse SelectionContext
+                                    //
                                     // We're currently in the predicate passed to 'retain',
                                     // so we return 'false' to remove the old predicate from
                                     // user_computed_preds
                                     return false;
                                 }
-                                (_, ty::RegionKind::ReLateBound(_, _)) => {
-                                    // This is the opposite situation as the previous arm - the
-                                    // old predicate has a HRTB lifetime in a place where the
-                                    // new predicate does not. We want to leave the old
+                                (_, ty::RegionKind::ReLateBound(_, _)) |
+                                (ty::RegionKind::ReVar(_), _) => {
+                                    // This is the opposite situation as the previous arm.
+                                    // One of these is true:
+                                    //
+                                    // The old predicate has a HRTB lifetime in a place where the
+                                    // new predicate does not.
+                                    //
+                                    // OR
+                                    //
+                                    // The new predicate has a region variable where the old
+                                    // predicate has some other type of region.
+                                    //
+                                    // We want to leave the old
                                     // predicate in user_computed_preds, and skip adding
                                     // new_pred to user_computed_params.
                                     should_add_new = false
-                                }
+                                },
                                 _ => {}
                             }
                         }
@@ -683,8 +707,8 @@ pub fn evaluate_nested_obligations<
                 }
                 &ty::Predicate::TypeOutlives(ref binder) => {
                     match (
-                        binder.no_late_bound_regions(),
-                        binder.map_bound_ref(|pred| pred.0).no_late_bound_regions(),
+                        binder.no_bound_vars(),
+                        binder.map_bound_ref(|pred| pred.0).no_bound_vars(),
                     ) {
                         (None, Some(t_a)) => {
                             select.infcx().register_region_obligation_with_cause(
index 817e9ffcbb55d786bbcb9ab7f56269472241d038..71b77909b82a8e3de49c1cfbdc483a1c9822232b 100644 (file)
@@ -455,7 +455,7 @@ fn ty_is_local_constructor(ty: Ty<'_>, in_crate: InCrate) -> bool {
             false
         }
 
-        ty::Infer(..) => match in_crate {
+        ty::Bound(..) | ty::Infer(..) => match in_crate {
             InCrate::Local => false,
             // The inference variable might be unified with a local
             // type in that remote crate.
index 15a0adc3c06928ad8ebbe9542c61bc56011b40a2..fbada789956be660c0e10ae04b213cc81993e45d 100644 (file)
@@ -34,7 +34,6 @@
 use infer::{self, InferCtxt};
 use infer::type_variable::TypeVariableOrigin;
 use std::fmt;
-use std::iter;
 use syntax::ast;
 use session::DiagnosticMessageId;
 use ty::{self, AdtKind, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
@@ -281,7 +280,7 @@ fn type_category<'tcx>(t: Ty<'tcx>) -> Option<u32> {
                 ty::Generator(..) => Some(18),
                 ty::Foreign(..) => Some(19),
                 ty::GeneratorWitness(..) => Some(20),
-                ty::Infer(..) | ty::Error => None,
+                ty::Bound(..) | ty::Infer(..) | ty::Error => None,
                 ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
             }
         }
@@ -429,7 +428,7 @@ fn on_unimplemented_note(
                 ));
                 let tcx = self.tcx;
                 if let Some(len) = len.val.try_to_scalar().and_then(|scalar| {
-                    scalar.to_usize(tcx).ok()
+                    scalar.to_usize(&tcx).ok()
                 }) {
                     flags.push((
                         "_Self".to_owned(),
@@ -755,7 +754,8 @@ pub fn report_selection_error(&self,
                     }
 
                     ty::Predicate::ObjectSafe(trait_def_id) => {
-                        let violations = self.tcx.object_safety_violations(trait_def_id);
+                        let violations = self.tcx.global_tcx()
+                            .object_safety_violations(trait_def_id);
                         self.tcx.report_object_safety_error(span,
                                                             trait_def_id,
                                                             violations)
@@ -876,7 +876,7 @@ pub fn report_selection_error(&self,
             }
 
             TraitNotObjectSafe(did) => {
-                let violations = self.tcx.object_safety_violations(did);
+                let violations = self.tcx.global_tcx().object_safety_violations(did);
                 self.tcx.report_object_safety_error(span, did, violations)
             }
 
@@ -1095,10 +1095,7 @@ pub fn report_arg_count_mismatch(
             // found arguments is empty (assume the user just wants to ignore args in this case).
             // For example, if `expected_args_length` is 2, suggest `|_, _|`.
             if found_args.is_empty() && is_closure {
-                let underscores = iter::repeat("_")
-                                      .take(expected_args.len())
-                                      .collect::<Vec<_>>()
-                                      .join(", ");
+                let underscores = vec!["_"; expected_args.len()].join(", ");
                 err.span_suggestion_with_applicability(
                     found_span,
                     &format!(
index cfa77b210e857769f2a3e8f58640bc11367bffab..bc091a4e7e0848e0ebd81fd2f0e3dde53ab44ec7 100644 (file)
@@ -12,8 +12,9 @@
 use mir::interpret::{GlobalId, ErrorHandled};
 use ty::{self, Ty, TypeFoldable, ToPolyTraitRef, ToPredicate};
 use ty::error::ExpectedFound;
-use rustc_data_structures::obligation_forest::{Error, ForestObligation, ObligationForest};
-use rustc_data_structures::obligation_forest::{ObligationProcessor, ProcessResult};
+use rustc_data_structures::obligation_forest::{DoCompleted, Error, ForestObligation};
+use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor};
+use rustc_data_structures::obligation_forest::{ProcessResult};
 use std::marker::PhantomData;
 use hir::def_id::DefId;
 
@@ -98,7 +99,7 @@ fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>)
             let outcome = self.predicates.process_obligations(&mut FulfillProcessor {
                 selcx,
                 register_region_obligations: self.register_region_obligations
-            });
+            }, DoCompleted::No);
             debug!("select: outcome={:#?}", outcome);
 
             // FIXME: if we kept the original cache key, we could mark projection
@@ -143,7 +144,7 @@ fn normalize_projection_type<'a, 'gcx>(&mut self,
         debug!("normalize_projection_type(projection_ty={:?})",
                projection_ty);
 
-        debug_assert!(!projection_ty.has_escaping_regions());
+        debug_assert!(!projection_ty.has_escaping_bound_vars());
 
         // FIXME(#20304) -- cache
 
@@ -349,15 +350,15 @@ fn process_obligation(&mut self,
             }
 
             ty::Predicate::TypeOutlives(ref binder) => {
-                // Check if there are higher-ranked regions.
-                match binder.no_late_bound_regions() {
+                // Check if there are higher-ranked vars.
+                match binder.no_bound_vars() {
                     // If there are, inspect the underlying type further.
                     None => {
                         // Convert from `Binder<OutlivesPredicate<Ty, Region>>` to `Binder<Ty>`.
                         let binder = binder.map_bound_ref(|pred| pred.0);
 
-                        // Check if the type has any bound regions.
-                        match binder.no_late_bound_regions() {
+                        // Check if the type has any bound vars.
+                        match binder.no_bound_vars() {
                             // If so, this obligation is an error (for now). Eventually we should be
                             // able to support additional cases here, like `for<'a> &'a str: 'a`.
                             // NOTE: this is duplicate-implemented between here and fulfillment.
index 6b2ec64668e9bbb006e1bbce46a7392ff0e9976a..b7512790bfb6913414c2e4d34c618b56c9012f19 100644 (file)
@@ -352,7 +352,7 @@ pub fn from_poly_domain_goal<'a>(
         domain_goal: PolyDomainGoal<'tcx>,
         tcx: TyCtxt<'a, 'tcx, 'tcx>,
     ) -> GoalKind<'tcx> {
-        match domain_goal.no_late_bound_regions() {
+        match domain_goal.no_bound_vars() {
             Some(p) => p.into_goal(),
             None => GoalKind::Quantified(
                 QuantifierKind::Universal,
@@ -534,8 +534,11 @@ pub enum Vtable<'tcx, N> {
     /// Same as above, but for a fn pointer type with the given signature.
     VtableFnPointer(VtableFnPointerData<'tcx, N>),
 
-    /// Vtable automatically generated for a generator
+    /// Vtable automatically generated for a generator.
     VtableGenerator(VtableGeneratorData<'tcx, N>),
+
+    /// Vtable for a trait alias.
+    VtableTraitAlias(VtableTraitAliasData<'tcx, N>),
 }
 
 /// Identifies a particular impl in the source, along with a set of
@@ -605,6 +608,13 @@ pub struct VtableFnPointerData<'tcx, N> {
     pub nested: Vec<N>
 }
 
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable)]
+pub struct VtableTraitAliasData<'tcx, N> {
+    pub alias_def_id: DefId,
+    pub substs: &'tcx Substs<'tcx>,
+    pub nested: Vec<N>,
+}
+
 /// Creates predicate obligations from the generic bounds.
 pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>,
                                      param_env: ty::ParamEnv<'tcx>,
@@ -1067,6 +1077,7 @@ pub fn nested_obligations(self) -> Vec<N> {
             VtableGenerator(c) => c.nested,
             VtableObject(d) => d.nested,
             VtableFnPointer(d) => d.nested,
+            VtableTraitAlias(d) => d.nested,
         }
     }
 
@@ -1090,20 +1101,25 @@ pub fn map<M, F>(self, f: F) -> Vtable<'tcx, M> where F: FnMut(N) -> M {
                 trait_def_id: d.trait_def_id,
                 nested: d.nested.into_iter().map(f).collect(),
             }),
-            VtableFnPointer(p) => VtableFnPointer(VtableFnPointerData {
-                fn_ty: p.fn_ty,
-                nested: p.nested.into_iter().map(f).collect(),
+            VtableClosure(c) => VtableClosure(VtableClosureData {
+                closure_def_id: c.closure_def_id,
+                substs: c.substs,
+                nested: c.nested.into_iter().map(f).collect(),
             }),
             VtableGenerator(c) => VtableGenerator(VtableGeneratorData {
                 generator_def_id: c.generator_def_id,
                 substs: c.substs,
                 nested: c.nested.into_iter().map(f).collect(),
             }),
-            VtableClosure(c) => VtableClosure(VtableClosureData {
-                closure_def_id: c.closure_def_id,
-                substs: c.substs,
-                nested: c.nested.into_iter().map(f).collect(),
-            })
+            VtableFnPointer(p) => VtableFnPointer(VtableFnPointerData {
+                fn_ty: p.fn_ty,
+                nested: p.nested.into_iter().map(f).collect(),
+            }),
+            VtableTraitAlias(d) => VtableTraitAlias(VtableTraitAliasData {
+                alias_def_id: d.alias_def_id,
+                substs: d.substs,
+                nested: d.nested.into_iter().map(f).collect(),
+            }),
         }
     }
 }
index d5942e738fdd9ad6a334e9b9ed69eea92b10defd..1d76ccdca3161b33a2f6b7cb6b447159f955314d 100644 (file)
@@ -13,7 +13,8 @@
 //! object if all of their methods meet certain criteria. In particular,
 //! they must:
 //!
-//!   - have a suitable receiver from which we can extract a vtable;
+//!   - have a suitable receiver from which we can extract a vtable and coerce to a "thin" version
+//!     that doesn't contain the vtable;
 //!   - not reference the erased type `Self` except for in this receiver;
 //!   - not have generic type parameters
 
 
 use hir::def_id::DefId;
 use lint;
-use traits;
-use ty::{self, Ty, TyCtxt, TypeFoldable};
-use ty::util::ExplicitSelf;
+use traits::{self, Obligation, ObligationCause};
+use ty::{self, Ty, TyCtxt, TypeFoldable, Predicate, ToPredicate};
+use ty::subst::{Subst, Substs};
 use std::borrow::Cow;
-use syntax::ast;
+use std::iter::{self};
+use syntax::ast::{self, Name};
 use syntax_pos::Span;
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
@@ -62,8 +64,8 @@ pub fn error_msg(&self) -> Cow<'static, str> {
                 format!("method `{}` references the `Self` type in where clauses", name).into(),
             ObjectSafetyViolation::Method(name, MethodViolationCode::Generic) =>
                 format!("method `{}` has generic type parameters", name).into(),
-            ObjectSafetyViolation::Method(name, MethodViolationCode::NonStandardSelfType) =>
-                format!("method `{}` has a non-standard `self` type", name).into(),
+            ObjectSafetyViolation::Method(name, MethodViolationCode::UndispatchableReceiver) =>
+                format!("method `{}`'s receiver cannot be dispatched on", name).into(),
             ObjectSafetyViolation::AssociatedConst(name) =>
                 format!("the trait cannot contain associated consts like `{}`", name).into(),
         }
@@ -85,11 +87,11 @@ pub enum MethodViolationCode {
     /// e.g., `fn foo<A>()`
     Generic,
 
-    /// arbitrary `self` type, e.g. `self: Rc<Self>`
-    NonStandardSelfType,
+    /// the method's receiver (`self` argument) can't be dispatched on
+    UndispatchableReceiver,
 }
 
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
 
     /// Returns the object safety violations that affect
     /// astconv - currently, Self in supertraits. This is needed
@@ -113,6 +115,8 @@ pub fn astconv_object_safety_violations(self, trait_def_id: DefId)
     pub fn object_safety_violations(self, trait_def_id: DefId)
                                     -> Vec<ObjectSafetyViolation>
     {
+        debug!("object_safety_violations: {:?}", trait_def_id);
+
         traits::supertrait_def_ids(self, trait_def_id)
             .flat_map(|def_id| self.object_safety_violations_for_trait(def_id))
             .collect()
@@ -277,23 +281,13 @@ fn virtual_call_violation_for_method(self,
                                          method: &ty::AssociatedItem)
                                          -> Option<MethodViolationCode>
     {
-        // The method's first parameter must be something that derefs (or
-        // autorefs) to `&self`. For now, we only accept `self`, `&self`
-        // and `Box<Self>`.
+        // The method's first parameter must be named `self`
         if !method.method_has_self_argument {
             return Some(MethodViolationCode::StaticMethod);
         }
 
         let sig = self.fn_sig(method.def_id);
 
-        let self_ty = self.mk_self_type();
-        let self_arg_ty = sig.skip_binder().inputs()[0];
-        if let ExplicitSelf::Other = ExplicitSelf::determine(self_arg_ty, |ty| ty == self_ty) {
-            return Some(MethodViolationCode::NonStandardSelfType);
-        }
-
-        // The `Self` type is erased, so it should not appear in list of
-        // arguments or return type apart from the receiver.
         for input_ty in &sig.skip_binder().inputs()[1..] {
             if self.contains_illegal_self_type_reference(trait_def_id, input_ty) {
                 return Some(MethodViolationCode::ReferencesSelf);
@@ -320,9 +314,254 @@ fn virtual_call_violation_for_method(self,
             return Some(MethodViolationCode::WhereClauseReferencesSelf(span));
         }
 
+        let receiver_ty = self.liberate_late_bound_regions(
+            method.def_id,
+            &sig.map_bound(|sig| sig.inputs()[0]),
+        );
+
+        // until `unsized_locals` is fully implemented, `self: Self` can't be dispatched on.
+        // However, this is already considered object-safe. We allow it as a special case here.
+        // FIXME(mikeyhew) get rid of this `if` statement once `receiver_is_dispatchable` allows
+        // `Receiver: Unsize<Receiver[Self => dyn Trait]>`
+        if receiver_ty != self.mk_self_type() {
+            if !self.receiver_is_dispatchable(method, receiver_ty) {
+                return Some(MethodViolationCode::UndispatchableReceiver);
+            } else {
+                // sanity check to make sure the receiver actually has the layout of a pointer
+
+                use ty::layout::Abi;
+
+                let param_env = self.param_env(method.def_id);
+
+                let abi_of_ty = |ty: Ty<'tcx>| -> &Abi {
+                    match self.layout_of(param_env.and(ty)) {
+                        Ok(layout) => &layout.abi,
+                        Err(err) => bug!(
+                            "Error: {}\n while computing layout for type {:?}", err, ty
+                        )
+                    }
+                };
+
+                // e.g. Rc<()>
+                let unit_receiver_ty = self.receiver_for_self_ty(
+                    receiver_ty, self.mk_unit(), method.def_id
+                );
+
+                match abi_of_ty(unit_receiver_ty) {
+                    &Abi::Scalar(..) => (),
+                    abi => bug!("Receiver when Self = () should have a Scalar ABI, found {:?}", abi)
+                }
+
+                let trait_object_ty = self.object_ty_for_trait(
+                    trait_def_id, self.mk_region(ty::ReStatic)
+                );
+
+                // e.g. Rc<dyn Trait>
+                let trait_object_receiver = self.receiver_for_self_ty(
+                    receiver_ty, trait_object_ty, method.def_id
+                );
+
+                match abi_of_ty(trait_object_receiver) {
+                    &Abi::ScalarPair(..) => (),
+                    abi => bug!(
+                        "Receiver when Self = {} should have a ScalarPair ABI, found {:?}",
+                        trait_object_ty, abi
+                    )
+                }
+            }
+        }
+
         None
     }
 
+    /// performs a type substitution to produce the version of receiver_ty when `Self = self_ty`
+    /// e.g. for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`
+    fn receiver_for_self_ty(
+        self, receiver_ty: Ty<'tcx>, self_ty: Ty<'tcx>, method_def_id: DefId
+    ) -> Ty<'tcx> {
+        let substs = Substs::for_item(self, method_def_id, |param, _| {
+            if param.index == 0 {
+                self_ty.into()
+            } else {
+                self.mk_param_from_def(param)
+            }
+        });
+
+        receiver_ty.subst(self, substs)
+    }
+
+    /// creates the object type for the current trait. For example,
+    /// if the current trait is `Deref`, then this will be
+    /// `dyn Deref<Target=Self::Target> + 'static`
+    fn object_ty_for_trait(self, trait_def_id: DefId, lifetime: ty::Region<'tcx>) -> Ty<'tcx> {
+        debug!("object_ty_for_trait: trait_def_id={:?}", trait_def_id);
+
+        let trait_ref = ty::TraitRef::identity(self, trait_def_id);
+
+        let trait_predicate = ty::ExistentialPredicate::Trait(
+            ty::ExistentialTraitRef::erase_self_ty(self, trait_ref)
+        );
+
+        let mut associated_types = traits::supertraits(self, ty::Binder::dummy(trait_ref))
+            .flat_map(|trait_ref| self.associated_items(trait_ref.def_id()))
+            .filter(|item| item.kind == ty::AssociatedKind::Type)
+            .collect::<Vec<_>>();
+
+        // existential predicates need to be in a specific order
+        associated_types.sort_by_key(|item| self.def_path_hash(item.def_id));
+
+        let projection_predicates = associated_types.into_iter().map(|item| {
+            ty::ExistentialPredicate::Projection(ty::ExistentialProjection {
+                ty: self.mk_projection(item.def_id, trait_ref.substs),
+                item_def_id: item.def_id,
+                substs: trait_ref.substs,
+            })
+        });
+
+        let existential_predicates = self.mk_existential_predicates(
+            iter::once(trait_predicate).chain(projection_predicates)
+        );
+
+        let object_ty = self.mk_dynamic(
+            ty::Binder::dummy(existential_predicates),
+            lifetime,
+        );
+
+        debug!("object_ty_for_trait: object_ty=`{}`", object_ty);
+
+        object_ty
+    }
+
+    /// checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a
+    /// trait object. We require that `DispatchableFromDyn` be implemented for the receiver type
+    /// in the following way:
+    /// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>`
+    /// - require the following bound:
+    ///
+    ///        Receiver[Self => T]: DispatchFromDyn<Receiver[Self => dyn Trait]>
+    ///
+    ///    where `Foo[X => Y]` means "the same type as `Foo`, but with `X` replaced with `Y`"
+    ///   (substitution notation).
+    ///
+    /// some examples of receiver types and their required obligation
+    /// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>`
+    /// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>`
+    /// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>`
+    ///
+    /// The only case where the receiver is not dispatchable, but is still a valid receiver
+    /// type (just not object-safe), is when there is more than one level of pointer indirection.
+    /// e.g. `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there
+    /// is no way, or at least no inexpensive way, to coerce the receiver from the version where
+    /// `Self = dyn Trait` to the version where `Self = T`, where `T` is the unknown erased type
+    /// contained by the trait object, because the object that needs to be coerced is behind
+    /// a pointer.
+    ///
+    /// In practice, we cannot use `dyn Trait` explicitly in the obligation because it would result
+    /// in a new check that `Trait` is object safe, creating a cycle. So instead, we fudge a little
+    /// by introducing a new type parameter `U` such that `Self: Unsize<U>` and `U: Trait + ?Sized`,
+    /// and use `U` in place of `dyn Trait`. Written as a chalk-style query:
+    ///
+    ///     forall (U: Trait + ?Sized) {
+    ///         if (Self: Unsize<U>) {
+    ///             Receiver: DispatchFromDyn<Receiver[Self => U]>
+    ///         }
+    ///     }
+    ///
+    /// for `self: &'a mut Self`, this means `&'a mut Self: DispatchFromDyn<&'a mut U>`
+    /// for `self: Rc<Self>`, this means `Rc<Self>: DispatchFromDyn<Rc<U>>`
+    /// for `self: Pin<Box<Self>>, this means `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<U>>>`
+    //
+    // FIXME(mikeyhew) when unsized receivers are implemented as part of unsized rvalues, add this
+    // fallback query: `Receiver: Unsize<Receiver[Self => U]>` to support receivers like
+    // `self: Wrapper<Self>`.
+    #[allow(dead_code)]
+    fn receiver_is_dispatchable(
+        self,
+        method: &ty::AssociatedItem,
+        receiver_ty: Ty<'tcx>,
+    ) -> bool {
+        debug!("receiver_is_dispatchable: method = {:?}, receiver_ty = {:?}", method, receiver_ty);
+
+        let traits = (self.lang_items().unsize_trait(),
+                      self.lang_items().dispatch_from_dyn_trait());
+        let (unsize_did, dispatch_from_dyn_did) = if let (Some(u), Some(cu)) = traits {
+            (u, cu)
+        } else {
+            debug!("receiver_is_dispatchable: Missing Unsize or DispatchFromDyn traits");
+            return false;
+        };
+
+        // the type `U` in the query
+        // use a bogus type parameter to mimick a forall(U) query using u32::MAX for now.
+        // FIXME(mikeyhew) this is a total hack, and we should replace it when real forall queries
+        // are implemented
+        let unsized_self_ty: Ty<'tcx> = self.mk_ty_param(
+            ::std::u32::MAX,
+            Name::intern("RustaceansAreAwesome").as_interned_str(),
+        );
+
+        // `Receiver[Self => U]`
+        let unsized_receiver_ty = self.receiver_for_self_ty(
+            receiver_ty, unsized_self_ty, method.def_id
+        );
+
+        // create a modified param env, with `Self: Unsize<U>` and `U: Trait` added to caller bounds
+        // `U: ?Sized` is already implied here
+        let param_env = {
+            let mut param_env = self.param_env(method.def_id);
+
+            // Self: Unsize<U>
+            let unsize_predicate = ty::TraitRef {
+                def_id: unsize_did,
+                substs: self.mk_substs_trait(self.mk_self_type(), &[unsized_self_ty.into()]),
+            }.to_predicate();
+
+            // U: Trait<Arg1, ..., ArgN>
+            let trait_predicate = {
+                let substs = Substs::for_item(self, method.container.assert_trait(), |param, _| {
+                    if param.index == 0 {
+                        unsized_self_ty.into()
+                    } else {
+                        self.mk_param_from_def(param)
+                    }
+                });
+
+                ty::TraitRef {
+                    def_id: unsize_did,
+                    substs,
+                }.to_predicate()
+            };
+
+            let caller_bounds: Vec<Predicate<'tcx>> = param_env.caller_bounds.iter().cloned()
+                .chain(iter::once(unsize_predicate))
+                .chain(iter::once(trait_predicate))
+                .collect();
+
+            param_env.caller_bounds = self.intern_predicates(&caller_bounds);
+
+            param_env
+        };
+
+        // Receiver: DispatchFromDyn<Receiver[Self => U]>
+        let obligation = {
+            let predicate = ty::TraitRef {
+                def_id: dispatch_from_dyn_did,
+                substs: self.mk_substs_trait(receiver_ty, &[unsized_receiver_ty.into()]),
+            }.to_predicate();
+
+            Obligation::new(
+                ObligationCause::dummy(),
+                param_env,
+                predicate,
+            )
+        };
+
+        self.infer_ctxt().enter(|ref infcx| {
+            // the receiver is dispatchable iff the obligation holds
+            infcx.predicate_must_hold(&obligation)
+        })
+    }
+
     fn contains_illegal_self_type_reference(self,
                                             trait_def_id: DefId,
                                             ty: Ty<'tcx>)
index 4eda47d31ebb54cd11f5ecec427378ef3c7c0b56..a388c7eeb7e49ede897bbe130ffc07ffd4071d93 100644 (file)
 use super::Selection;
 use super::SelectionContext;
 use super::SelectionError;
-use super::VtableClosureData;
-use super::VtableGeneratorData;
-use super::VtableFnPointerData;
-use super::VtableImplData;
+use super::{VtableImplData, VtableClosureData, VtableGeneratorData, VtableFnPointerData};
 use super::util;
 
 use hir::def_id::DefId;
@@ -366,7 +363,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
 
         let ty = ty.super_fold_with(self);
         match ty.sty {
-            ty::Opaque(def_id, substs) if !substs.has_escaping_regions() => { // (*)
+            ty::Opaque(def_id, substs) if !substs.has_escaping_bound_vars() => { // (*)
                 // Only normalize `impl Trait` after type-checking, usually in codegen.
                 match self.param_env.reveal {
                     Reveal::UserFacing => ty,
@@ -393,7 +390,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
                 }
             }
 
-            ty::Projection(ref data) if !data.has_escaping_regions() => { // (*)
+            ty::Projection(ref data) if !data.has_escaping_bound_vars() => { // (*)
 
                 // (*) This is kind of hacky -- we need to be able to
                 // handle normalization within binders because
@@ -1073,7 +1070,8 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>(
             super::VtableClosure(_) |
             super::VtableGenerator(_) |
             super::VtableFnPointer(_) |
-            super::VtableObject(_) => {
+            super::VtableObject(_) |
+            super::VtableTraitAlias(_) => {
                 debug!("assemble_candidates_from_impls: vtable={:?}",
                        vtable);
                 true
@@ -1235,7 +1233,8 @@ fn confirm_select_candidate<'cx, 'gcx, 'tcx>(
             confirm_object_candidate(selcx, obligation, obligation_trait_ref),
         super::VtableAutoImpl(..) |
         super::VtableParam(..) |
-        super::VtableBuiltin(..) =>
+        super::VtableBuiltin(..) |
+        super::VtableTraitAlias(..) =>
             // we don't create Select candidates with this kind of resolution
             span_bug!(
                 obligation.cause.span,
@@ -1486,7 +1485,7 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
     impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>)
     -> Progress<'tcx>
 {
-    let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
+    let VtableImplData { impl_def_id, substs, nested } = impl_vtable;
 
     let tcx = selcx.tcx();
     let param_env = obligation.param_env;
@@ -1619,7 +1618,7 @@ pub fn from_poly_projection_predicate(selcx: &mut SelectionContext<'cx, 'gcx, 't
         let infcx = selcx.infcx();
         // We don't do cross-snapshot caching of obligations with escaping regions,
         // so there's no cache key to use
-        predicate.no_late_bound_regions()
+        predicate.no_bound_vars()
             .map(|predicate| ProjectionCacheKey {
                 // We don't attempt to match up with a specific type-variable state
                 // from a specific call to `opt_normalize_projection_type` - if
index 8f7b0df8b95aa74f157d7cab5b1e42da2bb915d9..62317f074764f67248c11feff2203734d46e45a4 100644 (file)
@@ -252,6 +252,7 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) ->
         | ty::Param(_)
         | ty::Opaque(..)
         | ty::Infer(_)
+        | ty::Bound(..)
         | ty::Generator(..) => false,
 
         ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
index 4adb65dc58d917de255d7779c109a241ad676b12..59b086e35de310a0ae189d841b8e7270c007fec7 100644 (file)
@@ -100,7 +100,7 @@ fn tcx<'c>(&'c self) -> TyCtxt<'c, 'gcx, 'tcx> {
     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
         let ty = ty.super_fold_with(self);
         match ty.sty {
-            ty::Opaque(def_id, substs) if !substs.has_escaping_regions() => {
+            ty::Opaque(def_id, substs) if !substs.has_escaping_bound_vars() => {
                 // (*)
                 // Only normalize `impl Trait` after type-checking, usually in codegen.
                 match self.param_env.reveal {
@@ -138,7 +138,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
                 }
             }
 
-            ty::Projection(ref data) if !data.has_escaping_regions() => {
+            ty::Projection(ref data) if !data.has_escaping_bound_vars() => {
                 // (*)
                 // (*) This is kind of hacky -- we need to be able to
                 // handle normalization within binders because
index 99f557d44d9aa4e085cad074ed573aa4db34ad72..b3fae3bab347199ad0d95dd3d9d326725d53f056 100644 (file)
@@ -164,7 +164,7 @@ pub fn explicit_outlives_bounds<'tcx>(
             ty::Predicate::ClosureKind(..) |
             ty::Predicate::TypeOutlives(..) |
             ty::Predicate::ConstEvaluatable(..) => None,
-            ty::Predicate::RegionOutlives(ref data) => data.no_late_bound_regions().map(
+            ty::Predicate::RegionOutlives(ref data) => data.no_bound_vars().map(
                 |ty::OutlivesPredicate(r_a, r_b)| OutlivesBound::RegionSubRegion(r_b, r_a),
             ),
         })
index b113a322d370ac91db2d0eb1bea264ff563dc488..d5233851db8c6dcb2bdec7e3e74f9e6448687e2d 100644 (file)
@@ -38,19 +38,13 @@ fn perform_query(
         tcx: TyCtxt<'_, 'gcx, 'tcx>,
         canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
     ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
-        // FIXME the query should take a `ImpliedOutlivesBounds`
-        let Canonical {
-            variables,
-            value:
-                ParamEnvAnd {
-                    param_env,
-                    value: ImpliedOutlivesBounds { ty },
-                },
-        } = canonicalized;
-        let canonicalized = Canonical {
-            variables,
-            value: param_env.and(ty),
-        };
+        // FIXME this `unchecked_map` is only necessary because the
+        // query is defined as taking a `ParamEnvAnd<Ty>`; it should
+        // take a `ImpliedOutlivesBounds` instead
+        let canonicalized = canonicalized.unchecked_map(|ParamEnvAnd { param_env, value }| {
+            let ImpliedOutlivesBounds { ty } = value;
+            param_env.and(ty)
+        });
 
         tcx.implied_outlives_bounds(canonicalized)
     }
index a36c5accd2a681ae2a491bc3675cb8568ae4cc6e..cd7c6d76eab5782ce49dc647b3c48c5c5dae7c5e 100644 (file)
@@ -59,18 +59,10 @@ fn perform_query(
         // FIXME convert to the type expected by the `dropck_outlives`
         // query. This should eventually be fixed by changing the
         // *underlying query*.
-        let Canonical {
-            variables,
-            value:
-                ParamEnvAnd {
-                    param_env,
-                    value: DropckOutlives { dropped_ty },
-                },
-        } = canonicalized;
-        let canonicalized = Canonical {
-            variables,
-            value: param_env.and(dropped_ty),
-        };
+        let canonicalized = canonicalized.unchecked_map(|ParamEnvAnd { param_env, value }| {
+            let DropckOutlives { dropped_ty } = value;
+            param_env.and(dropped_ty)
+        });
 
         tcx.dropck_outlives(canonicalized)
     }
index 2ea16823cc65dadd3ef12f06860045dc75d73c9a..312cd66dcc75c229b78b1d9d7a4d2b537b7eb55b 100644 (file)
 use super::{OutputTypeParameterMismatch, Overflow, SelectionError, Unimplemented};
 use super::{
     VtableAutoImpl, VtableBuiltin, VtableClosure, VtableFnPointer, VtableGenerator, VtableImpl,
-    VtableObject, VtableParam,
+    VtableObject, VtableParam, VtableTraitAlias,
 };
 use super::{
     VtableAutoImplData, VtableBuiltinData, VtableClosureData, VtableFnPointerData,
-    VtableGeneratorData, VtableImplData, VtableObjectData,
+    VtableGeneratorData, VtableImplData, VtableObjectData, VtableTraitAliasData,
 };
 
 use dep_graph::{DepKind, DepNodeIndex};
@@ -271,6 +271,8 @@ enum SelectionCandidate<'tcx> {
     /// types generated for a fn pointer type (e.g., `fn(int)->int`)
     FnPointerCandidate,
 
+    TraitAliasCandidate(DefId),
+
     ObjectCandidate,
 
     BuiltinObjectCandidate,
@@ -286,12 +288,13 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
             ImplCandidate(def_id) => ImplCandidate(def_id),
             AutoImplCandidate(def_id) => AutoImplCandidate(def_id),
             ProjectionCandidate => ProjectionCandidate,
+            ClosureCandidate => ClosureCandidate,
+            GeneratorCandidate => GeneratorCandidate,
             FnPointerCandidate => FnPointerCandidate,
+            TraitAliasCandidate(def_id) => TraitAliasCandidate(def_id),
             ObjectCandidate => ObjectCandidate,
             BuiltinObjectCandidate => BuiltinObjectCandidate,
             BuiltinUnsizeCandidate => BuiltinUnsizeCandidate,
-            ClosureCandidate => ClosureCandidate,
-            GeneratorCandidate => GeneratorCandidate,
 
             ParamCandidate(ref trait_ref) => {
                 return tcx.lift(trait_ref).map(ParamCandidate);
@@ -587,7 +590,7 @@ pub fn select(
         obligation: &TraitObligation<'tcx>,
     ) -> SelectionResult<'tcx, Selection<'tcx>> {
         debug!("select({:?})", obligation);
-        debug_assert!(!obligation.predicate.has_escaping_regions());
+        debug_assert!(!obligation.predicate.has_escaping_bound_vars());
 
         let stack = self.push_stack(TraitObligationStackList::empty(), obligation);
 
@@ -690,7 +693,7 @@ fn evaluate_predicate_recursively<'o>(
 
         match obligation.predicate {
             ty::Predicate::Trait(ref t) => {
-                debug_assert!(!t.has_escaping_regions());
+                debug_assert!(!t.has_escaping_bound_vars());
                 let obligation = obligation.with(t.clone());
                 self.evaluate_trait_predicate_recursively(previous_stack, obligation)
             }
@@ -722,9 +725,9 @@ fn evaluate_predicate_recursively<'o>(
             },
 
             ty::Predicate::TypeOutlives(ref binder) => {
-                assert!(!binder.has_escaping_regions());
-                // Check if the type has higher-ranked regions.
-                if binder.skip_binder().0.has_escaping_regions() {
+                assert!(!binder.has_escaping_bound_vars());
+                // Check if the type has higher-ranked vars.
+                if binder.skip_binder().0.has_escaping_bound_vars() {
                     // If so, this obligation is an error (for now). Eventually we should be
                     // able to support additional cases here, like `for<'a> &'a str: 'a`.
 
@@ -740,7 +743,7 @@ fn evaluate_predicate_recursively<'o>(
                         Ok(EvaluatedToErr)
                     }
                 } else {
-                    // If the type has no late bound regions, then if we assign all
+                    // If the type has no late bound vars, then if we assign all
                     // the inference variables in it to be 'static, then the type
                     // will be 'static itself.
                     //
@@ -1199,7 +1202,7 @@ fn candidate_from_obligation<'o>(
             "candidate_from_obligation(cache_fresh_trait_pred={:?}, obligation={:?})",
             cache_fresh_trait_pred, stack
         );
-        debug_assert!(!stack.obligation.predicate.has_escaping_regions());
+        debug_assert!(!stack.obligation.predicate.has_escaping_bound_vars());
 
         if let Some(c) =
             self.check_candidate_cache(stack.obligation.param_env, &cache_fresh_trait_pred)
@@ -1368,7 +1371,8 @@ fn candidate_from_obligation_no_cache<'o>(
 
         // Winnow, but record the exact outcome of evaluation, which
         // is needed for specialization. Propagate overflow if it occurs.
-        let mut candidates = candidates.into_iter()
+        let mut candidates = candidates
+            .into_iter()
             .map(|c| match self.evaluate_candidate(stack, &c) {
                 Ok(eval) if eval.may_apply() => Ok(Some(EvaluatedCandidate {
                     candidate: c,
@@ -1377,8 +1381,8 @@ fn candidate_from_obligation_no_cache<'o>(
                 Ok(_) => Ok(None),
                 Err(OverflowError) => Err(Overflow),
             })
-           .flat_map(Result::transpose)
-           .collect::<Result<Vec<_>, _>>()?;
+            .flat_map(Result::transpose)
+            .collect::<Result<Vec<_>, _>>()?;
 
         debug!(
             "winnowed to {} candidates for {:?}: {:?}",
@@ -1451,7 +1455,7 @@ fn is_knowable<'o>(&mut self, stack: &TraitObligationStack<'o, 'tcx>) -> Option<
         let predicate = self.infcx()
             .resolve_type_vars_if_possible(&obligation.predicate);
 
-        // ok to skip binder because of the nature of the
+        // OK to skip binder because of the nature of the
         // trait-ref-is-knowable check, which does not care about
         // bound regions
         let trait_ref = predicate.skip_binder().trait_ref;
@@ -1631,6 +1635,8 @@ fn assemble_candidates<'o>(
             ambiguous: false,
         };
 
+        self.assemble_candidates_for_trait_alias(obligation, &mut candidates)?;
+
         // Other bounds. Consider both in-scope bounds from fn decl
         // and applicable impls. There is a certain set of precedence rules here.
         let def_id = obligation.predicate.def_id();
@@ -1800,7 +1806,7 @@ fn match_projection(
         placeholder_map: &infer::PlaceholderMap<'tcx>,
         snapshot: &infer::CombinedSnapshot<'cx, 'tcx>,
     ) -> bool {
-        debug_assert!(!skol_trait_ref.has_escaping_regions());
+        debug_assert!(!skol_trait_ref.has_escaping_bound_vars());
         if self.infcx
             .at(&obligation.cause, obligation.param_env)
             .sup(ty::Binder::dummy(skol_trait_ref), trait_bound)
@@ -1878,7 +1884,7 @@ fn assemble_generator_candidates(
             return Ok(());
         }
 
-        // ok to skip binder because the substs on generator types never
+        // OK to skip binder because the substs on generator types never
         // touch bound regions, they just capture the in-scope
         // type/region parameters
         let self_ty = *obligation.self_ty().skip_binder();
@@ -1922,7 +1928,7 @@ fn assemble_closure_candidates(
             }
         };
 
-        // ok to skip binder because the substs on closure types never
+        // OK to skip binder because the substs on closure types never
         // touch bound regions, they just capture the in-scope
         // type/region parameters
         match obligation.self_ty().skip_binder().sty {
@@ -1972,7 +1978,7 @@ fn assemble_fn_pointer_candidates(
             return Ok(());
         }
 
-        // ok to skip binder because what we are inspecting doesn't involve bound regions
+        // OK to skip binder because what we are inspecting doesn't involve bound regions
         let self_ty = *obligation.self_ty().skip_binder();
         match self_ty.sty {
             ty::Infer(ty::TyVar(_)) => {
@@ -2090,18 +2096,6 @@ fn assemble_candidates_from_object_ty(
             obligation.self_ty().skip_binder()
         );
 
-        // Object-safety candidates are only applicable to object-safe
-        // traits. Including this check is useful because it helps
-        // inference in cases of traits like `BorrowFrom`, which are
-        // not object-safe, and which rely on being able to infer the
-        // self-type from one of the other inputs. Without this check,
-        // these cases wind up being considered ambiguous due to a
-        // (spurious) ambiguity introduced here.
-        let predicate_trait_ref = obligation.predicate.to_poly_trait_ref();
-        if !self.tcx().is_object_safe(predicate_trait_ref.def_id()) {
-            return;
-        }
-
         self.probe(|this, _snapshot| {
             // the code below doesn't care about regions, and the
             // self-ty here doesn't escape this probe, so just erase
@@ -2179,7 +2173,7 @@ fn assemble_candidates_for_unsizing(
         //     T: Trait
         // so it seems ok if we (conservatively) fail to accept that `Unsize`
         // obligation above. Should be possible to extend this in the future.
-        let source = match obligation.self_ty().no_late_bound_regions() {
+        let source = match obligation.self_ty().no_bound_vars() {
             Some(t) => t,
             None => {
                 // Don't add any candidates if there are bound regions.
@@ -2249,6 +2243,24 @@ fn assemble_candidates_for_unsizing(
         }
     }
 
+    fn assemble_candidates_for_trait_alias(
+        &mut self,
+        obligation: &TraitObligation<'tcx>,
+        candidates: &mut SelectionCandidateSet<'tcx>,
+    ) -> Result<(), SelectionError<'tcx>> {
+        // OK to skip binder here because the tests we do below do not involve bound regions
+        let self_ty = *obligation.self_ty().skip_binder();
+        debug!("assemble_candidates_for_trait_alias(self_ty={:?})", self_ty);
+
+        let def_id = obligation.predicate.def_id();
+
+        if ty::is_trait_alias(self.tcx(), def_id) {
+            candidates.vec.push(TraitAliasCandidate(def_id.clone()));
+        }
+
+        Ok(())
+    }
+
     ///////////////////////////////////////////////////////////////////////////
     // WINNOW
     //
@@ -2299,7 +2311,8 @@ fn candidate_should_be_dropped_in_favor_of<'o>(
                 | FnPointerCandidate
                 | BuiltinObjectCandidate
                 | BuiltinUnsizeCandidate
-                | BuiltinCandidate { .. } => {
+                | BuiltinCandidate { .. }
+                | TraitAliasCandidate(..) => {
                     // Global bounds from the where clause should be ignored
                     // here (see issue #50825). Otherwise, we have a where
                     // clause so don't go around looking for impls.
@@ -2329,7 +2342,8 @@ fn candidate_should_be_dropped_in_favor_of<'o>(
                 | FnPointerCandidate
                 | BuiltinObjectCandidate
                 | BuiltinUnsizeCandidate
-                | BuiltinCandidate { .. } => true,
+                | BuiltinCandidate { .. }
+                | TraitAliasCandidate(..) => true,
                 ObjectCandidate | ProjectionCandidate => {
                     // Arbitrarily give param candidates priority
                     // over projection and object candidates.
@@ -2456,7 +2470,7 @@ fn sized_conditions(
             ty::Infer(ty::TyVar(_)) => Ambiguous,
 
             ty::UnnormalizedProjection(..)
-            | ty::Infer(ty::BoundTy(_))
+            | ty::Bound(_)
             | ty::Infer(ty::FreshTy(_))
             | ty::Infer(ty::FreshIntTy(_))
             | ty::Infer(ty::FreshFloatTy(_)) => {
@@ -2541,7 +2555,7 @@ fn copy_clone_conditions(
             }
 
             ty::UnnormalizedProjection(..)
-            | ty::Infer(ty::BoundTy(_))
+            | ty::Bound(_)
             | ty::Infer(ty::FreshTy(_))
             | ty::Infer(ty::FreshIntTy(_))
             | ty::Infer(ty::FreshFloatTy(_)) => {
@@ -2584,7 +2598,7 @@ fn constituent_types_for_ty(&self, t: Ty<'tcx>) -> Vec<Ty<'tcx>> {
             | ty::Param(..)
             | ty::Foreign(..)
             | ty::Projection(..)
-            | ty::Infer(ty::BoundTy(_))
+            | ty::Bound(_)
             | ty::Infer(ty::TyVar(_))
             | ty::Infer(ty::FreshTy(_))
             | ty::Infer(ty::FreshIntTy(_))
@@ -2723,15 +2737,20 @@ fn confirm_candidate(
                 Ok(VtableParam(obligations))
             }
 
+            ImplCandidate(impl_def_id) => Ok(VtableImpl(self.confirm_impl_candidate(
+                obligation,
+                impl_def_id,
+            ))),
+
             AutoImplCandidate(trait_def_id) => {
                 let data = self.confirm_auto_impl_candidate(obligation, trait_def_id);
                 Ok(VtableAutoImpl(data))
             }
 
-            ImplCandidate(impl_def_id) => Ok(VtableImpl(self.confirm_impl_candidate(
-                obligation,
-                impl_def_id,
-            ))),
+            ProjectionCandidate => {
+                self.confirm_projection_candidate(obligation);
+                Ok(VtableParam(Vec::new()))
+            }
 
             ClosureCandidate => {
                 let vtable_closure = self.confirm_closure_candidate(obligation)?;
@@ -2743,13 +2762,14 @@ fn confirm_candidate(
                 Ok(VtableGenerator(vtable_generator))
             }
 
-            BuiltinObjectCandidate => {
-                // This indicates something like `(Trait+Send) :
-                // Send`. In this case, we know that this holds
-                // because that's what the object type is telling us,
-                // and there's really no additional obligations to
-                // prove and no types in particular to unify etc.
-                Ok(VtableParam(Vec::new()))
+            FnPointerCandidate => {
+                let data = self.confirm_fn_pointer_candidate(obligation)?;
+                Ok(VtableFnPointer(data))
+            }
+
+            TraitAliasCandidate(alias_def_id) => {
+                let data = self.confirm_trait_alias_candidate(obligation, alias_def_id);
+                Ok(VtableTraitAlias(data))
             }
 
             ObjectCandidate => {
@@ -2757,13 +2777,12 @@ fn confirm_candidate(
                 Ok(VtableObject(data))
             }
 
-            FnPointerCandidate => {
-                let data = self.confirm_fn_pointer_candidate(obligation)?;
-                Ok(VtableFnPointer(data))
-            }
-
-            ProjectionCandidate => {
-                self.confirm_projection_candidate(obligation);
+            BuiltinObjectCandidate => {
+                // This indicates something like `(Trait+Send) :
+                // Send`. In this case, we know that this holds
+                // because that's what the object type is telling us,
+                // and there's really no additional obligations to
+                // prove and no types in particular to unify etc.
                 Ok(VtableParam(Vec::new()))
             }
 
@@ -2876,7 +2895,7 @@ fn confirm_auto_impl_candidate(
         self.vtable_auto_impl(obligation, trait_def_id, types)
     }
 
-    /// See `confirm_auto_impl_candidate`
+    /// See `confirm_auto_impl_candidate`.
     fn vtable_auto_impl(
         &mut self,
         obligation: &TraitObligation<'tcx>,
@@ -2933,7 +2952,7 @@ fn confirm_impl_candidate(
         // this time not in a probe.
         self.in_snapshot(|this, snapshot| {
             let (substs, placeholder_map) = this.rematch_impl(impl_def_id, obligation, snapshot);
-            debug!("confirm_impl_candidate substs={:?}", substs);
+            debug!("confirm_impl_candidate: substs={:?}", substs);
             let cause = obligation.derived_cause(ImplDerivedObligation);
             this.vtable_impl(
                 impl_def_id,
@@ -2997,16 +3016,14 @@ fn confirm_object_candidate(
     ) -> VtableObjectData<'tcx, PredicateObligation<'tcx>> {
         debug!("confirm_object_candidate({:?})", obligation);
 
-        // FIXME skipping binder here seems wrong -- we should
-        // probably flatten the binder from the obligation and the
-        // binder from the object. Have to try to make a broken test
-        // case that results. -nmatsakis
+        // FIXME(nmatsakis) skipping binder here seems wrong -- we should
+        // probably flatten the binder from the obligation and the binder
+        // from the object. Have to try to make a broken test case that
+        // results.
         let self_ty = self.infcx
             .shallow_resolve(*obligation.self_ty().skip_binder());
         let poly_trait_ref = match self_ty.sty {
-            ty::Dynamic(ref data, ..) => {
-                data.principal().with_self_ty(self.tcx(), self_ty)
-            }
+            ty::Dynamic(ref data, ..) => data.principal().with_self_ty(self.tcx(), self_ty),
             _ => span_bug!(obligation.cause.span, "object candidate with non-object"),
         };
 
@@ -3054,7 +3071,7 @@ fn confirm_fn_pointer_candidate(
     ) -> Result<VtableFnPointerData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>> {
         debug!("confirm_fn_pointer_candidate({:?})", obligation);
 
-        // ok to skip binder; it is reintroduced below
+        // OK to skip binder; it is reintroduced below
         let self_ty = self.infcx
             .shallow_resolve(*obligation.self_ty().skip_binder());
         let sig = self_ty.fn_sig(self.tcx());
@@ -3090,11 +3107,51 @@ fn confirm_fn_pointer_candidate(
         })
     }
 
+    fn confirm_trait_alias_candidate(
+        &mut self,
+        obligation: &TraitObligation<'tcx>,
+        alias_def_id: DefId,
+    ) -> VtableTraitAliasData<'tcx, PredicateObligation<'tcx>> {
+        debug!(
+            "confirm_trait_alias_candidate({:?}, {:?})",
+            obligation, alias_def_id
+        );
+
+        self.in_snapshot(|this, snapshot| {
+            let (predicate, placeholder_map) = this.infcx()
+                .replace_late_bound_regions_with_placeholders(&obligation.predicate);
+            let trait_ref = predicate.trait_ref;
+            let trait_def_id = trait_ref.def_id;
+            let substs = trait_ref.substs;
+
+            let trait_obligations = this.impl_or_trait_obligations(
+                obligation.cause.clone(),
+                obligation.recursion_depth,
+                obligation.param_env,
+                trait_def_id,
+                &substs,
+                placeholder_map,
+                snapshot,
+            );
+
+            debug!(
+                "confirm_trait_alias_candidate: trait_def_id={:?} trait_obligations={:?}",
+                trait_def_id, trait_obligations
+            );
+
+            VtableTraitAliasData {
+                alias_def_id,
+                substs: substs,
+                nested: trait_obligations,
+            }
+        })
+    }
+
     fn confirm_generator_candidate(
         &mut self,
         obligation: &TraitObligation<'tcx>,
     ) -> Result<VtableGeneratorData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>> {
-        // ok to skip binder because the substs on generator types never
+        // OK to skip binder because the substs on generator types never
         // touch bound regions, they just capture the in-scope
         // type/region parameters
         let self_ty = self.infcx
@@ -3152,7 +3209,7 @@ fn confirm_closure_candidate(
             .fn_trait_kind(obligation.predicate.def_id())
             .unwrap_or_else(|| bug!("closure candidate for non-fn trait {:?}", obligation));
 
-        // ok to skip binder because the substs on closure types never
+        // OK to skip binder because the substs on closure types never
         // touch bound regions, they just capture the in-scope
         // type/region parameters
         let self_ty = self.infcx
@@ -3248,7 +3305,7 @@ fn confirm_builtin_unsize_candidate(
         // assemble_candidates_for_unsizing should ensure there are no late bound
         // regions here. See the comment there for more details.
         let source = self.infcx
-            .shallow_resolve(obligation.self_ty().no_late_bound_regions().unwrap());
+            .shallow_resolve(obligation.self_ty().no_bound_vars().unwrap());
         let target = obligation
             .predicate
             .skip_binder()
@@ -3666,8 +3723,17 @@ fn closure_trait_ref_unnormalized(
         closure_def_id: DefId,
         substs: ty::ClosureSubsts<'tcx>,
     ) -> ty::PolyTraitRef<'tcx> {
+        debug!(
+            "closure_trait_ref_unnormalized(obligation={:?}, closure_def_id={:?}, substs={:?})",
+            obligation, closure_def_id, substs,
+        );
         let closure_type = self.infcx.closure_sig(closure_def_id, substs);
 
+        debug!(
+            "closure_trait_ref_unnormalized: closure_type = {:?}",
+            closure_type
+        );
+
         // (1) Feels icky to skip the binder here, but OTOH we know
         // that the self-type is an unboxed closure type and hence is
         // in fact unparameterized (or at least does not reference any
index 24779a350d74bdfcf5382244a26db480dfd44560..e83d085971caa32ef52c6b2fbab491d53c5cc58d 100644 (file)
@@ -62,6 +62,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
             super::VtableParam(ref n) => write!(f, "VtableParam({:?})", n),
 
             super::VtableBuiltin(ref d) => write!(f, "{:?}", d),
+
+            super::VtableTraitAlias(ref d) => write!(f, "{:?}", d),
         }
     }
 }
@@ -70,7 +72,7 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableImplData<'tcx, N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
-            "VtableImpl(impl_def_id={:?}, substs={:?}, nested={:?})",
+            "VtableImplData(impl_def_id={:?}, substs={:?}, nested={:?})",
             self.impl_def_id, self.substs, self.nested
         )
     }
@@ -80,7 +82,7 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableGeneratorData<'tcx, N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
-            "VtableGenerator(generator_def_id={:?}, substs={:?}, nested={:?})",
+            "VtableGeneratorData(generator_def_id={:?}, substs={:?}, nested={:?})",
             self.generator_def_id, self.substs, self.nested
         )
     }
@@ -90,7 +92,7 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableClosureData<'tcx, N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
-            "VtableClosure(closure_def_id={:?}, substs={:?}, nested={:?})",
+            "VtableClosureData(closure_def_id={:?}, substs={:?}, nested={:?})",
             self.closure_def_id, self.substs, self.nested
         )
     }
@@ -98,7 +100,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
 impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableBuiltinData<N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "VtableBuiltin(nested={:?})", self.nested)
+        write!(f, "VtableBuiltinData(nested={:?})", self.nested)
     }
 }
 
@@ -116,7 +118,7 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableObjectData<'tcx, N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
-            "VtableObject(upcast={:?}, vtable_base={}, nested={:?})",
+            "VtableObjectData(upcast={:?}, vtable_base={}, nested={:?})",
             self.upcast_trait_ref, self.vtable_base, self.nested
         )
     }
@@ -126,12 +128,22 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableFnPointerData<'tcx, N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
-            "VtableFnPointer(fn_ty={:?}, nested={:?})",
+            "VtableFnPointerData(fn_ty={:?}, nested={:?})",
             self.fn_ty, self.nested
         )
     }
 }
 
+impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableTraitAliasData<'tcx, N> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(
+            f,
+            "VtableTraitAlias(alias_def_id={:?}, substs={:?}, nested={:?})",
+            self.alias_def_id, self.substs, self.nested
+        )
+    }
+}
+
 impl<'tcx> fmt::Debug for traits::FulfillmentError<'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "FulfillmentError({:?},{:?})", self.obligation, self.code)
@@ -321,6 +333,17 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
                     nested,
                 })
             ),
+            traits::VtableTraitAlias(traits::VtableTraitAliasData {
+                alias_def_id,
+                substs,
+                nested,
+            }) => tcx.lift(&substs).map(|substs|
+                traits::VtableTraitAlias(traits::VtableTraitAliasData {
+                    alias_def_id,
+                    substs,
+                    nested,
+                })
+            ),
         }
     }
 }
@@ -386,6 +409,12 @@ impl<'tcx, N> TypeFoldable<'tcx> for traits::VtableFnPointerData<'tcx, N> {
     } where N: TypeFoldable<'tcx>
 }
 
+BraceStructTypeFoldableImpl! {
+    impl<'tcx, N> TypeFoldable<'tcx> for traits::VtableTraitAliasData<'tcx, N> {
+        alias_def_id, substs, nested
+    } where N: TypeFoldable<'tcx>
+}
+
 EnumTypeFoldableImpl! {
     impl<'tcx, N> TypeFoldable<'tcx> for traits::Vtable<'tcx, N> {
         (traits::VtableImpl)(a),
@@ -396,6 +425,7 @@ impl<'tcx, N> TypeFoldable<'tcx> for traits::Vtable<'tcx, N> {
         (traits::VtableParam)(a),
         (traits::VtableBuiltin)(a),
         (traits::VtableObject)(a),
+        (traits::VtableTraitAlias)(a),
     } where N: TypeFoldable<'tcx>
 }
 
index 940cf736012ec0c0e2a9bde0ffdce2c34b5a0270..74f8d67ce04846a97c309a0573037530a4b98225 100644 (file)
@@ -333,7 +333,7 @@ fn new(base: I) -> FilterToTraits<I> {
     }
 }
 
-impl<'tcx,I:Iterator<Item=ty::Predicate<'tcx>>> Iterator for FilterToTraits<I> {
+impl<'tcx,I:Iterator<Item = ty::Predicate<'tcx>>> Iterator for FilterToTraits<I> {
     type Item = ty::PolyTraitRef<'tcx>;
 
     fn next(&mut self) -> Option<ty::PolyTraitRef<'tcx>> {
index d4b47db608163ed9ee293c46b35c723de274af18..05d9d4bc37d79f540e3c887feb5fcc82ae099be7 100644 (file)
@@ -1547,11 +1547,9 @@ pub fn borrowck_mode(&self) -> BorrowckMode {
     }
 
     /// Should we emit EndRegion MIR statements? These are consumed by
-    /// MIR borrowck, but not when NLL is used. They are also consumed
-    /// by the validation stuff.
+    /// MIR borrowck, but not when NLL is used.
     pub fn emit_end_regions(self) -> bool {
         self.sess.opts.debugging_opts.emit_end_regions ||
-            self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
             self.use_mir_borrowck()
     }
 
@@ -2245,7 +2243,7 @@ pub fn print_debug_stats(self) {
         sty_debug_print!(
             self,
             Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr,
-            Generator, GeneratorWitness, Dynamic, Closure, Tuple,
+            Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound,
             Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign);
 
         println!("Substs interner: #{}", self.interners.substs.borrow().len());
index ed6e372fe7637d21427081b9994c1d10f970c44a..4737c72b1ef0f75bcc2c0ca382ef69c4b769ea54 100644 (file)
@@ -53,8 +53,6 @@ pub enum TypeError<'tcx> {
     ProjectionMismatched(ExpectedFound<DefId>),
     ProjectionBoundsLength(ExpectedFound<usize>),
     ExistentialMismatch(ExpectedFound<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>),
-
-    OldStyleLUB(Box<TypeError<'tcx>>),
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -166,9 +164,6 @@ fn report_maybe_different(f: &mut fmt::Formatter<'_>,
                 report_maybe_different(f, &format!("trait `{}`", values.expected),
                                        &format!("trait `{}`", values.found))
             }
-            OldStyleLUB(ref err) => {
-                write!(f, "{}", err)
-            }
         }
     }
 }
@@ -217,7 +212,7 @@ pub fn sort_string(&self, tcx: TyCtxt<'a, 'gcx, 'lcx>) -> Cow<'static, str> {
             ty::Infer(ty::TyVar(_)) => "inferred type".into(),
             ty::Infer(ty::IntVar(_)) => "integral variable".into(),
             ty::Infer(ty::FloatVar(_)) => "floating-point variable".into(),
-            ty::Infer(ty::BoundTy(_)) |
+            ty::Bound(_) |
             ty::Infer(ty::FreshTy(_)) => "fresh type".into(),
             ty::Infer(ty::FreshIntTy(_)) => "fresh integral type".into(),
             ty::Infer(ty::FreshFloatTy(_)) => "fresh floating-point type".into(),
@@ -266,12 +261,6 @@ pub fn note_and_explain_type_err(self,
                     }
                 }
             },
-            OldStyleLUB(err) => {
-                db.note("this was previously accepted by the compiler but has been phased out");
-                db.note("for more information, see https://github.com/rust-lang/rust/issues/45852");
-
-                self.note_and_explain_type_err(db, &err, sp);
-            }
             CyclicTy(ty) => {
                 // Watch out for various cases of cyclic types and try to explain.
                 if ty.is_closure() || ty.is_generator() {
index e6aaf8b1bb20604a7046b991d1f520b0afccc27a..380f95993f8fbab345f51c0c1a93e55ec70b425b 100644 (file)
@@ -122,7 +122,7 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
         ty::Foreign(def_id) => {
             Some(ForeignSimplifiedType(def_id))
         }
-        ty::Infer(_) | ty::Error => None,
+        ty::Bound(..) | ty::Infer(_) | ty::Error => None,
     }
 }
 
index a7b21688fbeb3ab1f0e3a4dc6db32a9da5318e39..0764f363250dd8be56b0b7d846ff529fa0b2caca 100644 (file)
@@ -115,15 +115,17 @@ fn add_sty(&mut self, st: &ty::TyKind<'_>) {
                 self.add_substs(&substs.substs);
             }
 
+            &ty::Bound(bound_ty) => {
+                self.add_binder(bound_ty.index);
+            }
+
             &ty::Infer(infer) => {
                 self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES); // it might, right?
                 self.add_flags(TypeFlags::HAS_TY_INFER);
                 match infer {
                     ty::FreshTy(_) |
                     ty::FreshIntTy(_) |
-                    ty::FreshFloatTy(_) |
-                    ty::BoundTy(_) => {
-                        self.add_flags(TypeFlags::HAS_CANONICAL_VARS);
+                    ty::FreshFloatTy(_) => {
                     }
 
                     ty::TyVar(_) |
@@ -141,7 +143,7 @@ fn add_sty(&mut self, st: &ty::TyKind<'_>) {
             &ty::Projection(ref data) => {
                 // currently we can't normalize projections that
                 // include bound regions, so track those separately.
-                if !data.has_escaping_regions() {
+                if !data.has_escaping_bound_vars() {
                     self.add_flags(TypeFlags::HAS_NORMALIZABLE_PROJECTION);
                 }
                 self.add_flags(TypeFlags::HAS_PROJECTION);
index 80dfd263af9af99e6dfe8d6753dc70e10d94f47b..8c822adf7b0236bd39b829d1cae1335f7a8c7169 100644 (file)
@@ -67,18 +67,18 @@ fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
     /// bound by `binder` or bound by some binder outside of `binder`.
     /// If `binder` is `ty::INNERMOST`, this indicates whether
     /// there are any late-bound regions that appear free.
-    fn has_regions_bound_at_or_above(&self, binder: ty::DebruijnIndex) -> bool {
-        self.visit_with(&mut HasEscapingRegionsVisitor { outer_index: binder })
+    fn has_vars_bound_at_or_above(&self, binder: ty::DebruijnIndex) -> bool {
+        self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder })
     }
 
     /// True if this `self` has any regions that escape `binder` (and
     /// hence are not bound by it).
-    fn has_regions_bound_above(&self, binder: ty::DebruijnIndex) -> bool {
-        self.has_regions_bound_at_or_above(binder.shifted_in(1))
+    fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool {
+        self.has_vars_bound_at_or_above(binder.shifted_in(1))
     }
 
-    fn has_escaping_regions(&self) -> bool {
-        self.has_regions_bound_at_or_above(ty::INNERMOST)
+    fn has_escaping_bound_vars(&self) -> bool {
+        self.has_vars_bound_at_or_above(ty::INNERMOST)
     }
 
     fn has_type_flags(&self, flags: TypeFlags) -> bool {
@@ -416,11 +416,10 @@ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
 }
 
 ///////////////////////////////////////////////////////////////////////////
-// Late-bound region replacer
+// Bound vars replacer
 
-// Replaces the escaping regions in a type.
-
-struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+/// Replaces the escaping bound vars (late bound regions or bound types) in a type.
+struct BoundVarReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
 
     /// As with `RegionFolder`, represents the index of a binder *just outside*
@@ -428,7 +427,82 @@ struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     current_index: ty::DebruijnIndex,
 
     fld_r: &'a mut (dyn FnMut(ty::BoundRegion) -> ty::Region<'tcx> + 'a),
-    map: BTreeMap<ty::BoundRegion, ty::Region<'tcx>>
+    fld_t: &'a mut (dyn FnMut(ty::BoundTy) -> ty::Ty<'tcx> + 'a),
+}
+
+impl<'a, 'gcx, 'tcx> BoundVarReplacer<'a, 'gcx, 'tcx> {
+    fn new<F, G>(
+        tcx: TyCtxt<'a, 'gcx, 'tcx>,
+        fld_r: &'a mut F,
+        fld_t: &'a mut G
+    ) -> Self
+        where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
+              G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>
+    {
+        BoundVarReplacer {
+            tcx,
+            current_index: ty::INNERMOST,
+            fld_r,
+            fld_t,
+        }
+    }
+}
+
+impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> {
+    fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+
+    fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
+        self.current_index.shift_in(1);
+        let t = t.super_fold_with(self);
+        self.current_index.shift_out(1);
+        t
+    }
+
+    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+        match t.sty {
+            ty::Bound(bound_ty) => {
+                if bound_ty.index == self.current_index {
+                    let fld_t = &mut self.fld_t;
+                    let ty = fld_t(bound_ty);
+                    ty::fold::shift_vars(
+                        self.tcx,
+                        &ty,
+                        self.current_index.as_u32()
+                    )
+                } else {
+                    t
+                }
+            }
+            _ => {
+                if !t.has_vars_bound_at_or_above(self.current_index) {
+                    // Nothing more to substitute.
+                    t
+                } else {
+                    t.super_fold_with(self)
+                }
+            }
+        }
+    }
+
+    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+        match *r {
+            ty::ReLateBound(debruijn, br) if debruijn == self.current_index => {
+                let fld_r = &mut self.fld_r;
+                let region = fld_r(br);
+                if let ty::ReLateBound(debruijn1, br) = *region {
+                    // If the callback returns a late-bound region,
+                    // that region should always use the INNERMOST
+                    // debruijn index. Then we adjust it to the
+                    // correct depth.
+                    assert_eq!(debruijn1, ty::INNERMOST);
+                    self.tcx.mk_region(ty::ReLateBound(debruijn, br))
+                } else {
+                    region
+                }
+            }
+            _ => r
+        }
+    }
 }
 
 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
@@ -440,16 +514,65 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
     /// same `BoundRegion` will reuse the previous result.  A map is
     /// returned at the end with each bound region and the free region
     /// that replaced it.
-    pub fn replace_late_bound_regions<T,F>(self,
+    ///
+    /// This method only replaces late bound regions and the result may still
+    /// contain escaping bound types.
+    pub fn replace_late_bound_regions<T, F>(
+        self,
         value: &Binder<T>,
-        mut f: F)
-        -> (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
-        where F : FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
-              T : TypeFoldable<'tcx>,
+        mut fld_r: F
+    ) -> (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
+        where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
+              T: TypeFoldable<'tcx>
     {
-        let mut replacer = RegionReplacer::new(self, &mut f);
+        let mut map = BTreeMap::new();
+        let mut real_fldr = |br| {
+            *map.entry(br).or_insert_with(|| fld_r(br))
+        };
+
+        // identity for bound types
+        let mut fld_t = |bound_ty| self.mk_ty(ty::Bound(bound_ty));
+
+        let mut replacer = BoundVarReplacer::new(self, &mut real_fldr, &mut fld_t);
         let result = value.skip_binder().fold_with(&mut replacer);
-        (result, replacer.map)
+        (result, map)
+    }
+
+    /// Replace all escaping bound vars. The `fld_r` closure replaces escaping
+    /// bound regions while the `fld_t` closure replaces escaping bound types.
+    pub fn replace_escaping_bound_vars<T, F, G>(
+        self,
+        value: &T,
+        mut fld_r: F,
+        mut fld_t: G
+    ) -> T
+        where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
+              G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>,
+              T: TypeFoldable<'tcx>
+    {
+        if !value.has_escaping_bound_vars() {
+            value.clone()
+        } else {
+            let mut replacer = BoundVarReplacer::new(self, &mut fld_r, &mut fld_t);
+            let result = value.fold_with(&mut replacer);
+            result
+        }
+    }
+
+    /// Replace all types or regions bound by the given `Binder`. The `fld_r`
+    /// closure replaces bound regions while the `fld_t` closure replaces bound
+    /// types.
+    pub fn replace_bound_vars<T, F, G>(
+        self,
+        value: &Binder<T>,
+        fld_r: F,
+        fld_t: G
+    ) -> T
+        where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
+              G: FnMut(ty::BoundTy) -> ty::Ty<'tcx>,
+              T: TypeFoldable<'tcx>
+    {
+        self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t)
     }
 
     /// Replace any late-bound regions bound in `value` with
@@ -549,21 +672,33 @@ pub fn anonymize_late_bound_regions<T>(self, sig: &Binder<T>) -> Binder<T>
     }
 }
 
-impl<'a, 'gcx, 'tcx> RegionReplacer<'a, 'gcx, 'tcx> {
-    fn new<F>(tcx: TyCtxt<'a, 'gcx, 'tcx>, fld_r: &'a mut F)
-              -> RegionReplacer<'a, 'gcx, 'tcx>
-        where F : FnMut(ty::BoundRegion) -> ty::Region<'tcx>
-    {
-        RegionReplacer {
+///////////////////////////////////////////////////////////////////////////
+// Shifter
+//
+// Shifts the De Bruijn indices on all escaping bound vars by a
+// fixed amount. Useful in substitution or when otherwise introducing
+// a binding level that is not intended to capture the existing bound
+// vars. See comment on `shift_vars_through_binders` method in
+// `subst.rs` for more details.
+
+struct Shifter<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+    tcx: TyCtxt<'a, 'gcx, 'tcx>,
+
+    current_index: ty::DebruijnIndex,
+    amount: u32,
+}
+
+impl Shifter<'a, 'gcx, 'tcx> {
+    pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, amount: u32) -> Self {
+        Shifter {
             tcx,
             current_index: ty::INNERMOST,
-            fld_r,
-            map: BTreeMap::default()
+            amount,
         }
     }
 }
 
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> {
+impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Shifter<'a, 'gcx, 'tcx> {
     fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
 
     fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
@@ -573,64 +708,48 @@ fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binde
         t
     }
 
-    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
-        if !t.has_regions_bound_at_or_above(self.current_index) {
-            return t;
-        }
-
-        t.super_fold_with(self)
-    }
-
     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
         match *r {
-            ty::ReLateBound(debruijn, br) if debruijn == self.current_index => {
-                let fld_r = &mut self.fld_r;
-                let region = *self.map.entry(br).or_insert_with(|| fld_r(br));
-                if let ty::ReLateBound(debruijn1, br) = *region {
-                    // If the callback returns a late-bound region,
-                    // that region should always use the INNERMOST
-                    // debruijn index. Then we adjust it to the
-                    // correct depth.
-                    assert_eq!(debruijn1, ty::INNERMOST);
-                    self.tcx.mk_region(ty::ReLateBound(debruijn, br))
+            ty::ReLateBound(debruijn, br) => {
+                if self.amount == 0 || debruijn < self.current_index {
+                    r
                 } else {
-                    region
+                    let shifted = ty::ReLateBound(debruijn.shifted_in(self.amount), br);
+                    self.tcx.mk_region(shifted)
                 }
             }
             _ => r
         }
     }
-}
 
-///////////////////////////////////////////////////////////////////////////
-// Region shifter
-//
-// Shifts the De Bruijn indices on all escaping bound regions by a
-// fixed amount. Useful in substitution or when otherwise introducing
-// a binding level that is not intended to capture the existing bound
-// regions. See comment on `shift_regions_through_binders` method in
-// `subst.rs` for more details.
+    fn fold_ty(&mut self, ty: ty::Ty<'tcx>) -> ty::Ty<'tcx> {
+        match ty.sty {
+            ty::Bound(bound_ty) => {
+                if self.amount == 0 || bound_ty.index < self.current_index {
+                    ty
+                } else {
+                    let shifted = ty::BoundTy {
+                        index: bound_ty.index.shifted_in(self.amount),
+                        var: bound_ty.var,
+                        kind: bound_ty.kind,
+                    };
+                    self.tcx.mk_ty(ty::Bound(shifted))
+                }
+            }
 
-pub fn shift_region(region: ty::RegionKind, amount: u32) -> ty::RegionKind {
-    match region {
-        ty::ReLateBound(debruijn, br) => {
-            ty::ReLateBound(debruijn.shifted_in(amount), br)
-        }
-        _ => {
-            region
+            _ => ty.super_fold_with(self),
         }
     }
 }
 
-pub fn shift_region_ref<'a, 'gcx, 'tcx>(
+pub fn shift_region<'a, 'gcx, 'tcx>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     region: ty::Region<'tcx>,
-    amount: u32)
-    -> ty::Region<'tcx>
-{
+    amount: u32
+) -> ty::Region<'tcx> {
     match region {
-        &ty::ReLateBound(debruijn, br) if amount > 0 => {
-            tcx.mk_region(ty::ReLateBound(debruijn.shifted_in(amount), br))
+        ty::ReLateBound(debruijn, br) if amount > 0 => {
+            tcx.mk_region(ty::ReLateBound(debruijn.shifted_in(amount), *br))
         }
         _ => {
             region
@@ -638,20 +757,19 @@ pub fn shift_region_ref<'a, 'gcx, 'tcx>(
     }
 }
 
-pub fn shift_regions<'a, 'gcx, 'tcx, T>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                                        amount: u32,
-                                        value: &T) -> T
-    where T: TypeFoldable<'tcx>
-{
-    debug!("shift_regions(value={:?}, amount={})",
+pub fn shift_vars<'a, 'gcx, 'tcx, T>(
+    tcx: TyCtxt<'a, 'gcx, 'tcx>,
+    value: &T,
+    amount: u32
+) -> T where T: TypeFoldable<'tcx> {
+    debug!("shift_vars(value={:?}, amount={})",
            value, amount);
 
-    value.fold_with(&mut RegionFolder::new(tcx, &mut false, &mut |region, _current_depth| {
-        shift_region_ref(tcx, region, amount)
-    }))
+    value.fold_with(&mut Shifter::new(tcx, amount))
 }
 
-/// An "escaping region" is a bound region whose binder is not part of `t`.
+/// An "escaping var" is a bound var whose binder is not part of `t`. A bound var can be a
+/// bound region or a bound type.
 ///
 /// So, for example, consider a type like the following, which has two binders:
 ///
@@ -663,24 +781,24 @@ pub fn shift_regions<'a, 'gcx, 'tcx, T>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
 /// binders of both `'a` and `'b` are part of the type itself. However, if we consider the *inner
 /// fn type*, that type has an escaping region: `'a`.
 ///
-/// Note that what I'm calling an "escaping region" is often just called a "free region". However,
-/// we already use the term "free region". It refers to the regions that we use to represent bound
-/// regions on a fn definition while we are typechecking its body.
+/// Note that what I'm calling an "escaping var" is often just called a "free var". However,
+/// we already use the term "free var". It refers to the regions or types that we use to represent
+/// bound regions or type params on a fn definition while we are typechecking its body.
 ///
 /// To clarify, conceptually there is no particular difference between
-/// an "escaping" region and a "free" region. However, there is a big
+/// an "escaping" var and a "free" var. However, there is a big
 /// difference in practice. Basically, when "entering" a binding
 /// level, one is generally required to do some sort of processing to
-/// a bound region, such as replacing it with a fresh/placeholder
-/// region, or making an entry in the environment to represent the
-/// scope to which it is attached, etc. An escaping region represents
-/// a bound region for which this processing has not yet been done.
-struct HasEscapingRegionsVisitor {
+/// a bound var, such as replacing it with a fresh/placeholder
+/// var, or making an entry in the environment to represent the
+/// scope to which it is attached, etc. An escaping var represents
+/// a bound var for which this processing has not yet been done.
+struct HasEscapingVarsVisitor {
     /// Anything bound by `outer_index` or "above" is escaping
     outer_index: ty::DebruijnIndex,
 }
 
-impl<'tcx> TypeVisitor<'tcx> for HasEscapingRegionsVisitor {
+impl<'tcx> TypeVisitor<'tcx> for HasEscapingVarsVisitor {
     fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
         self.outer_index.shift_in(1);
         let result = t.super_visit_with(self);
@@ -693,7 +811,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
         // `outer_index`, that means that `t` contains some content
         // bound at `outer_index` or above (because
         // `outer_exclusive_binder` is always 1 higher than the
-        // content in `t`). Therefore, `t` has some escaping regions.
+        // content in `t`). Therefore, `t` has some escaping vars.
         t.outer_exclusive_binder > self.outer_index
     }
 
index 041565c8b5a07ba6834e2e5bebbabf52373adf44..b6691df39c120ccebeee7eef6692e609802447ab 100644 (file)
@@ -213,7 +213,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 impl<'a, 'b, 'tcx> Instance<'tcx> {
     pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>)
                -> Instance<'tcx> {
-        assert!(!substs.has_escaping_regions(),
+        assert!(!substs.has_escaping_bound_vars(),
                 "substs of instance {:?} not normalized for codegen: {:?}",
                 def_id, substs);
         Instance { def: InstanceDef::Item(def_id), substs: substs }
@@ -400,7 +400,9 @@ fn resolve_associated_item<'a, 'tcx>(
                 None
             }
         }
-        traits::VtableAutoImpl(..) | traits::VtableParam(..) => None
+        traits::VtableAutoImpl(..) |
+        traits::VtableParam(..) |
+        traits::VtableTraitAlias(..) => None
     }
 }
 
index 7153c729d1542b439d8dcec805ca5a8cccf0a37c..d44ba03084159ebe6a320e1e41d8f75a77f6b421 100644 (file)
@@ -519,6 +519,7 @@ pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option<DefId> {
         ty::Param(_) |
         ty::Opaque(..) |
         ty::Infer(_) |
+        ty::Bound(..) |
         ty::Error |
         ty::GeneratorWitness(..) |
         ty::Never |
index 05d4aeb6ddec4b0fa2964e409f52860a50d623bf..5aeba652f3228e9988abd367b72adc80440eb691 100644 (file)
@@ -30,7 +30,7 @@
 
 pub trait IntegerExt {
     fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>;
-    fn from_attr<C: HasDataLayout>(cx: C, ity: attr::IntType) -> Integer;
+    fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer;
     fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                             ty: Ty<'tcx>,
                             repr: &ReprOptions,
@@ -56,7 +56,7 @@ fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>
     }
 
     /// Get the Integer type from an attr::IntType.
-    fn from_attr<C: HasDataLayout>(cx: C, ity: attr::IntType) -> Integer {
+    fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer {
         let dl = cx.data_layout();
 
         match ity {
@@ -92,7 +92,7 @@ fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let min_default = I8;
 
         if let Some(ity) = repr.int {
-            let discr = Integer::from_attr(tcx, ity);
+            let discr = Integer::from_attr(&tcx, ity);
             let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
             if discr < fit {
                 bug!("Integer::repr_discr: `#[repr]` hint too small for \
@@ -202,14 +202,13 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) {
     };
 }
 
-#[derive(Copy, Clone)]
 pub struct LayoutCx<'tcx, C> {
     pub tcx: C,
     pub param_env: ty::ParamEnv<'tcx>
 }
 
 impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
-    fn layout_raw_uncached(self, ty: Ty<'tcx>)
+    fn layout_raw_uncached(&self, ty: Ty<'tcx>)
                            -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
         let tcx = self.tcx;
         let param_env = self.param_env;
@@ -899,7 +898,7 @@ enum StructKind {
 
                 let (mut min, mut max) = (i128::max_value(), i128::min_value());
                 let discr_type = def.repr.discr_type();
-                let bits = Integer::from_attr(tcx, discr_type).size().bits();
+                let bits = Integer::from_attr(self, discr_type).size().bits();
                 for (i, discr) in def.discriminants(tcx).enumerate() {
                     if variants[i].iter().any(|f| f.abi.is_uninhabited()) {
                         continue;
@@ -1124,9 +1123,14 @@ enum StructKind {
                 }
                 tcx.layout_raw(param_env.and(normalized))?
             }
-            ty::UnnormalizedProjection(..) | ty::GeneratorWitness(..) | ty::Infer(_) => {
+
+            ty::Bound(..) |
+            ty::UnnormalizedProjection(..) |
+            ty::GeneratorWitness(..) |
+            ty::Infer(_) => {
                 bug!("LayoutDetails::compute: unexpected type `{}`", ty)
             }
+
             ty::Param(_) | ty::Error => {
                 return Err(LayoutError::Unknown(ty));
             }
@@ -1136,7 +1140,7 @@ enum StructKind {
     /// This is invoked by the `layout_raw` query to record the final
     /// layout of each type.
     #[inline]
-    fn record_layout_for_printing(self, layout: TyLayout<'tcx>) {
+    fn record_layout_for_printing(&self, layout: TyLayout<'tcx>) {
         // If we are running with `-Zprint-type-sizes`, record layouts for
         // dumping later. Ignore layouts that are done with non-empty
         // environments or non-monomorphic layouts, as the user only wants
@@ -1153,7 +1157,7 @@ fn record_layout_for_printing(self, layout: TyLayout<'tcx>) {
         self.record_layout_for_printing_outlined(layout)
     }
 
-    fn record_layout_for_printing_outlined(self, layout: TyLayout<'tcx>) {
+    fn record_layout_for_printing_outlined(&self, layout: TyLayout<'tcx>) {
         // (delay format until we actually need it)
         let record = |kind, packed, opt_discr_size, variants| {
             let type_desc = format!("{:?}", layout.ty);
@@ -1473,7 +1477,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
 
     /// Computes the layout of a type. Note that this implicitly
     /// executes in "reveal all" mode.
-    fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
+    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
         let param_env = self.param_env.with_reveal_all();
         let ty = self.tcx.normalize_erasing_regions(param_env, ty);
         let details = self.tcx.layout_raw(param_env.and(ty))?;
@@ -1500,7 +1504,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'a, 'tcx, 'tcx>>
 
     /// Computes the layout of a type. Note that this implicitly
     /// executes in "reveal all" mode.
-    fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
+    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
         let param_env = self.param_env.with_reveal_all();
         let ty = self.tcx.normalize_erasing_regions(param_env, ty);
         let details = self.tcx.layout_raw(param_env.and(ty))?;
@@ -1558,7 +1562,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
     where C: LayoutOf<Ty = Ty<'tcx>> + HasTyCtxt<'tcx>,
           C::TyLayout: MaybeResult<TyLayout<'tcx>>
 {
-    fn for_variant(this: TyLayout<'tcx>, cx: C, variant_index: usize) -> TyLayout<'tcx> {
+    fn for_variant(this: TyLayout<'tcx>, cx: &C, variant_index: usize) -> TyLayout<'tcx> {
         let details = match this.variants {
             Variants::Single { index } if index == variant_index => this.details,
 
@@ -1597,7 +1601,7 @@ fn for_variant(this: TyLayout<'tcx>, cx: C, variant_index: usize) -> TyLayout<'t
         }
     }
 
-    fn field(this: TyLayout<'tcx>, cx: C, i: usize) -> C::TyLayout {
+    fn field(this: TyLayout<'tcx>, cx: &C, i: usize) -> C::TyLayout {
         let tcx = cx.tcx();
         cx.layout_of(match this.ty.sty {
             ty::Bool |
@@ -1694,7 +1698,7 @@ fn field(this: TyLayout<'tcx>, cx: C, i: usize) -> C::TyLayout {
                     Variants::Tagged { tag: ref discr, .. } |
                     Variants::NicheFilling { niche: ref discr, .. } => {
                         assert_eq!(i, 0);
-                        let layout = LayoutDetails::scalar(tcx, discr.clone());
+                        let layout = LayoutDetails::scalar(cx, discr.clone());
                         return MaybeResult::from_ok(TyLayout {
                             details: tcx.intern_layout(layout),
                             ty: discr.value.to_ty(tcx)
@@ -1703,7 +1707,7 @@ fn field(this: TyLayout<'tcx>, cx: C, i: usize) -> C::TyLayout {
                 }
             }
 
-            ty::Projection(_) | ty::UnnormalizedProjection(..) |
+            ty::Projection(_) | ty::UnnormalizedProjection(..) | ty::Bound(..) |
             ty::Opaque(..) | ty::Param(_) | ty::Infer(_) | ty::Error => {
                 bug!("TyLayout::field_type: unexpected type `{}`", this.ty)
             }
@@ -1720,7 +1724,7 @@ struct Niche {
 impl Niche {
     fn reserve<'a, 'tcx>(
         &self,
-        cx: LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>>,
+        cx: &LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>>,
         count: u128,
     ) -> Option<(u128, Scalar)> {
         if count > self.available {
@@ -1740,7 +1744,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
     /// Find the offset of a niche leaf field, starting from
     /// the given type and recursing through aggregates.
     // FIXME(eddyb) traverse already optimized enums.
-    fn find_niche(self, layout: TyLayout<'tcx>) -> Result<Option<Niche>, LayoutError<'tcx>> {
+    fn find_niche(&self, layout: TyLayout<'tcx>) -> Result<Option<Niche>, LayoutError<'tcx>> {
         let scalar_niche = |scalar: &Scalar, offset| {
             let Scalar { value, valid_range: ref v } = *scalar;
 
index f8fc2cc83034fb418d2df9ef0406d72aa47930be..c7c197d11c03beb112b95d85933d45b9d6acc1ef 100644 (file)
@@ -63,7 +63,7 @@
 
 use hir;
 
-pub use self::sty::{Binder, BoundTy, BoundTyIndex, DebruijnIndex, INNERMOST};
+pub use self::sty::{Binder, BoundTy, BoundTyKind, BoundVar, DebruijnIndex, INNERMOST};
 pub use self::sty::{FnSig, GenSig, CanonicalPolyFnSig, PolyFnSig, PolyGenSig};
 pub use self::sty::{InferTy, ParamTy, ProjectionTy, ExistentialPredicate};
 pub use self::sty::{ClosureSubsts, GeneratorSubsts, UpvarSubsts, TypeAndMut};
@@ -463,13 +463,9 @@ pub struct TypeFlags: u32 {
         // Currently we can't normalize projections w/ bound regions.
         const HAS_NORMALIZABLE_PROJECTION = 1 << 12;
 
-        // Set if this includes a "canonical" type or region var --
-        // ought to be true only for the results of canonicalization.
-        const HAS_CANONICAL_VARS = 1 << 13;
-
         /// Does this have any `ReLateBound` regions? Used to check
         /// if a global bound is safe to evaluate.
-        const HAS_RE_LATE_BOUND = 1 << 14;
+        const HAS_RE_LATE_BOUND = 1 << 13;
 
         const NEEDS_SUBST        = TypeFlags::HAS_PARAMS.bits |
                                    TypeFlags::HAS_SELF.bits |
@@ -490,7 +486,6 @@ pub struct TypeFlags: u32 {
                                   TypeFlags::HAS_TY_CLOSURE.bits |
                                   TypeFlags::HAS_FREE_LOCAL_NAMES.bits |
                                   TypeFlags::KEEP_IN_LOCAL_TCX.bits |
-                                  TypeFlags::HAS_CANONICAL_VARS.bits |
                                   TypeFlags::HAS_RE_LATE_BOUND.bits;
     }
 }
@@ -1051,24 +1046,24 @@ pub enum Predicate<'tcx> {
     /// would be the type parameters.
     Trait(PolyTraitPredicate<'tcx>),
 
-    /// where 'a : 'b
+    /// where `'a : 'b`
     RegionOutlives(PolyRegionOutlivesPredicate<'tcx>),
 
-    /// where T : 'a
+    /// where `T : 'a`
     TypeOutlives(PolyTypeOutlivesPredicate<'tcx>),
 
-    /// where <T as TraitRef>::Name == X, approximately.
-    /// See `ProjectionPredicate` struct for details.
+    /// where `<T as TraitRef>::Name == X`, approximately.
+    /// See the `ProjectionPredicate` struct for details.
     Projection(PolyProjectionPredicate<'tcx>),
 
-    /// no syntax: T WF
+    /// no syntax: `T` well-formed
     WellFormed(Ty<'tcx>),
 
     /// trait must be object-safe
     ObjectSafe(DefId),
 
     /// No direct syntax. May be thought of as `where T : FnFoo<...>`
-    /// for some substitutions `...` and T being a closure type.
+    /// for some substitutions `...` and `T` being a closure type.
     /// Satisfied (or refuted) once we know the closure's kind.
     ClosureKind(DefId, ClosureSubsts<'tcx>, ClosureKind),
 
@@ -1522,10 +1517,17 @@ pub fn next_universe(self) -> UniverseIndex {
 
     /// True if `self` can name a name from `other` -- in other words,
     /// if the set of names in `self` is a superset of those in
-    /// `other`.
+    /// `other` (`self >= other`).
     pub fn can_name(self, other: UniverseIndex) -> bool {
         self.private >= other.private
     }
+
+    /// True if `self` cannot name some names from `other` -- in other
+    /// words, if the set of names in `self` is a strict subset of
+    /// those in `other` (`self < other`).
+    pub fn cannot_name(self, other: UniverseIndex) -> bool {
+        self.private < other.private
+    }
 }
 
 /// The "placeholder index" fully defines a placeholder region.
@@ -1540,6 +1542,8 @@ pub struct Placeholder {
     pub name: BoundRegion,
 }
 
+impl_stable_hash_for!(struct Placeholder { universe, name });
+
 /// When type checking, we use the `ParamEnv` to track
 /// details about the set of where-clauses that are in scope at this
 /// particular point.
@@ -2369,6 +2373,7 @@ fn sized_constraint_for_ty(&self,
                 }
             }
 
+            Bound(..) |
             Infer(..) => {
                 bug!("unexpected type `{:?}` in sized_constraint_for_ty",
                      ty)
@@ -2792,7 +2797,7 @@ pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> {
         }
     }
 
-    /// Determine whether an item is annotated with an attribute
+    /// Determine whether an item is annotated with an attribute.
     pub fn has_attr(self, did: DefId, attr: &str) -> bool {
         attr::contains_name(&self.get_attrs(did), attr)
     }
@@ -2806,14 +2811,14 @@ pub fn generator_layout(self, def_id: DefId) -> &'tcx GeneratorLayout<'tcx> {
         self.optimized_mir(def_id).generator_layout.as_ref().unwrap()
     }
 
-    /// Given the def_id of an impl, return the def_id of the trait it implements.
+    /// Given the def-id of an impl, return the def_id of the trait it implements.
     /// If it implements no trait, return `None`.
     pub fn trait_id_of_impl(self, def_id: DefId) -> Option<DefId> {
         self.impl_trait_ref(def_id).map(|tr| tr.def_id)
     }
 
-    /// If the given def ID describes a method belonging to an impl, return the
-    /// ID of the impl that the method belongs to. Otherwise, return `None`.
+    /// If the given defid describes a method belonging to an impl, return the
+    /// def-id of the impl that the method belongs to. Otherwise, return `None`.
     pub fn impl_of_method(self, def_id: DefId) -> Option<DefId> {
         let item = if def_id.krate != LOCAL_CRATE {
             if let Some(Def::Method(_)) = self.describe_def(def_id) {
@@ -2978,7 +2983,7 @@ fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option
         })
 }
 
-/// Yields the parent function's `DefId` if `def_id` is an `impl Trait` definition
+/// Yields the parent function's `DefId` if `def_id` is an `impl Trait` definition.
 pub fn is_impl_trait_defn(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Option<DefId> {
     if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
         if let Node::Item(item) = tcx.hir.get(node_id) {
@@ -2990,7 +2995,19 @@ pub fn is_impl_trait_defn(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Option<DefI
     None
 }
 
-/// See `ParamEnv` struct def'n for details.
+/// Returns `true` if `def_id` is a trait alias.
+pub fn is_trait_alias(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> bool {
+    if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
+        if let Node::Item(item) = tcx.hir.get(node_id) {
+            if let hir::ItemKind::TraitAlias(..) = item.node {
+                return true;
+            }
+        }
+    }
+    false
+}
+
+/// See `ParamEnv` struct definition for details.
 fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                        def_id: DefId)
                        -> ParamEnv<'tcx>
index b49664b6247332a16e7c799a43428ad2fd6bd8ac..449730c9d0601d6c991e7705131b3eb71ed122fe 100644 (file)
@@ -106,7 +106,7 @@ fn compute_components(&self, ty: Ty<'tcx>, out: &mut Vec<Component<'tcx>>) {
             // we simply fallback to the most restrictive rule, which
             // requires that `Pi: 'a` for all `i`.
             ty::Projection(ref data) => {
-                if !data.has_escaping_regions() {
+                if !data.has_escaping_bound_vars() {
                     // best case: no escaping regions, so push the
                     // projection and skip the subtree (thus generating no
                     // constraints for Pi). This defers the choice between
@@ -156,6 +156,7 @@ fn compute_components(&self, ty: Ty<'tcx>, out: &mut Vec<Component<'tcx>>) {
             ty::FnDef(..) |       // OutlivesFunction (*)
             ty::FnPtr(_) |        // OutlivesFunction (*)
             ty::Dynamic(..) |       // OutlivesObject, OutlivesFragment (*)
+            ty::Bound(..) |
             ty::Error => {
                 // (*) Bare functions and traits are both binders. In the
                 // RFC, this means we would add the bound regions to the
index ba5b714a0e7d90f41ca36e1ceebb5d80c79e3f9e..59a66513eef0558a274c023b34e3169eeb183bd0 100644 (file)
@@ -455,7 +455,6 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
             ProjectionMismatched(x) => ProjectionMismatched(x),
             ProjectionBoundsLength(x) => ProjectionBoundsLength(x),
             Sorts(ref x) => return tcx.lift(x).map(Sorts),
-            OldStyleLUB(ref x) => return tcx.lift(x).map(OldStyleLUB),
             ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch)
         })
     }
@@ -735,9 +734,19 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
                 ty::UnnormalizedProjection(data.fold_with(folder))
             }
             ty::Opaque(did, substs) => ty::Opaque(did, substs.fold_with(folder)),
-            ty::Bool | ty::Char | ty::Str | ty::Int(_) |
-            ty::Uint(_) | ty::Float(_) | ty::Error | ty::Infer(_) |
-            ty::Param(..) | ty::Never | ty::Foreign(..) => return self
+
+            ty::Bool |
+            ty::Char |
+            ty::Str |
+            ty::Int(_) |
+            ty::Uint(_) |
+            ty::Float(_) |
+            ty::Error |
+            ty::Infer(_) |
+            ty::Param(..) |
+            ty::Bound(..) |
+            ty::Never |
+            ty::Foreign(..) => return self
         };
 
         if self.sty == sty {
@@ -772,9 +781,19 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
                 data.visit_with(visitor)
             }
             ty::Opaque(_, ref substs) => substs.visit_with(visitor),
-            ty::Bool | ty::Char | ty::Str | ty::Int(_) |
-            ty::Uint(_) | ty::Float(_) | ty::Error | ty::Infer(_) |
-            ty::Param(..) | ty::Never | ty::Foreign(..) => false,
+
+            ty::Bool |
+            ty::Char |
+            ty::Str |
+            ty::Int(_) |
+            ty::Uint(_) |
+            ty::Float(_) |
+            ty::Error |
+            ty::Infer(_) |
+            ty::Bound(..) |
+            ty::Param(..) |
+            ty::Never |
+            ty::Foreign(..) => false,
         }
     }
 
@@ -1003,7 +1022,6 @@ impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> {
         (ty::error::TypeError::ProjectionBoundsLength)(x),
         (ty::error::TypeError::Sorts)(x),
         (ty::error::TypeError::ExistentialMismatch)(x),
-        (ty::error::TypeError::OldStyleLUB)(x),
     }
 }
 
index 62e38ad9bfa662a84ecd25a2356fce2828332e74..28b58d62175bc21f270ef57a38d478c2df832c54 100644 (file)
@@ -77,6 +77,17 @@ pub fn is_named(&self) -> bool {
             _ => false,
         }
     }
+
+    /// When canonicalizing, we replace unbound inference variables and free
+    /// regions with anonymous late bound regions. This method asserts that
+    /// we have an anonymous late bound region, which hence may refer to
+    /// a canonical variable.
+    pub fn assert_bound_var(&self) -> BoundVar {
+        match *self {
+            BoundRegion::BrAnon(var) => BoundVar::from_u32(var),
+            _ => bug!("bound region is not anonymous"),
+        }
+    }
 }
 
 /// N.B., If you change this, you'll probably want to change the corresponding
@@ -188,6 +199,9 @@ pub enum TyKind<'tcx> {
     /// A type parameter; for example, `T` in `fn f<T>(x: T) {}
     Param(ParamTy),
 
+    /// Bound type variable, used only when preparing a trait query.
+    Bound(BoundTy),
+
     /// A type variable used during type checking.
     Infer(InferTy),
 
@@ -636,7 +650,7 @@ pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) -> TraitRef<'tcx> {
         TraitRef { def_id: def_id, substs: substs }
     }
 
-    /// Returns a TraitRef of the form `P0: Foo<P1..Pn>` where `Pi`
+    /// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
     /// are the parameters defined on trait.
     pub fn identity<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> TraitRef<'tcx> {
         TraitRef {
@@ -727,8 +741,8 @@ pub fn erase_self_ty(tcx: TyCtxt<'a, 'gcx, 'tcx>,
     /// or some placeholder type.
     pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
         -> ty::TraitRef<'tcx>  {
-        // otherwise the escaping regions would be captured by the binder
-        // debug_assert!(!self_ty.has_escaping_regions());
+        // otherwise the escaping vars would be captured by the binder
+        // debug_assert!(!self_ty.has_escaping_bound_vars());
 
         ty::TraitRef {
             def_id: self.def_id,
@@ -755,11 +769,11 @@ pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>,
     }
 }
 
-/// Binder is a binder for higher-ranked lifetimes. It is part of the
+/// Binder is a binder for higher-ranked lifetimes or types. It is part of the
 /// compiler's representation for things like `for<'a> Fn(&'a isize)`
 /// (which would be represented by the type `PolyTraitRef ==
 /// Binder<TraitRef>`). Note that when we instantiate,
-/// erase, or otherwise "discharge" these bound regions, we change the
+/// erase, or otherwise "discharge" these bound vars, we change the
 /// type from `Binder<T>` to just `T` (see
 /// e.g. `liberate_late_bound_regions`).
 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
@@ -767,29 +781,28 @@ pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>,
 
 impl<T> Binder<T> {
     /// Wraps `value` in a binder, asserting that `value` does not
-    /// contain any bound regions that would be bound by the
+    /// contain any bound vars that would be bound by the
     /// binder. This is commonly used to 'inject' a value T into a
     /// different binding level.
     pub fn dummy<'tcx>(value: T) -> Binder<T>
         where T: TypeFoldable<'tcx>
     {
-        debug_assert!(!value.has_escaping_regions());
+        debug_assert!(!value.has_escaping_bound_vars());
         Binder(value)
     }
 
-    /// Wraps `value` in a binder, binding late-bound regions (if any).
-    pub fn bind<'tcx>(value: T) -> Binder<T>
-    {
+    /// Wraps `value` in a binder, binding higher-ranked vars (if any).
+    pub fn bind<'tcx>(value: T) -> Binder<T> {
         Binder(value)
     }
 
     /// Skips the binder and returns the "bound" value. This is a
     /// risky thing to do because it's easy to get confused about
     /// debruijn indices and the like. It is usually better to
-    /// discharge the binder using `no_late_bound_regions` or
+    /// discharge the binder using `no_bound_vars` or
     /// `replace_late_bound_regions` or something like
     /// that. `skip_binder` is only valid when you are either
-    /// extracting data that has nothing to do with bound regions, you
+    /// extracting data that has nothing to do with bound vars, you
     /// are doing some sort of test that does not involve bound
     /// regions, or you are being very careful about your depth
     /// accounting.
@@ -798,7 +811,7 @@ pub fn bind<'tcx>(value: T) -> Binder<T>
     ///
     /// - extracting the def-id from a PolyTraitRef;
     /// - comparing the self type of a PolyTraitRef to see if it is equal to
-    ///   a type parameter `X`, since the type `X`  does not reference any regions
+    ///   a type parameter `X`, since the type `X` does not reference any regions
     pub fn skip_binder(&self) -> &T {
         &self.0
     }
@@ -820,19 +833,19 @@ pub fn map_bound<F, U>(self, f: F) -> Binder<U>
     }
 
     /// Unwraps and returns the value within, but only if it contains
-    /// no bound regions at all. (In other words, if this binder --
+    /// no bound vars at all. (In other words, if this binder --
     /// and indeed any enclosing binder -- doesn't bind anything at
     /// all.) Otherwise, returns `None`.
     ///
     /// (One could imagine having a method that just unwraps a single
-    /// binder, but permits late-bound regions bound by enclosing
+    /// binder, but permits late-bound vars bound by enclosing
     /// binders, but that would require adjusting the debruijn
     /// indices, and given the shallow binding structure we often use,
     /// would not be that useful.)
-    pub fn no_late_bound_regions<'tcx>(self) -> Option<T>
-        where T : TypeFoldable<'tcx>
+    pub fn no_bound_vars<'tcx>(self) -> Option<T>
+        where T: TypeFoldable<'tcx>
     {
-        if self.skip_binder().has_escaping_regions() {
+        if self.skip_binder().has_escaping_bound_vars() {
             None
         } else {
             Some(self.skip_binder().clone())
@@ -1166,9 +1179,6 @@ pub enum RegionKind {
     /// `ClosureRegionRequirements` that are produced by MIR borrowck.
     /// See `ClosureRegionRequirements` for more details.
     ReClosureBound(RegionVid),
-
-    /// Canonicalized region, used only when preparing a trait query.
-    ReCanonical(BoundTyIndex),
 }
 
 impl<'tcx> serialize::UseSpecializedDecodable for Region<'tcx> {}
@@ -1219,22 +1229,37 @@ pub enum InferTy {
     FreshTy(u32),
     FreshIntTy(u32),
     FreshFloatTy(u32),
-
-    /// Bound type variable, used only when preparing a trait query.
-    BoundTy(BoundTy),
 }
 
 newtype_index! {
-    pub struct BoundTyIndex { .. }
+    pub struct BoundVar { .. }
 }
 
 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
 pub struct BoundTy {
-    pub level: DebruijnIndex,
-    pub var: BoundTyIndex,
+    pub index: DebruijnIndex,
+    pub var: BoundVar,
+    pub kind: BoundTyKind,
 }
 
-impl_stable_hash_for!(struct BoundTy { level, var });
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
+pub enum BoundTyKind {
+    Anon,
+    Param(InternedString),
+}
+
+impl_stable_hash_for!(struct BoundTy { index, var, kind });
+impl_stable_hash_for!(enum self::BoundTyKind { Anon, Param(a) });
+
+impl BoundTy {
+    pub fn new(index: DebruijnIndex, var: BoundVar) -> Self {
+        BoundTy {
+            index,
+            var,
+            kind: BoundTyKind::Anon,
+        }
+    }
+}
 
 /// A `ProjectionPredicate` for an `ExistentialTraitRef`.
 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
@@ -1264,7 +1289,7 @@ pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
                         -> ty::ProjectionPredicate<'tcx>
     {
         // otherwise the escaping regions would be captured by the binders
-        debug_assert!(!self_ty.has_escaping_regions());
+        debug_assert!(!self_ty.has_escaping_bound_vars());
 
         ty::ProjectionPredicate {
             projection_ty: ty::ProjectionTy {
@@ -1363,7 +1388,6 @@ pub fn has_name(&self) -> bool {
             RegionKind::ReEmpty => false,
             RegionKind::ReErased => false,
             RegionKind::ReClosureBound(..) => false,
-            RegionKind::ReCanonical(..) => false,
         }
     }
 
@@ -1450,10 +1474,6 @@ pub fn type_flags(&self) -> TypeFlags {
             }
             ty::ReErased => {
             }
-            ty::ReCanonical(..) => {
-                flags = flags | TypeFlags::HAS_FREE_REGIONS;
-                flags = flags | TypeFlags::HAS_CANONICAL_VARS;
-            }
             ty::ReClosureBound(..) => {
                 flags = flags | TypeFlags::HAS_FREE_REGIONS;
             }
@@ -1865,6 +1885,7 @@ pub fn regions(&self) -> Vec<ty::Region<'tcx>> {
             Tuple(..) |
             Foreign(..) |
             Param(_) |
+            Bound(..) |
             Infer(_) |
             Error => {
                 vec![]
@@ -1930,7 +1951,7 @@ pub fn is_trivially_sized(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> bool {
 
             ty::Infer(ty::TyVar(_)) => false,
 
-            ty::Infer(ty::BoundTy(_)) |
+            ty::Bound(_) |
             ty::Infer(ty::FreshTy(_)) |
             ty::Infer(ty::FreshIntTy(_)) |
             ty::Infer(ty::FreshFloatTy(_)) =>
index 02b5d36ecce6ea0dab9088b61e202d5438116a64..c1aed36c92ddf3bfd60170781ea85bf88ff9bb82 100644 (file)
@@ -12,7 +12,7 @@
 
 use hir::def_id::DefId;
 use infer::canonical::Canonical;
-use ty::{self, BoundTyIndex, Lift, List, Ty, TyCtxt};
+use ty::{self, BoundVar, Lift, List, Ty, TyCtxt};
 use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
 
 use serialize::{self, Encodable, Encoder, Decodable, Decoder};
@@ -355,7 +355,7 @@ fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
                                        span,
                                        root_ty: None,
                                        ty_stack_depth: 0,
-                                       region_binders_passed: 0 };
+                                       binders_passed: 0 };
         (*self).fold_with(&mut folder)
     }
 }
@@ -377,16 +377,16 @@ struct SubstFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     ty_stack_depth: usize,
 
     // Number of region binders we have passed through while doing the substitution
-    region_binders_passed: u32,
+    binders_passed: u32,
 }
 
 impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> {
     fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
 
     fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
-        self.region_binders_passed += 1;
+        self.binders_passed += 1;
         let t = t.super_fold_with(self);
-        self.region_binders_passed -= 1;
+        self.binders_passed -= 1;
         t
     }
 
@@ -471,12 +471,12 @@ fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> {
             }
         };
 
-        self.shift_regions_through_binders(ty)
+        self.shift_vars_through_binders(ty)
     }
 
     /// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs
-    /// when we are substituting a type with escaping regions into a context where we have passed
-    /// through region binders. That's quite a mouthful. Let's see an example:
+    /// when we are substituting a type with escaping bound vars into a context where we have
+    /// passed through binders. That's quite a mouthful. Let's see an example:
     ///
     /// ```
     /// type Func<A> = fn(A);
@@ -516,25 +516,25 @@ fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> {
     /// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the
     /// first case we do not increase the Debruijn index and in the second case we do. The reason
     /// is that only in the second case have we passed through a fn binder.
-    fn shift_regions_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
-        debug!("shift_regions(ty={:?}, region_binders_passed={:?}, has_escaping_regions={:?})",
-               ty, self.region_binders_passed, ty.has_escaping_regions());
+    fn shift_vars_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
+        debug!("shift_vars(ty={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})",
+               ty, self.binders_passed, ty.has_escaping_bound_vars());
 
-        if self.region_binders_passed == 0 || !ty.has_escaping_regions() {
+        if self.binders_passed == 0 || !ty.has_escaping_bound_vars() {
             return ty;
         }
 
-        let result = ty::fold::shift_regions(self.tcx(), self.region_binders_passed, &ty);
-        debug!("shift_regions: shifted result = {:?}", result);
+        let result = ty::fold::shift_vars(self.tcx(), &ty, self.binders_passed);
+        debug!("shift_vars: shifted result = {:?}", result);
 
         result
     }
 
     fn shift_region_through_binders(&self, region: ty::Region<'tcx>) -> ty::Region<'tcx> {
-        if self.region_binders_passed == 0 || !region.has_escaping_regions() {
+        if self.binders_passed == 0 || !region.has_escaping_bound_vars() {
             return region;
         }
-        self.tcx().mk_region(ty::fold::shift_region(*region, self.region_binders_passed))
+        ty::fold::shift_region(self.tcx, region, self.binders_passed)
     }
 }
 
@@ -553,15 +553,23 @@ pub fn is_identity(&self) -> bool {
             return false;
         }
 
-        self.value.substs.iter().zip(BoundTyIndex::new(0)..).all(|(kind, cvar)| {
+        self.value.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| {
             match kind.unpack() {
                 UnpackedKind::Type(ty) => match ty.sty {
-                    ty::Infer(ty::BoundTy(ref b)) => cvar == b.var,
+                    ty::Bound(b) => {
+                        // We only allow a `ty::INNERMOST` index in substitutions.
+                        assert_eq!(b.index, ty::INNERMOST);
+                        cvar == b.var
+                    }
                     _ => false,
                 },
 
                 UnpackedKind::Lifetime(r) => match r {
-                    ty::ReCanonical(cvar1) => cvar == *cvar1,
+                    ty::ReLateBound(index, br) => {
+                        // We only allow a `ty::INNERMOST` index in substitutions.
+                        assert_eq!(*index, ty::INNERMOST);
+                        cvar == br.assert_bound_var()
+                    }
                     _ => false,
                 },
             }
index 00a1bfaacd781affb2dd9971d219f9052c97e2d7..3d0c54d6b0a5b44f5165c4ce7ff9c8e9d9b35097 100644 (file)
@@ -43,7 +43,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self.ty.sty {
             ty::Int(ity) => {
                 let bits = ty::tls::with(|tcx| {
-                    Integer::from_attr(tcx, SignedInt(ity)).size().bits()
+                    Integer::from_attr(&tcx, SignedInt(ity)).size().bits()
                 });
                 let x = self.val as i128;
                 // sign extend the raw representation to be an i128
@@ -62,8 +62,8 @@ pub fn wrap_incr<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
     }
     pub fn checked_add<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, n: u128) -> (Self, bool) {
         let (int, signed) = match self.ty.sty {
-            Int(ity) => (Integer::from_attr(tcx, SignedInt(ity)), true),
-            Uint(uty) => (Integer::from_attr(tcx, UnsignedInt(uty)), false),
+            Int(ity) => (Integer::from_attr(&tcx, SignedInt(ity)), true),
+            Uint(uty) => (Integer::from_attr(&tcx, UnsignedInt(uty)), false),
             _ => bug!("non integer discriminant"),
         };
 
@@ -363,7 +363,7 @@ pub fn required_region_bounds(self,
                erased_self_ty,
                predicates);
 
-        assert!(!erased_self_ty.has_escaping_regions());
+        assert!(!erased_self_ty.has_escaping_bound_vars());
 
         traits::elaborate_predicates(self, predicates)
             .filter_map(|predicate| {
@@ -389,7 +389,7 @@ pub fn required_region_bounds(self,
                         // construct such an object, but this seems
                         // correct even if that code changes).
                         let ty::OutlivesPredicate(ref t, ref r) = predicate.skip_binder();
-                        if t == &erased_self_ty && !r.has_escaping_regions() {
+                        if t == &erased_self_ty && !r.has_escaping_bound_vars() {
                             Some(*r)
                         } else {
                             None
@@ -527,7 +527,7 @@ pub fn is_closure(self, def_id: DefId) -> bool {
         self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
     }
 
-    /// True if `def_id` refers to a trait (e.g., `trait Foo { ... }`).
+    /// True if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
     pub fn is_trait(self, def_id: DefId) -> bool {
         if let DefPathData::Trait(_) = self.def_key(def_id).disambiguated_data.data {
             true
@@ -951,7 +951,7 @@ fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
         // Can refer to a type which may drop.
         // FIXME(eddyb) check this against a ParamEnv.
-        ty::Dynamic(..) | ty::Projection(..) | ty::Param(_) |
+        ty::Dynamic(..) | ty::Projection(..) | ty::Param(_) | ty::Bound(..) |
         ty::Opaque(..) | ty::Infer(_) | ty::Error => true,
 
         ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
index 47fbfba87748ba073270d06b01e64e0db7ce2cb2..284c595ee2d965ed6177317bf611106f97725934 100644 (file)
@@ -82,7 +82,7 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) {
     match parent_ty.sty {
         ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) |
         ty::Str | ty::Infer(_) | ty::Param(_) | ty::Never | ty::Error |
-        ty::Foreign(..) => {
+        ty::Bound(..) | ty::Foreign(..) => {
         }
         ty::Array(ty, len) => {
             push_const(stack, len);
index 27747970f76b2ca74caf955f9198ba8a85baa647..1336eac63f880514902cac8abb1e8b298beb40a7 100644 (file)
@@ -158,7 +158,7 @@ fn normalize(&mut self) -> Vec<traits::PredicateObligation<'tcx>> {
         let infcx = &mut self.infcx;
         let param_env = self.param_env;
         self.out.iter()
-                .inspect(|pred| assert!(!pred.has_escaping_regions()))
+                .inspect(|pred| assert!(!pred.has_escaping_bound_vars()))
                 .flat_map(|pred| {
                     let mut selcx = traits::SelectionContext::new(infcx);
                     let pred = traits::normalize(&mut selcx, param_env, cause.clone(), pred);
@@ -190,7 +190,7 @@ fn compute_trait_ref(&mut self, trait_ref: &ty::TraitRef<'tcx>, elaborate: Elabo
 
         self.out.extend(
             trait_ref.substs.types()
-                            .filter(|ty| !ty.has_escaping_regions())
+                            .filter(|ty| !ty.has_escaping_bound_vars())
                             .map(|ty| traits::Obligation::new(cause.clone(),
                                                               param_env,
                                                               ty::Predicate::WellFormed(ty))));
@@ -205,7 +205,7 @@ fn compute_projection(&mut self, data: ty::ProjectionTy<'tcx>) {
         let trait_ref = data.trait_ref(self.infcx.tcx);
         self.compute_trait_ref(&trait_ref, Elaborate::None);
 
-        if !data.has_escaping_regions() {
+        if !data.has_escaping_bound_vars() {
             let predicate = trait_ref.to_predicate();
             let cause = self.cause(traits::ProjectionWf(data));
             self.out.push(traits::Obligation::new(cause, self.param_env, predicate));
@@ -229,7 +229,7 @@ fn compute_const(&mut self, constant: &'tcx ty::Const<'tcx>) {
     }
 
     fn require_sized(&mut self, subty: Ty<'tcx>, cause: traits::ObligationCauseCode<'tcx>) {
-        if !subty.has_escaping_regions() {
+        if !subty.has_escaping_bound_vars() {
             let cause = self.cause(cause);
             let trait_ref = ty::TraitRef {
                 def_id: self.infcx.tcx.require_lang_item(lang_items::SizedTraitLangItem),
@@ -258,6 +258,7 @@ fn compute(&mut self, ty0: Ty<'tcx>) -> bool {
                 ty::GeneratorWitness(..) |
                 ty::Never |
                 ty::Param(_) |
+                ty::Bound(..) |
                 ty::Foreign(..) => {
                     // WfScalar, WfParameter, etc
                 }
@@ -299,7 +300,7 @@ fn compute(&mut self, ty0: Ty<'tcx>) -> bool {
 
                 ty::Ref(r, rty, _) => {
                     // WfReference
-                    if !r.has_escaping_regions() && !rty.has_escaping_regions() {
+                    if !r.has_escaping_bound_vars() && !rty.has_escaping_bound_vars() {
                         let cause = self.cause(traits::ReferenceOutlivesReferent(ty));
                         self.out.push(
                             traits::Obligation::new(
@@ -450,7 +451,7 @@ fn nominal_obligations(&mut self,
                   .map(|pred| traits::Obligation::new(cause.clone(),
                                                       self.param_env,
                                                       pred))
-                  .filter(|pred| !pred.has_escaping_regions())
+                  .filter(|pred| !pred.has_escaping_bound_vars())
                   .collect()
     }
 
@@ -489,7 +490,7 @@ fn from_object_ty(&mut self, ty: Ty<'tcx>,
         // Note: in fact we only permit builtin traits, not `Bar<'d>`, I
         // am looking forward to the future here.
 
-        if !data.has_escaping_regions() {
+        if !data.has_escaping_bound_vars() {
             let implicit_bounds =
                 object_region_bounds(self.infcx.tcx, data);
 
index abdd7fd8d40bf0d220ad2022d1fa47d6f12eb243..5ec4f55b142ebbfcac4c74ae4f0eb63de550c5dd 100644 (file)
@@ -16,7 +16,7 @@
 use ty::{BrAnon, BrEnv, BrFresh, BrNamed};
 use ty::{Bool, Char, Adt};
 use ty::{Error, Str, Array, Slice, Float, FnDef, FnPtr};
-use ty::{Param, RawPtr, Ref, Never, Tuple};
+use ty::{Param, Bound, RawPtr, Ref, Never, Tuple};
 use ty::{Closure, Generator, GeneratorWitness, Foreign, Projection, Opaque};
 use ty::{UnnormalizedProjection, Dynamic, Int, Uint, Infer};
 use ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, GenericParamCount, GenericParamDefKind};
@@ -790,9 +790,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                 ty::ReEarlyBound(ref data) => {
                     write!(f, "{}", data.name)
                 }
-                ty::ReCanonical(_) => {
-                    write!(f, "'_")
-                }
                 ty::ReLateBound(_, br) |
                 ty::ReFree(ty::FreeRegion { bound_region: br, .. }) |
                 ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
@@ -860,10 +857,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                     write!(f, "{:?}", vid)
                 }
 
-                ty::ReCanonical(c) => {
-                    write!(f, "'?{}", c.index())
-                }
-
                 ty::RePlaceholder(placeholder) => {
                     write!(f, "RePlaceholder({:?})", placeholder)
                 }
@@ -976,7 +969,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                     ty::TyVar(_) => write!(f, "_"),
                     ty::IntVar(_) => write!(f, "{}", "{integer}"),
                     ty::FloatVar(_) => write!(f, "{}", "{float}"),
-                    ty::BoundTy(_) => write!(f, "_"),
                     ty::FreshTy(v) => write!(f, "FreshTy({})", v),
                     ty::FreshIntTy(v) => write!(f, "FreshIntTy({})", v),
                     ty::FreshFloatTy(v) => write!(f, "FreshFloatTy({})", v)
@@ -988,7 +980,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                 ty::TyVar(ref v) => write!(f, "{:?}", v),
                 ty::IntVar(ref v) => write!(f, "{:?}", v),
                 ty::FloatVar(ref v) => write!(f, "{:?}", v),
-                ty::BoundTy(v) => write!(f, "?{:?}", v.var.index()),
                 ty::FreshTy(v) => write!(f, "FreshTy({:?})", v),
                 ty::FreshIntTy(v) => write!(f, "FreshIntTy({:?})", v),
                 ty::FreshFloatTy(v) => write!(f, "FreshFloatTy({:?})", v)
@@ -1119,6 +1110,19 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                 Infer(infer_ty) => write!(f, "{}", infer_ty),
                 Error => write!(f, "[type error]"),
                 Param(ref param_ty) => write!(f, "{}", param_ty),
+                Bound(bound_ty) => {
+                    match bound_ty.kind {
+                        ty::BoundTyKind::Anon => {
+                            if bound_ty.index == ty::INNERMOST {
+                                write!(f, "?{}", bound_ty.var.index())
+                            } else {
+                                write!(f, "?{}_{}", bound_ty.index.index(), bound_ty.var.index())
+                            }
+                        }
+
+                        ty::BoundTyKind::Param(p) => write!(f, "{}", p),
+                    }
+                }
                 Adt(def, substs) => cx.parameterized(f, substs, def.did, &[]),
                 Dynamic(data, r) => {
                     let r = r.print_to_string(cx);
index 1fc9ee07a1ae49e03aac74689c59cfa858871359..a802729e3fbdbfe24317565dc98ffc9d35e51d80 100644 (file)
@@ -426,7 +426,6 @@ pub fn borrow_of_local_data<'tcx>(cmt: &mc::cmt_<'tcx>) -> bool {
 
             // These cannot exist in borrowck
             RegionKind::ReVar(..) |
-            RegionKind::ReCanonical(..) |
             RegionKind::RePlaceholder(..) |
             RegionKind::ReClosureBound(..) |
             RegionKind::ReErased => span_bug!(borrow_span,
index 1f83c30a3876a215b7a9c8fa584bc334bea2496a..78a31ed668fcae090b32f31f3fee1e1bd71fbeea 100644 (file)
@@ -363,7 +363,6 @@ fn guarantee_valid(&mut self,
 
                     ty::ReStatic => self.item_ub,
 
-                    ty::ReCanonical(_) |
                     ty::ReEmpty |
                     ty::ReClosureBound(..) |
                     ty::ReLateBound(..) |
index 7c7662a88de535a1af92a71c40a0617a185fa058..e50534a4e1dc95fc7d19eaf5f3c6d4f0e691046c 100644 (file)
@@ -19,7 +19,7 @@
 use type_of::{LayoutLlvmExt, PointerKind};
 use value::Value;
 
-use rustc_target::abi::{LayoutOf, Size, TyLayout};
+use rustc_target::abi::{LayoutOf, Size, TyLayout, Abi as LayoutAbi};
 use rustc::ty::{self, Ty};
 use rustc::ty::layout;
 
@@ -302,21 +302,49 @@ fn new_vtable(cx: &CodegenCx<'ll, 'tcx>,
         FnType::new_internal(cx, sig, extra_args, |ty, arg_idx| {
             let mut layout = cx.layout_of(ty);
             // Don't pass the vtable, it's not an argument of the virtual fn.
-            // Instead, pass just the (thin pointer) first field of `*dyn Trait`.
+            // Instead, pass just the data pointer, but give it the type `*const/mut dyn Trait`
+            // or `&/&mut dyn Trait` because this is special-cased elsewhere in codegen
             if arg_idx == Some(0) {
-                // FIXME(eddyb) `layout.field(cx, 0)` is not enough because e.g.
-                // `Box<dyn Trait>` has a few newtype wrappers around the raw
-                // pointer, so we'd have to "dig down" to find `*dyn Trait`.
-                let pointee = if layout.is_unsized() {
-                    layout.ty
+                let fat_pointer_ty = if layout.is_unsized() {
+                    // unsized `self` is passed as a pointer to `self`
+                    // FIXME (mikeyhew) change this to use &own if it is ever added to the language
+                    cx.tcx.mk_mut_ptr(layout.ty)
                 } else {
-                    layout.ty.builtin_deref(true)
-                        .unwrap_or_else(|| {
-                            bug!("FnType::new_vtable: non-pointer self {:?}", layout)
-                        }).ty
+                    match layout.abi {
+                        LayoutAbi::ScalarPair(..) => (),
+                        _ => bug!("receiver type has unsupported layout: {:?}", layout)
+                    }
+
+                    // In the case of Rc<Self>, we need to explicitly pass a *mut RcBox<Self>
+                    // with a Scalar (not ScalarPair) ABI. This is a hack that is understood
+                    // elsewhere in the compiler as a method on a `dyn Trait`.
+                    // To get the type `*mut RcBox<Self>`, we just keep unwrapping newtypes until we
+                    // get a built-in pointer type
+                    let mut fat_pointer_layout = layout;
+                    'descend_newtypes: while !fat_pointer_layout.ty.is_unsafe_ptr()
+                        && !fat_pointer_layout.ty.is_region_ptr()
+                    {
+                        'iter_fields: for i in 0..fat_pointer_layout.fields.count() {
+                            let field_layout = fat_pointer_layout.field(cx, i);
+
+                            if !field_layout.is_zst() {
+                                fat_pointer_layout = field_layout;
+                                continue 'descend_newtypes
+                            }
+                        }
+
+                        bug!("receiver has no non-zero-sized fields {:?}", fat_pointer_layout);
+                    }
+
+                    fat_pointer_layout.ty
                 };
-                let fat_ptr_ty = cx.tcx.mk_mut_ptr(pointee);
-                layout = cx.layout_of(fat_ptr_ty).field(cx, 0);
+
+                // we now have a type like `*mut RcBox<dyn Trait>`
+                // change its layout to that of `*mut ()`, a thin pointer, but keep the same type
+                // this is understood as a special case elsewhere in the compiler
+                let unit_pointer_ty = cx.tcx.mk_mut_ptr(cx.tcx.mk_unit());
+                layout = cx.layout_of(unit_pointer_ty);
+                layout.ty = fat_pointer_ty;
             }
             ArgType::new(layout)
         })
index af9efc6d7c4173f905254795be1d431a9255258e..ce4cb1ea3a0423cac94695cb53dba75ab751be8d 100644 (file)
@@ -52,28 +52,6 @@ enum Addition {
     },
 }
 
-pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session)
-                    -> PathBuf {
-    // On Windows, static libraries sometimes show up as libfoo.a and other
-    // times show up as foo.lib
-    let oslibname = format!("{}{}{}",
-                            sess.target.target.options.staticlib_prefix,
-                            name,
-                            sess.target.target.options.staticlib_suffix);
-    let unixlibname = format!("lib{}.a", name);
-
-    for path in search_paths {
-        debug!("looking for {} inside {:?}", name, path);
-        let test = path.join(&oslibname);
-        if test.exists() { return test }
-        if oslibname != unixlibname {
-            let test = path.join(&unixlibname);
-            if test.exists() { return test }
-        }
-    }
-    sess.fatal(&format!("could not find native static library `{}`, \
-                         perhaps an -L flag is missing?", name));
-}
 
 fn is_relevant_child(c: &Child) -> bool {
     match c.name() {
@@ -128,7 +106,7 @@ fn src_archive(&mut self) -> Option<&ArchiveRO> {
     /// Adds all of the contents of a native library to this archive. This will
     /// search in the relevant locations for a library named `name`.
     pub fn add_native_library(&mut self, name: &str) {
-        let location = find_library(name, &self.config.lib_search_paths,
+        let location = ::rustc_codegen_utils::find_library(name, &self.config.lib_search_paths,
                                     self.config.sess);
         self.add_archive(&location, |_| false).unwrap_or_else(|e| {
             self.config.sess.fatal(&format!("failed to add native library {}: {}",
diff --git a/src/librustc_codegen_llvm/back/command.rs b/src/librustc_codegen_llvm/back/command.rs
deleted file mode 100644 (file)
index 9ebbdd7..0000000
+++ /dev/null
@@ -1,175 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A thin wrapper around `Command` in the standard library which allows us to
-//! read the arguments that are built up.
-
-use std::ffi::{OsStr, OsString};
-use std::fmt;
-use std::io;
-use std::mem;
-use std::process::{self, Output};
-
-use rustc_target::spec::LldFlavor;
-
-#[derive(Clone)]
-pub struct Command {
-    program: Program,
-    args: Vec<OsString>,
-    env: Vec<(OsString, OsString)>,
-}
-
-#[derive(Clone)]
-enum Program {
-    Normal(OsString),
-    CmdBatScript(OsString),
-    Lld(OsString, LldFlavor)
-}
-
-impl Command {
-    pub fn new<P: AsRef<OsStr>>(program: P) -> Command {
-        Command::_new(Program::Normal(program.as_ref().to_owned()))
-    }
-
-    pub fn bat_script<P: AsRef<OsStr>>(program: P) -> Command {
-        Command::_new(Program::CmdBatScript(program.as_ref().to_owned()))
-    }
-
-    pub fn lld<P: AsRef<OsStr>>(program: P, flavor: LldFlavor) -> Command {
-        Command::_new(Program::Lld(program.as_ref().to_owned(), flavor))
-    }
-
-    fn _new(program: Program) -> Command {
-        Command {
-            program,
-            args: Vec::new(),
-            env: Vec::new(),
-        }
-    }
-
-    pub fn arg<P: AsRef<OsStr>>(&mut self, arg: P) -> &mut Command {
-        self._arg(arg.as_ref());
-        self
-    }
-
-    pub fn args<I>(&mut self, args: I) -> &mut Command
-        where I: IntoIterator,
-              I::Item: AsRef<OsStr>,
-    {
-        for arg in args {
-            self._arg(arg.as_ref());
-        }
-        self
-    }
-
-    fn _arg(&mut self, arg: &OsStr) {
-        self.args.push(arg.to_owned());
-    }
-
-    pub fn env<K, V>(&mut self, key: K, value: V) -> &mut Command
-        where K: AsRef<OsStr>,
-              V: AsRef<OsStr>
-    {
-        self._env(key.as_ref(), value.as_ref());
-        self
-    }
-
-    fn _env(&mut self, key: &OsStr, value: &OsStr) {
-        self.env.push((key.to_owned(), value.to_owned()));
-    }
-
-    pub fn output(&mut self) -> io::Result<Output> {
-        self.command().output()
-    }
-
-    pub fn command(&self) -> process::Command {
-        let mut ret = match self.program {
-            Program::Normal(ref p) => process::Command::new(p),
-            Program::CmdBatScript(ref p) => {
-                let mut c = process::Command::new("cmd");
-                c.arg("/c").arg(p);
-                c
-            }
-            Program::Lld(ref p, flavor) => {
-                let mut c = process::Command::new(p);
-                c.arg("-flavor").arg(match flavor {
-                    LldFlavor::Wasm => "wasm",
-                    LldFlavor::Ld => "gnu",
-                    LldFlavor::Link => "link",
-                    LldFlavor::Ld64 => "darwin",
-                });
-                c
-            }
-        };
-        ret.args(&self.args);
-        ret.envs(self.env.clone());
-        return ret
-    }
-
-    // extensions
-
-    pub fn get_args(&self) -> &[OsString] {
-        &self.args
-    }
-
-    pub fn take_args(&mut self) -> Vec<OsString> {
-        mem::replace(&mut self.args, Vec::new())
-    }
-
-    /// Returns a `true` if we're pretty sure that this'll blow OS spawn limits,
-    /// or `false` if we should attempt to spawn and see what the OS says.
-    pub fn very_likely_to_exceed_some_spawn_limit(&self) -> bool {
-        // We mostly only care about Windows in this method, on Unix the limits
-        // can be gargantuan anyway so we're pretty unlikely to hit them
-        if cfg!(unix) {
-            return false
-        }
-
-        // Right now LLD doesn't support the `@` syntax of passing an argument
-        // through files, so regardless of the platform we try to go to the OS
-        // on this one.
-        if let Program::Lld(..) = self.program {
-            return false
-        }
-
-        // Ok so on Windows to spawn a process is 32,768 characters in its
-        // command line [1]. Unfortunately we don't actually have access to that
-        // as it's calculated just before spawning. Instead we perform a
-        // poor-man's guess as to how long our command line will be. We're
-        // assuming here that we don't have to escape every character...
-        //
-        // Turns out though that `cmd.exe` has even smaller limits, 8192
-        // characters [2]. Linkers can often be batch scripts (for example
-        // Emscripten, Gecko's current build system) which means that we're
-        // running through batch scripts. These linkers often just forward
-        // arguments elsewhere (and maybe tack on more), so if we blow 8192
-        // bytes we'll typically cause them to blow as well.
-        //
-        // Basically as a result just perform an inflated estimate of what our
-        // command line will look like and test if it's > 8192 (we actually
-        // test against 6k to artificially inflate our estimate). If all else
-        // fails we'll fall back to the normal unix logic of testing the OS
-        // error code if we fail to spawn and automatically re-spawning the
-        // linker with smaller arguments.
-        //
-        // [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms682425(v=vs.85).aspx
-        // [2]: https://blogs.msdn.microsoft.com/oldnewthing/20031210-00/?p=41553
-
-        let estimated_command_line_len =
-            self.args.iter().map(|a| a.len()).sum::<usize>();
-        estimated_command_line_len > 1024 * 6
-    }
-}
-
-impl fmt::Debug for Command {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.command().fmt(f)
-    }
-}
index 86c6a5e65b0e9c4dc2dc9633c0ffa196c9b6d935..dd95c3d986299491466baebddfd2d84a1c39e6b9 100644 (file)
@@ -12,8 +12,6 @@
 use cc::windows_registry;
 use super::archive::{ArchiveBuilder, ArchiveConfig};
 use super::bytecode::RLIB_BYTECODE_EXTENSION;
-use super::linker::Linker;
-use super::command::Command;
 use super::rpath::RPathConfig;
 use super::rpath;
 use metadata::METADATA_FILENAME;
@@ -31,6 +29,8 @@
 use tempfile::{Builder as TempFileBuilder, TempDir};
 use rustc_target::spec::{PanicStrategy, RelroLevel, LinkerFlavor};
 use rustc_data_structures::fx::FxHashSet;
+use rustc_codegen_utils::linker::Linker;
+use rustc_codegen_utils::command::Command;
 use context::get_reloc_model;
 use llvm;
 
@@ -701,7 +701,8 @@ fn link_natively(sess: &Session,
     }
 
     {
-        let mut linker = codegen_results.linker_info.to_linker(cmd, &sess, flavor);
+        let target_cpu = ::llvm_util::target_cpu(sess);
+        let mut linker = codegen_results.linker_info.to_linker(cmd, &sess, flavor, target_cpu);
         link_args(&mut *linker, flavor, sess, crate_type, tmpdir,
                   out_filename, codegen_results);
         cmd = linker.finalize();
diff --git a/src/librustc_codegen_llvm/back/linker.rs b/src/librustc_codegen_llvm/back/linker.rs
deleted file mode 100644 (file)
index e18c8b9..0000000
+++ /dev/null
@@ -1,1095 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc_data_structures::fx::FxHashMap;
-use std::ffi::{OsStr, OsString};
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::io::{self, BufWriter};
-use std::path::{Path, PathBuf};
-
-use back::archive;
-use back::command::Command;
-use back::symbol_export;
-use rustc::hir::def_id::{LOCAL_CRATE, CrateNum};
-use rustc::middle::dependency_format::Linkage;
-use rustc::session::Session;
-use rustc::session::config::{self, CrateType, OptLevel, DebugInfo,
-                             CrossLangLto};
-use rustc::ty::TyCtxt;
-use rustc_target::spec::{LinkerFlavor, LldFlavor};
-use serialize::{json, Encoder};
-use llvm_util;
-
-/// For all the linkers we support, and information they might
-/// need out of the shared crate context before we get rid of it.
-pub struct LinkerInfo {
-    exports: FxHashMap<CrateType, Vec<String>>,
-}
-
-impl LinkerInfo {
-    pub fn new(tcx: TyCtxt) -> LinkerInfo {
-        LinkerInfo {
-            exports: tcx.sess.crate_types.borrow().iter().map(|&c| {
-                (c, exported_symbols(tcx, c))
-            }).collect(),
-        }
-    }
-
-    pub fn to_linker<'a>(&'a self,
-                         cmd: Command,
-                         sess: &'a Session,
-                         flavor: LinkerFlavor) -> Box<dyn Linker+'a> {
-        match flavor {
-            LinkerFlavor::Lld(LldFlavor::Link) |
-            LinkerFlavor::Msvc => {
-                Box::new(MsvcLinker {
-                    cmd,
-                    sess,
-                    info: self
-                }) as Box<dyn Linker>
-            }
-            LinkerFlavor::Em =>  {
-                Box::new(EmLinker {
-                    cmd,
-                    sess,
-                    info: self
-                }) as Box<dyn Linker>
-            }
-            LinkerFlavor::Gcc =>  {
-                Box::new(GccLinker {
-                    cmd,
-                    sess,
-                    info: self,
-                    hinted_static: false,
-                    is_ld: false,
-                }) as Box<dyn Linker>
-            }
-
-            LinkerFlavor::Lld(LldFlavor::Ld) |
-            LinkerFlavor::Lld(LldFlavor::Ld64) |
-            LinkerFlavor::Ld => {
-                Box::new(GccLinker {
-                    cmd,
-                    sess,
-                    info: self,
-                    hinted_static: false,
-                    is_ld: true,
-                }) as Box<dyn Linker>
-            }
-
-            LinkerFlavor::Lld(LldFlavor::Wasm) => {
-                Box::new(WasmLd {
-                    cmd,
-                    sess,
-                    info: self
-                }) as Box<dyn Linker>
-            }
-        }
-    }
-}
-
-/// Linker abstraction used by back::link to build up the command to invoke a
-/// linker.
-///
-/// This trait is the total list of requirements needed by `back::link` and
-/// represents the meaning of each option being passed down. This trait is then
-/// used to dispatch on whether a GNU-like linker (generally `ld.exe`) or an
-/// MSVC linker (e.g. `link.exe`) is being used.
-pub trait Linker {
-    fn link_dylib(&mut self, lib: &str);
-    fn link_rust_dylib(&mut self, lib: &str, path: &Path);
-    fn link_framework(&mut self, framework: &str);
-    fn link_staticlib(&mut self, lib: &str);
-    fn link_rlib(&mut self, lib: &Path);
-    fn link_whole_rlib(&mut self, lib: &Path);
-    fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]);
-    fn include_path(&mut self, path: &Path);
-    fn framework_path(&mut self, path: &Path);
-    fn output_filename(&mut self, path: &Path);
-    fn add_object(&mut self, path: &Path);
-    fn gc_sections(&mut self, keep_metadata: bool);
-    fn position_independent_executable(&mut self);
-    fn no_position_independent_executable(&mut self);
-    fn full_relro(&mut self);
-    fn partial_relro(&mut self);
-    fn no_relro(&mut self);
-    fn optimize(&mut self);
-    fn pgo_gen(&mut self);
-    fn debuginfo(&mut self);
-    fn no_default_libraries(&mut self);
-    fn build_dylib(&mut self, out_filename: &Path);
-    fn build_static_executable(&mut self);
-    fn args(&mut self, args: &[String]);
-    fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType);
-    fn subsystem(&mut self, subsystem: &str);
-    fn group_start(&mut self);
-    fn group_end(&mut self);
-    fn cross_lang_lto(&mut self);
-    // Should have been finalize(self), but we don't support self-by-value on trait objects (yet?).
-    fn finalize(&mut self) -> Command;
-}
-
-pub struct GccLinker<'a> {
-    cmd: Command,
-    sess: &'a Session,
-    info: &'a LinkerInfo,
-    hinted_static: bool, // Keeps track of the current hinting mode.
-    // Link as ld
-    is_ld: bool,
-}
-
-impl<'a> GccLinker<'a> {
-    /// Argument that must be passed *directly* to the linker
-    ///
-    /// These arguments need to be prepended with '-Wl,' when a gcc-style linker is used
-    fn linker_arg<S>(&mut self, arg: S) -> &mut Self
-        where S: AsRef<OsStr>
-    {
-        if !self.is_ld {
-            let mut os = OsString::from("-Wl,");
-            os.push(arg.as_ref());
-            self.cmd.arg(os);
-        } else {
-            self.cmd.arg(arg);
-        }
-        self
-    }
-
-    fn takes_hints(&self) -> bool {
-        !self.sess.target.target.options.is_like_osx
-    }
-
-    // Some platforms take hints about whether a library is static or dynamic.
-    // For those that support this, we ensure we pass the option if the library
-    // was flagged "static" (most defaults are dynamic) to ensure that if
-    // libfoo.a and libfoo.so both exist that the right one is chosen.
-    fn hint_static(&mut self) {
-        if !self.takes_hints() { return }
-        if !self.hinted_static {
-            self.linker_arg("-Bstatic");
-            self.hinted_static = true;
-        }
-    }
-
-    fn hint_dynamic(&mut self) {
-        if !self.takes_hints() { return }
-        if self.hinted_static {
-            self.linker_arg("-Bdynamic");
-            self.hinted_static = false;
-        }
-    }
-
-    fn push_cross_lang_lto_args(&mut self, plugin_path: Option<&OsStr>) {
-        if let Some(plugin_path) = plugin_path {
-            let mut arg = OsString::from("-plugin=");
-            arg.push(plugin_path);
-            self.linker_arg(&arg);
-        }
-
-        let opt_level = match self.sess.opts.optimize {
-            config::OptLevel::No => "O0",
-            config::OptLevel::Less => "O1",
-            config::OptLevel::Default => "O2",
-            config::OptLevel::Aggressive => "O3",
-            config::OptLevel::Size => "Os",
-            config::OptLevel::SizeMin => "Oz",
-        };
-
-        self.linker_arg(&format!("-plugin-opt={}", opt_level));
-        self.linker_arg(&format!("-plugin-opt=mcpu={}", llvm_util::target_cpu(self.sess)));
-
-        match self.sess.lto() {
-            config::Lto::Thin |
-            config::Lto::ThinLocal => {
-                self.linker_arg("-plugin-opt=thin");
-            }
-            config::Lto::Fat |
-            config::Lto::No => {
-                // default to regular LTO
-            }
-        }
-    }
-}
-
-impl<'a> Linker for GccLinker<'a> {
-    fn link_dylib(&mut self, lib: &str) { self.hint_dynamic(); self.cmd.arg(format!("-l{}",lib)); }
-    fn link_staticlib(&mut self, lib: &str) {
-        self.hint_static(); self.cmd.arg(format!("-l{}",lib));
-    }
-    fn link_rlib(&mut self, lib: &Path) { self.hint_static(); self.cmd.arg(lib); }
-    fn include_path(&mut self, path: &Path) { self.cmd.arg("-L").arg(path); }
-    fn framework_path(&mut self, path: &Path) { self.cmd.arg("-F").arg(path); }
-    fn output_filename(&mut self, path: &Path) { self.cmd.arg("-o").arg(path); }
-    fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
-    fn position_independent_executable(&mut self) { self.cmd.arg("-pie"); }
-    fn no_position_independent_executable(&mut self) { self.cmd.arg("-no-pie"); }
-    fn full_relro(&mut self) { self.linker_arg("-zrelro"); self.linker_arg("-znow"); }
-    fn partial_relro(&mut self) { self.linker_arg("-zrelro"); }
-    fn no_relro(&mut self) { self.linker_arg("-znorelro"); }
-    fn build_static_executable(&mut self) { self.cmd.arg("-static"); }
-    fn args(&mut self, args: &[String]) { self.cmd.args(args); }
-
-    fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
-        self.hint_dynamic();
-        self.cmd.arg(format!("-l{}",lib));
-    }
-
-    fn link_framework(&mut self, framework: &str) {
-        self.hint_dynamic();
-        self.cmd.arg("-framework").arg(framework);
-    }
-
-    // Here we explicitly ask that the entire archive is included into the
-    // result artifact. For more details see #15460, but the gist is that
-    // the linker will strip away any unused objects in the archive if we
-    // don't otherwise explicitly reference them. This can occur for
-    // libraries which are just providing bindings, libraries with generic
-    // functions, etc.
-    fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]) {
-        self.hint_static();
-        let target = &self.sess.target.target;
-        if !target.options.is_like_osx {
-            self.linker_arg("--whole-archive").cmd.arg(format!("-l{}",lib));
-            self.linker_arg("--no-whole-archive");
-        } else {
-            // -force_load is the macOS equivalent of --whole-archive, but it
-            // involves passing the full path to the library to link.
-            self.linker_arg("-force_load");
-            let lib = archive::find_library(lib, search_path, &self.sess);
-            self.linker_arg(&lib);
-        }
-    }
-
-    fn link_whole_rlib(&mut self, lib: &Path) {
-        self.hint_static();
-        if self.sess.target.target.options.is_like_osx {
-            self.linker_arg("-force_load");
-            self.linker_arg(&lib);
-        } else {
-            self.linker_arg("--whole-archive").cmd.arg(lib);
-            self.linker_arg("--no-whole-archive");
-        }
-    }
-
-    fn gc_sections(&mut self, keep_metadata: bool) {
-        // The dead_strip option to the linker specifies that functions and data
-        // unreachable by the entry point will be removed. This is quite useful
-        // with Rust's compilation model of compiling libraries at a time into
-        // one object file. For example, this brings hello world from 1.7MB to
-        // 458K.
-        //
-        // Note that this is done for both executables and dynamic libraries. We
-        // won't get much benefit from dylibs because LLVM will have already
-        // stripped away as much as it could. This has not been seen to impact
-        // link times negatively.
-        //
-        // -dead_strip can't be part of the pre_link_args because it's also used
-        // for partial linking when using multiple codegen units (-r).  So we
-        // insert it here.
-        if self.sess.target.target.options.is_like_osx {
-            self.linker_arg("-dead_strip");
-        } else if self.sess.target.target.options.is_like_solaris {
-            self.linker_arg("-zignore");
-
-        // If we're building a dylib, we don't use --gc-sections because LLVM
-        // has already done the best it can do, and we also don't want to
-        // eliminate the metadata. If we're building an executable, however,
-        // --gc-sections drops the size of hello world from 1.8MB to 597K, a 67%
-        // reduction.
-        } else if !keep_metadata {
-            self.linker_arg("--gc-sections");
-        }
-    }
-
-    fn optimize(&mut self) {
-        if !self.sess.target.target.options.linker_is_gnu { return }
-
-        // GNU-style linkers support optimization with -O. GNU ld doesn't
-        // need a numeric argument, but other linkers do.
-        if self.sess.opts.optimize == config::OptLevel::Default ||
-           self.sess.opts.optimize == config::OptLevel::Aggressive {
-            self.linker_arg("-O1");
-        }
-    }
-
-    fn pgo_gen(&mut self) {
-        if !self.sess.target.target.options.linker_is_gnu { return }
-
-        // If we're doing PGO generation stuff and on a GNU-like linker, use the
-        // "-u" flag to properly pull in the profiler runtime bits.
-        //
-        // This is because LLVM otherwise won't add the needed initialization
-        // for us on Linux (though the extra flag should be harmless if it
-        // does).
-        //
-        // See https://reviews.llvm.org/D14033 and https://reviews.llvm.org/D14030.
-        //
-        // Though it may be worth to try to revert those changes upstream, since
-        // the overhead of the initialization should be minor.
-        self.cmd.arg("-u");
-        self.cmd.arg("__llvm_profile_runtime");
-    }
-
-    fn debuginfo(&mut self) {
-        match self.sess.opts.debuginfo {
-            DebugInfo::None => {
-                // If we are building without debuginfo enabled and we were called with
-                // `-Zstrip-debuginfo-if-disabled=yes`, tell the linker to strip any debuginfo
-                // found when linking to get rid of symbols from libstd.
-                match self.sess.opts.debugging_opts.strip_debuginfo_if_disabled {
-                    Some(true) => { self.linker_arg("-S"); },
-                    _ => {},
-                }
-            },
-            _ => {},
-        };
-    }
-
-    fn no_default_libraries(&mut self) {
-        if !self.is_ld {
-            self.cmd.arg("-nodefaultlibs");
-        }
-    }
-
-    fn build_dylib(&mut self, out_filename: &Path) {
-        // On mac we need to tell the linker to let this library be rpathed
-        if self.sess.target.target.options.is_like_osx {
-            self.cmd.arg("-dynamiclib");
-            self.linker_arg("-dylib");
-
-            // Note that the `osx_rpath_install_name` option here is a hack
-            // purely to support rustbuild right now, we should get a more
-            // principled solution at some point to force the compiler to pass
-            // the right `-Wl,-install_name` with an `@rpath` in it.
-            if self.sess.opts.cg.rpath ||
-               self.sess.opts.debugging_opts.osx_rpath_install_name {
-                self.linker_arg("-install_name");
-                let mut v = OsString::from("@rpath/");
-                v.push(out_filename.file_name().unwrap());
-                self.linker_arg(&v);
-            }
-        } else {
-            self.cmd.arg("-shared");
-        }
-    }
-
-    fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType) {
-        // If we're compiling a dylib, then we let symbol visibility in object
-        // files to take care of whether they're exported or not.
-        //
-        // If we're compiling a cdylib, however, we manually create a list of
-        // exported symbols to ensure we don't expose any more. The object files
-        // have far more public symbols than we actually want to export, so we
-        // hide them all here.
-        if crate_type == CrateType::Dylib ||
-           crate_type == CrateType::ProcMacro {
-            return
-        }
-
-        let mut arg = OsString::new();
-        let path = tmpdir.join("list");
-
-        debug!("EXPORTED SYMBOLS:");
-
-        if self.sess.target.target.options.is_like_osx {
-            // Write a plain, newline-separated list of symbols
-            let res = (|| -> io::Result<()> {
-                let mut f = BufWriter::new(File::create(&path)?);
-                for sym in self.info.exports[&crate_type].iter() {
-                    debug!("  _{}", sym);
-                    writeln!(f, "_{}", sym)?;
-                }
-                Ok(())
-            })();
-            if let Err(e) = res {
-                self.sess.fatal(&format!("failed to write lib.def file: {}", e));
-            }
-        } else {
-            // Write an LD version script
-            let res = (|| -> io::Result<()> {
-                let mut f = BufWriter::new(File::create(&path)?);
-                writeln!(f, "{{\n  global:")?;
-                for sym in self.info.exports[&crate_type].iter() {
-                    debug!("    {};", sym);
-                    writeln!(f, "    {};", sym)?;
-                }
-                writeln!(f, "\n  local:\n    *;\n}};")?;
-                Ok(())
-            })();
-            if let Err(e) = res {
-                self.sess.fatal(&format!("failed to write version script: {}", e));
-            }
-        }
-
-        if self.sess.target.target.options.is_like_osx {
-            if !self.is_ld {
-                arg.push("-Wl,")
-            }
-            arg.push("-exported_symbols_list,");
-        } else if self.sess.target.target.options.is_like_solaris {
-            if !self.is_ld {
-                arg.push("-Wl,")
-            }
-            arg.push("-M,");
-        } else {
-            if !self.is_ld {
-                arg.push("-Wl,")
-            }
-            arg.push("--version-script=");
-        }
-
-        arg.push(&path);
-        self.cmd.arg(arg);
-    }
-
-    fn subsystem(&mut self, subsystem: &str) {
-        self.linker_arg("--subsystem");
-        self.linker_arg(&subsystem);
-    }
-
-    fn finalize(&mut self) -> Command {
-        self.hint_dynamic(); // Reset to default before returning the composed command line.
-        let mut cmd = Command::new("");
-        ::std::mem::swap(&mut cmd, &mut self.cmd);
-        cmd
-    }
-
-    fn group_start(&mut self) {
-        if !self.sess.target.target.options.is_like_osx {
-            self.linker_arg("--start-group");
-        }
-    }
-
-    fn group_end(&mut self) {
-        if !self.sess.target.target.options.is_like_osx {
-            self.linker_arg("--end-group");
-        }
-    }
-
-    fn cross_lang_lto(&mut self) {
-        match self.sess.opts.debugging_opts.cross_lang_lto {
-            CrossLangLto::Disabled => {
-                // Nothing to do
-            }
-            CrossLangLto::LinkerPluginAuto => {
-                self.push_cross_lang_lto_args(None);
-            }
-            CrossLangLto::LinkerPlugin(ref path) => {
-                self.push_cross_lang_lto_args(Some(path.as_os_str()));
-            }
-        }
-    }
-}
-
-pub struct MsvcLinker<'a> {
-    cmd: Command,
-    sess: &'a Session,
-    info: &'a LinkerInfo
-}
-
-impl<'a> Linker for MsvcLinker<'a> {
-    fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
-    fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
-    fn args(&mut self, args: &[String]) { self.cmd.args(args); }
-
-    fn build_dylib(&mut self, out_filename: &Path) {
-        self.cmd.arg("/DLL");
-        let mut arg: OsString = "/IMPLIB:".into();
-        arg.push(out_filename.with_extension("dll.lib"));
-        self.cmd.arg(arg);
-    }
-
-    fn build_static_executable(&mut self) {
-        // noop
-    }
-
-    fn gc_sections(&mut self, _keep_metadata: bool) {
-        // MSVC's ICF (Identical COMDAT Folding) link optimization is
-        // slow for Rust and thus we disable it by default when not in
-        // optimization build.
-        if self.sess.opts.optimize != config::OptLevel::No {
-            self.cmd.arg("/OPT:REF,ICF");
-        } else {
-            // It is necessary to specify NOICF here, because /OPT:REF
-            // implies ICF by default.
-            self.cmd.arg("/OPT:REF,NOICF");
-        }
-    }
-
-    fn link_dylib(&mut self, lib: &str) {
-        self.cmd.arg(&format!("{}.lib", lib));
-    }
-
-    fn link_rust_dylib(&mut self, lib: &str, path: &Path) {
-        // When producing a dll, the MSVC linker may not actually emit a
-        // `foo.lib` file if the dll doesn't actually export any symbols, so we
-        // check to see if the file is there and just omit linking to it if it's
-        // not present.
-        let name = format!("{}.dll.lib", lib);
-        if fs::metadata(&path.join(&name)).is_ok() {
-            self.cmd.arg(name);
-        }
-    }
-
-    fn link_staticlib(&mut self, lib: &str) {
-        self.cmd.arg(&format!("{}.lib", lib));
-    }
-
-    fn position_independent_executable(&mut self) {
-        // noop
-    }
-
-    fn no_position_independent_executable(&mut self) {
-        // noop
-    }
-
-    fn full_relro(&mut self) {
-        // noop
-    }
-
-    fn partial_relro(&mut self) {
-        // noop
-    }
-
-    fn no_relro(&mut self) {
-        // noop
-    }
-
-    fn no_default_libraries(&mut self) {
-        // Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC
-        // as there's been trouble in the past of linking the C++ standard
-        // library required by LLVM. This likely needs to happen one day, but
-        // in general Windows is also a more controlled environment than
-        // Unix, so it's not necessarily as critical that this be implemented.
-        //
-        // Note that there are also some licensing worries about statically
-        // linking some libraries which require a specific agreement, so it may
-        // not ever be possible for us to pass this flag.
-    }
-
-    fn include_path(&mut self, path: &Path) {
-        let mut arg = OsString::from("/LIBPATH:");
-        arg.push(path);
-        self.cmd.arg(&arg);
-    }
-
-    fn output_filename(&mut self, path: &Path) {
-        let mut arg = OsString::from("/OUT:");
-        arg.push(path);
-        self.cmd.arg(&arg);
-    }
-
-    fn framework_path(&mut self, _path: &Path) {
-        bug!("frameworks are not supported on windows")
-    }
-    fn link_framework(&mut self, _framework: &str) {
-        bug!("frameworks are not supported on windows")
-    }
-
-    fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
-        // not supported?
-        self.link_staticlib(lib);
-    }
-    fn link_whole_rlib(&mut self, path: &Path) {
-        // not supported?
-        self.link_rlib(path);
-    }
-    fn optimize(&mut self) {
-        // Needs more investigation of `/OPT` arguments
-    }
-
-    fn pgo_gen(&mut self) {
-        // Nothing needed here.
-    }
-
-    fn debuginfo(&mut self) {
-        // This will cause the Microsoft linker to generate a PDB file
-        // from the CodeView line tables in the object files.
-        self.cmd.arg("/DEBUG");
-
-        // This will cause the Microsoft linker to embed .natvis info into the the PDB file
-        let sysroot = self.sess.sysroot();
-        let natvis_dir_path = sysroot.join("lib\\rustlib\\etc");
-        if let Ok(natvis_dir) = fs::read_dir(&natvis_dir_path) {
-            // LLVM 5.0.0's lld-link frontend doesn't yet recognize, and chokes
-            // on, the /NATVIS:... flags.  LLVM 6 (or earlier) should at worst ignore
-            // them, eventually mooting this workaround, per this landed patch:
-            // https://github.com/llvm-mirror/lld/commit/27b9c4285364d8d76bb43839daa100
-            if let Some(ref linker_path) = self.sess.opts.cg.linker {
-                if let Some(linker_name) = Path::new(&linker_path).file_stem() {
-                    if linker_name.to_str().unwrap().to_lowercase() == "lld-link" {
-                        self.sess.warn("not embedding natvis: lld-link may not support the flag");
-                        return;
-                    }
-                }
-            }
-            for entry in natvis_dir {
-                match entry {
-                    Ok(entry) => {
-                        let path = entry.path();
-                        if path.extension() == Some("natvis".as_ref()) {
-                            let mut arg = OsString::from("/NATVIS:");
-                            arg.push(path);
-                            self.cmd.arg(arg);
-                        }
-                    },
-                    Err(err) => {
-                        self.sess.warn(&format!("error enumerating natvis directory: {}", err));
-                    },
-                }
-            }
-        }
-    }
-
-    // Currently the compiler doesn't use `dllexport` (an LLVM attribute) to
-    // export symbols from a dynamic library. When building a dynamic library,
-    // however, we're going to want some symbols exported, so this function
-    // generates a DEF file which lists all the symbols.
-    //
-    // The linker will read this `*.def` file and export all the symbols from
-    // the dynamic library. Note that this is not as simple as just exporting
-    // all the symbols in the current crate (as specified by `codegen.reachable`)
-    // but rather we also need to possibly export the symbols of upstream
-    // crates. Upstream rlibs may be linked statically to this dynamic library,
-    // in which case they may continue to transitively be used and hence need
-    // their symbols exported.
-    fn export_symbols(&mut self,
-                      tmpdir: &Path,
-                      crate_type: CrateType) {
-        let path = tmpdir.join("lib.def");
-        let res = (|| -> io::Result<()> {
-            let mut f = BufWriter::new(File::create(&path)?);
-
-            // Start off with the standard module name header and then go
-            // straight to exports.
-            writeln!(f, "LIBRARY")?;
-            writeln!(f, "EXPORTS")?;
-            for symbol in self.info.exports[&crate_type].iter() {
-                debug!("  _{}", symbol);
-                writeln!(f, "  {}", symbol)?;
-            }
-            Ok(())
-        })();
-        if let Err(e) = res {
-            self.sess.fatal(&format!("failed to write lib.def file: {}", e));
-        }
-        let mut arg = OsString::from("/DEF:");
-        arg.push(path);
-        self.cmd.arg(&arg);
-    }
-
-    fn subsystem(&mut self, subsystem: &str) {
-        // Note that previous passes of the compiler validated this subsystem,
-        // so we just blindly pass it to the linker.
-        self.cmd.arg(&format!("/SUBSYSTEM:{}", subsystem));
-
-        // Windows has two subsystems we're interested in right now, the console
-        // and windows subsystems. These both implicitly have different entry
-        // points (starting symbols). The console entry point starts with
-        // `mainCRTStartup` and the windows entry point starts with
-        // `WinMainCRTStartup`. These entry points, defined in system libraries,
-        // will then later probe for either `main` or `WinMain`, respectively to
-        // start the application.
-        //
-        // In Rust we just always generate a `main` function so we want control
-        // to always start there, so we force the entry point on the windows
-        // subsystem to be `mainCRTStartup` to get everything booted up
-        // correctly.
-        //
-        // For more information see RFC #1665
-        if subsystem == "windows" {
-            self.cmd.arg("/ENTRY:mainCRTStartup");
-        }
-    }
-
-    fn finalize(&mut self) -> Command {
-        let mut cmd = Command::new("");
-        ::std::mem::swap(&mut cmd, &mut self.cmd);
-        cmd
-    }
-
-    // MSVC doesn't need group indicators
-    fn group_start(&mut self) {}
-    fn group_end(&mut self) {}
-
-    fn cross_lang_lto(&mut self) {
-        // Do nothing
-    }
-}
-
-pub struct EmLinker<'a> {
-    cmd: Command,
-    sess: &'a Session,
-    info: &'a LinkerInfo
-}
-
-impl<'a> Linker for EmLinker<'a> {
-    fn include_path(&mut self, path: &Path) {
-        self.cmd.arg("-L").arg(path);
-    }
-
-    fn link_staticlib(&mut self, lib: &str) {
-        self.cmd.arg("-l").arg(lib);
-    }
-
-    fn output_filename(&mut self, path: &Path) {
-        self.cmd.arg("-o").arg(path);
-    }
-
-    fn add_object(&mut self, path: &Path) {
-        self.cmd.arg(path);
-    }
-
-    fn link_dylib(&mut self, lib: &str) {
-        // Emscripten always links statically
-        self.link_staticlib(lib);
-    }
-
-    fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
-        // not supported?
-        self.link_staticlib(lib);
-    }
-
-    fn link_whole_rlib(&mut self, lib: &Path) {
-        // not supported?
-        self.link_rlib(lib);
-    }
-
-    fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
-        self.link_dylib(lib);
-    }
-
-    fn link_rlib(&mut self, lib: &Path) {
-        self.add_object(lib);
-    }
-
-    fn position_independent_executable(&mut self) {
-        // noop
-    }
-
-    fn no_position_independent_executable(&mut self) {
-        // noop
-    }
-
-    fn full_relro(&mut self) {
-        // noop
-    }
-
-    fn partial_relro(&mut self) {
-        // noop
-    }
-
-    fn no_relro(&mut self) {
-        // noop
-    }
-
-    fn args(&mut self, args: &[String]) {
-        self.cmd.args(args);
-    }
-
-    fn framework_path(&mut self, _path: &Path) {
-        bug!("frameworks are not supported on Emscripten")
-    }
-
-    fn link_framework(&mut self, _framework: &str) {
-        bug!("frameworks are not supported on Emscripten")
-    }
-
-    fn gc_sections(&mut self, _keep_metadata: bool) {
-        // noop
-    }
-
-    fn optimize(&mut self) {
-        // Emscripten performs own optimizations
-        self.cmd.arg(match self.sess.opts.optimize {
-            OptLevel::No => "-O0",
-            OptLevel::Less => "-O1",
-            OptLevel::Default => "-O2",
-            OptLevel::Aggressive => "-O3",
-            OptLevel::Size => "-Os",
-            OptLevel::SizeMin => "-Oz"
-        });
-        // Unusable until https://github.com/rust-lang/rust/issues/38454 is resolved
-        self.cmd.args(&["--memory-init-file", "0"]);
-    }
-
-    fn pgo_gen(&mut self) {
-        // noop, but maybe we need something like the gnu linker?
-    }
-
-    fn debuginfo(&mut self) {
-        // Preserve names or generate source maps depending on debug info
-        self.cmd.arg(match self.sess.opts.debuginfo {
-            DebugInfo::None => "-g0",
-            DebugInfo::Limited => "-g3",
-            DebugInfo::Full => "-g4"
-        });
-    }
-
-    fn no_default_libraries(&mut self) {
-        self.cmd.args(&["-s", "DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[]"]);
-    }
-
-    fn build_dylib(&mut self, _out_filename: &Path) {
-        bug!("building dynamic library is unsupported on Emscripten")
-    }
-
-    fn build_static_executable(&mut self) {
-        // noop
-    }
-
-    fn export_symbols(&mut self, _tmpdir: &Path, crate_type: CrateType) {
-        let symbols = &self.info.exports[&crate_type];
-
-        debug!("EXPORTED SYMBOLS:");
-
-        self.cmd.arg("-s");
-
-        let mut arg = OsString::from("EXPORTED_FUNCTIONS=");
-        let mut encoded = String::new();
-
-        {
-            let mut encoder = json::Encoder::new(&mut encoded);
-            let res = encoder.emit_seq(symbols.len(), |encoder| {
-                for (i, sym) in symbols.iter().enumerate() {
-                    encoder.emit_seq_elt(i, |encoder| {
-                        encoder.emit_str(&("_".to_string() + sym))
-                    })?;
-                }
-                Ok(())
-            });
-            if let Err(e) = res {
-                self.sess.fatal(&format!("failed to encode exported symbols: {}", e));
-            }
-        }
-        debug!("{}", encoded);
-        arg.push(encoded);
-
-        self.cmd.arg(arg);
-    }
-
-    fn subsystem(&mut self, _subsystem: &str) {
-        // noop
-    }
-
-    fn finalize(&mut self) -> Command {
-        let mut cmd = Command::new("");
-        ::std::mem::swap(&mut cmd, &mut self.cmd);
-        cmd
-    }
-
-    // Appears not necessary on Emscripten
-    fn group_start(&mut self) {}
-    fn group_end(&mut self) {}
-
-    fn cross_lang_lto(&mut self) {
-        // Do nothing
-    }
-}
-
-fn exported_symbols(tcx: TyCtxt, crate_type: CrateType) -> Vec<String> {
-    let mut symbols = Vec::new();
-
-    let export_threshold = symbol_export::crates_export_threshold(&[crate_type]);
-    for &(symbol, level) in tcx.exported_symbols(LOCAL_CRATE).iter() {
-        if level.is_below_threshold(export_threshold) {
-            symbols.push(symbol.symbol_name(tcx).to_string());
-        }
-    }
-
-    let formats = tcx.sess.dependency_formats.borrow();
-    let deps = formats[&crate_type].iter();
-
-    for (index, dep_format) in deps.enumerate() {
-        let cnum = CrateNum::new(index + 1);
-        // For each dependency that we are linking to statically ...
-        if *dep_format == Linkage::Static {
-            // ... we add its symbol list to our export list.
-            for &(symbol, level) in tcx.exported_symbols(cnum).iter() {
-                if level.is_below_threshold(export_threshold) {
-                    symbols.push(symbol.symbol_name(tcx).to_string());
-                }
-            }
-        }
-    }
-
-    symbols
-}
-
-pub struct WasmLd<'a> {
-    cmd: Command,
-    sess: &'a Session,
-    info: &'a LinkerInfo,
-}
-
-impl<'a> Linker for WasmLd<'a> {
-    fn link_dylib(&mut self, lib: &str) {
-        self.cmd.arg("-l").arg(lib);
-    }
-
-    fn link_staticlib(&mut self, lib: &str) {
-        self.cmd.arg("-l").arg(lib);
-    }
-
-    fn link_rlib(&mut self, lib: &Path) {
-        self.cmd.arg(lib);
-    }
-
-    fn include_path(&mut self, path: &Path) {
-        self.cmd.arg("-L").arg(path);
-    }
-
-    fn framework_path(&mut self, _path: &Path) {
-        panic!("frameworks not supported")
-    }
-
-    fn output_filename(&mut self, path: &Path) {
-        self.cmd.arg("-o").arg(path);
-    }
-
-    fn add_object(&mut self, path: &Path) {
-        self.cmd.arg(path);
-    }
-
-    fn position_independent_executable(&mut self) {
-    }
-
-    fn full_relro(&mut self) {
-    }
-
-    fn partial_relro(&mut self) {
-    }
-
-    fn no_relro(&mut self) {
-    }
-
-    fn build_static_executable(&mut self) {
-    }
-
-    fn args(&mut self, args: &[String]) {
-        self.cmd.args(args);
-    }
-
-    fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
-        self.cmd.arg("-l").arg(lib);
-    }
-
-    fn link_framework(&mut self, _framework: &str) {
-        panic!("frameworks not supported")
-    }
-
-    fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
-        self.cmd.arg("-l").arg(lib);
-    }
-
-    fn link_whole_rlib(&mut self, lib: &Path) {
-        self.cmd.arg(lib);
-    }
-
-    fn gc_sections(&mut self, _keep_metadata: bool) {
-        self.cmd.arg("--gc-sections");
-    }
-
-    fn optimize(&mut self) {
-        self.cmd.arg(match self.sess.opts.optimize {
-            OptLevel::No => "-O0",
-            OptLevel::Less => "-O1",
-            OptLevel::Default => "-O2",
-            OptLevel::Aggressive => "-O3",
-            // Currently LLD doesn't support `Os` and `Oz`, so pass through `O2`
-            // instead.
-            OptLevel::Size => "-O2",
-            OptLevel::SizeMin => "-O2"
-        });
-    }
-
-    fn pgo_gen(&mut self) {
-    }
-
-    fn debuginfo(&mut self) {
-    }
-
-    fn no_default_libraries(&mut self) {
-    }
-
-    fn build_dylib(&mut self, _out_filename: &Path) {
-    }
-
-    fn export_symbols(&mut self, _tmpdir: &Path, crate_type: CrateType) {
-        for sym in self.info.exports[&crate_type].iter() {
-            self.cmd.arg("--export").arg(&sym);
-        }
-    }
-
-    fn subsystem(&mut self, _subsystem: &str) {
-    }
-
-    fn no_position_independent_executable(&mut self) {
-    }
-
-    fn finalize(&mut self) -> Command {
-        // There have been reports in the wild (rustwasm/wasm-bindgen#119) of
-        // using threads causing weird hangs and bugs. Disable it entirely as
-        // this isn't yet the bottleneck of compilation at all anyway.
-        self.cmd.arg("--no-threads");
-
-        // By default LLD only gives us one page of stack (64k) which is a
-        // little small. Default to a larger stack closer to other PC platforms
-        // (1MB) and users can always inject their own link-args to override this.
-        self.cmd.arg("-z").arg("stack-size=1048576");
-
-        // By default LLD's memory layout is:
-        //
-        // 1. First, a blank page
-        // 2. Next, all static data
-        // 3. Finally, the main stack (which grows down)
-        //
-        // This has the unfortunate consequence that on stack overflows you
-        // corrupt static data and can cause some exceedingly weird bugs. To
-        // help detect this a little sooner we instead request that the stack is
-        // placed before static data.
-        //
-        // This means that we'll generate slightly larger binaries as references
-        // to static data will take more bytes in the ULEB128 encoding, but
-        // stack overflow will be guaranteed to trap as it underflows instead of
-        // corrupting static data.
-        self.cmd.arg("--stack-first");
-
-        // FIXME we probably shouldn't pass this but instead pass an explicit
-        // whitelist of symbols we'll allow to be undefined. Unfortunately
-        // though we can't handle symbols like `log10` that LLVM injects at a
-        // super late date without actually parsing object files. For now let's
-        // stick to this and hopefully fix it before stabilization happens.
-        self.cmd.arg("--allow-undefined");
-
-        // For now we just never have an entry symbol
-        self.cmd.arg("--no-entry");
-
-        // Make the default table accessible
-        self.cmd.arg("--export-table");
-
-        // Rust code should never have warnings, and warnings are often
-        // indicative of bugs, let's prevent them.
-        self.cmd.arg("--fatal-warnings");
-
-        let mut cmd = Command::new("");
-        ::std::mem::swap(&mut cmd, &mut self.cmd);
-        cmd
-    }
-
-    // Not needed for now with LLD
-    fn group_start(&mut self) {}
-    fn group_end(&mut self) {}
-
-    fn cross_lang_lto(&mut self) {
-        // Do nothing for now
-    }
-}
index 61856236a149141294fc6395d1f2f90e6d00ce84..8f940e0d22a83db3000da3eea32c9824e3919133 100644 (file)
@@ -9,7 +9,6 @@
 // except according to those terms.
 
 use back::bytecode::{DecodedBytecode, RLIB_BYTECODE_EXTENSION};
-use back::symbol_export;
 use back::write::{ModuleConfig, with_llvm_pmb, CodegenContext};
 use back::write::{self, DiagnosticHandlers, pre_lto_bitcode_filename};
 use errors::{FatalError, Handler};
@@ -24,6 +23,7 @@
 use rustc::session::config::{self, Lto};
 use rustc::util::common::time_ext;
 use rustc_data_structures::fx::FxHashMap;
+use rustc_codegen_utils::symbol_export;
 use time_graph::Timeline;
 use {ModuleCodegen, ModuleLlvm, ModuleKind};
 
diff --git a/src/librustc_codegen_llvm/back/symbol_export.rs b/src/librustc_codegen_llvm/back/symbol_export.rs
deleted file mode 100644 (file)
index 6b1b0b9..0000000
+++ /dev/null
@@ -1,395 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc_data_structures::sync::Lrc;
-use std::sync::Arc;
-
-use monomorphize::Instance;
-use rustc::hir;
-use rustc::hir::Node;
-use rustc::hir::CodegenFnAttrFlags;
-use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
-use rustc_data_structures::fingerprint::Fingerprint;
-use rustc::middle::exported_symbols::{SymbolExportLevel, ExportedSymbol, metadata_symbol_name};
-use rustc::session::config;
-use rustc::ty::{TyCtxt, SymbolName};
-use rustc::ty::query::Providers;
-use rustc::ty::subst::Substs;
-use rustc::util::nodemap::{FxHashMap, DefIdMap};
-use rustc_allocator::ALLOCATOR_METHODS;
-use rustc_data_structures::indexed_vec::IndexVec;
-use std::collections::hash_map::Entry::*;
-
-pub type ExportedSymbols = FxHashMap<
-    CrateNum,
-    Arc<Vec<(String, SymbolExportLevel)>>,
->;
-
-pub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {
-    crates_export_threshold(&tcx.sess.crate_types.borrow())
-}
-
-fn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {
-    match crate_type {
-        config::CrateType::Executable |
-        config::CrateType::Staticlib  |
-        config::CrateType::ProcMacro  |
-        config::CrateType::Cdylib     => SymbolExportLevel::C,
-        config::CrateType::Rlib       |
-        config::CrateType::Dylib      => SymbolExportLevel::Rust,
-    }
-}
-
-pub fn crates_export_threshold(crate_types: &[config::CrateType])
-                                      -> SymbolExportLevel {
-    if crate_types.iter().any(|&crate_type| {
-        crate_export_threshold(crate_type) == SymbolExportLevel::Rust
-    }) {
-        SymbolExportLevel::Rust
-    } else {
-        SymbolExportLevel::C
-    }
-}
-
-fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                             cnum: CrateNum)
-                                             -> Lrc<DefIdMap<SymbolExportLevel>>
-{
-    assert_eq!(cnum, LOCAL_CRATE);
-
-    if !tcx.sess.opts.output_types.should_codegen() {
-        return Lrc::new(DefIdMap())
-    }
-
-    // Check to see if this crate is a "special runtime crate". These
-    // crates, implementation details of the standard library, typically
-    // have a bunch of `pub extern` and `#[no_mangle]` functions as the
-    // ABI between them. We don't want their symbols to have a `C`
-    // export level, however, as they're just implementation details.
-    // Down below we'll hardwire all of the symbols to the `Rust` export
-    // level instead.
-    let special_runtime_crate = tcx.is_panic_runtime(LOCAL_CRATE) ||
-        tcx.is_compiler_builtins(LOCAL_CRATE);
-
-    let mut reachable_non_generics: DefIdMap<_> = tcx.reachable_set(LOCAL_CRATE).0
-        .iter()
-        .filter_map(|&node_id| {
-            // We want to ignore some FFI functions that are not exposed from
-            // this crate. Reachable FFI functions can be lumped into two
-            // categories:
-            //
-            // 1. Those that are included statically via a static library
-            // 2. Those included otherwise (e.g. dynamically or via a framework)
-            //
-            // Although our LLVM module is not literally emitting code for the
-            // statically included symbols, it's an export of our library which
-            // needs to be passed on to the linker and encoded in the metadata.
-            //
-            // As a result, if this id is an FFI item (foreign item) then we only
-            // let it through if it's included statically.
-            match tcx.hir.get(node_id) {
-                Node::ForeignItem(..) => {
-                    let def_id = tcx.hir.local_def_id(node_id);
-                    if tcx.is_statically_included_foreign_item(def_id) {
-                        Some(def_id)
-                    } else {
-                        None
-                    }
-                }
-
-                // Only consider nodes that actually have exported symbols.
-                Node::Item(&hir::Item {
-                    node: hir::ItemKind::Static(..),
-                    ..
-                }) |
-                Node::Item(&hir::Item {
-                    node: hir::ItemKind::Fn(..), ..
-                }) |
-                Node::ImplItem(&hir::ImplItem {
-                    node: hir::ImplItemKind::Method(..),
-                    ..
-                }) => {
-                    let def_id = tcx.hir.local_def_id(node_id);
-                    let generics = tcx.generics_of(def_id);
-                    if !generics.requires_monomorphization(tcx) &&
-                        // Functions marked with #[inline] are only ever codegened
-                        // with "internal" linkage and are never exported.
-                        !Instance::mono(tcx, def_id).def.requires_local(tcx) {
-                        Some(def_id)
-                    } else {
-                        None
-                    }
-                }
-
-                _ => None
-            }
-        })
-        .map(|def_id| {
-            let export_level = if special_runtime_crate {
-                let name = tcx.symbol_name(Instance::mono(tcx, def_id)).as_str();
-                // We can probably do better here by just ensuring that
-                // it has hidden visibility rather than public
-                // visibility, as this is primarily here to ensure it's
-                // not stripped during LTO.
-                //
-                // In general though we won't link right if these
-                // symbols are stripped, and LTO currently strips them.
-                if &*name == "rust_eh_personality" ||
-                   &*name == "rust_eh_register_frames" ||
-                   &*name == "rust_eh_unregister_frames" {
-                    SymbolExportLevel::C
-                } else {
-                    SymbolExportLevel::Rust
-                }
-            } else {
-                symbol_export_level(tcx, def_id)
-            };
-            debug!("EXPORTED SYMBOL (local): {} ({:?})",
-                   tcx.symbol_name(Instance::mono(tcx, def_id)),
-                   export_level);
-            (def_id, export_level)
-        })
-        .collect();
-
-    if let Some(id) = *tcx.sess.derive_registrar_fn.get() {
-        let def_id = tcx.hir.local_def_id(id);
-        reachable_non_generics.insert(def_id, SymbolExportLevel::C);
-    }
-
-    if let Some(id) = *tcx.sess.plugin_registrar_fn.get() {
-        let def_id = tcx.hir.local_def_id(id);
-        reachable_non_generics.insert(def_id, SymbolExportLevel::C);
-    }
-
-    Lrc::new(reachable_non_generics)
-}
-
-fn is_reachable_non_generic_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                     def_id: DefId)
-                                                     -> bool {
-    let export_threshold = threshold(tcx);
-
-    if let Some(&level) = tcx.reachable_non_generics(def_id.krate).get(&def_id) {
-        level.is_below_threshold(export_threshold)
-    } else {
-        false
-    }
-}
-
-fn is_reachable_non_generic_provider_extern<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                      def_id: DefId)
-                                                      -> bool {
-    tcx.reachable_non_generics(def_id.krate).contains_key(&def_id)
-}
-
-fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                             cnum: CrateNum)
-                                             -> Arc<Vec<(ExportedSymbol<'tcx>,
-                                                         SymbolExportLevel)>>
-{
-    assert_eq!(cnum, LOCAL_CRATE);
-
-    if !tcx.sess.opts.output_types.should_codegen() {
-        return Arc::new(vec![])
-    }
-
-    let mut symbols: Vec<_> = tcx.reachable_non_generics(LOCAL_CRATE)
-                                 .iter()
-                                 .map(|(&def_id, &level)| {
-                                    (ExportedSymbol::NonGeneric(def_id), level)
-                                 })
-                                 .collect();
-
-    if tcx.sess.entry_fn.borrow().is_some() {
-        let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new("main"));
-
-        symbols.push((exported_symbol, SymbolExportLevel::C));
-    }
-
-    if tcx.sess.allocator_kind.get().is_some() {
-        for method in ALLOCATOR_METHODS {
-            let symbol_name = format!("__rust_{}", method.name);
-            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
-
-            symbols.push((exported_symbol, SymbolExportLevel::Rust));
-        }
-    }
-
-    if tcx.sess.opts.debugging_opts.pgo_gen.is_some() {
-        // These are weak symbols that point to the profile version and the
-        // profile name, which need to be treated as exported so LTO doesn't nix
-        // them.
-        const PROFILER_WEAK_SYMBOLS: [&'static str; 2] = [
-            "__llvm_profile_raw_version",
-            "__llvm_profile_filename",
-        ];
-        for sym in &PROFILER_WEAK_SYMBOLS {
-            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(sym));
-            symbols.push((exported_symbol, SymbolExportLevel::C));
-        }
-    }
-
-    if tcx.sess.crate_types.borrow().contains(&config::CrateType::Dylib) {
-        let symbol_name = metadata_symbol_name(tcx);
-        let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
-
-        symbols.push((exported_symbol, SymbolExportLevel::Rust));
-    }
-
-    if tcx.sess.opts.share_generics() && tcx.local_crate_exports_generics() {
-        use rustc::mir::mono::{Linkage, Visibility, MonoItem};
-        use rustc::ty::InstanceDef;
-
-        // Normally, we require that shared monomorphizations are not hidden,
-        // because if we want to re-use a monomorphization from a Rust dylib, it
-        // needs to be exported.
-        // However, on platforms that don't allow for Rust dylibs, having
-        // external linkage is enough for monomorphization to be linked to.
-        let need_visibility = tcx.sess.target.target.options.dynamic_linking &&
-                              !tcx.sess.target.target.options.only_cdylib;
-
-        let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
-
-        for (mono_item, &(linkage, visibility)) in cgus.iter()
-                                                       .flat_map(|cgu| cgu.items().iter()) {
-            if linkage != Linkage::External {
-                // We can only re-use things with external linkage, otherwise
-                // we'll get a linker error
-                continue
-            }
-
-            if need_visibility && visibility == Visibility::Hidden {
-                // If we potentially share things from Rust dylibs, they must
-                // not be hidden
-                continue
-            }
-
-            if let &MonoItem::Fn(Instance {
-                def: InstanceDef::Item(def_id),
-                substs,
-            }) = mono_item {
-                if substs.types().next().is_some() {
-                    symbols.push((ExportedSymbol::Generic(def_id, substs),
-                                  SymbolExportLevel::Rust));
-                }
-            }
-        }
-    }
-
-    // Sort so we get a stable incr. comp. hash.
-    symbols.sort_unstable_by(|&(ref symbol1, ..), &(ref symbol2, ..)| {
-        symbol1.compare_stable(tcx, symbol2)
-    });
-
-    Arc::new(symbols)
-}
-
-fn upstream_monomorphizations_provider<'a, 'tcx>(
-    tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    cnum: CrateNum)
-    -> Lrc<DefIdMap<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>>
-{
-    debug_assert!(cnum == LOCAL_CRATE);
-
-    let cnums = tcx.all_crate_nums(LOCAL_CRATE);
-
-    let mut instances: DefIdMap<FxHashMap<_, _>> = DefIdMap();
-
-    let cnum_stable_ids: IndexVec<CrateNum, Fingerprint> = {
-        let mut cnum_stable_ids = IndexVec::from_elem_n(Fingerprint::ZERO,
-                                                        cnums.len() + 1);
-
-        for &cnum in cnums.iter() {
-            cnum_stable_ids[cnum] = tcx.def_path_hash(DefId {
-                krate: cnum,
-                index: CRATE_DEF_INDEX,
-            }).0;
-        }
-
-        cnum_stable_ids
-    };
-
-    for &cnum in cnums.iter() {
-        for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() {
-            if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol {
-                let substs_map = instances.entry(def_id).or_default();
-
-                match substs_map.entry(substs) {
-                    Occupied(mut e) => {
-                        // If there are multiple monomorphizations available,
-                        // we select one deterministically.
-                        let other_cnum = *e.get();
-                        if cnum_stable_ids[other_cnum] > cnum_stable_ids[cnum] {
-                            e.insert(cnum);
-                        }
-                    }
-                    Vacant(e) => {
-                        e.insert(cnum);
-                    }
-                }
-            }
-        }
-    }
-
-    Lrc::new(instances.into_iter()
-                      .map(|(key, value)| (key, Lrc::new(value)))
-                      .collect())
-}
-
-fn upstream_monomorphizations_for_provider<'a, 'tcx>(
-    tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    def_id: DefId)
-    -> Option<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>
-{
-    debug_assert!(!def_id.is_local());
-    tcx.upstream_monomorphizations(LOCAL_CRATE)
-       .get(&def_id)
-       .cloned()
-}
-
-fn is_unreachable_local_definition_provider(tcx: TyCtxt, def_id: DefId) -> bool {
-    if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
-        !tcx.reachable_set(LOCAL_CRATE).0.contains(&node_id)
-    } else {
-        bug!("is_unreachable_local_definition called with non-local DefId: {:?}",
-              def_id)
-    }
-}
-
-pub fn provide(providers: &mut Providers) {
-    providers.reachable_non_generics = reachable_non_generics_provider;
-    providers.is_reachable_non_generic = is_reachable_non_generic_provider_local;
-    providers.exported_symbols = exported_symbols_provider_local;
-    providers.upstream_monomorphizations = upstream_monomorphizations_provider;
-    providers.is_unreachable_local_definition = is_unreachable_local_definition_provider;
-}
-
-pub fn provide_extern(providers: &mut Providers) {
-    providers.is_reachable_non_generic = is_reachable_non_generic_provider_extern;
-    providers.upstream_monomorphizations_for = upstream_monomorphizations_for_provider;
-}
-
-fn symbol_export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
-    // We export anything that's not mangled at the "C" layer as it probably has
-    // to do with ABI concerns. We do not, however, apply such treatment to
-    // special symbols in the standard library for various plumbing between
-    // core/std/allocators/etc. For example symbols used to hook up allocation
-    // are not considered for export
-    let codegen_fn_attrs = tcx.codegen_fn_attrs(sym_def_id);
-    let is_extern = codegen_fn_attrs.contains_extern_indicator();
-    let std_internal =
-        codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
-
-    if is_extern && !std_internal {
-        SymbolExportLevel::C
-    } else {
-        SymbolExportLevel::Rust
-    }
-}
index 81619c219757b4a11a43e627e7b5cca698b1b8c1..d04e80195f054afcb155a8823a099767c11e0e94 100644 (file)
 
 use attributes;
 use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
-use back::lto::{self, ModuleBuffer, ThinBuffer, SerializedModule};
+use back::lto::{self, ThinBuffer, SerializedModule};
 use back::link::{self, get_linker, remove};
-use back::command::Command;
-use back::linker::LinkerInfo;
-use back::symbol_export::ExportedSymbols;
 use base;
 use consts;
 use memmap;
@@ -38,6 +35,9 @@
 use rustc_fs_util::{path2cstr, link_or_copy};
 use rustc_data_structures::small_c_str::SmallCStr;
 use rustc_data_structures::svh::Svh;
+use rustc_codegen_utils::command::Command;
+use rustc_codegen_utils::linker::LinkerInfo;
+use rustc_codegen_utils::symbol_export::ExportedSymbols;
 use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
 use errors::emitter::{Emitter};
 use syntax::attr;
@@ -564,8 +564,8 @@ unsafe fn optimize(cgcx: &CodegenContext,
             // Some options cause LLVM bitcode to be emitted, which uses ThinLTOBuffers, so we need
             // to make sure we run LLVM's NameAnonGlobals pass when emitting bitcode; otherwise
             // we'll get errors in LLVM.
-            let using_thin_buffers = llvm::LLVMRustThinLTOAvailable() && (config.emit_bc
-                || config.obj_is_bitcode || config.emit_bc_compressed || config.embed_bitcode);
+            let using_thin_buffers = config.emit_bc || config.obj_is_bitcode
+                || config.emit_bc_compressed || config.embed_bitcode;
             let mut have_name_anon_globals_pass = false;
             if !config.no_prepopulate_passes {
                 llvm::LLVMRustAddAnalysisPasses(tm, fpm, llmod);
@@ -729,15 +729,8 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
 
 
         if write_bc || config.emit_bc_compressed || config.embed_bitcode {
-            let thin;
-            let old;
-            let data = if llvm::LLVMRustThinLTOAvailable() {
-                thin = ThinBuffer::new(llmod);
-                thin.data()
-            } else {
-                old = ModuleBuffer::new(llmod);
-                old.data()
-            };
+            let thin = ThinBuffer::new(llmod);
+            let data = thin.data();
             timeline.record("make-bc");
 
             if write_bc {
@@ -1385,12 +1378,8 @@ fn execute_optimize_work_item(cgcx: &CodegenContext,
         // builds we don't actually want to LTO the allocator modules if
         // it shows up. This is due to various linker shenanigans that
         // we'll encounter later.
-        //
-        // Additionally here's where we also factor in the current LLVM
-        // version. If it doesn't support ThinLTO we skip this.
         Lto::ThinLocal => {
-            module.kind != ModuleKind::Allocator &&
-                unsafe { llvm::LLVMRustThinLTOAvailable() }
+            module.kind != ModuleKind::Allocator
         }
     };
 
@@ -1508,6 +1497,7 @@ enum Message {
     },
     CodegenComplete,
     CodegenItem,
+    CodegenAborted,
 }
 
 struct Diagnostic {
@@ -1788,6 +1778,7 @@ fn start_executing_work(tcx: TyCtxt,
         let mut needs_lto = Vec::new();
         let mut lto_import_only_modules = Vec::new();
         let mut started_lto = false;
+        let mut codegen_aborted = false;
 
         // This flag tracks whether all items have gone through codegens
         let mut codegen_done = false;
@@ -1805,13 +1796,19 @@ fn start_executing_work(tcx: TyCtxt,
         let mut llvm_start_time = None;
 
         // Run the message loop while there's still anything that needs message
-        // processing:
+        // processing. Note that as soon as codegen is aborted we simply want to
+        // wait for all existing work to finish, so many of the conditions here
+        // only apply if codegen hasn't been aborted as they represent pending
+        // work to be done.
         while !codegen_done ||
-              work_items.len() > 0 ||
               running > 0 ||
-              needs_lto.len() > 0 ||
-              lto_import_only_modules.len() > 0 ||
-              main_thread_worker_state != MainThreadWorkerState::Idle {
+              (!codegen_aborted && (
+                  work_items.len() > 0 ||
+                  needs_lto.len() > 0 ||
+                  lto_import_only_modules.len() > 0 ||
+                  main_thread_worker_state != MainThreadWorkerState::Idle
+              ))
+        {
 
             // While there are still CGUs to be codegened, the coordinator has
             // to decide how to utilize the compiler processes implicit Token:
@@ -1840,6 +1837,9 @@ fn start_executing_work(tcx: TyCtxt,
                         spawn_work(cgcx, item);
                     }
                 }
+            } else if codegen_aborted {
+                // don't queue up any more work if codegen was aborted, we're
+                // just waiting for our existing children to finish
             } else {
                 // If we've finished everything related to normal codegen
                 // then it must be the case that we've got some LTO work to do.
@@ -1904,7 +1904,7 @@ fn start_executing_work(tcx: TyCtxt,
 
             // Spin up what work we can, only doing this while we've got available
             // parallelism slots and work left to spawn.
-            while work_items.len() > 0 && running < tokens.len() {
+            while !codegen_aborted && work_items.len() > 0 && running < tokens.len() {
                 let (item, _) = work_items.pop().unwrap();
 
                 maybe_start_llvm_timer(cgcx.config(item.module_kind()),
@@ -1969,6 +1969,7 @@ fn start_executing_work(tcx: TyCtxt,
                     if !cgcx.opts.debugging_opts.no_parallel_llvm {
                         helper.request_token();
                     }
+                    assert!(!codegen_aborted);
                     assert_eq!(main_thread_worker_state,
                                MainThreadWorkerState::Codegenning);
                     main_thread_worker_state = MainThreadWorkerState::Idle;
@@ -1976,11 +1977,26 @@ fn start_executing_work(tcx: TyCtxt,
 
                 Message::CodegenComplete => {
                     codegen_done = true;
+                    assert!(!codegen_aborted);
                     assert_eq!(main_thread_worker_state,
                                MainThreadWorkerState::Codegenning);
                     main_thread_worker_state = MainThreadWorkerState::Idle;
                 }
 
+                // If codegen is aborted that means translation was aborted due
+                // to some normal-ish compiler error. In this situation we want
+                // to exit as soon as possible, but we want to make sure all
+                // existing work has finished. Flag codegen as being done, and
+                // then conditions above will ensure no more work is spawned but
+                // we'll keep executing this loop until `running` hits 0.
+                Message::CodegenAborted => {
+                    assert!(!codegen_aborted);
+                    codegen_done = true;
+                    codegen_aborted = true;
+                    assert_eq!(main_thread_worker_state,
+                               MainThreadWorkerState::Codegenning);
+                }
+
                 // If a thread exits successfully then we drop a token associated
                 // with that worker and update our `running` count. We may later
                 // re-acquire a token to continue running more work. We may also not
@@ -2446,6 +2462,19 @@ pub fn codegen_finished(&self, tcx: TyCtxt) {
         drop(self.coordinator_send.send(Box::new(Message::CodegenComplete)));
     }
 
+    /// Consume this context indicating that codegen was entirely aborted, and
+    /// we need to exit as quickly as possible.
+    ///
+    /// This method blocks the current thread until all worker threads have
+    /// finished, and all worker threads should have exited or be real close to
+    /// exiting at this point.
+    pub fn codegen_aborted(self) {
+        // Signal to the coordinator it should spawn no more work and start
+        // shutdown.
+        drop(self.coordinator_send.send(Box::new(Message::CodegenAborted)));
+        drop(self.future.join());
+    }
+
     pub fn check_for_errors(&self, sess: &Session) {
         self.shared_emitter_main.check(sess, false);
     }
@@ -2464,6 +2493,11 @@ pub fn wait_for_signal_to_codegen_item(&self) {
     }
 }
 
+// impl Drop for OngoingCodegen {
+//     fn drop(&mut self) {
+//     }
+// }
+
 pub(crate) fn submit_codegened_module_to_llvm(tcx: TyCtxt,
                                               module: ModuleCodegen,
                                               cost: u64) {
index a4c7a7123b9647d1745728eadc1edcfce50a3f76..fb33fe85b005441782f340c8f880a687273bbf3e 100644 (file)
@@ -54,7 +54,6 @@
 use builder::{Builder, MemFlags};
 use callee;
 use common::{C_bool, C_bytes_in_context, C_i32, C_usize};
-use rustc_mir::monomorphize::collector::{self, MonoItemCollectionMode};
 use rustc_mir::monomorphize::item::DefPathBasedNames;
 use common::{C_struct_in_context, C_array, val_ty};
 use consts;
 use meth;
 use mir;
 use monomorphize::Instance;
-use monomorphize::partitioning::{self, PartitioningStrategy, CodegenUnit, CodegenUnitExt};
+use monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
 use rustc_codegen_utils::symbol_names_test;
 use time_graph;
-use mono_item::{MonoItem, BaseMonoItemExt, MonoItemExt};
+use mono_item::{MonoItem, MonoItemExt};
 use type_::Type;
 use type_of::LayoutLlvmExt;
-use rustc::util::nodemap::{FxHashMap, DefIdSet};
+use rustc::util::nodemap::FxHashMap;
 use CrateInfo;
 use rustc_data_structures::small_c_str::SmallCStr;
 use rustc_data_structures::sync::Lrc;
 
 use std::any::Any;
+use std::cmp;
 use std::ffi::CString;
-use std::sync::Arc;
-use std::time::{Instant, Duration};
 use std::i32;
-use std::cmp;
+use std::ops::{Deref, DerefMut};
 use std::sync::mpsc;
+use std::time::{Instant, Duration};
 use syntax_pos::Span;
 use syntax_pos::symbol::InternedString;
 use syntax::attr;
@@ -557,7 +556,7 @@ fn create_entry_fn(
         // regions must appear in the argument
         // listing.
         let main_ret_ty = cx.tcx.erase_regions(
-            &main_ret_ty.no_late_bound_regions().unwrap(),
+            &main_ret_ty.no_bound_vars().unwrap(),
         );
 
         if declare::get_defined_value(cx, "main").is_some() {
@@ -738,19 +737,6 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 {
     check_for_rustc_errors_attr(tcx);
 
-    if let Some(true) = tcx.sess.opts.debugging_opts.thinlto {
-        if unsafe { !llvm::LLVMRustThinLTOAvailable() } {
-            tcx.sess.fatal("this compiler's LLVM does not support ThinLTO");
-        }
-    }
-
-    if (tcx.sess.opts.debugging_opts.pgo_gen.is_some() ||
-        !tcx.sess.opts.debugging_opts.pgo_use.is_empty()) &&
-        unsafe { !llvm::LLVMRustPGOAvailable() }
-    {
-        tcx.sess.fatal("this compiler's LLVM does not support PGO");
-    }
-
     let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx);
 
     // Codegen the metadata.
@@ -820,6 +806,7 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         metadata,
         rx,
         codegen_units.len());
+    let ongoing_codegen = AbortCodegenOnDrop(Some(ongoing_codegen));
 
     // Codegen an allocator shim, if necessary.
     //
@@ -949,139 +936,64 @@ pub fn codegen_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     ongoing_codegen.check_for_errors(tcx.sess);
 
     assert_and_save_dep_graph(tcx);
-    ongoing_codegen
+    ongoing_codegen.into_inner()
 }
 
-fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
-    time(tcx.sess,
-         "assert dep graph",
-         || rustc_incremental::assert_dep_graph(tcx));
-
-    time(tcx.sess,
-         "serialize dep graph",
-         || rustc_incremental::save_dep_graph(tcx));
+/// A curious wrapper structure whose only purpose is to call `codegen_aborted`
+/// when it's dropped abnormally.
+///
+/// In the process of working on rust-lang/rust#55238 a mysterious segfault was
+/// stumbled upon. The segfault was never reproduced locally, but it was
+/// suspected to be releated to the fact that codegen worker threads were
+/// sticking around by the time the main thread was exiting, causing issues.
+///
+/// This structure is an attempt to fix that issue where the `codegen_aborted`
+/// message will block until all workers have finished. This should ensure that
+/// even if the main codegen thread panics we'll wait for pending work to
+/// complete before returning from the main thread, hopefully avoiding
+/// segfaults.
+///
+/// If you see this comment in the code, then it means that this workaround
+/// worked! We may yet one day track down the mysterious cause of that
+/// segfault...
+struct AbortCodegenOnDrop(Option<OngoingCodegen>);
+
+impl AbortCodegenOnDrop {
+    fn into_inner(mut self) -> OngoingCodegen {
+        self.0.take().unwrap()
+    }
 }
 
-fn collect_and_partition_mono_items<'a, 'tcx>(
-    tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    cnum: CrateNum,
-) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>)
-{
-    assert_eq!(cnum, LOCAL_CRATE);
-
-    let collection_mode = match tcx.sess.opts.debugging_opts.print_mono_items {
-        Some(ref s) => {
-            let mode_string = s.to_lowercase();
-            let mode_string = mode_string.trim();
-            if mode_string == "eager" {
-                MonoItemCollectionMode::Eager
-            } else {
-                if mode_string != "lazy" {
-                    let message = format!("Unknown codegen-item collection mode '{}'. \
-                                           Falling back to 'lazy' mode.",
-                                          mode_string);
-                    tcx.sess.warn(&message);
-                }
-
-                MonoItemCollectionMode::Lazy
-            }
-        }
-        None => {
-            if tcx.sess.opts.cg.link_dead_code {
-                MonoItemCollectionMode::Eager
-            } else {
-                MonoItemCollectionMode::Lazy
-            }
-        }
-    };
-
-    let (items, inlining_map) =
-        time(tcx.sess, "monomorphization collection", || {
-            collector::collect_crate_mono_items(tcx, collection_mode)
-    });
-
-    tcx.sess.abort_if_errors();
-
-    ::rustc_mir::monomorphize::assert_symbols_are_distinct(tcx, items.iter());
-
-    let strategy = if tcx.sess.opts.incremental.is_some() {
-        PartitioningStrategy::PerModule
-    } else {
-        PartitioningStrategy::FixedUnitCount(tcx.sess.codegen_units())
-    };
-
-    let codegen_units = time(tcx.sess, "codegen unit partitioning", || {
-        partitioning::partition(tcx,
-                                items.iter().cloned(),
-                                strategy,
-                                &inlining_map)
-            .into_iter()
-            .map(Arc::new)
-            .collect::<Vec<_>>()
-    });
-
-    let mono_items: DefIdSet = items.iter().filter_map(|mono_item| {
-        match *mono_item {
-            MonoItem::Fn(ref instance) => Some(instance.def_id()),
-            MonoItem::Static(def_id) => Some(def_id),
-            _ => None,
-        }
-    }).collect();
-
-    if tcx.sess.opts.debugging_opts.print_mono_items.is_some() {
-        let mut item_to_cgus: FxHashMap<_, Vec<_>> = Default::default();
-
-        for cgu in &codegen_units {
-            for (&mono_item, &linkage) in cgu.items() {
-                item_to_cgus.entry(mono_item)
-                            .or_default()
-                            .push((cgu.name().clone(), linkage));
-            }
-        }
+impl Deref for AbortCodegenOnDrop {
+    type Target = OngoingCodegen;
 
-        let mut item_keys: Vec<_> = items
-            .iter()
-            .map(|i| {
-                let mut output = i.to_string(tcx);
-                output.push_str(" @@");
-                let mut empty = Vec::new();
-                let cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty);
-                cgus.as_mut_slice().sort_by_key(|&(ref name, _)| name.clone());
-                cgus.dedup();
-                for &(ref cgu_name, (linkage, _)) in cgus.iter() {
-                    output.push_str(" ");
-                    output.push_str(&cgu_name.as_str());
-
-                    let linkage_abbrev = match linkage {
-                        Linkage::External => "External",
-                        Linkage::AvailableExternally => "Available",
-                        Linkage::LinkOnceAny => "OnceAny",
-                        Linkage::LinkOnceODR => "OnceODR",
-                        Linkage::WeakAny => "WeakAny",
-                        Linkage::WeakODR => "WeakODR",
-                        Linkage::Appending => "Appending",
-                        Linkage::Internal => "Internal",
-                        Linkage::Private => "Private",
-                        Linkage::ExternalWeak => "ExternalWeak",
-                        Linkage::Common => "Common",
-                    };
-
-                    output.push_str("[");
-                    output.push_str(linkage_abbrev);
-                    output.push_str("]");
-                }
-                output
-            })
-            .collect();
+    fn deref(&self) -> &OngoingCodegen {
+        self.0.as_ref().unwrap()
+    }
+}
 
-        item_keys.sort();
+impl DerefMut for AbortCodegenOnDrop {
+    fn deref_mut(&mut self) -> &mut OngoingCodegen {
+        self.0.as_mut().unwrap()
+    }
+}
 
-        for item in item_keys {
-            println!("MONO_ITEM {}", item);
+impl Drop for AbortCodegenOnDrop {
+    fn drop(&mut self) {
+        if let Some(codegen) = self.0.take() {
+            codegen.codegen_aborted();
         }
     }
+}
 
-    (Arc::new(mono_items), Arc::new(codegen_units))
+fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+    time(tcx.sess,
+         "assert dep graph",
+         || rustc_incremental::assert_dep_graph(tcx));
+
+    time(tcx.sess,
+         "serialize dep graph",
+         || rustc_incremental::save_dep_graph(tcx));
 }
 
 impl CrateInfo {
@@ -1173,12 +1085,6 @@ fn load_wasm_imports(&mut self, tcx: TyCtxt, cnum: CrateNum) {
     }
 }
 
-fn is_codegened_item(tcx: TyCtxt, id: DefId) -> bool {
-    let (all_mono_items, _) =
-        tcx.collect_and_partition_mono_items(LOCAL_CRATE);
-    all_mono_items.contains(&id)
-}
-
 fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                   cgu_name: InternedString)
                                   -> Stats {
@@ -1269,24 +1175,7 @@ fn module_codegen<'a, 'tcx>(
     }
 }
 
-pub fn provide(providers: &mut Providers) {
-    providers.collect_and_partition_mono_items =
-        collect_and_partition_mono_items;
-
-    providers.is_codegened_item = is_codegened_item;
-
-    providers.codegen_unit = |tcx, name| {
-        let (_, all) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
-        all.iter()
-            .find(|cgu| *cgu.name() == name)
-            .cloned()
-            .unwrap_or_else(|| panic!("failed to find cgu with name {:?}", name))
-    };
-
-    provide_extern(providers);
-}
-
-pub fn provide_extern(providers: &mut Providers) {
+pub fn provide_both(providers: &mut Providers) {
     providers.dllimport_foreign_items = |tcx, krate| {
         let module_map = tcx.foreign_modules(krate);
         let module_map = module_map.iter()
index 2fe6a0377f81b14803ab93feae7a03795b3795bb..f70a68c72489a457df330d85794677f40ce912ca 100644 (file)
@@ -482,14 +482,12 @@ pub fn volatile_load(&self, ptr: &'ll Value) -> &'ll Value {
         }
     }
 
-    pub fn atomic_load(&self, ptr: &'ll Value, order: AtomicOrdering, align: Align) -> &'ll Value {
+    pub fn atomic_load(&self, ptr: &'ll Value, order: AtomicOrdering, size: Size) -> &'ll Value {
         self.count_insn("load.atomic");
         unsafe {
             let load = llvm::LLVMRustBuildAtomicLoad(self.llbuilder, ptr, noname(), order);
-            // FIXME(eddyb) Isn't it UB to use `pref` instead of `abi` here?
-            // However, 64-bit atomic loads on `i686-apple-darwin` appear to
-            // require `___atomic_load` with ABI-alignment, so it's staying.
-            llvm::LLVMSetAlignment(load, align.pref() as c_uint);
+            // LLVM requires the alignment of atomic loads to be at least the size of the type.
+            llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
             load
         }
     }
@@ -564,15 +562,14 @@ pub fn store_with_flags(
     }
 
     pub fn atomic_store(&self, val: &'ll Value, ptr: &'ll Value,
-                        order: AtomicOrdering, align: Align) {
+                        order: AtomicOrdering, size: Size) {
         debug!("Store {:?} -> {:?}", val, ptr);
         self.count_insn("store.atomic");
         let ptr = self.check_store(val, ptr);
         unsafe {
             let store = llvm::LLVMRustBuildAtomicStore(self.llbuilder, val, ptr, order);
-            // FIXME(eddyb) Isn't it UB to use `pref` instead of `abi` here?
-            // Also see `atomic_load` for more context.
-            llvm::LLVMSetAlignment(store, align.pref() as c_uint);
+            // LLVM requires the alignment of atomic stores to be at least the size of the type.
+            llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
         }
     }
 
index 7300bac96182b53bbdd6ddfc9e6d48e05dc9d1a4..c8c693257d52f410261bd223c7d2872dc50b475a 100644 (file)
@@ -44,7 +44,7 @@ pub fn get_fn(
     debug!("get_fn(instance={:?})", instance);
 
     assert!(!instance.substs.needs_infer());
-    assert!(!instance.substs.has_escaping_regions());
+    assert!(!instance.substs.has_escaping_bound_vars());
     assert!(!instance.substs.has_param_types());
 
     let sig = instance.fn_sig(cx.tcx);
index 241f7989e168179c8c21de67061f7fdec0a5d496..120449ae2af4c2518294656eaac3aa6fb68370d5 100644 (file)
@@ -446,29 +446,29 @@ pub fn type_has_metadata(&self, ty: Ty<'tcx>) -> bool {
     }
 }
 
-impl ty::layout::HasDataLayout for &'a CodegenCx<'ll, 'tcx> {
+impl ty::layout::HasDataLayout for CodegenCx<'ll, 'tcx> {
     fn data_layout(&self) -> &ty::layout::TargetDataLayout {
         &self.tcx.data_layout
     }
 }
 
-impl HasTargetSpec for &'a CodegenCx<'ll, 'tcx> {
+impl HasTargetSpec for CodegenCx<'ll, 'tcx> {
     fn target_spec(&self) -> &Target {
         &self.tcx.sess.target.target
     }
 }
 
-impl ty::layout::HasTyCtxt<'tcx> for &'a CodegenCx<'ll, 'tcx> {
-    fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
+impl ty::layout::HasTyCtxt<'tcx> for CodegenCx<'ll, 'tcx> {
+    fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
         self.tcx
     }
 }
 
-impl LayoutOf for &'a CodegenCx<'ll, 'tcx> {
+impl LayoutOf for CodegenCx<'ll, 'tcx> {
     type Ty = Ty<'tcx>;
     type TyLayout = TyLayout<'tcx>;
 
-    fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
+    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
         self.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty))
             .unwrap_or_else(|e| if let LayoutError::SizeOverflow(_) = e {
                 self.sess().fatal(&e.to_string())
index 846d505641103dd864de097c137b2d19165fad62..ba1e3f5960c8596b0d4c484aff8f703fd986d8ef 100644 (file)
 use rustc::ich::NodeIdHashingMode;
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc::ty::Instance;
-use common::CodegenCx;
+use common::{CodegenCx, C_u64};
 use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt};
-use rustc::ty::layout::{self, Align, LayoutOf, PrimitiveExt, Size, TyLayout};
+use rustc::ty::layout::{self, Align, HasDataLayout, Integer, IntegerExt, LayoutOf,
+                        PrimitiveExt, Size, TyLayout};
 use rustc::session::config;
 use rustc::util::nodemap::FxHashMap;
 use rustc_fs_util::path2cstr;
@@ -205,6 +206,7 @@ enum RecursiveTypeDescription<'ll, 'tcx> {
         unfinished_type: Ty<'tcx>,
         unique_type_id: UniqueTypeId,
         metadata_stub: &'ll DICompositeType,
+        member_holding_stub: &'ll DICompositeType,
         member_description_factory: MemberDescriptionFactory<'ll, 'tcx>,
     },
     FinalMetadata(&'ll DICompositeType)
@@ -215,6 +217,7 @@ fn create_and_register_recursive_type_forward_declaration(
     unfinished_type: Ty<'tcx>,
     unique_type_id: UniqueTypeId,
     metadata_stub: &'ll DICompositeType,
+    member_holding_stub: &'ll DICompositeType,
     member_description_factory: MemberDescriptionFactory<'ll, 'tcx>,
 ) -> RecursiveTypeDescription<'ll, 'tcx> {
 
@@ -227,6 +230,7 @@ fn create_and_register_recursive_type_forward_declaration(
         unfinished_type,
         unique_type_id,
         metadata_stub,
+        member_holding_stub,
         member_description_factory,
     }
 }
@@ -242,6 +246,7 @@ fn finalize(&self, cx: &CodegenCx<'ll, 'tcx>) -> MetadataCreationResult<'ll> {
                 unfinished_type,
                 unique_type_id,
                 metadata_stub,
+                member_holding_stub,
                 ref member_description_factory,
             } => {
                 // Make sure that we have a forward declaration of the type in
@@ -266,7 +271,7 @@ fn finalize(&self, cx: &CodegenCx<'ll, 'tcx>) -> MetadataCreationResult<'ll> {
 
                 // ... and attach them to the stub to complete it.
                 set_members_of_composite_type(cx,
-                                              metadata_stub,
+                                              member_holding_stub,
                                               member_descriptions);
                 return MetadataCreationResult::new(metadata_stub, true);
             }
@@ -350,6 +355,7 @@ fn vec_slice_metadata(
             size: pointer_size,
             align: pointer_align,
             flags: DIFlags::FlagZero,
+            discriminant: None,
         },
         MemberDescription {
             name: "length".to_owned(),
@@ -358,6 +364,7 @@ fn vec_slice_metadata(
             size: usize_size,
             align: usize_align,
             flags: DIFlags::FlagZero,
+            discriminant: None,
         },
     ];
 
@@ -458,6 +465,7 @@ fn trait_pointer_metadata(
             size: data_ptr_field.size,
             align: data_ptr_field.align,
             flags: DIFlags::FlagArtificial,
+            discriminant: None,
         },
         MemberDescription {
             name: "vtable".to_owned(),
@@ -466,6 +474,7 @@ fn trait_pointer_metadata(
             size: vtable_field.size,
             align: vtable_field.align,
             flags: DIFlags::FlagArtificial,
+            discriminant: None,
         },
     ];
 
@@ -914,6 +923,7 @@ struct MemberDescription<'ll> {
     size: Size,
     align: Align,
     flags: DIFlags,
+    discriminant: Option<u64>,
 }
 
 // A factory for MemberDescriptions. It produces a list of member descriptions
@@ -981,6 +991,7 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                 size,
                 align,
                 flags: DIFlags::FlagZero,
+                discriminant: None,
             }
         }).collect()
     }
@@ -1013,6 +1024,7 @@ fn prepare_struct_metadata(
         struct_type,
         unique_type_id,
         struct_metadata_stub,
+        struct_metadata_stub,
         StructMDF(StructMemberDescriptionFactory {
             ty: struct_type,
             variant,
@@ -1045,6 +1057,7 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                 size,
                 align,
                 flags: DIFlags::FlagZero,
+                discriminant: None,
             }
         }).collect()
     }
@@ -1059,15 +1072,18 @@ fn prepare_tuple_metadata(
 ) -> RecursiveTypeDescription<'ll, 'tcx> {
     let tuple_name = compute_debuginfo_type_name(cx, tuple_type, false);
 
+    let struct_stub = create_struct_stub(cx,
+                                         tuple_type,
+                                         &tuple_name[..],
+                                         unique_type_id,
+                                         NO_SCOPE_METADATA);
+
     create_and_register_recursive_type_forward_declaration(
         cx,
         tuple_type,
         unique_type_id,
-        create_struct_stub(cx,
-                           tuple_type,
-                           &tuple_name[..],
-                           unique_type_id,
-                           NO_SCOPE_METADATA),
+        struct_stub,
+        struct_stub,
         TupleMDF(TupleMemberDescriptionFactory {
             ty: tuple_type,
             component_types: component_types.to_vec(),
@@ -1099,6 +1115,7 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                 size,
                 align,
                 flags: DIFlags::FlagZero,
+                discriminant: None,
             }
         }).collect()
     }
@@ -1130,6 +1147,7 @@ fn prepare_union_metadata(
         union_type,
         unique_type_id,
         union_metadata_stub,
+        union_metadata_stub,
         UnionMDF(UnionMemberDescriptionFactory {
             layout: cx.layout_of(union_type),
             variant,
@@ -1142,6 +1160,20 @@ fn prepare_union_metadata(
 // Enums
 //=-----------------------------------------------------------------------------
 
+// DWARF variant support is only available starting in LLVM 7.
+// Although the earlier enum debug info output did not work properly
+// in all situations, it is better for the time being to continue to
+// sometimes emit the old style rather than emit something completely
+// useless when rust is compiled against LLVM 6 or older.  This
+// function decides which representation will be emitted.
+fn use_enum_fallback(cx: &CodegenCx) -> bool {
+    // On MSVC we have to use the fallback mode, because LLVM doesn't
+    // lower variant parts to PDB.
+    return cx.sess().target.target.options.is_like_msvc || unsafe {
+        llvm::LLVMRustVersionMajor() < 7
+    };
+}
+
 // Describes the members of an enum value: An enum is described as a union of
 // structs in DWARF. This MemberDescriptionFactory provides the description for
 // the members of this union; so for every variant of the given enum, this
@@ -1159,6 +1191,15 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> {
     fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                                   -> Vec<MemberDescription<'ll>> {
         let adt = &self.enum_type.ty_adt_def().unwrap();
+
+        // This will always find the metadata in the type map.
+        let fallback = use_enum_fallback(cx);
+        let self_metadata = if fallback {
+            self.containing_scope
+        } else {
+            type_metadata(cx, self.enum_type, self.span)
+        };
+
         match self.layout.variants {
             layout::Variants::Single { .. } if adt.variants.is_empty() => vec![],
             layout::Variants::Single { index } => {
@@ -1167,7 +1208,7 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                                           self.layout,
                                           &adt.variants[index],
                                           NoDiscriminant,
-                                          self.containing_scope,
+                                          self_metadata,
                                           self.span);
 
                 let member_descriptions =
@@ -1178,18 +1219,28 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                                               member_descriptions);
                 vec![
                     MemberDescription {
-                        name: String::new(),
+                        name: if fallback {
+                            String::new()
+                        } else {
+                            adt.variants[index].name.as_str().to_string()
+                        },
                         type_metadata: variant_type_metadata,
                         offset: Size::ZERO,
                         size: self.layout.size,
                         align: self.layout.align,
-                        flags: DIFlags::FlagZero
+                        flags: DIFlags::FlagZero,
+                        discriminant: None,
                     }
                 ]
             }
             layout::Variants::Tagged { ref variants, .. } => {
-                let discriminant_info = RegularDiscriminant(self.discriminant_type_metadata
-                    .expect(""));
+                let discriminant_info = if fallback {
+                    RegularDiscriminant(self.discriminant_type_metadata
+                                        .expect(""))
+                } else {
+                    // This doesn't matter in this case.
+                    NoDiscriminant
+                };
                 (0..variants.len()).map(|i| {
                     let variant = self.layout.for_variant(cx, i);
                     let (variant_type_metadata, member_desc_factory) =
@@ -1197,7 +1248,7 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                                               variant,
                                               &adt.variants[i],
                                               discriminant_info,
-                                              self.containing_scope,
+                                              self_metadata,
                                               self.span);
 
                     let member_descriptions = member_desc_factory
@@ -1207,75 +1258,127 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                                                   variant_type_metadata,
                                                   member_descriptions);
                     MemberDescription {
-                        name: String::new(),
+                        name: if fallback {
+                            String::new()
+                        } else {
+                            adt.variants[i].name.as_str().to_string()
+                        },
                         type_metadata: variant_type_metadata,
                         offset: Size::ZERO,
-                        size: variant.size,
-                        align: variant.align,
-                        flags: DIFlags::FlagZero
+                        size: self.layout.size,
+                        align: self.layout.align,
+                        flags: DIFlags::FlagZero,
+                        discriminant: Some(self.layout.ty.ty_adt_def().unwrap()
+                                           .discriminant_for_variant(cx.tcx, i)
+                                           .val as u64),
                     }
                 }).collect()
             }
-            layout::Variants::NicheFilling { dataful_variant, ref niche_variants, .. } => {
-                let variant = self.layout.for_variant(cx, dataful_variant);
-                // Create a description of the non-null variant
-                let (variant_type_metadata, member_description_factory) =
-                    describe_enum_variant(cx,
-                                          variant,
-                                          &adt.variants[dataful_variant],
-                                          OptimizedDiscriminant,
-                                          self.containing_scope,
-                                          self.span);
+            layout::Variants::NicheFilling {
+                ref niche_variants,
+                niche_start,
+                ref variants,
+                dataful_variant,
+                ..
+            } => {
+                if fallback {
+                    let variant = self.layout.for_variant(cx, dataful_variant);
+                    // Create a description of the non-null variant
+                    let (variant_type_metadata, member_description_factory) =
+                        describe_enum_variant(cx,
+                                              variant,
+                                              &adt.variants[dataful_variant],
+                                              OptimizedDiscriminant,
+                                              self.containing_scope,
+                                              self.span);
 
-                let variant_member_descriptions =
-                    member_description_factory.create_member_descriptions(cx);
+                    let variant_member_descriptions =
+                        member_description_factory.create_member_descriptions(cx);
 
-                set_members_of_composite_type(cx,
-                                              variant_type_metadata,
-                                              variant_member_descriptions);
-
-                // Encode the information about the null variant in the union
-                // member's name.
-                let mut name = String::from("RUST$ENCODED$ENUM$");
-                // HACK(eddyb) the debuggers should just handle offset+size
-                // of discriminant instead of us having to recover its path.
-                // Right now it's not even going to work for `niche_start > 0`,
-                // and for multiple niche variants it only supports the first.
-                fn compute_field_path<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
-                                                name: &mut String,
-                                                layout: TyLayout<'tcx>,
-                                                offset: Size,
-                                                size: Size) {
-                    for i in 0..layout.fields.count() {
-                        let field_offset = layout.fields.offset(i);
-                        if field_offset > offset {
-                            continue;
-                        }
-                        let inner_offset = offset - field_offset;
-                        let field = layout.field(cx, i);
-                        if inner_offset + size <= field.size {
-                            write!(name, "{}$", i).unwrap();
-                            compute_field_path(cx, name, field, inner_offset, size);
+                    set_members_of_composite_type(cx,
+                                                  variant_type_metadata,
+                                                  variant_member_descriptions);
+
+                    // Encode the information about the null variant in the union
+                    // member's name.
+                    let mut name = String::from("RUST$ENCODED$ENUM$");
+                    // Right now it's not even going to work for `niche_start > 0`,
+                    // and for multiple niche variants it only supports the first.
+                    fn compute_field_path<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
+                                                    name: &mut String,
+                                                    layout: TyLayout<'tcx>,
+                                                    offset: Size,
+                                                    size: Size) {
+                        for i in 0..layout.fields.count() {
+                            let field_offset = layout.fields.offset(i);
+                            if field_offset > offset {
+                                continue;
+                            }
+                            let inner_offset = offset - field_offset;
+                            let field = layout.field(cx, i);
+                            if inner_offset + size <= field.size {
+                                write!(name, "{}$", i).unwrap();
+                                compute_field_path(cx, name, field, inner_offset, size);
+                            }
                         }
                     }
+                    compute_field_path(cx, &mut name,
+                                       self.layout,
+                                       self.layout.fields.offset(0),
+                                       self.layout.field(cx, 0).size);
+                    name.push_str(&adt.variants[*niche_variants.start()].name.as_str());
+
+                    // Create the (singleton) list of descriptions of union members.
+                    vec![
+                        MemberDescription {
+                            name,
+                            type_metadata: variant_type_metadata,
+                            offset: Size::ZERO,
+                            size: variant.size,
+                            align: variant.align,
+                            flags: DIFlags::FlagZero,
+                            discriminant: None,
+                        }
+                    ]
+                } else {
+                    (0..variants.len()).map(|i| {
+                        let variant = self.layout.for_variant(cx, i);
+                        let (variant_type_metadata, member_desc_factory) =
+                            describe_enum_variant(cx,
+                                                  variant,
+                                                  &adt.variants[i],
+                                                  OptimizedDiscriminant,
+                                                  self_metadata,
+                                                  self.span);
+
+                        let member_descriptions = member_desc_factory
+                            .create_member_descriptions(cx);
+
+                        set_members_of_composite_type(cx,
+                                                      variant_type_metadata,
+                                                      member_descriptions);
+
+                        let niche_value = if i == dataful_variant {
+                            None
+                        } else {
+                            let niche = (i as u128)
+                                .wrapping_sub(*niche_variants.start() as u128)
+                                .wrapping_add(niche_start);
+                            assert_eq!(niche as u64 as u128, niche);
+                            Some(niche as u64)
+                        };
+
+                        MemberDescription {
+                            name: adt.variants[i].name.as_str().to_string(),
+                            type_metadata: variant_type_metadata,
+                            offset: Size::ZERO,
+                            size: self.layout.size,
+                            align: self.layout.align,
+                            flags: DIFlags::FlagZero,
+                            discriminant: niche_value,
+                        }
+                    }).collect()
                 }
-                compute_field_path(cx, &mut name,
-                                   self.layout,
-                                   self.layout.fields.offset(0),
-                                   self.layout.field(cx, 0).size);
-                name.push_str(&adt.variants[*niche_variants.start()].name.as_str());
-
-                // Create the (singleton) list of descriptions of union members.
-                vec![
-                    MemberDescription {
-                        name,
-                        type_metadata: variant_type_metadata,
-                        offset: Size::ZERO,
-                        size: variant.size,
-                        align: variant.align,
-                        flags: DIFlags::FlagZero
-                    }
-                ]
             }
         }
     }
@@ -1297,14 +1400,19 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
             let (size, align) = cx.size_and_align_of(ty);
             MemberDescription {
                 name: name.to_string(),
-                type_metadata: match self.discriminant_type_metadata {
-                    Some(metadata) if i == 0 => metadata,
-                    _ => type_metadata(cx, ty, self.span)
+                type_metadata: if use_enum_fallback(cx) {
+                    match self.discriminant_type_metadata {
+                        Some(metadata) if i == 0 => metadata,
+                        _ => type_metadata(cx, ty, self.span)
+                    }
+                } else {
+                    type_metadata(cx, ty, self.span)
                 },
                 offset: self.offsets[i],
                 size,
                 align,
-                flags: DIFlags::FlagZero
+                flags: DIFlags::FlagZero,
+                discriminant: None,
             }
         }).collect()
     }
@@ -1317,10 +1425,10 @@ enum EnumDiscriminantInfo<'ll> {
     NoDiscriminant
 }
 
-// Returns a tuple of (1) type_metadata_stub of the variant, (2) the llvm_type
-// of the variant, and (3) a MemberDescriptionFactory for producing the
-// descriptions of the fields of the variant. This is a rudimentary version of a
-// full RecursiveTypeDescription.
+// Returns a tuple of (1) type_metadata_stub of the variant, (2) a
+// MemberDescriptionFactory for producing the descriptions of the
+// fields of the variant. This is a rudimentary version of a full
+// RecursiveTypeDescription.
 fn describe_enum_variant(
     cx: &CodegenCx<'ll, 'tcx>,
     layout: layout::TyLayout<'tcx>,
@@ -1343,29 +1451,46 @@ fn describe_enum_variant(
                                            unique_type_id,
                                            Some(containing_scope));
 
-    // If this is not a univariant enum, there is also the discriminant field.
-    let (discr_offset, discr_arg) = match discriminant_info {
-        RegularDiscriminant(_) => {
-            // We have the layout of an enum variant, we need the layout of the outer enum
-            let enum_layout = cx.layout_of(layout.ty);
-            (Some(enum_layout.fields.offset(0)),
-             Some(("RUST$ENUM$DISR".to_owned(), enum_layout.field(cx, 0).ty)))
-        }
-        _ => (None, None),
-    };
-    let offsets = discr_offset.into_iter().chain((0..layout.fields.count()).map(|i| {
-        layout.fields.offset(i)
-    })).collect();
-
     // Build an array of (field name, field type) pairs to be captured in the factory closure.
-    let args = discr_arg.into_iter().chain((0..layout.fields.count()).map(|i| {
-        let name = if variant.ctor_kind == CtorKind::Fn {
-            format!("__{}", i)
-        } else {
-            variant.fields[i].ident.to_string()
+    let (offsets, args) = if use_enum_fallback(cx) {
+        // If this is not a univariant enum, there is also the discriminant field.
+        let (discr_offset, discr_arg) = match discriminant_info {
+            RegularDiscriminant(_) => {
+                // We have the layout of an enum variant, we need the layout of the outer enum
+                let enum_layout = cx.layout_of(layout.ty);
+                (Some(enum_layout.fields.offset(0)),
+                 Some(("RUST$ENUM$DISR".to_owned(), enum_layout.field(cx, 0).ty)))
+            }
+            _ => (None, None),
         };
-        (name, layout.field(cx, i).ty)
-    })).collect();
+        (
+            discr_offset.into_iter().chain((0..layout.fields.count()).map(|i| {
+                layout.fields.offset(i)
+            })).collect(),
+            discr_arg.into_iter().chain((0..layout.fields.count()).map(|i| {
+                let name = if variant.ctor_kind == CtorKind::Fn {
+                    format!("__{}", i)
+                } else {
+                    variant.fields[i].ident.to_string()
+                };
+                (name, layout.field(cx, i).ty)
+            })).collect()
+        )
+    } else {
+        (
+            (0..layout.fields.count()).map(|i| {
+                layout.fields.offset(i)
+            }).collect(),
+            (0..layout.fields.count()).map(|i| {
+                let name = if variant.ctor_kind == CtorKind::Fn {
+                    format!("__{}", i)
+                } else {
+                    variant.fields[i].ident.to_string()
+                };
+                (name, layout.field(cx, i).ty)
+            }).collect()
+        )
+    };
 
     let member_description_factory =
         VariantMDF(VariantMemberDescriptionFactory {
@@ -1401,22 +1526,22 @@ fn prepare_enum_metadata(
     // <unknown>
     let file_metadata = unknown_file_metadata(cx);
 
-    let def = enum_type.ty_adt_def().unwrap();
-    let enumerators_metadata: Vec<_> = def.discriminants(cx.tcx)
-        .zip(&def.variants)
-        .map(|(discr, v)| {
-            let name = SmallCStr::new(&v.name.as_str());
-            unsafe {
-                Some(llvm::LLVMRustDIBuilderCreateEnumerator(
-                    DIB(cx),
-                    name.as_ptr(),
-                    // FIXME: what if enumeration has i128 discriminant?
-                    discr.val as u64))
-            }
-        })
-        .collect();
-
     let discriminant_type_metadata = |discr: layout::Primitive| {
+        let def = enum_type.ty_adt_def().unwrap();
+        let enumerators_metadata: Vec<_> = def.discriminants(cx.tcx)
+            .zip(&def.variants)
+            .map(|(discr, v)| {
+                let name = SmallCStr::new(&v.name.as_str());
+                unsafe {
+                    Some(llvm::LLVMRustDIBuilderCreateEnumerator(
+                        DIB(cx),
+                        name.as_ptr(),
+                        // FIXME: what if enumeration has i128 discriminant?
+                        discr.val as u64))
+                }
+            })
+            .collect();
+
         let disr_type_key = (enum_def_id, discr);
         let cached_discriminant_type_metadata = debug_context(cx).created_enum_disr_types
                                                                  .borrow()
@@ -1441,7 +1566,7 @@ fn prepare_enum_metadata(
                         discriminant_size.bits(),
                         discriminant_align.abi_bits() as u32,
                         create_DIArray(DIB(cx), &enumerators_metadata),
-                        discriminant_base_type_metadata)
+                        discriminant_base_type_metadata, true)
                 };
 
                 debug_context(cx).created_enum_disr_types
@@ -1455,16 +1580,10 @@ fn prepare_enum_metadata(
 
     let layout = cx.layout_of(enum_type);
 
-    let discriminant_type_metadata = match layout.variants {
-        layout::Variants::Single { .. } |
-        layout::Variants::NicheFilling { .. } => None,
-        layout::Variants::Tagged { ref tag, .. } => {
-            Some(discriminant_type_metadata(tag.value))
-        }
-    };
-
-    if let (&layout::Abi::Scalar(_), Some(discr)) = (&layout.abi, discriminant_type_metadata) {
-        return FinalMetadata(discr);
+    match (&layout.abi, &layout.variants) {
+        (&layout::Abi::Scalar(_), &layout::Variants::Tagged {ref tag, .. }) =>
+            return FinalMetadata(discriminant_type_metadata(tag.value)),
+        _ => {}
     }
 
     let (enum_type_size, enum_type_align) = layout.size_and_align();
@@ -1473,30 +1592,145 @@ fn prepare_enum_metadata(
     let unique_type_id_str = SmallCStr::new(
         debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id)
     );
-    let enum_metadata = unsafe {
-        llvm::LLVMRustDIBuilderCreateUnionType(
-        DIB(cx),
-        containing_scope,
-        enum_name.as_ptr(),
-        file_metadata,
-        UNKNOWN_LINE_NUMBER,
-        enum_type_size.bits(),
-        enum_type_align.abi_bits() as u32,
-        DIFlags::FlagZero,
-        None,
-        0, // RuntimeLang
-        unique_type_id_str.as_ptr())
+
+    if use_enum_fallback(cx) {
+        let discriminant_type_metadata = match layout.variants {
+            layout::Variants::Single { .. } |
+            layout::Variants::NicheFilling { .. } => None,
+            layout::Variants::Tagged { ref tag, .. } => {
+                Some(discriminant_type_metadata(tag.value))
+            }
+        };
+
+        let enum_metadata = unsafe {
+            llvm::LLVMRustDIBuilderCreateUnionType(
+                DIB(cx),
+                containing_scope,
+                enum_name.as_ptr(),
+                file_metadata,
+                UNKNOWN_LINE_NUMBER,
+                enum_type_size.bits(),
+                enum_type_align.abi_bits() as u32,
+                DIFlags::FlagZero,
+                None,
+                0, // RuntimeLang
+                unique_type_id_str.as_ptr())
+        };
+
+        return create_and_register_recursive_type_forward_declaration(
+            cx,
+            enum_type,
+            unique_type_id,
+            enum_metadata,
+            enum_metadata,
+            EnumMDF(EnumMemberDescriptionFactory {
+                enum_type,
+                layout,
+                discriminant_type_metadata,
+                containing_scope,
+                span,
+            }),
+        );
+    }
+
+    let discriminator_metadata = match &layout.variants {
+        // A single-variant enum has no discriminant.
+        &layout::Variants::Single { .. } => None,
+
+        &layout::Variants::NicheFilling { ref niche, .. } => {
+            // Find the integer type of the correct size.
+            let size = niche.value.size(cx);
+            let align = niche.value.align(cx);
+
+            let discr_type = match niche.value {
+                layout::Int(t, _) => t,
+                layout::Float(layout::FloatTy::F32) => Integer::I32,
+                layout::Float(layout::FloatTy::F64) => Integer::I64,
+                layout::Pointer => cx.data_layout().ptr_sized_integer(),
+            }.to_ty(cx.tcx, false);
+
+            let discr_metadata = basic_type_metadata(cx, discr_type);
+            unsafe {
+                Some(llvm::LLVMRustDIBuilderCreateMemberType(
+                    DIB(cx),
+                    containing_scope,
+                    ptr::null_mut(),
+                    file_metadata,
+                    UNKNOWN_LINE_NUMBER,
+                    size.bits(),
+                    align.abi_bits() as u32,
+                    layout.fields.offset(0).bits(),
+                    DIFlags::FlagArtificial,
+                    discr_metadata))
+            }
+        },
+
+        &layout::Variants::Tagged { ref tag, .. } => {
+            let discr_type = tag.value.to_ty(cx.tcx);
+            let (size, align) = cx.size_and_align_of(discr_type);
+
+            let discr_metadata = basic_type_metadata(cx, discr_type);
+            unsafe {
+                Some(llvm::LLVMRustDIBuilderCreateMemberType(
+                    DIB(cx),
+                    containing_scope,
+                    ptr::null_mut(),
+                    file_metadata,
+                    UNKNOWN_LINE_NUMBER,
+                    size.bits(),
+                    align.abi_bits() as u32,
+                    layout.fields.offset(0).bits(),
+                    DIFlags::FlagArtificial,
+                    discr_metadata))
+            }
+        },
+    };
+
+    let empty_array = create_DIArray(DIB(cx), &[]);
+    let variant_part = unsafe {
+        llvm::LLVMRustDIBuilderCreateVariantPart(
+            DIB(cx),
+            containing_scope,
+            ptr::null_mut(),
+            file_metadata,
+            UNKNOWN_LINE_NUMBER,
+            enum_type_size.bits(),
+            enum_type_align.abi_bits() as u32,
+            DIFlags::FlagZero,
+            discriminator_metadata,
+            empty_array,
+            unique_type_id_str.as_ptr())
+    };
+
+    // The variant part must be wrapped in a struct according to DWARF.
+    let type_array = create_DIArray(DIB(cx), &[Some(variant_part)]);
+    let struct_wrapper = unsafe {
+        llvm::LLVMRustDIBuilderCreateStructType(
+            DIB(cx),
+            Some(containing_scope),
+            enum_name.as_ptr(),
+            file_metadata,
+            UNKNOWN_LINE_NUMBER,
+            enum_type_size.bits(),
+            enum_type_align.abi_bits() as u32,
+            DIFlags::FlagZero,
+            None,
+            type_array,
+            0,
+            None,
+            unique_type_id_str.as_ptr())
     };
 
     return create_and_register_recursive_type_forward_declaration(
         cx,
         enum_type,
         unique_type_id,
-        enum_metadata,
+        struct_wrapper,
+        variant_part,
         EnumMDF(EnumMemberDescriptionFactory {
             enum_type,
             layout,
-            discriminant_type_metadata,
+            discriminant_type_metadata: None,
             containing_scope,
             span,
         }),
@@ -1565,7 +1799,7 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, '_>,
         .map(|member_description| {
             let member_name = CString::new(member_description.name).unwrap();
             unsafe {
-                Some(llvm::LLVMRustDIBuilderCreateMemberType(
+                Some(llvm::LLVMRustDIBuilderCreateVariantMemberType(
                     DIB(cx),
                     composite_type_metadata,
                     member_name.as_ptr(),
@@ -1574,6 +1808,10 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, '_>,
                     member_description.size.bits(),
                     member_description.align.abi_bits() as u32,
                     member_description.offset.bits(),
+                    match member_description.discriminant {
+                        None => None,
+                        Some(value) => Some(C_u64(cx, value)),
+                    },
                     member_description.flags,
                     member_description.type_metadata))
             }
index f5abb527e430fa91c41a6ce4f6c12c9c3a9b21d1..eb5ae81b2184024ad6191c52580dc348466a87a2 100644 (file)
@@ -173,6 +173,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
         ty::Infer(_) |
         ty::UnnormalizedProjection(..) |
         ty::Projection(..) |
+        ty::Bound(..) |
         ty::Opaque(..) |
         ty::GeneratorWitness(..) |
         ty::Param(_) => {
index 03244c18ac3e4f3d15e6be674228431fe4a91ccf..e44bd2d23672fcb371facfb0459b2a351595414d 100644 (file)
@@ -477,8 +477,8 @@ pub fn codegen_intrinsic_call(
                 "load" => {
                     let ty = substs.type_at(0);
                     if int_type_width_signed(ty, cx).is_some() {
-                        let align = cx.align_of(ty);
-                        bx.atomic_load(args[0].immediate(), order, align)
+                        let size = cx.size_of(ty);
+                        bx.atomic_load(args[0].immediate(), order, size)
                     } else {
                         return invalid_monomorphization(ty);
                     }
@@ -487,8 +487,8 @@ pub fn codegen_intrinsic_call(
                 "store" => {
                     let ty = substs.type_at(0);
                     if int_type_width_signed(ty, cx).is_some() {
-                        let align = cx.align_of(ty);
-                        bx.atomic_store(args[1].immediate(), args[0].immediate(), order, align);
+                        let size = cx.size_of(ty);
+                        bx.atomic_store(args[1].immediate(), args[0].immediate(), order, size);
                         return;
                     } else {
                         return invalid_monomorphization(ty);
index 63a8ab077e5ae494c90d00c14ce0b24da08288a0..5d9bae5412e1a38fbe9db46214311b51355ea81b 100644 (file)
@@ -71,7 +71,6 @@
 
 pub use llvm_util::target_features;
 use std::any::Any;
-use std::path::{PathBuf};
 use std::sync::mpsc;
 use rustc_data_structures::sync::Lrc;
 
 use rustc::util::nodemap::{FxHashSet, FxHashMap};
 use rustc::util::profiling::ProfileCategory;
 use rustc_mir::monomorphize;
+use rustc_codegen_utils::{CompiledModule, ModuleKind};
 use rustc_codegen_utils::codegen_backend::CodegenBackend;
 use rustc_data_structures::svh::Svh;
 
 mod diagnostics;
 
 mod back {
-    pub use rustc_codegen_utils::symbol_names;
     mod archive;
     pub mod bytecode;
-    mod command;
-    pub mod linker;
     pub mod link;
     pub mod lto;
-    pub mod symbol_export;
     pub mod write;
     mod rpath;
     pub mod wasm;
@@ -194,15 +190,15 @@ fn metadata_loader(&self) -> Box<dyn MetadataLoader + Sync> {
     }
 
     fn provide(&self, providers: &mut ty::query::Providers) {
-        back::symbol_names::provide(providers);
-        back::symbol_export::provide(providers);
-        base::provide(providers);
+        rustc_codegen_utils::symbol_export::provide(providers);
+        rustc_codegen_utils::symbol_names::provide(providers);
+        base::provide_both(providers);
         attributes::provide(providers);
     }
 
     fn provide_extern(&self, providers: &mut ty::query::Providers) {
-        back::symbol_export::provide_extern(providers);
-        base::provide_extern(providers);
+        rustc_codegen_utils::symbol_export::provide_extern(providers);
+        base::provide_both(providers);
         attributes::provide_extern(providers);
     }
 
@@ -281,13 +277,6 @@ struct CachedModuleCodegen {
     source: WorkProduct,
 }
 
-#[derive(Copy, Clone, Debug, PartialEq)]
-enum ModuleKind {
-    Regular,
-    Metadata,
-    Allocator,
-}
-
 impl ModuleCodegen {
     fn into_compiled_module(self,
                             emit_obj: bool,
@@ -321,15 +310,6 @@ fn into_compiled_module(self,
     }
 }
 
-#[derive(Debug)]
-struct CompiledModule {
-    name: String,
-    kind: ModuleKind,
-    object: Option<PathBuf>,
-    bytecode: Option<PathBuf>,
-    bytecode_compressed: Option<PathBuf>,
-}
-
 struct ModuleLlvm {
     llcx: &'static mut llvm::Context,
     llmod_raw: *const llvm::Module,
@@ -377,7 +357,7 @@ struct CodegenResults {
     crate_hash: Svh,
     metadata: rustc::middle::cstore::EncodedMetadata,
     windows_subsystem: Option<String>,
-    linker_info: back::linker::LinkerInfo,
+    linker_info: rustc_codegen_utils::linker::LinkerInfo,
     crate_info: CrateInfo,
 }
 
index 0b98fa4eaf55139ebce5967de1a234a26379b759..12d4670e4be4bed52973273fdaf37e5b3bb2b598 100644 (file)
@@ -1307,6 +1307,19 @@ pub fn LLVMRustDIBuilderCreateMemberType(Builder: &DIBuilder<'a>,
                                              Ty: &'a DIType)
                                              -> &'a DIDerivedType;
 
+    pub fn LLVMRustDIBuilderCreateVariantMemberType(Builder: &DIBuilder<'a>,
+                                                    Scope: &'a DIScope,
+                                                    Name: *const c_char,
+                                                    File: &'a DIFile,
+                                                    LineNumber: c_uint,
+                                                    SizeInBits: u64,
+                                                    AlignInBits: u32,
+                                                    OffsetInBits: u64,
+                                                    Discriminant: Option<&'a Value>,
+                                                    Flags: DIFlags,
+                                                    Ty: &'a DIType)
+                                                    -> &'a DIType;
+
     pub fn LLVMRustDIBuilderCreateLexicalBlock(Builder: &DIBuilder<'a>,
                                                Scope: &'a DIScope,
                                                File: &'a DIFile,
@@ -1384,7 +1397,8 @@ pub fn LLVMRustDIBuilderCreateEnumerationType(Builder: &DIBuilder<'a>,
                                                   SizeInBits: u64,
                                                   AlignInBits: u32,
                                                   Elements: &'a DIArray,
-                                                  ClassType: &'a DIType)
+                                                  ClassType: &'a DIType,
+                                                  IsFixed: bool)
                                                   -> &'a DIType;
 
     pub fn LLVMRustDIBuilderCreateUnionType(Builder: &DIBuilder<'a>,
@@ -1400,6 +1414,19 @@ pub fn LLVMRustDIBuilderCreateUnionType(Builder: &DIBuilder<'a>,
                                             UniqueId: *const c_char)
                                             -> &'a DIType;
 
+    pub fn LLVMRustDIBuilderCreateVariantPart(Builder: &DIBuilder<'a>,
+                                              Scope: &'a DIScope,
+                                              Name: *const c_char,
+                                              File: &'a DIFile,
+                                              LineNo: c_uint,
+                                              SizeInBits: u64,
+                                              AlignInBits: u32,
+                                              Flags: DIFlags,
+                                              Discriminator: Option<&'a DIDerivedType>,
+                                              Elements: &'a DIArray,
+                                              UniqueId: *const c_char)
+                                              -> &'a DIDerivedType;
+
     pub fn LLVMSetUnnamedAddr(GlobalVar: &Value, UnnamedAddr: Bool);
 
     pub fn LLVMRustDIBuilderCreateTemplateTypeParameter(Builder: &DIBuilder<'a>,
@@ -1599,8 +1626,6 @@ pub fn LLVMRustBuildOperandBundleDef(Name: *const c_char,
     pub fn LLVMRustModuleBufferFree(p: &'static mut ModuleBuffer);
     pub fn LLVMRustModuleCost(M: &Module) -> u64;
 
-    pub fn LLVMRustThinLTOAvailable() -> bool;
-    pub fn LLVMRustPGOAvailable() -> bool;
     pub fn LLVMRustThinLTOBufferCreate(M: &Module) -> &'static mut ThinLTOBuffer;
     pub fn LLVMRustThinLTOBufferFree(M: &'static mut ThinLTOBuffer);
     pub fn LLVMRustThinLTOBufferPtr(M: &ThinLTOBuffer) -> *const c_char;
index a63cbe70df611a0983ea38bf741a2ad916412387..a93c6faaf7ba927658a0c6c030bc1b59d24e8ccf 100644 (file)
@@ -219,7 +219,8 @@ fn visit_local(&mut self,
                 self.assign(local, location);
             }
 
-            PlaceContext::NonUse(_) => {}
+            PlaceContext::NonUse(_) |
+            PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
 
             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
             PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => {
index d98b7869ae98e3dfa9a54b8895a5390559db503f..a7f4c48c89bd6c99ef8e7a559e702f14d0033b26 100644 (file)
@@ -642,14 +642,46 @@ fn codegen_terminator(&mut self,
                     (&args[..], None)
                 };
 
-                for (i, arg) in first_args.iter().enumerate() {
+                'make_args: for (i, arg) in first_args.iter().enumerate() {
                     let mut op = self.codegen_operand(&bx, arg);
+
                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
-                        if let Pair(data_ptr, meta) = op.val {
-                            llfn = Some(meth::VirtualIndex::from_index(idx)
-                                .get_fn(&bx, meta, &fn_ty));
-                            llargs.push(data_ptr);
-                            continue;
+                        if let Pair(..) = op.val {
+                            // In the case of Rc<Self>, we need to explicitly pass a
+                            // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
+                            // that is understood elsewhere in the compiler as a method on
+                            // `dyn Trait`.
+                            // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
+                            // we get a value of a built-in pointer type
+                            'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
+                                            && !op.layout.ty.is_region_ptr()
+                            {
+                                'iter_fields: for i in 0..op.layout.fields.count() {
+                                    let field = op.extract_field(&bx, i);
+                                    if !field.layout.is_zst() {
+                                        // we found the one non-zero-sized field that is allowed
+                                        // now find *its* non-zero-sized field, or stop if it's a
+                                        // pointer
+                                        op = field;
+                                        continue 'descend_newtypes
+                                    }
+                                }
+
+                                span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
+                            }
+
+                            // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
+                            // data pointer and vtable. Look up the method in the vtable, and pass
+                            // the data pointer as the first argument
+                            match op.val {
+                                Pair(data_ptr, meta) => {
+                                    llfn = Some(meth::VirtualIndex::from_index(idx)
+                                        .get_fn(&bx, meta, &fn_ty));
+                                    llargs.push(data_ptr);
+                                    continue 'make_args
+                                }
+                                other => bug!("expected a Pair, got {:?}", other)
+                            }
                         } else if let Ref(data_ptr, Some(meta), _) = op.val {
                             // by-value dynamic dispatch
                             llfn = Some(meth::VirtualIndex::from_index(idx)
index 1f66ee36170dc510d119e536ba3fa0925930f107..586a490774023f9f000a68b8c63da9eb2b4636c1 100644 (file)
@@ -87,8 +87,8 @@ pub fn scalar_to_llvm(
 
 pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll Value {
     let mut llvals = Vec::with_capacity(alloc.relocations.len() + 1);
-    let layout = cx.data_layout();
-    let pointer_size = layout.pointer_size.bytes() as usize;
+    let dl = cx.data_layout();
+    let pointer_size = dl.pointer_size.bytes() as usize;
 
     let mut next_offset = 0;
     for &(offset, ((), alloc_id)) in alloc.relocations.iter() {
@@ -99,7 +99,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
             llvals.push(C_bytes(cx, &alloc.bytes[next_offset..offset]));
         }
         let ptr_offset = read_target_uint(
-            layout.endian,
+            dl.endian,
             &alloc.bytes[offset..(offset + pointer_size)],
         ).expect("const_alloc_to_llvm: could not read relocation pointer") as u64;
         llvals.push(scalar_to_llvm(
index 93be0074f6e9b57cd24992958ad84a72af72576d..c8c8e02bf05f731134d9819de833404becd339dc 100644 (file)
@@ -84,21 +84,18 @@ pub fn codegen_statement(&mut self,
                 }).collect();
 
                 let input_vals = inputs.iter()
-                    .try_fold(Vec::with_capacity(inputs.len()), |mut acc, input| {
+                    .fold(Vec::with_capacity(inputs.len()), |mut acc, (span, input)| {
                         let op = self.codegen_operand(&bx, input);
                         if let OperandValue::Immediate(_) = op.val {
                             acc.push(op.immediate());
-                            Ok(acc)
                         } else {
-                            Err(op)
+                            span_err!(bx.sess(), span.to_owned(), E0669,
+                                     "invalid value for constraint in inline assembly");
                         }
+                        acc
                 });
 
-                if input_vals.is_err() {
-                   span_err!(bx.sess(), statement.source_info.span, E0669,
-                             "invalid value for constraint in inline assembly");
-                } else {
-                    let input_vals = input_vals.unwrap();
+                if input_vals.len() == inputs.len() {
                     let res = asm::codegen_inline_asm(&bx, asm, outputs, input_vals);
                     if !res {
                         span_err!(bx.sess(), statement.source_info.span, E0668,
@@ -109,7 +106,7 @@ pub fn codegen_statement(&mut self,
             }
             mir::StatementKind::FakeRead(..) |
             mir::StatementKind::EndRegion(_) |
-            mir::StatementKind::Validate(..) |
+            mir::StatementKind::Retag { .. } |
             mir::StatementKind::AscribeUserType(..) |
             mir::StatementKind::Nop => bx,
         }
index 03ded64e642355706e8c7b5471028a0ec8a5e30e..b01d7e3a776f7145b791bfe7670db6f55636f6f4 100644 (file)
@@ -285,7 +285,7 @@ fn llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type {
 
         debug!("llvm_type({:#?})", self);
 
-        assert!(!self.ty.has_escaping_regions(), "{:?} has escaping regions", self.ty);
+        assert!(!self.ty.has_escaping_bound_vars(), "{:?} has escaping bound vars", self.ty);
 
         // Make sure lifetimes are erased, to avoid generating distinct LLVM
         // types for Rust types that only differ in the choice of lifetimes.
index a1f4a323f849ee3feecf5ec2f11526aee2bdb6b3..4c57e97841409a0d8a344eb797b823ce87177510 100644 (file)
@@ -13,11 +13,13 @@ test = false
 flate2 = "1.0"
 log = "0.4"
 
+serialize = { path = "../libserialize" }
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
 rustc = { path = "../librustc" }
+rustc_allocator = { path = "../librustc_allocator" }
 rustc_target = { path = "../librustc_target" }
 rustc_data_structures = { path = "../librustc_data_structures" }
+rustc_metadata = { path = "../librustc_metadata" }
 rustc_mir = { path = "../librustc_mir" }
 rustc_incremental = { path = "../librustc_incremental" }
-rustc_metadata_utils = { path = "../librustc_metadata_utils" }
diff --git a/src/librustc_codegen_utils/command.rs b/src/librustc_codegen_utils/command.rs
new file mode 100644 (file)
index 0000000..9ebbdd7
--- /dev/null
@@ -0,0 +1,175 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A thin wrapper around `Command` in the standard library which allows us to
+//! read the arguments that are built up.
+
+use std::ffi::{OsStr, OsString};
+use std::fmt;
+use std::io;
+use std::mem;
+use std::process::{self, Output};
+
+use rustc_target::spec::LldFlavor;
+
+#[derive(Clone)]
+pub struct Command {
+    program: Program,
+    args: Vec<OsString>,
+    env: Vec<(OsString, OsString)>,
+}
+
+#[derive(Clone)]
+enum Program {
+    Normal(OsString),
+    CmdBatScript(OsString),
+    Lld(OsString, LldFlavor)
+}
+
+impl Command {
+    pub fn new<P: AsRef<OsStr>>(program: P) -> Command {
+        Command::_new(Program::Normal(program.as_ref().to_owned()))
+    }
+
+    pub fn bat_script<P: AsRef<OsStr>>(program: P) -> Command {
+        Command::_new(Program::CmdBatScript(program.as_ref().to_owned()))
+    }
+
+    pub fn lld<P: AsRef<OsStr>>(program: P, flavor: LldFlavor) -> Command {
+        Command::_new(Program::Lld(program.as_ref().to_owned(), flavor))
+    }
+
+    fn _new(program: Program) -> Command {
+        Command {
+            program,
+            args: Vec::new(),
+            env: Vec::new(),
+        }
+    }
+
+    pub fn arg<P: AsRef<OsStr>>(&mut self, arg: P) -> &mut Command {
+        self._arg(arg.as_ref());
+        self
+    }
+
+    pub fn args<I>(&mut self, args: I) -> &mut Command
+        where I: IntoIterator,
+              I::Item: AsRef<OsStr>,
+    {
+        for arg in args {
+            self._arg(arg.as_ref());
+        }
+        self
+    }
+
+    fn _arg(&mut self, arg: &OsStr) {
+        self.args.push(arg.to_owned());
+    }
+
+    pub fn env<K, V>(&mut self, key: K, value: V) -> &mut Command
+        where K: AsRef<OsStr>,
+              V: AsRef<OsStr>
+    {
+        self._env(key.as_ref(), value.as_ref());
+        self
+    }
+
+    fn _env(&mut self, key: &OsStr, value: &OsStr) {
+        self.env.push((key.to_owned(), value.to_owned()));
+    }
+
+    pub fn output(&mut self) -> io::Result<Output> {
+        self.command().output()
+    }
+
+    pub fn command(&self) -> process::Command {
+        let mut ret = match self.program {
+            Program::Normal(ref p) => process::Command::new(p),
+            Program::CmdBatScript(ref p) => {
+                let mut c = process::Command::new("cmd");
+                c.arg("/c").arg(p);
+                c
+            }
+            Program::Lld(ref p, flavor) => {
+                let mut c = process::Command::new(p);
+                c.arg("-flavor").arg(match flavor {
+                    LldFlavor::Wasm => "wasm",
+                    LldFlavor::Ld => "gnu",
+                    LldFlavor::Link => "link",
+                    LldFlavor::Ld64 => "darwin",
+                });
+                c
+            }
+        };
+        ret.args(&self.args);
+        ret.envs(self.env.clone());
+        return ret
+    }
+
+    // extensions
+
+    pub fn get_args(&self) -> &[OsString] {
+        &self.args
+    }
+
+    pub fn take_args(&mut self) -> Vec<OsString> {
+        mem::replace(&mut self.args, Vec::new())
+    }
+
+    /// Returns a `true` if we're pretty sure that this'll blow OS spawn limits,
+    /// or `false` if we should attempt to spawn and see what the OS says.
+    pub fn very_likely_to_exceed_some_spawn_limit(&self) -> bool {
+        // We mostly only care about Windows in this method, on Unix the limits
+        // can be gargantuan anyway so we're pretty unlikely to hit them
+        if cfg!(unix) {
+            return false
+        }
+
+        // Right now LLD doesn't support the `@` syntax of passing an argument
+        // through files, so regardless of the platform we try to go to the OS
+        // on this one.
+        if let Program::Lld(..) = self.program {
+            return false
+        }
+
+        // Ok so on Windows to spawn a process is 32,768 characters in its
+        // command line [1]. Unfortunately we don't actually have access to that
+        // as it's calculated just before spawning. Instead we perform a
+        // poor-man's guess as to how long our command line will be. We're
+        // assuming here that we don't have to escape every character...
+        //
+        // Turns out though that `cmd.exe` has even smaller limits, 8192
+        // characters [2]. Linkers can often be batch scripts (for example
+        // Emscripten, Gecko's current build system) which means that we're
+        // running through batch scripts. These linkers often just forward
+        // arguments elsewhere (and maybe tack on more), so if we blow 8192
+        // bytes we'll typically cause them to blow as well.
+        //
+        // Basically as a result just perform an inflated estimate of what our
+        // command line will look like and test if it's > 8192 (we actually
+        // test against 6k to artificially inflate our estimate). If all else
+        // fails we'll fall back to the normal unix logic of testing the OS
+        // error code if we fail to spawn and automatically re-spawning the
+        // linker with smaller arguments.
+        //
+        // [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms682425(v=vs.85).aspx
+        // [2]: https://blogs.msdn.microsoft.com/oldnewthing/20031210-00/?p=41553
+
+        let estimated_command_line_len =
+            self.args.iter().map(|a| a.len()).sum::<usize>();
+        estimated_command_line_len > 1024 * 6
+    }
+}
+
+impl fmt::Debug for Command {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.command().fmt(f)
+    }
+}
index 03b3b20a4e772cd6467d4a00f45a44ce71c9a614..f0ce1e9b0efab2021377b8f52f57b2e18f724b47 100644 (file)
 #[macro_use]
 extern crate log;
 
+extern crate serialize;
 #[macro_use]
 extern crate rustc;
+extern crate rustc_allocator;
 extern crate rustc_target;
+extern crate rustc_metadata;
 extern crate rustc_mir;
 extern crate rustc_incremental;
 extern crate syntax;
 extern crate syntax_pos;
 #[macro_use] extern crate rustc_data_structures;
-extern crate rustc_metadata_utils;
 
+use std::path::PathBuf;
+
+use rustc::session::Session;
 use rustc::ty::TyCtxt;
 
+pub mod command;
 pub mod link;
+pub mod linker;
 pub mod codegen_backend;
+pub mod symbol_export;
 pub mod symbol_names;
 pub mod symbol_names_test;
 
@@ -61,4 +69,43 @@ pub fn check_for_rustc_errors_attr(tcx: TyCtxt) {
     }
 }
 
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum ModuleKind {
+    Regular,
+    Metadata,
+    Allocator,
+}
+
+#[derive(Debug)]
+pub struct CompiledModule {
+    pub name: String,
+    pub kind: ModuleKind,
+    pub object: Option<PathBuf>,
+    pub bytecode: Option<PathBuf>,
+    pub bytecode_compressed: Option<PathBuf>,
+}
+
+pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session)
+                    -> PathBuf {
+    // On Windows, static libraries sometimes show up as libfoo.a and other
+    // times show up as foo.lib
+    let oslibname = format!("{}{}{}",
+                            sess.target.target.options.staticlib_prefix,
+                            name,
+                            sess.target.target.options.staticlib_suffix);
+    let unixlibname = format!("lib{}.a", name);
+
+    for path in search_paths {
+        debug!("looking for {} inside {:?}", name, path);
+        let test = path.join(&oslibname);
+        if test.exists() { return test }
+        if oslibname != unixlibname {
+            let test = path.join(&unixlibname);
+            if test.exists() { return test }
+        }
+    }
+    sess.fatal(&format!("could not find native static library `{}`, \
+                         perhaps an -L flag is missing?", name));
+}
+
 __build_diagnostic_array! { librustc_codegen_utils, DIAGNOSTICS }
index 66e98793f420d737954619b61010e4c45648473b..b11aa687326f2536b846d10f79270bf009ab8467 100644 (file)
@@ -13,7 +13,6 @@
 use std::path::{Path, PathBuf};
 use syntax::{ast, attr};
 use syntax_pos::Span;
-use rustc_metadata_utils::validate_crate_name;
 
 pub fn out_filename(sess: &Session,
                 crate_type: config::CrateType,
@@ -52,7 +51,7 @@ pub fn find_crate_name(sess: Option<&Session>,
                        attrs: &[ast::Attribute],
                        input: &Input) -> String {
     let validate = |s: String, span: Option<Span>| {
-        validate_crate_name(sess, &s, span);
+        ::rustc_metadata::validate_crate_name(sess, &s, span);
         s
     };
 
diff --git a/src/librustc_codegen_utils/linker.rs b/src/librustc_codegen_utils/linker.rs
new file mode 100644 (file)
index 0000000..c1f41fd
--- /dev/null
@@ -0,0 +1,1100 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc_data_structures::fx::FxHashMap;
+use std::ffi::{OsStr, OsString};
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::{self, BufWriter};
+use std::path::{Path, PathBuf};
+
+use command::Command;
+use rustc::hir::def_id::{LOCAL_CRATE, CrateNum};
+use rustc::middle::dependency_format::Linkage;
+use rustc::session::Session;
+use rustc::session::config::{self, CrateType, OptLevel, DebugInfo,
+                             CrossLangLto};
+use rustc::ty::TyCtxt;
+use rustc_target::spec::{LinkerFlavor, LldFlavor};
+use serialize::{json, Encoder};
+
+/// For all the linkers we support, and information they might
+/// need out of the shared crate context before we get rid of it.
+pub struct LinkerInfo {
+    exports: FxHashMap<CrateType, Vec<String>>,
+}
+
+impl LinkerInfo {
+    pub fn new(tcx: TyCtxt) -> LinkerInfo {
+        LinkerInfo {
+            exports: tcx.sess.crate_types.borrow().iter().map(|&c| {
+                (c, exported_symbols(tcx, c))
+            }).collect(),
+        }
+    }
+
+    pub fn to_linker<'a>(
+        &'a self,
+        cmd: Command,
+        sess: &'a Session,
+        flavor: LinkerFlavor,
+        target_cpu: &'a str,
+    ) -> Box<dyn Linker+'a> {
+        match flavor {
+            LinkerFlavor::Lld(LldFlavor::Link) |
+            LinkerFlavor::Msvc => {
+                Box::new(MsvcLinker {
+                    cmd,
+                    sess,
+                    info: self
+                }) as Box<dyn Linker>
+            }
+            LinkerFlavor::Em =>  {
+                Box::new(EmLinker {
+                    cmd,
+                    sess,
+                    info: self
+                }) as Box<dyn Linker>
+            }
+            LinkerFlavor::Gcc =>  {
+                Box::new(GccLinker {
+                    cmd,
+                    sess,
+                    info: self,
+                    hinted_static: false,
+                    is_ld: false,
+                    target_cpu,
+                }) as Box<dyn Linker>
+            }
+
+            LinkerFlavor::Lld(LldFlavor::Ld) |
+            LinkerFlavor::Lld(LldFlavor::Ld64) |
+            LinkerFlavor::Ld => {
+                Box::new(GccLinker {
+                    cmd,
+                    sess,
+                    info: self,
+                    hinted_static: false,
+                    is_ld: true,
+                    target_cpu,
+                }) as Box<dyn Linker>
+            }
+
+            LinkerFlavor::Lld(LldFlavor::Wasm) => {
+                Box::new(WasmLd {
+                    cmd,
+                    sess,
+                    info: self
+                }) as Box<dyn Linker>
+            }
+        }
+    }
+}
+
+/// Linker abstraction used by back::link to build up the command to invoke a
+/// linker.
+///
+/// This trait is the total list of requirements needed by `back::link` and
+/// represents the meaning of each option being passed down. This trait is then
+/// used to dispatch on whether a GNU-like linker (generally `ld.exe`) or an
+/// MSVC linker (e.g. `link.exe`) is being used.
+pub trait Linker {
+    fn link_dylib(&mut self, lib: &str);
+    fn link_rust_dylib(&mut self, lib: &str, path: &Path);
+    fn link_framework(&mut self, framework: &str);
+    fn link_staticlib(&mut self, lib: &str);
+    fn link_rlib(&mut self, lib: &Path);
+    fn link_whole_rlib(&mut self, lib: &Path);
+    fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]);
+    fn include_path(&mut self, path: &Path);
+    fn framework_path(&mut self, path: &Path);
+    fn output_filename(&mut self, path: &Path);
+    fn add_object(&mut self, path: &Path);
+    fn gc_sections(&mut self, keep_metadata: bool);
+    fn position_independent_executable(&mut self);
+    fn no_position_independent_executable(&mut self);
+    fn full_relro(&mut self);
+    fn partial_relro(&mut self);
+    fn no_relro(&mut self);
+    fn optimize(&mut self);
+    fn pgo_gen(&mut self);
+    fn debuginfo(&mut self);
+    fn no_default_libraries(&mut self);
+    fn build_dylib(&mut self, out_filename: &Path);
+    fn build_static_executable(&mut self);
+    fn args(&mut self, args: &[String]);
+    fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType);
+    fn subsystem(&mut self, subsystem: &str);
+    fn group_start(&mut self);
+    fn group_end(&mut self);
+    fn cross_lang_lto(&mut self);
+    // Should have been finalize(self), but we don't support self-by-value on trait objects (yet?).
+    fn finalize(&mut self) -> Command;
+}
+
+pub struct GccLinker<'a> {
+    cmd: Command,
+    sess: &'a Session,
+    info: &'a LinkerInfo,
+    hinted_static: bool, // Keeps track of the current hinting mode.
+    // Link as ld
+    is_ld: bool,
+    target_cpu: &'a str,
+}
+
+impl<'a> GccLinker<'a> {
+    /// Argument that must be passed *directly* to the linker
+    ///
+    /// These arguments need to be prepended with '-Wl,' when a gcc-style linker is used
+    fn linker_arg<S>(&mut self, arg: S) -> &mut Self
+        where S: AsRef<OsStr>
+    {
+        if !self.is_ld {
+            let mut os = OsString::from("-Wl,");
+            os.push(arg.as_ref());
+            self.cmd.arg(os);
+        } else {
+            self.cmd.arg(arg);
+        }
+        self
+    }
+
+    fn takes_hints(&self) -> bool {
+        !self.sess.target.target.options.is_like_osx
+    }
+
+    // Some platforms take hints about whether a library is static or dynamic.
+    // For those that support this, we ensure we pass the option if the library
+    // was flagged "static" (most defaults are dynamic) to ensure that if
+    // libfoo.a and libfoo.so both exist that the right one is chosen.
+    fn hint_static(&mut self) {
+        if !self.takes_hints() { return }
+        if !self.hinted_static {
+            self.linker_arg("-Bstatic");
+            self.hinted_static = true;
+        }
+    }
+
+    fn hint_dynamic(&mut self) {
+        if !self.takes_hints() { return }
+        if self.hinted_static {
+            self.linker_arg("-Bdynamic");
+            self.hinted_static = false;
+        }
+    }
+
+    fn push_cross_lang_lto_args(&mut self, plugin_path: Option<&OsStr>) {
+        if let Some(plugin_path) = plugin_path {
+            let mut arg = OsString::from("-plugin=");
+            arg.push(plugin_path);
+            self.linker_arg(&arg);
+        }
+
+        let opt_level = match self.sess.opts.optimize {
+            config::OptLevel::No => "O0",
+            config::OptLevel::Less => "O1",
+            config::OptLevel::Default => "O2",
+            config::OptLevel::Aggressive => "O3",
+            config::OptLevel::Size => "Os",
+            config::OptLevel::SizeMin => "Oz",
+        };
+
+        self.linker_arg(&format!("-plugin-opt={}", opt_level));
+        let target_cpu = self.target_cpu;
+        self.linker_arg(&format!("-plugin-opt=mcpu={}", target_cpu));
+
+        match self.sess.lto() {
+            config::Lto::Thin |
+            config::Lto::ThinLocal => {
+                self.linker_arg("-plugin-opt=thin");
+            }
+            config::Lto::Fat |
+            config::Lto::No => {
+                // default to regular LTO
+            }
+        }
+    }
+}
+
+impl<'a> Linker for GccLinker<'a> {
+    fn link_dylib(&mut self, lib: &str) { self.hint_dynamic(); self.cmd.arg(format!("-l{}",lib)); }
+    fn link_staticlib(&mut self, lib: &str) {
+        self.hint_static(); self.cmd.arg(format!("-l{}",lib));
+    }
+    fn link_rlib(&mut self, lib: &Path) { self.hint_static(); self.cmd.arg(lib); }
+    fn include_path(&mut self, path: &Path) { self.cmd.arg("-L").arg(path); }
+    fn framework_path(&mut self, path: &Path) { self.cmd.arg("-F").arg(path); }
+    fn output_filename(&mut self, path: &Path) { self.cmd.arg("-o").arg(path); }
+    fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
+    fn position_independent_executable(&mut self) { self.cmd.arg("-pie"); }
+    fn no_position_independent_executable(&mut self) { self.cmd.arg("-no-pie"); }
+    fn full_relro(&mut self) { self.linker_arg("-zrelro"); self.linker_arg("-znow"); }
+    fn partial_relro(&mut self) { self.linker_arg("-zrelro"); }
+    fn no_relro(&mut self) { self.linker_arg("-znorelro"); }
+    fn build_static_executable(&mut self) { self.cmd.arg("-static"); }
+    fn args(&mut self, args: &[String]) { self.cmd.args(args); }
+
+    fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
+        self.hint_dynamic();
+        self.cmd.arg(format!("-l{}",lib));
+    }
+
+    fn link_framework(&mut self, framework: &str) {
+        self.hint_dynamic();
+        self.cmd.arg("-framework").arg(framework);
+    }
+
+    // Here we explicitly ask that the entire archive is included into the
+    // result artifact. For more details see #15460, but the gist is that
+    // the linker will strip away any unused objects in the archive if we
+    // don't otherwise explicitly reference them. This can occur for
+    // libraries which are just providing bindings, libraries with generic
+    // functions, etc.
+    fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]) {
+        self.hint_static();
+        let target = &self.sess.target.target;
+        if !target.options.is_like_osx {
+            self.linker_arg("--whole-archive").cmd.arg(format!("-l{}",lib));
+            self.linker_arg("--no-whole-archive");
+        } else {
+            // -force_load is the macOS equivalent of --whole-archive, but it
+            // involves passing the full path to the library to link.
+            self.linker_arg("-force_load");
+            let lib = ::find_library(lib, search_path, &self.sess);
+            self.linker_arg(&lib);
+        }
+    }
+
+    fn link_whole_rlib(&mut self, lib: &Path) {
+        self.hint_static();
+        if self.sess.target.target.options.is_like_osx {
+            self.linker_arg("-force_load");
+            self.linker_arg(&lib);
+        } else {
+            self.linker_arg("--whole-archive").cmd.arg(lib);
+            self.linker_arg("--no-whole-archive");
+        }
+    }
+
+    fn gc_sections(&mut self, keep_metadata: bool) {
+        // The dead_strip option to the linker specifies that functions and data
+        // unreachable by the entry point will be removed. This is quite useful
+        // with Rust's compilation model of compiling libraries at a time into
+        // one object file. For example, this brings hello world from 1.7MB to
+        // 458K.
+        //
+        // Note that this is done for both executables and dynamic libraries. We
+        // won't get much benefit from dylibs because LLVM will have already
+        // stripped away as much as it could. This has not been seen to impact
+        // link times negatively.
+        //
+        // -dead_strip can't be part of the pre_link_args because it's also used
+        // for partial linking when using multiple codegen units (-r).  So we
+        // insert it here.
+        if self.sess.target.target.options.is_like_osx {
+            self.linker_arg("-dead_strip");
+        } else if self.sess.target.target.options.is_like_solaris {
+            self.linker_arg("-zignore");
+
+        // If we're building a dylib, we don't use --gc-sections because LLVM
+        // has already done the best it can do, and we also don't want to
+        // eliminate the metadata. If we're building an executable, however,
+        // --gc-sections drops the size of hello world from 1.8MB to 597K, a 67%
+        // reduction.
+        } else if !keep_metadata {
+            self.linker_arg("--gc-sections");
+        }
+    }
+
+    fn optimize(&mut self) {
+        if !self.sess.target.target.options.linker_is_gnu { return }
+
+        // GNU-style linkers support optimization with -O. GNU ld doesn't
+        // need a numeric argument, but other linkers do.
+        if self.sess.opts.optimize == config::OptLevel::Default ||
+           self.sess.opts.optimize == config::OptLevel::Aggressive {
+            self.linker_arg("-O1");
+        }
+    }
+
+    fn pgo_gen(&mut self) {
+        if !self.sess.target.target.options.linker_is_gnu { return }
+
+        // If we're doing PGO generation stuff and on a GNU-like linker, use the
+        // "-u" flag to properly pull in the profiler runtime bits.
+        //
+        // This is because LLVM otherwise won't add the needed initialization
+        // for us on Linux (though the extra flag should be harmless if it
+        // does).
+        //
+        // See https://reviews.llvm.org/D14033 and https://reviews.llvm.org/D14030.
+        //
+        // Though it may be worth to try to revert those changes upstream, since
+        // the overhead of the initialization should be minor.
+        self.cmd.arg("-u");
+        self.cmd.arg("__llvm_profile_runtime");
+    }
+
+    fn debuginfo(&mut self) {
+        match self.sess.opts.debuginfo {
+            DebugInfo::None => {
+                // If we are building without debuginfo enabled and we were called with
+                // `-Zstrip-debuginfo-if-disabled=yes`, tell the linker to strip any debuginfo
+                // found when linking to get rid of symbols from libstd.
+                match self.sess.opts.debugging_opts.strip_debuginfo_if_disabled {
+                    Some(true) => { self.linker_arg("-S"); },
+                    _ => {},
+                }
+            },
+            _ => {},
+        };
+    }
+
+    fn no_default_libraries(&mut self) {
+        if !self.is_ld {
+            self.cmd.arg("-nodefaultlibs");
+        }
+    }
+
+    fn build_dylib(&mut self, out_filename: &Path) {
+        // On mac we need to tell the linker to let this library be rpathed
+        if self.sess.target.target.options.is_like_osx {
+            self.cmd.arg("-dynamiclib");
+            self.linker_arg("-dylib");
+
+            // Note that the `osx_rpath_install_name` option here is a hack
+            // purely to support rustbuild right now, we should get a more
+            // principled solution at some point to force the compiler to pass
+            // the right `-Wl,-install_name` with an `@rpath` in it.
+            if self.sess.opts.cg.rpath ||
+               self.sess.opts.debugging_opts.osx_rpath_install_name {
+                self.linker_arg("-install_name");
+                let mut v = OsString::from("@rpath/");
+                v.push(out_filename.file_name().unwrap());
+                self.linker_arg(&v);
+            }
+        } else {
+            self.cmd.arg("-shared");
+        }
+    }
+
+    fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType) {
+        // If we're compiling a dylib, then we let symbol visibility in object
+        // files to take care of whether they're exported or not.
+        //
+        // If we're compiling a cdylib, however, we manually create a list of
+        // exported symbols to ensure we don't expose any more. The object files
+        // have far more public symbols than we actually want to export, so we
+        // hide them all here.
+        if crate_type == CrateType::Dylib ||
+           crate_type == CrateType::ProcMacro {
+            return
+        }
+
+        let mut arg = OsString::new();
+        let path = tmpdir.join("list");
+
+        debug!("EXPORTED SYMBOLS:");
+
+        if self.sess.target.target.options.is_like_osx {
+            // Write a plain, newline-separated list of symbols
+            let res = (|| -> io::Result<()> {
+                let mut f = BufWriter::new(File::create(&path)?);
+                for sym in self.info.exports[&crate_type].iter() {
+                    debug!("  _{}", sym);
+                    writeln!(f, "_{}", sym)?;
+                }
+                Ok(())
+            })();
+            if let Err(e) = res {
+                self.sess.fatal(&format!("failed to write lib.def file: {}", e));
+            }
+        } else {
+            // Write an LD version script
+            let res = (|| -> io::Result<()> {
+                let mut f = BufWriter::new(File::create(&path)?);
+                writeln!(f, "{{\n  global:")?;
+                for sym in self.info.exports[&crate_type].iter() {
+                    debug!("    {};", sym);
+                    writeln!(f, "    {};", sym)?;
+                }
+                writeln!(f, "\n  local:\n    *;\n}};")?;
+                Ok(())
+            })();
+            if let Err(e) = res {
+                self.sess.fatal(&format!("failed to write version script: {}", e));
+            }
+        }
+
+        if self.sess.target.target.options.is_like_osx {
+            if !self.is_ld {
+                arg.push("-Wl,")
+            }
+            arg.push("-exported_symbols_list,");
+        } else if self.sess.target.target.options.is_like_solaris {
+            if !self.is_ld {
+                arg.push("-Wl,")
+            }
+            arg.push("-M,");
+        } else {
+            if !self.is_ld {
+                arg.push("-Wl,")
+            }
+            arg.push("--version-script=");
+        }
+
+        arg.push(&path);
+        self.cmd.arg(arg);
+    }
+
+    fn subsystem(&mut self, subsystem: &str) {
+        self.linker_arg("--subsystem");
+        self.linker_arg(&subsystem);
+    }
+
+    fn finalize(&mut self) -> Command {
+        self.hint_dynamic(); // Reset to default before returning the composed command line.
+        let mut cmd = Command::new("");
+        ::std::mem::swap(&mut cmd, &mut self.cmd);
+        cmd
+    }
+
+    fn group_start(&mut self) {
+        if !self.sess.target.target.options.is_like_osx {
+            self.linker_arg("--start-group");
+        }
+    }
+
+    fn group_end(&mut self) {
+        if !self.sess.target.target.options.is_like_osx {
+            self.linker_arg("--end-group");
+        }
+    }
+
+    fn cross_lang_lto(&mut self) {
+        match self.sess.opts.debugging_opts.cross_lang_lto {
+            CrossLangLto::Disabled => {
+                // Nothing to do
+            }
+            CrossLangLto::LinkerPluginAuto => {
+                self.push_cross_lang_lto_args(None);
+            }
+            CrossLangLto::LinkerPlugin(ref path) => {
+                self.push_cross_lang_lto_args(Some(path.as_os_str()));
+            }
+        }
+    }
+}
+
+pub struct MsvcLinker<'a> {
+    cmd: Command,
+    sess: &'a Session,
+    info: &'a LinkerInfo
+}
+
+impl<'a> Linker for MsvcLinker<'a> {
+    fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
+    fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
+    fn args(&mut self, args: &[String]) { self.cmd.args(args); }
+
+    fn build_dylib(&mut self, out_filename: &Path) {
+        self.cmd.arg("/DLL");
+        let mut arg: OsString = "/IMPLIB:".into();
+        arg.push(out_filename.with_extension("dll.lib"));
+        self.cmd.arg(arg);
+    }
+
+    fn build_static_executable(&mut self) {
+        // noop
+    }
+
+    fn gc_sections(&mut self, _keep_metadata: bool) {
+        // MSVC's ICF (Identical COMDAT Folding) link optimization is
+        // slow for Rust and thus we disable it by default when not in
+        // optimization build.
+        if self.sess.opts.optimize != config::OptLevel::No {
+            self.cmd.arg("/OPT:REF,ICF");
+        } else {
+            // It is necessary to specify NOICF here, because /OPT:REF
+            // implies ICF by default.
+            self.cmd.arg("/OPT:REF,NOICF");
+        }
+    }
+
+    fn link_dylib(&mut self, lib: &str) {
+        self.cmd.arg(&format!("{}.lib", lib));
+    }
+
+    fn link_rust_dylib(&mut self, lib: &str, path: &Path) {
+        // When producing a dll, the MSVC linker may not actually emit a
+        // `foo.lib` file if the dll doesn't actually export any symbols, so we
+        // check to see if the file is there and just omit linking to it if it's
+        // not present.
+        let name = format!("{}.dll.lib", lib);
+        if fs::metadata(&path.join(&name)).is_ok() {
+            self.cmd.arg(name);
+        }
+    }
+
+    fn link_staticlib(&mut self, lib: &str) {
+        self.cmd.arg(&format!("{}.lib", lib));
+    }
+
+    fn position_independent_executable(&mut self) {
+        // noop
+    }
+
+    fn no_position_independent_executable(&mut self) {
+        // noop
+    }
+
+    fn full_relro(&mut self) {
+        // noop
+    }
+
+    fn partial_relro(&mut self) {
+        // noop
+    }
+
+    fn no_relro(&mut self) {
+        // noop
+    }
+
+    fn no_default_libraries(&mut self) {
+        // Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC
+        // as there's been trouble in the past of linking the C++ standard
+        // library required by LLVM. This likely needs to happen one day, but
+        // in general Windows is also a more controlled environment than
+        // Unix, so it's not necessarily as critical that this be implemented.
+        //
+        // Note that there are also some licensing worries about statically
+        // linking some libraries which require a specific agreement, so it may
+        // not ever be possible for us to pass this flag.
+    }
+
+    fn include_path(&mut self, path: &Path) {
+        let mut arg = OsString::from("/LIBPATH:");
+        arg.push(path);
+        self.cmd.arg(&arg);
+    }
+
+    fn output_filename(&mut self, path: &Path) {
+        let mut arg = OsString::from("/OUT:");
+        arg.push(path);
+        self.cmd.arg(&arg);
+    }
+
+    fn framework_path(&mut self, _path: &Path) {
+        bug!("frameworks are not supported on windows")
+    }
+    fn link_framework(&mut self, _framework: &str) {
+        bug!("frameworks are not supported on windows")
+    }
+
+    fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
+        // not supported?
+        self.link_staticlib(lib);
+    }
+    fn link_whole_rlib(&mut self, path: &Path) {
+        // not supported?
+        self.link_rlib(path);
+    }
+    fn optimize(&mut self) {
+        // Needs more investigation of `/OPT` arguments
+    }
+
+    fn pgo_gen(&mut self) {
+        // Nothing needed here.
+    }
+
+    fn debuginfo(&mut self) {
+        // This will cause the Microsoft linker to generate a PDB file
+        // from the CodeView line tables in the object files.
+        self.cmd.arg("/DEBUG");
+
+        // This will cause the Microsoft linker to embed .natvis info into the the PDB file
+        let sysroot = self.sess.sysroot();
+        let natvis_dir_path = sysroot.join("lib\\rustlib\\etc");
+        if let Ok(natvis_dir) = fs::read_dir(&natvis_dir_path) {
+            // LLVM 5.0.0's lld-link frontend doesn't yet recognize, and chokes
+            // on, the /NATVIS:... flags.  LLVM 6 (or earlier) should at worst ignore
+            // them, eventually mooting this workaround, per this landed patch:
+            // https://github.com/llvm-mirror/lld/commit/27b9c4285364d8d76bb43839daa100
+            if let Some(ref linker_path) = self.sess.opts.cg.linker {
+                if let Some(linker_name) = Path::new(&linker_path).file_stem() {
+                    if linker_name.to_str().unwrap().to_lowercase() == "lld-link" {
+                        self.sess.warn("not embedding natvis: lld-link may not support the flag");
+                        return;
+                    }
+                }
+            }
+            for entry in natvis_dir {
+                match entry {
+                    Ok(entry) => {
+                        let path = entry.path();
+                        if path.extension() == Some("natvis".as_ref()) {
+                            let mut arg = OsString::from("/NATVIS:");
+                            arg.push(path);
+                            self.cmd.arg(arg);
+                        }
+                    },
+                    Err(err) => {
+                        self.sess.warn(&format!("error enumerating natvis directory: {}", err));
+                    },
+                }
+            }
+        }
+    }
+
+    // Currently the compiler doesn't use `dllexport` (an LLVM attribute) to
+    // export symbols from a dynamic library. When building a dynamic library,
+    // however, we're going to want some symbols exported, so this function
+    // generates a DEF file which lists all the symbols.
+    //
+    // The linker will read this `*.def` file and export all the symbols from
+    // the dynamic library. Note that this is not as simple as just exporting
+    // all the symbols in the current crate (as specified by `codegen.reachable`)
+    // but rather we also need to possibly export the symbols of upstream
+    // crates. Upstream rlibs may be linked statically to this dynamic library,
+    // in which case they may continue to transitively be used and hence need
+    // their symbols exported.
+    fn export_symbols(&mut self,
+                      tmpdir: &Path,
+                      crate_type: CrateType) {
+        let path = tmpdir.join("lib.def");
+        let res = (|| -> io::Result<()> {
+            let mut f = BufWriter::new(File::create(&path)?);
+
+            // Start off with the standard module name header and then go
+            // straight to exports.
+            writeln!(f, "LIBRARY")?;
+            writeln!(f, "EXPORTS")?;
+            for symbol in self.info.exports[&crate_type].iter() {
+                debug!("  _{}", symbol);
+                writeln!(f, "  {}", symbol)?;
+            }
+            Ok(())
+        })();
+        if let Err(e) = res {
+            self.sess.fatal(&format!("failed to write lib.def file: {}", e));
+        }
+        let mut arg = OsString::from("/DEF:");
+        arg.push(path);
+        self.cmd.arg(&arg);
+    }
+
+    fn subsystem(&mut self, subsystem: &str) {
+        // Note that previous passes of the compiler validated this subsystem,
+        // so we just blindly pass it to the linker.
+        self.cmd.arg(&format!("/SUBSYSTEM:{}", subsystem));
+
+        // Windows has two subsystems we're interested in right now, the console
+        // and windows subsystems. These both implicitly have different entry
+        // points (starting symbols). The console entry point starts with
+        // `mainCRTStartup` and the windows entry point starts with
+        // `WinMainCRTStartup`. These entry points, defined in system libraries,
+        // will then later probe for either `main` or `WinMain`, respectively to
+        // start the application.
+        //
+        // In Rust we just always generate a `main` function so we want control
+        // to always start there, so we force the entry point on the windows
+        // subsystem to be `mainCRTStartup` to get everything booted up
+        // correctly.
+        //
+        // For more information see RFC #1665
+        if subsystem == "windows" {
+            self.cmd.arg("/ENTRY:mainCRTStartup");
+        }
+    }
+
+    fn finalize(&mut self) -> Command {
+        let mut cmd = Command::new("");
+        ::std::mem::swap(&mut cmd, &mut self.cmd);
+        cmd
+    }
+
+    // MSVC doesn't need group indicators
+    fn group_start(&mut self) {}
+    fn group_end(&mut self) {}
+
+    fn cross_lang_lto(&mut self) {
+        // Do nothing
+    }
+}
+
+pub struct EmLinker<'a> {
+    cmd: Command,
+    sess: &'a Session,
+    info: &'a LinkerInfo
+}
+
+impl<'a> Linker for EmLinker<'a> {
+    fn include_path(&mut self, path: &Path) {
+        self.cmd.arg("-L").arg(path);
+    }
+
+    fn link_staticlib(&mut self, lib: &str) {
+        self.cmd.arg("-l").arg(lib);
+    }
+
+    fn output_filename(&mut self, path: &Path) {
+        self.cmd.arg("-o").arg(path);
+    }
+
+    fn add_object(&mut self, path: &Path) {
+        self.cmd.arg(path);
+    }
+
+    fn link_dylib(&mut self, lib: &str) {
+        // Emscripten always links statically
+        self.link_staticlib(lib);
+    }
+
+    fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
+        // not supported?
+        self.link_staticlib(lib);
+    }
+
+    fn link_whole_rlib(&mut self, lib: &Path) {
+        // not supported?
+        self.link_rlib(lib);
+    }
+
+    fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
+        self.link_dylib(lib);
+    }
+
+    fn link_rlib(&mut self, lib: &Path) {
+        self.add_object(lib);
+    }
+
+    fn position_independent_executable(&mut self) {
+        // noop
+    }
+
+    fn no_position_independent_executable(&mut self) {
+        // noop
+    }
+
+    fn full_relro(&mut self) {
+        // noop
+    }
+
+    fn partial_relro(&mut self) {
+        // noop
+    }
+
+    fn no_relro(&mut self) {
+        // noop
+    }
+
+    fn args(&mut self, args: &[String]) {
+        self.cmd.args(args);
+    }
+
+    fn framework_path(&mut self, _path: &Path) {
+        bug!("frameworks are not supported on Emscripten")
+    }
+
+    fn link_framework(&mut self, _framework: &str) {
+        bug!("frameworks are not supported on Emscripten")
+    }
+
+    fn gc_sections(&mut self, _keep_metadata: bool) {
+        // noop
+    }
+
+    fn optimize(&mut self) {
+        // Emscripten performs own optimizations
+        self.cmd.arg(match self.sess.opts.optimize {
+            OptLevel::No => "-O0",
+            OptLevel::Less => "-O1",
+            OptLevel::Default => "-O2",
+            OptLevel::Aggressive => "-O3",
+            OptLevel::Size => "-Os",
+            OptLevel::SizeMin => "-Oz"
+        });
+        // Unusable until https://github.com/rust-lang/rust/issues/38454 is resolved
+        self.cmd.args(&["--memory-init-file", "0"]);
+    }
+
+    fn pgo_gen(&mut self) {
+        // noop, but maybe we need something like the gnu linker?
+    }
+
+    fn debuginfo(&mut self) {
+        // Preserve names or generate source maps depending on debug info
+        self.cmd.arg(match self.sess.opts.debuginfo {
+            DebugInfo::None => "-g0",
+            DebugInfo::Limited => "-g3",
+            DebugInfo::Full => "-g4"
+        });
+    }
+
+    fn no_default_libraries(&mut self) {
+        self.cmd.args(&["-s", "DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[]"]);
+    }
+
+    fn build_dylib(&mut self, _out_filename: &Path) {
+        bug!("building dynamic library is unsupported on Emscripten")
+    }
+
+    fn build_static_executable(&mut self) {
+        // noop
+    }
+
+    fn export_symbols(&mut self, _tmpdir: &Path, crate_type: CrateType) {
+        let symbols = &self.info.exports[&crate_type];
+
+        debug!("EXPORTED SYMBOLS:");
+
+        self.cmd.arg("-s");
+
+        let mut arg = OsString::from("EXPORTED_FUNCTIONS=");
+        let mut encoded = String::new();
+
+        {
+            let mut encoder = json::Encoder::new(&mut encoded);
+            let res = encoder.emit_seq(symbols.len(), |encoder| {
+                for (i, sym) in symbols.iter().enumerate() {
+                    encoder.emit_seq_elt(i, |encoder| {
+                        encoder.emit_str(&("_".to_string() + sym))
+                    })?;
+                }
+                Ok(())
+            });
+            if let Err(e) = res {
+                self.sess.fatal(&format!("failed to encode exported symbols: {}", e));
+            }
+        }
+        debug!("{}", encoded);
+        arg.push(encoded);
+
+        self.cmd.arg(arg);
+    }
+
+    fn subsystem(&mut self, _subsystem: &str) {
+        // noop
+    }
+
+    fn finalize(&mut self) -> Command {
+        let mut cmd = Command::new("");
+        ::std::mem::swap(&mut cmd, &mut self.cmd);
+        cmd
+    }
+
+    // Appears not necessary on Emscripten
+    fn group_start(&mut self) {}
+    fn group_end(&mut self) {}
+
+    fn cross_lang_lto(&mut self) {
+        // Do nothing
+    }
+}
+
+fn exported_symbols(tcx: TyCtxt, crate_type: CrateType) -> Vec<String> {
+    let mut symbols = Vec::new();
+
+    let export_threshold =
+        ::symbol_export::crates_export_threshold(&[crate_type]);
+    for &(symbol, level) in tcx.exported_symbols(LOCAL_CRATE).iter() {
+        if level.is_below_threshold(export_threshold) {
+            symbols.push(symbol.symbol_name(tcx).to_string());
+        }
+    }
+
+    let formats = tcx.sess.dependency_formats.borrow();
+    let deps = formats[&crate_type].iter();
+
+    for (index, dep_format) in deps.enumerate() {
+        let cnum = CrateNum::new(index + 1);
+        // For each dependency that we are linking to statically ...
+        if *dep_format == Linkage::Static {
+            // ... we add its symbol list to our export list.
+            for &(symbol, level) in tcx.exported_symbols(cnum).iter() {
+                if level.is_below_threshold(export_threshold) {
+                    symbols.push(symbol.symbol_name(tcx).to_string());
+                }
+            }
+        }
+    }
+
+    symbols
+}
+
+pub struct WasmLd<'a> {
+    cmd: Command,
+    sess: &'a Session,
+    info: &'a LinkerInfo,
+}
+
+impl<'a> Linker for WasmLd<'a> {
+    fn link_dylib(&mut self, lib: &str) {
+        self.cmd.arg("-l").arg(lib);
+    }
+
+    fn link_staticlib(&mut self, lib: &str) {
+        self.cmd.arg("-l").arg(lib);
+    }
+
+    fn link_rlib(&mut self, lib: &Path) {
+        self.cmd.arg(lib);
+    }
+
+    fn include_path(&mut self, path: &Path) {
+        self.cmd.arg("-L").arg(path);
+    }
+
+    fn framework_path(&mut self, _path: &Path) {
+        panic!("frameworks not supported")
+    }
+
+    fn output_filename(&mut self, path: &Path) {
+        self.cmd.arg("-o").arg(path);
+    }
+
+    fn add_object(&mut self, path: &Path) {
+        self.cmd.arg(path);
+    }
+
+    fn position_independent_executable(&mut self) {
+    }
+
+    fn full_relro(&mut self) {
+    }
+
+    fn partial_relro(&mut self) {
+    }
+
+    fn no_relro(&mut self) {
+    }
+
+    fn build_static_executable(&mut self) {
+    }
+
+    fn args(&mut self, args: &[String]) {
+        self.cmd.args(args);
+    }
+
+    fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
+        self.cmd.arg("-l").arg(lib);
+    }
+
+    fn link_framework(&mut self, _framework: &str) {
+        panic!("frameworks not supported")
+    }
+
+    fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
+        self.cmd.arg("-l").arg(lib);
+    }
+
+    fn link_whole_rlib(&mut self, lib: &Path) {
+        self.cmd.arg(lib);
+    }
+
+    fn gc_sections(&mut self, _keep_metadata: bool) {
+        self.cmd.arg("--gc-sections");
+    }
+
+    fn optimize(&mut self) {
+        self.cmd.arg(match self.sess.opts.optimize {
+            OptLevel::No => "-O0",
+            OptLevel::Less => "-O1",
+            OptLevel::Default => "-O2",
+            OptLevel::Aggressive => "-O3",
+            // Currently LLD doesn't support `Os` and `Oz`, so pass through `O2`
+            // instead.
+            OptLevel::Size => "-O2",
+            OptLevel::SizeMin => "-O2"
+        });
+    }
+
+    fn pgo_gen(&mut self) {
+    }
+
+    fn debuginfo(&mut self) {
+    }
+
+    fn no_default_libraries(&mut self) {
+    }
+
+    fn build_dylib(&mut self, _out_filename: &Path) {
+    }
+
+    fn export_symbols(&mut self, _tmpdir: &Path, crate_type: CrateType) {
+        for sym in self.info.exports[&crate_type].iter() {
+            self.cmd.arg("--export").arg(&sym);
+        }
+    }
+
+    fn subsystem(&mut self, _subsystem: &str) {
+    }
+
+    fn no_position_independent_executable(&mut self) {
+    }
+
+    fn finalize(&mut self) -> Command {
+        // There have been reports in the wild (rustwasm/wasm-bindgen#119) of
+        // using threads causing weird hangs and bugs. Disable it entirely as
+        // this isn't yet the bottleneck of compilation at all anyway.
+        self.cmd.arg("--no-threads");
+
+        // By default LLD only gives us one page of stack (64k) which is a
+        // little small. Default to a larger stack closer to other PC platforms
+        // (1MB) and users can always inject their own link-args to override this.
+        self.cmd.arg("-z").arg("stack-size=1048576");
+
+        // By default LLD's memory layout is:
+        //
+        // 1. First, a blank page
+        // 2. Next, all static data
+        // 3. Finally, the main stack (which grows down)
+        //
+        // This has the unfortunate consequence that on stack overflows you
+        // corrupt static data and can cause some exceedingly weird bugs. To
+        // help detect this a little sooner we instead request that the stack is
+        // placed before static data.
+        //
+        // This means that we'll generate slightly larger binaries as references
+        // to static data will take more bytes in the ULEB128 encoding, but
+        // stack overflow will be guaranteed to trap as it underflows instead of
+        // corrupting static data.
+        self.cmd.arg("--stack-first");
+
+        // FIXME we probably shouldn't pass this but instead pass an explicit
+        // whitelist of symbols we'll allow to be undefined. Unfortunately
+        // though we can't handle symbols like `log10` that LLVM injects at a
+        // super late date without actually parsing object files. For now let's
+        // stick to this and hopefully fix it before stabilization happens.
+        self.cmd.arg("--allow-undefined");
+
+        // For now we just never have an entry symbol
+        self.cmd.arg("--no-entry");
+
+        // Make the default table accessible
+        self.cmd.arg("--export-table");
+
+        // Rust code should never have warnings, and warnings are often
+        // indicative of bugs, let's prevent them.
+        self.cmd.arg("--fatal-warnings");
+
+        let mut cmd = Command::new("");
+        ::std::mem::swap(&mut cmd, &mut self.cmd);
+        cmd
+    }
+
+    // Not needed for now with LLD
+    fn group_start(&mut self) {}
+    fn group_end(&mut self) {}
+
+    fn cross_lang_lto(&mut self) {
+        // Do nothing for now
+    }
+}
diff --git a/src/librustc_codegen_utils/symbol_export.rs b/src/librustc_codegen_utils/symbol_export.rs
new file mode 100644 (file)
index 0000000..2d650f7
--- /dev/null
@@ -0,0 +1,395 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc_data_structures::sync::Lrc;
+use std::sync::Arc;
+
+use rustc::ty::Instance;
+use rustc::hir;
+use rustc::hir::Node;
+use rustc::hir::CodegenFnAttrFlags;
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
+use rustc_data_structures::fingerprint::Fingerprint;
+use rustc::middle::exported_symbols::{SymbolExportLevel, ExportedSymbol, metadata_symbol_name};
+use rustc::session::config;
+use rustc::ty::{TyCtxt, SymbolName};
+use rustc::ty::query::Providers;
+use rustc::ty::subst::Substs;
+use rustc::util::nodemap::{FxHashMap, DefIdMap};
+use rustc_allocator::ALLOCATOR_METHODS;
+use rustc_data_structures::indexed_vec::IndexVec;
+use std::collections::hash_map::Entry::*;
+
+pub type ExportedSymbols = FxHashMap<
+    CrateNum,
+    Arc<Vec<(String, SymbolExportLevel)>>,
+>;
+
+pub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {
+    crates_export_threshold(&tcx.sess.crate_types.borrow())
+}
+
+fn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {
+    match crate_type {
+        config::CrateType::Executable |
+        config::CrateType::Staticlib  |
+        config::CrateType::ProcMacro  |
+        config::CrateType::Cdylib     => SymbolExportLevel::C,
+        config::CrateType::Rlib       |
+        config::CrateType::Dylib      => SymbolExportLevel::Rust,
+    }
+}
+
+pub fn crates_export_threshold(crate_types: &[config::CrateType])
+                                      -> SymbolExportLevel {
+    if crate_types.iter().any(|&crate_type| {
+        crate_export_threshold(crate_type) == SymbolExportLevel::Rust
+    }) {
+        SymbolExportLevel::Rust
+    } else {
+        SymbolExportLevel::C
+    }
+}
+
+fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                             cnum: CrateNum)
+                                             -> Lrc<DefIdMap<SymbolExportLevel>>
+{
+    assert_eq!(cnum, LOCAL_CRATE);
+
+    if !tcx.sess.opts.output_types.should_codegen() {
+        return Lrc::new(DefIdMap())
+    }
+
+    // Check to see if this crate is a "special runtime crate". These
+    // crates, implementation details of the standard library, typically
+    // have a bunch of `pub extern` and `#[no_mangle]` functions as the
+    // ABI between them. We don't want their symbols to have a `C`
+    // export level, however, as they're just implementation details.
+    // Down below we'll hardwire all of the symbols to the `Rust` export
+    // level instead.
+    let special_runtime_crate = tcx.is_panic_runtime(LOCAL_CRATE) ||
+        tcx.is_compiler_builtins(LOCAL_CRATE);
+
+    let mut reachable_non_generics: DefIdMap<_> = tcx.reachable_set(LOCAL_CRATE).0
+        .iter()
+        .filter_map(|&node_id| {
+            // We want to ignore some FFI functions that are not exposed from
+            // this crate. Reachable FFI functions can be lumped into two
+            // categories:
+            //
+            // 1. Those that are included statically via a static library
+            // 2. Those included otherwise (e.g. dynamically or via a framework)
+            //
+            // Although our LLVM module is not literally emitting code for the
+            // statically included symbols, it's an export of our library which
+            // needs to be passed on to the linker and encoded in the metadata.
+            //
+            // As a result, if this id is an FFI item (foreign item) then we only
+            // let it through if it's included statically.
+            match tcx.hir.get(node_id) {
+                Node::ForeignItem(..) => {
+                    let def_id = tcx.hir.local_def_id(node_id);
+                    if tcx.is_statically_included_foreign_item(def_id) {
+                        Some(def_id)
+                    } else {
+                        None
+                    }
+                }
+
+                // Only consider nodes that actually have exported symbols.
+                Node::Item(&hir::Item {
+                    node: hir::ItemKind::Static(..),
+                    ..
+                }) |
+                Node::Item(&hir::Item {
+                    node: hir::ItemKind::Fn(..), ..
+                }) |
+                Node::ImplItem(&hir::ImplItem {
+                    node: hir::ImplItemKind::Method(..),
+                    ..
+                }) => {
+                    let def_id = tcx.hir.local_def_id(node_id);
+                    let generics = tcx.generics_of(def_id);
+                    if !generics.requires_monomorphization(tcx) &&
+                        // Functions marked with #[inline] are only ever codegened
+                        // with "internal" linkage and are never exported.
+                        !Instance::mono(tcx, def_id).def.requires_local(tcx) {
+                        Some(def_id)
+                    } else {
+                        None
+                    }
+                }
+
+                _ => None
+            }
+        })
+        .map(|def_id| {
+            let export_level = if special_runtime_crate {
+                let name = tcx.symbol_name(Instance::mono(tcx, def_id)).as_str();
+                // We can probably do better here by just ensuring that
+                // it has hidden visibility rather than public
+                // visibility, as this is primarily here to ensure it's
+                // not stripped during LTO.
+                //
+                // In general though we won't link right if these
+                // symbols are stripped, and LTO currently strips them.
+                if &*name == "rust_eh_personality" ||
+                   &*name == "rust_eh_register_frames" ||
+                   &*name == "rust_eh_unregister_frames" {
+                    SymbolExportLevel::C
+                } else {
+                    SymbolExportLevel::Rust
+                }
+            } else {
+                symbol_export_level(tcx, def_id)
+            };
+            debug!("EXPORTED SYMBOL (local): {} ({:?})",
+                   tcx.symbol_name(Instance::mono(tcx, def_id)),
+                   export_level);
+            (def_id, export_level)
+        })
+        .collect();
+
+    if let Some(id) = *tcx.sess.derive_registrar_fn.get() {
+        let def_id = tcx.hir.local_def_id(id);
+        reachable_non_generics.insert(def_id, SymbolExportLevel::C);
+    }
+
+    if let Some(id) = *tcx.sess.plugin_registrar_fn.get() {
+        let def_id = tcx.hir.local_def_id(id);
+        reachable_non_generics.insert(def_id, SymbolExportLevel::C);
+    }
+
+    Lrc::new(reachable_non_generics)
+}
+
+fn is_reachable_non_generic_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                                     def_id: DefId)
+                                                     -> bool {
+    let export_threshold = threshold(tcx);
+
+    if let Some(&level) = tcx.reachable_non_generics(def_id.krate).get(&def_id) {
+        level.is_below_threshold(export_threshold)
+    } else {
+        false
+    }
+}
+
+fn is_reachable_non_generic_provider_extern<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                                      def_id: DefId)
+                                                      -> bool {
+    tcx.reachable_non_generics(def_id.krate).contains_key(&def_id)
+}
+
+fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                             cnum: CrateNum)
+                                             -> Arc<Vec<(ExportedSymbol<'tcx>,
+                                                         SymbolExportLevel)>>
+{
+    assert_eq!(cnum, LOCAL_CRATE);
+
+    if !tcx.sess.opts.output_types.should_codegen() {
+        return Arc::new(vec![])
+    }
+
+    let mut symbols: Vec<_> = tcx.reachable_non_generics(LOCAL_CRATE)
+                                 .iter()
+                                 .map(|(&def_id, &level)| {
+                                    (ExportedSymbol::NonGeneric(def_id), level)
+                                 })
+                                 .collect();
+
+    if tcx.sess.entry_fn.borrow().is_some() {
+        let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new("main"));
+
+        symbols.push((exported_symbol, SymbolExportLevel::C));
+    }
+
+    if tcx.sess.allocator_kind.get().is_some() {
+        for method in ALLOCATOR_METHODS {
+            let symbol_name = format!("__rust_{}", method.name);
+            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
+
+            symbols.push((exported_symbol, SymbolExportLevel::Rust));
+        }
+    }
+
+    if tcx.sess.opts.debugging_opts.pgo_gen.is_some() {
+        // These are weak symbols that point to the profile version and the
+        // profile name, which need to be treated as exported so LTO doesn't nix
+        // them.
+        const PROFILER_WEAK_SYMBOLS: [&'static str; 2] = [
+            "__llvm_profile_raw_version",
+            "__llvm_profile_filename",
+        ];
+        for sym in &PROFILER_WEAK_SYMBOLS {
+            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(sym));
+            symbols.push((exported_symbol, SymbolExportLevel::C));
+        }
+    }
+
+    if tcx.sess.crate_types.borrow().contains(&config::CrateType::Dylib) {
+        let symbol_name = metadata_symbol_name(tcx);
+        let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
+
+        symbols.push((exported_symbol, SymbolExportLevel::Rust));
+    }
+
+    if tcx.sess.opts.share_generics() && tcx.local_crate_exports_generics() {
+        use rustc::mir::mono::{Linkage, Visibility, MonoItem};
+        use rustc::ty::InstanceDef;
+
+        // Normally, we require that shared monomorphizations are not hidden,
+        // because if we want to re-use a monomorphization from a Rust dylib, it
+        // needs to be exported.
+        // However, on platforms that don't allow for Rust dylibs, having
+        // external linkage is enough for monomorphization to be linked to.
+        let need_visibility = tcx.sess.target.target.options.dynamic_linking &&
+                              !tcx.sess.target.target.options.only_cdylib;
+
+        let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
+
+        for (mono_item, &(linkage, visibility)) in cgus.iter()
+                                                       .flat_map(|cgu| cgu.items().iter()) {
+            if linkage != Linkage::External {
+                // We can only re-use things with external linkage, otherwise
+                // we'll get a linker error
+                continue
+            }
+
+            if need_visibility && visibility == Visibility::Hidden {
+                // If we potentially share things from Rust dylibs, they must
+                // not be hidden
+                continue
+            }
+
+            if let &MonoItem::Fn(Instance {
+                def: InstanceDef::Item(def_id),
+                substs,
+            }) = mono_item {
+                if substs.types().next().is_some() {
+                    symbols.push((ExportedSymbol::Generic(def_id, substs),
+                                  SymbolExportLevel::Rust));
+                }
+            }
+        }
+    }
+
+    // Sort so we get a stable incr. comp. hash.
+    symbols.sort_unstable_by(|&(ref symbol1, ..), &(ref symbol2, ..)| {
+        symbol1.compare_stable(tcx, symbol2)
+    });
+
+    Arc::new(symbols)
+}
+
+fn upstream_monomorphizations_provider<'a, 'tcx>(
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    cnum: CrateNum)
+    -> Lrc<DefIdMap<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>>
+{
+    debug_assert!(cnum == LOCAL_CRATE);
+
+    let cnums = tcx.all_crate_nums(LOCAL_CRATE);
+
+    let mut instances: DefIdMap<FxHashMap<_, _>> = DefIdMap();
+
+    let cnum_stable_ids: IndexVec<CrateNum, Fingerprint> = {
+        let mut cnum_stable_ids = IndexVec::from_elem_n(Fingerprint::ZERO,
+                                                        cnums.len() + 1);
+
+        for &cnum in cnums.iter() {
+            cnum_stable_ids[cnum] = tcx.def_path_hash(DefId {
+                krate: cnum,
+                index: CRATE_DEF_INDEX,
+            }).0;
+        }
+
+        cnum_stable_ids
+    };
+
+    for &cnum in cnums.iter() {
+        for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() {
+            if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol {
+                let substs_map = instances.entry(def_id).or_default();
+
+                match substs_map.entry(substs) {
+                    Occupied(mut e) => {
+                        // If there are multiple monomorphizations available,
+                        // we select one deterministically.
+                        let other_cnum = *e.get();
+                        if cnum_stable_ids[other_cnum] > cnum_stable_ids[cnum] {
+                            e.insert(cnum);
+                        }
+                    }
+                    Vacant(e) => {
+                        e.insert(cnum);
+                    }
+                }
+            }
+        }
+    }
+
+    Lrc::new(instances.into_iter()
+                      .map(|(key, value)| (key, Lrc::new(value)))
+                      .collect())
+}
+
+fn upstream_monomorphizations_for_provider<'a, 'tcx>(
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    def_id: DefId)
+    -> Option<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>
+{
+    debug_assert!(!def_id.is_local());
+    tcx.upstream_monomorphizations(LOCAL_CRATE)
+       .get(&def_id)
+       .cloned()
+}
+
+fn is_unreachable_local_definition_provider(tcx: TyCtxt, def_id: DefId) -> bool {
+    if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
+        !tcx.reachable_set(LOCAL_CRATE).0.contains(&node_id)
+    } else {
+        bug!("is_unreachable_local_definition called with non-local DefId: {:?}",
+              def_id)
+    }
+}
+
+pub fn provide(providers: &mut Providers) {
+    providers.reachable_non_generics = reachable_non_generics_provider;
+    providers.is_reachable_non_generic = is_reachable_non_generic_provider_local;
+    providers.exported_symbols = exported_symbols_provider_local;
+    providers.upstream_monomorphizations = upstream_monomorphizations_provider;
+    providers.is_unreachable_local_definition = is_unreachable_local_definition_provider;
+}
+
+pub fn provide_extern(providers: &mut Providers) {
+    providers.is_reachable_non_generic = is_reachable_non_generic_provider_extern;
+    providers.upstream_monomorphizations_for = upstream_monomorphizations_for_provider;
+}
+
+fn symbol_export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
+    // We export anything that's not mangled at the "C" layer as it probably has
+    // to do with ABI concerns. We do not, however, apply such treatment to
+    // special symbols in the standard library for various plumbing between
+    // core/std/allocators/etc. For example symbols used to hook up allocation
+    // are not considered for export
+    let codegen_fn_attrs = tcx.codegen_fn_attrs(sym_def_id);
+    let is_extern = codegen_fn_attrs.contains_extern_indicator();
+    let std_internal =
+        codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
+
+    if is_extern && !std_internal {
+        SymbolExportLevel::C
+    } else {
+        SymbolExportLevel::Rust
+    }
+}
index b197d29b184de11f6adec4e559c466290d270f06..07e5548216f3cee30dcf4fa17f434961053016e2 100644 (file)
@@ -21,7 +21,6 @@
       html_root_url = "https://doc.rust-lang.org/nightly/")]
 
 #![feature(in_band_lifetimes)]
-#![cfg_attr(stage0, feature(impl_header_lifetime_elision))]
 #![feature(unboxed_closures)]
 #![feature(fn_traits)]
 #![feature(unsize)]
index ccf2a7f81590e94b182fc92f631879c8c4fe3689..c211d888df131b5b62f29fde6d1b98a5cc4496d5 100644 (file)
@@ -162,8 +162,8 @@ enum NodeState {
 #[derive(Debug)]
 pub struct Outcome<O, E> {
     /// Obligations that were completely evaluated, including all
-    /// (transitive) subobligations.
-    pub completed: Vec<O>,
+    /// (transitive) subobligations. Only computed if requested.
+    pub completed: Option<Vec<O>>,
 
     /// Backtrace of obligations that were found to be in error.
     pub errors: Vec<Error<O, E>>,
@@ -177,6 +177,14 @@ pub struct Outcome<O, E> {
     pub stalled: bool,
 }
 
+/// Should `process_obligations` compute the `Outcome::completed` field of its
+/// result?
+#[derive(PartialEq)]
+pub enum DoCompleted {
+    No,
+    Yes,
+}
+
 #[derive(Debug, PartialEq, Eq)]
 pub struct Error<O, E> {
     pub error: E,
@@ -282,8 +290,8 @@ pub fn to_errors<E: Clone>(&mut self, error: E) -> Vec<Error<O, E>> {
                 });
             }
         }
-        let successful_obligations = self.compress();
-        assert!(successful_obligations.is_empty());
+        let successful_obligations = self.compress(DoCompleted::Yes);
+        assert!(successful_obligations.unwrap().is_empty());
         errors
     }
 
@@ -311,7 +319,8 @@ fn insert_into_error_cache(&mut self, node_index: usize) {
     /// be called in a loop until `outcome.stalled` is false.
     ///
     /// This CANNOT be unrolled (presently, at least).
-    pub fn process_obligations<P>(&mut self, processor: &mut P) -> Outcome<O, P::Error>
+    pub fn process_obligations<P>(&mut self, processor: &mut P, do_completed: DoCompleted)
+                                  -> Outcome<O, P::Error>
         where P: ObligationProcessor<Obligation=O>
     {
         debug!("process_obligations(len={})", self.nodes.len());
@@ -366,7 +375,7 @@ pub fn process_obligations<P>(&mut self, processor: &mut P) -> Outcome<O, P::Err
             // There's no need to perform marking, cycle processing and compression when nothing
             // changed.
             return Outcome {
-                completed: vec![],
+                completed: if do_completed == DoCompleted::Yes { Some(vec![]) } else { None },
                 errors,
                 stalled,
             };
@@ -376,12 +385,12 @@ pub fn process_obligations<P>(&mut self, processor: &mut P) -> Outcome<O, P::Err
         self.process_cycles(processor);
 
         // Now we have to compress the result
-        let completed_obligations = self.compress();
+        let completed = self.compress(do_completed);
 
         debug!("process_obligations: complete");
 
         Outcome {
-            completed: completed_obligations,
+            completed,
             errors,
             stalled,
         }
@@ -524,7 +533,7 @@ fn mark_as_waiting_from(&self, node: &Node<O>) {
     /// Beforehand, all nodes must be marked as `Done` and no cycles
     /// on these nodes may be present. This is done by e.g. `process_cycles`.
     #[inline(never)]
-    fn compress(&mut self) -> Vec<O> {
+    fn compress(&mut self, do_completed: DoCompleted) -> Option<Vec<O>> {
         let nodes_len = self.nodes.len();
         let mut node_rewrites: Vec<_> = self.scratch.take().unwrap();
         node_rewrites.extend(0..nodes_len);
@@ -573,21 +582,26 @@ fn compress(&mut self) -> Vec<O> {
         if dead_nodes == 0 {
             node_rewrites.truncate(0);
             self.scratch = Some(node_rewrites);
-            return vec![];
+            return if do_completed == DoCompleted::Yes { Some(vec![]) } else { None };
         }
 
         // Pop off all the nodes we killed and extract the success
         // stories.
-        let successful = (0..dead_nodes)
-                             .map(|_| self.nodes.pop().unwrap())
-                             .flat_map(|node| {
-                                 match node.state.get() {
-                                     NodeState::Error => None,
-                                     NodeState::Done => Some(node.obligation),
-                                     _ => unreachable!()
-                                 }
-                             })
-            .collect();
+        let successful = if do_completed == DoCompleted::Yes {
+            Some((0..dead_nodes)
+                .map(|_| self.nodes.pop().unwrap())
+                .flat_map(|node| {
+                    match node.state.get() {
+                        NodeState::Error => None,
+                        NodeState::Done => Some(node.obligation),
+                        _ => unreachable!()
+                    }
+                })
+                .collect())
+        } else {
+            self.nodes.truncate(self.nodes.len() - dead_nodes);
+            None
+        };
         self.apply_rewrites(&node_rewrites);
 
         node_rewrites.truncate(0);
index c27a65e34310ff19e774a6d0029366d5ffeaa8f2..2a418973fbda2010d37613bd78458123470bff5f 100644 (file)
@@ -10,7 +10,7 @@
 
 #![cfg(test)]
 
-use super::{Error, ObligationForest, ObligationProcessor, Outcome, ProcessResult};
+use super::{Error, DoCompleted, ObligationForest, ObligationProcessor, Outcome, ProcessResult};
 
 use std::fmt;
 use std::marker::PhantomData;
@@ -84,8 +84,8 @@ fn push_pop() {
                 "C" => ProcessResult::Changed(vec![]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert_eq!(ok, vec!["C"]);
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["C"]);
     assert_eq!(err,
                vec![Error {
                         error: "B is for broken",
@@ -108,8 +108,8 @@ fn push_pop() {
                 "D" => ProcessResult::Changed(vec!["D.1", "D.2"]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert_eq!(ok, Vec::<&'static str>::new());
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), Vec::<&'static str>::new());
     assert_eq!(err, Vec::new());
 
 
@@ -127,8 +127,8 @@ fn push_pop() {
                 "D.2" => ProcessResult::Changed(vec!["D.2.i"]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert_eq!(ok, vec!["A.3", "A.1", "A.3.i"]);
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["A.3", "A.1", "A.3.i"]);
     assert_eq!(err,
                vec![Error {
                         error: "A is for apple",
@@ -143,8 +143,8 @@ fn push_pop() {
                 "D.2.i" => ProcessResult::Changed(vec![]),
                 _ => panic!("unexpected obligation {:?}", obligation),
             }
-        }, |_| {}));
-    assert_eq!(ok, vec!["D.2.i", "D.2"]);
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["D.2.i", "D.2"]);
     assert_eq!(err,
                vec![Error {
                         error: "D is for dumb",
@@ -171,8 +171,8 @@ fn success_in_grandchildren() {
                 "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert!(ok.is_empty());
+        }, |_| {}), DoCompleted::Yes);
+    assert!(ok.unwrap().is_empty());
     assert!(err.is_empty());
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -183,8 +183,8 @@ fn success_in_grandchildren() {
                 "A.3" => ProcessResult::Changed(vec![]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert_eq!(ok, vec!["A.3", "A.1"]);
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["A.3", "A.1"]);
     assert!(err.is_empty());
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -194,8 +194,8 @@ fn success_in_grandchildren() {
                 "A.2.ii" => ProcessResult::Changed(vec![]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert_eq!(ok, vec!["A.2.ii"]);
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["A.2.ii"]);
     assert!(err.is_empty());
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -204,14 +204,15 @@ fn success_in_grandchildren() {
                 "A.2.i.a" => ProcessResult::Changed(vec![]),
                 _ => unreachable!(),
             }
-        }, |_| {}));
-    assert_eq!(ok, vec!["A.2.i.a", "A.2.i", "A.2", "A"]);
+        }, |_| {}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["A.2.i.a", "A.2.i", "A.2", "A"]);
     assert!(err.is_empty());
 
     let Outcome { completed: ok, errors: err, .. } =
-        forest.process_obligations(&mut C(|_| unreachable!(), |_| {}));
+        forest.process_obligations(&mut C(|_| unreachable!(), |_| {}),
+        DoCompleted::Yes);
 
-    assert!(ok.is_empty());
+    assert!(ok.unwrap().is_empty());
     assert!(err.is_empty());
 }
 
@@ -227,8 +228,8 @@ fn to_errors_no_throw() {
                 "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err.len(), 0);
     let errors = forest.to_errors(());
     assert_eq!(errors[0].backtrace, vec!["A.1", "A"]);
@@ -248,8 +249,8 @@ fn diamond() {
                 "A" => ProcessResult::Changed(vec!["A.1", "A.2"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err.len(), 0);
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -259,8 +260,8 @@ fn diamond() {
                 "A.2" => ProcessResult::Changed(vec!["D"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err.len(), 0);
 
     let mut d_count = 0;
@@ -270,9 +271,9 @@ fn diamond() {
                 "D" => { d_count += 1; ProcessResult::Changed(vec![]) },
                 _ => unreachable!(),
             }
-        }, |_|{}));
+        }, |_|{}), DoCompleted::Yes);
     assert_eq!(d_count, 1);
-    assert_eq!(ok, vec!["D", "A.2", "A.1", "A"]);
+    assert_eq!(ok.unwrap(), vec!["D", "A.2", "A.1", "A"]);
     assert_eq!(err.len(), 0);
 
     let errors = forest.to_errors(());
@@ -285,8 +286,8 @@ fn diamond() {
                 "A'" => ProcessResult::Changed(vec!["A'.1", "A'.2"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err.len(), 0);
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -296,8 +297,8 @@ fn diamond() {
                 "A'.2" => ProcessResult::Changed(vec!["D'"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err.len(), 0);
 
     let mut d_count = 0;
@@ -307,9 +308,9 @@ fn diamond() {
                 "D'" => { d_count += 1; ProcessResult::Error("operation failed") },
                 _ => unreachable!(),
             }
-        }, |_|{}));
+        }, |_|{}), DoCompleted::Yes);
     assert_eq!(d_count, 1);
-    assert_eq!(ok.len(), 0);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err, vec![super::Error {
         error: "operation failed",
         backtrace: vec!["D'", "A'.1", "A'"]
@@ -333,8 +334,8 @@ fn done_dependency() {
                 "A: Sized" | "B: Sized" | "C: Sized" => ProcessResult::Changed(vec![]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok, vec!["C: Sized", "B: Sized", "A: Sized"]);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["C: Sized", "B: Sized", "A: Sized"]);
     assert_eq!(err.len(), 0);
 
     forest.register_obligation("(A,B,C): Sized");
@@ -348,8 +349,8 @@ fn done_dependency() {
                         ]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok, vec!["(A,B,C): Sized"]);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["(A,B,C): Sized"]);
     assert_eq!(err.len(), 0);
 }
 
@@ -371,8 +372,8 @@ fn orphan() {
                 "C2" => ProcessResult::Changed(vec![]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok, vec!["C2", "C1"]);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap(), vec!["C2", "C1"]);
     assert_eq!(err.len(), 0);
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -382,8 +383,8 @@ fn orphan() {
                 "B" => ProcessResult::Changed(vec!["D"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err.len(), 0);
 
     let Outcome { completed: ok, errors: err, .. } =
@@ -393,8 +394,8 @@ fn orphan() {
                 "E" => ProcessResult::Error("E is for error"),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err, vec![super::Error {
         error: "E is for error",
         backtrace: vec!["E", "A"]
@@ -406,8 +407,8 @@ fn orphan() {
                 "D" => ProcessResult::Error("D is dead"),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err, vec![super::Error {
         error: "D is dead",
         backtrace: vec!["D"]
@@ -431,8 +432,8 @@ fn simultaneous_register_and_error() {
                 "B" => ProcessResult::Changed(vec!["A"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err, vec![super::Error {
         error: "An error",
         backtrace: vec!["A"]
@@ -449,8 +450,8 @@ fn simultaneous_register_and_error() {
                 "B" => ProcessResult::Changed(vec!["A"]),
                 _ => unreachable!(),
             }
-        }, |_|{}));
-    assert_eq!(ok.len(), 0);
+        }, |_|{}), DoCompleted::Yes);
+    assert_eq!(ok.unwrap().len(), 0);
     assert_eq!(err, vec![super::Error {
         error: "An error",
         backtrace: vec!["A"]
index 470c8b03d0bca212282d8bc9c4185eb58418f5e7..1e32f5ef6f0b61c82a92249497fa2c40aa86048c 100644 (file)
@@ -38,3 +38,8 @@ syntax = { path = "../libsyntax" }
 smallvec = { version = "0.6.5", features = ["union"] }
 syntax_ext = { path = "../libsyntax_ext" }
 syntax_pos = { path = "../libsyntax_pos" }
+
+[dependencies.jemalloc-sys]
+version = '0.1.8'
+optional = true
+features = ['unprefixed_malloc_on_supported_platforms']
index 276b7290c2ef012de4ac7e0c83b48a1a77702232..6c7982242bfada275219bc82d01eac49235190b6 100644 (file)
 extern crate syntax_ext;
 extern crate syntax_pos;
 
+// Note that the linkage here should be all that we need, on Linux we're not
+// prefixing the symbols here so this should naturally override our default
+// allocator. On OSX it should override via the zone allocator. We shouldn't
+// enable this by default on other platforms, so other platforms aren't handled
+// here yet.
+#[cfg(feature = "jemalloc-sys")]
+extern crate jemalloc_sys;
+
 use driver::CompileController;
 use pretty::{PpMode, UserIdentifiedItem};
 
@@ -944,7 +952,7 @@ fn build_controller(self: Box<Self>,
             control.compilation_done.callback = box move |state| {
                 old_callback(state);
                 let sess = state.session;
-                println!("Fuel used by {}: {}",
+                eprintln!("Fuel used by {}: {}",
                     sess.print_fuel_crate.as_ref().unwrap(),
                     sess.print_fuel.get());
             }
index f18f40bf7a1448851a6bc44b398673d86a8eb76b..28b7c610a91c0f8a9d2ccf728c9d1f854d9b6ee9 100644 (file)
@@ -616,22 +616,22 @@ fn escaping() {
         // Theta = [A -> &'a foo]
         env.create_simple_region_hierarchy();
 
-        assert!(!env.t_nil().has_escaping_regions());
+        assert!(!env.t_nil().has_escaping_bound_vars());
 
         let t_rptr_free1 = env.t_rptr_free(1);
-        assert!(!t_rptr_free1.has_escaping_regions());
+        assert!(!t_rptr_free1.has_escaping_bound_vars());
 
         let t_rptr_bound1 = env.t_rptr_late_bound_with_debruijn(1, d1());
-        assert!(t_rptr_bound1.has_escaping_regions());
+        assert!(t_rptr_bound1.has_escaping_bound_vars());
 
         let t_rptr_bound2 = env.t_rptr_late_bound_with_debruijn(1, d2());
-        assert!(t_rptr_bound2.has_escaping_regions());
+        assert!(t_rptr_bound2.has_escaping_bound_vars());
 
         // t_fn = fn(A)
         let t_param = env.t_param(0);
-        assert!(!t_param.has_escaping_regions());
+        assert!(!t_param.has_escaping_bound_vars());
         let t_fn = env.t_fn(&[t_param], env.t_nil());
-        assert!(!t_fn.has_escaping_regions());
+        assert!(!t_fn.has_escaping_bound_vars());
     })
 }
 
index 870eeadc081e76b856087c8d2b84c638eaaed545..a323282f2335325896a68fafa95d3bc2c5bd54b3 100644 (file)
@@ -350,10 +350,10 @@ pub fn span_suggestion_with_applicability(&mut self, sp: Span, msg: &str,
     }
 
     pub fn span_suggestions_with_applicability(&mut self, sp: Span, msg: &str,
-                                        suggestions: Vec<String>,
-                                        applicability: Applicability) -> &mut Self {
+        suggestions: impl Iterator<Item = String>, applicability: Applicability) -> &mut Self
+    {
         self.suggestions.push(CodeSuggestion {
-            substitutions: suggestions.into_iter().map(|snippet| Substitution {
+            substitutions: suggestions.map(|snippet| Substitution {
                 parts: vec![SubstitutionPart {
                     snippet,
                     span: sp,
index f4289ea2d4b26e59b2d3ac2006edc7212d420b6c..2f16470530e4374581750e89c1f2afef8dc6c4fc 100644 (file)
@@ -253,7 +253,7 @@ pub fn span_suggestion_with_applicability(&mut self,
     pub fn span_suggestions_with_applicability(&mut self,
                                                sp: Span,
                                                msg: &str,
-                                               suggestions: Vec<String>,
+                                               suggestions: impl Iterator<Item = String>,
                                                applicability: Applicability)
                                                -> &mut Self {
         if !self.allow_suggestions {
index 5197876f921973ce7d38770eeea3beb2ede7e744..0b788a8fd97630523487e41bc6a172a52500169d 100644 (file)
@@ -377,13 +377,13 @@ fn report_bin_hex_error(
             let (t, actually) = match ty {
                 ty::Int(t) => {
                     let ity = attr::IntType::SignedInt(t);
-                    let bits = layout::Integer::from_attr(cx.tcx, ity).size().bits();
+                    let bits = layout::Integer::from_attr(&cx.tcx, ity).size().bits();
                     let actually = (val << (128 - bits)) as i128 >> (128 - bits);
                     (format!("{:?}", t), actually.to_string())
                 }
                 ty::Uint(t) => {
                     let ity = attr::IntType::UnsignedInt(t);
-                    let bits = layout::Integer::from_attr(cx.tcx, ity).size().bits();
+                    let bits = layout::Integer::from_attr(&cx.tcx, ity).size().bits();
                     let actually = (val << (128 - bits)) >> (128 - bits);
                     (format!("{:?}", t), actually.to_string())
                 }
@@ -718,6 +718,7 @@ fn check_type_for_ffi(&self,
 
             ty::Param(..) |
             ty::Infer(..) |
+            ty::Bound(..) |
             ty::Error |
             ty::Closure(..) |
             ty::Generator(..) |
@@ -828,7 +829,7 @@ fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
                 Ok(layout) => {
                     let variants = &layout.variants;
                     if let layout::Variants::Tagged { ref variants, ref tag, .. } = variants {
-                        let discr_size = tag.value.size(cx.tcx).bytes();
+                        let discr_size = tag.value.size(&cx.tcx).bytes();
 
                         debug!("enum `{}` is {} bytes large with layout:\n{:#?}",
                                t, layout.size.bytes(), layout);
index 5950e19b0ee1d449562e8007ead16b09df8603dc..6d365e6d1ecbf1c7d0066b342df471aed461454d 100644 (file)
@@ -29,7 +29,8 @@
 declare_lint! {
     pub UNUSED_MUST_USE,
     Warn,
-    "unused result of a type flagged as #[must_use]"
+    "unused result of a type flagged as #[must_use]",
+    report_in_external_macro: true
 }
 
 declare_lint! {
index 6142fe78149ce219c5715d8c43f629d0c07d71a9..338824d5efe4caa862256121941b11a7e38bdbce 100644 (file)
@@ -20,4 +20,3 @@ serialize = { path = "../libserialize" }
 syntax = { path = "../libsyntax" }
 syntax_ext = { path = "../libsyntax_ext" }
 syntax_pos = { path = "../libsyntax_pos" }
-rustc_metadata_utils = { path = "../librustc_metadata_utils" }
index 4b96735eb77b4945f13bf2f5769f1ea5cb0206cc..7733ab2e246d1135fa3fac808b73ce98c0375852 100644 (file)
@@ -30,8 +30,6 @@
 use rustc::util::nodemap::FxHashSet;
 use rustc::hir::map::Definitions;
 
-use rustc_metadata_utils::validate_crate_name;
-
 use std::ops::Deref;
 use std::path::PathBuf;
 use std::{cmp, fs};
@@ -1106,7 +1104,7 @@ pub fn process_extern_crate(
                        item.ident, orig_name);
                 let orig_name = match orig_name {
                     Some(orig_name) => {
-                        validate_crate_name(Some(self.sess), &orig_name.as_str(),
+                        ::validate_crate_name(Some(self.sess), &orig_name.as_str(),
                                             Some(item.span));
                         orig_name
                     }
index 9864c1f3d7c686dca4c69c3b88c2a68be731138f..0854df5d1269f36679846fd387347d1c1c59128b 100644 (file)
@@ -400,7 +400,7 @@ pub fn list_crate_metadata(&self,
         for (i, dep) in root.crate_deps
                             .decode(self)
                             .enumerate() {
-            write!(out, "{} {}-{}\n", i + 1, dep.name, dep.hash)?;
+            write!(out, "{} {}{}\n", i + 1, dep.name, dep.extra_filename)?;
         }
         write!(out, "\n")?;
         Ok(())
index 7008166b9035d7248c9f29997315a768c97e1a3a..0cc0707a3a51f263bcdf0f5ca347ed71c41f56af 100644 (file)
@@ -38,7 +38,6 @@
 extern crate rustc_errors as errors;
 extern crate syntax_ext;
 extern crate proc_macro;
-extern crate rustc_metadata_utils;
 
 #[macro_use]
 extern crate rustc;
 pub mod dynamic_lib;
 pub mod locator;
 
+pub fn validate_crate_name(
+    sess: Option<&rustc::session::Session>,
+    s: &str,
+    sp: Option<syntax_pos::Span>
+) {
+    let mut err_count = 0;
+    {
+        let mut say = |s: &str| {
+            match (sp, sess) {
+                (_, None) => bug!("{}", s),
+                (Some(sp), Some(sess)) => sess.span_err(sp, s),
+                (None, Some(sess)) => sess.err(s),
+            }
+            err_count += 1;
+        };
+        if s.is_empty() {
+            say("crate name must not be empty");
+        }
+        for c in s.chars() {
+            if c.is_alphanumeric() { continue }
+            if c == '_'  { continue }
+            say(&format!("invalid character `{}` in crate name: `{}`", c, s));
+        }
+    }
+
+    if err_count > 0 {
+        sess.unwrap().abort_if_errors();
+    }
+}
+
 __build_diagnostic_array! { librustc_metadata, DIAGNOSTICS }
diff --git a/src/librustc_metadata_utils/Cargo.toml b/src/librustc_metadata_utils/Cargo.toml
deleted file mode 100644 (file)
index 4a5e203..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-[package]
-authors = ["The Rust Project Developers"]
-name = "rustc_metadata_utils"
-version = "0.0.0"
-
-[lib]
-name = "rustc_metadata_utils"
-path = "lib.rs"
-crate-type = ["dylib"]
-
-[dependencies]
-rustc = { path = "../librustc" }
-syntax = { path = "../libsyntax" }
-syntax_pos = { path = "../libsyntax_pos" }
diff --git a/src/librustc_metadata_utils/lib.rs b/src/librustc_metadata_utils/lib.rs
deleted file mode 100644 (file)
index a1e5150..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[macro_use]
-extern crate rustc;
-extern crate syntax_pos;
-
-use rustc::session::Session;
-use syntax_pos::Span;
-
-pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
-    let mut err_count = 0;
-    {
-        let mut say = |s: &str| {
-            match (sp, sess) {
-                (_, None) => bug!("{}", s),
-                (Some(sp), Some(sess)) => sess.span_err(sp, s),
-                (None, Some(sess)) => sess.err(s),
-            }
-            err_count += 1;
-        };
-        if s.is_empty() {
-            say("crate name must not be empty");
-        }
-        for c in s.chars() {
-            if c.is_alphanumeric() { continue }
-            if c == '_'  { continue }
-            say(&format!("invalid character `{}` in crate name: `{}`", c, s));
-        }
-    }
-
-    if err_count > 0 {
-        sess.unwrap().abort_if_errors();
-    }
-}
index b2b92a6f857845ef8d626147cc6fc9cf0f4b8df6..3c4d8e09fc166b00018e7888b108579b3161c668 100644 (file)
@@ -788,7 +788,7 @@ fn report_borrow_conflicts_with_destructor(
 
         let what_was_dropped = match self.describe_place(place) {
             Some(name) => format!("`{}`", name.as_str()),
-            None => format!("temporary value"),
+            None => String::from("temporary value"),
         };
 
         let label = match self.describe_place(&borrow.borrowed_place) {
@@ -1028,7 +1028,7 @@ fn report_escaping_closure_capture(
 
         match category {
             ConstraintCategory::Return => {
-                err.span_note(constraint_span, &format!("closure is returned here"));
+                err.span_note(constraint_span, "closure is returned here");
             }
             ConstraintCategory::CallArgument => {
                 fr_name.highlight_region_name(&mut err);
index 8d10c0395f5083fac7316a777e154ad5421cbf78..d4f00ab3bb91a217bfdf7f8738f01455ae8f6409 100644 (file)
@@ -590,7 +590,7 @@ fn visit_statement_entry(
                         );
                     }
                 }
-                for input in inputs.iter() {
+                for (_, input) in inputs.iter() {
                     self.consume_operand(context, (input, span), flow_state);
                 }
             }
@@ -600,9 +600,9 @@ fn visit_statement_entry(
             }
             StatementKind::Nop
             | StatementKind::AscribeUserType(..)
-            | StatementKind::Validate(..)
+            | StatementKind::Retag { .. }
             | StatementKind::StorageLive(..) => {
-                // `Nop`, `AscribeUserType`, `Validate`, and `StorageLive` are irrelevant
+                // `Nop`, `AscribeUserType`, `Retag`, and `StorageLive` are irrelevant
                 // to borrow check.
             }
             StatementKind::StorageDead(local) => {
index 002f35880ae6b92d1dda52012b3f8cab6e747c89..cfe03c2d1c71e593a00c2021d368ca2e89de5cf0 100644 (file)
@@ -128,7 +128,7 @@ fn visit_statement(&mut self,
                         );
                     }
                 }
-                for input in inputs.iter() {
+                for (_, input) in inputs.iter() {
                     self.consume_operand(context, input);
                 }
             }
@@ -136,9 +136,9 @@ fn visit_statement(&mut self,
             StatementKind::EndRegion(..) |
             StatementKind::Nop |
             StatementKind::AscribeUserType(..) |
-            StatementKind::Validate(..) |
+            StatementKind::Retag { .. } |
             StatementKind::StorageLive(..) => {
-                // `Nop`, `AscribeUserType`, `Validate`, and `StorageLive` are irrelevant
+                // `Nop`, `AscribeUserType`, `Retag`, and `StorageLive` are irrelevant
                 // to borrow check.
             }
             StatementKind::StorageDead(local) => {
index 8fc54b6ff92a3e2b0099aade09ff191e0fe7f6b2..0c4140caee86f8e85542955b50e9cf4ccbb59127 100644 (file)
@@ -107,7 +107,6 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
     // Run the MIR type-checker.
     let MirTypeckResults {
         constraints,
-        placeholder_indices,
         universal_region_relations,
     } = type_check::type_check(
         infcx,
@@ -123,8 +122,6 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
         elements,
     );
 
-    let placeholder_indices = Rc::new(placeholder_indices);
-
     if let Some(all_facts) = &mut all_facts {
         all_facts
             .universal_region
@@ -136,11 +133,14 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
     // base constraints generated by the type-check.
     let var_origins = infcx.take_region_var_origins();
     let MirTypeckRegionConstraints {
+        placeholder_indices,
+        placeholder_index_to_region: _,
         mut liveness_constraints,
         outlives_constraints,
         closure_bounds_mapping,
         type_tests,
     } = constraints;
+    let placeholder_indices = Rc::new(placeholder_indices);
 
     constraint_generation::generate_constraints(
         infcx,
index ccb44c670f72fd9873469655ddbacfbcdd2b3a67..3358e5851f939d10c7da2d079c8ee0fc92839f47 100644 (file)
@@ -8,23 +8,24 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use borrow_check::nll::ConstraintDescription;
-use borrow_check::nll::constraints::{OutlivesConstraint};
+use borrow_check::nll::constraints::OutlivesConstraint;
 use borrow_check::nll::region_infer::RegionInferenceContext;
 use borrow_check::nll::type_check::Locations;
 use borrow_check::nll::universal_regions::DefiningTy;
-use util::borrowck_errors::{BorrowckErrors, Origin};
+use borrow_check::nll::ConstraintDescription;
 use rustc::hir::def_id::DefId;
 use rustc::infer::error_reporting::nice_region_error::NiceRegionError;
 use rustc::infer::InferCtxt;
+use rustc::infer::NLLRegionVariableOrigin;
 use rustc::mir::{ConstraintCategory, Location, Mir};
 use rustc::ty::{self, RegionVid};
 use rustc_data_structures::indexed_vec::IndexVec;
 use rustc_errors::{Diagnostic, DiagnosticBuilder};
 use std::collections::VecDeque;
+use syntax::errors::Applicability;
 use syntax::symbol::keywords;
 use syntax_pos::Span;
-use syntax::errors::Applicability;
+use util::borrowck_errors::{BorrowckErrors, Origin};
 
 mod region_name;
 mod var_name;
@@ -76,9 +77,9 @@ fn best_blame_constraint(
         debug!("best_blame_constraint(from_region={:?})", from_region);
 
         // Find all paths
-        let (path, target_region) = self
-            .find_constraint_paths_between_regions(from_region, target_test)
-            .unwrap();
+        let (path, target_region) =
+            self.find_constraint_paths_between_regions(from_region, target_test)
+                .unwrap();
         debug!(
             "best_blame_constraint: path={:#?}",
             path.iter()
@@ -92,8 +93,7 @@ fn best_blame_constraint(
         );
 
         // Classify each of the constraints along the path.
-        let mut categorized_path: Vec<(ConstraintCategory, bool, Span)> = path
-            .iter()
+        let mut categorized_path: Vec<(ConstraintCategory, bool, Span)> = path.iter()
             .map(|constraint| {
                 if constraint.category == ConstraintCategory::ClosureBounds {
                     self.retrieve_closure_constraint_info(mir, &constraint)
@@ -137,13 +137,12 @@ fn best_blame_constraint(
                 | ConstraintCategory::Boring
                 | ConstraintCategory::BoringNoLocation
                 | ConstraintCategory::Internal => false,
-                ConstraintCategory::TypeAnnotation
-                | ConstraintCategory::Return => true,
+                ConstraintCategory::TypeAnnotation | ConstraintCategory::Return => true,
                 _ => constraint_sup_scc != target_scc,
             }
         });
         if let Some(i) = best_choice {
-            return categorized_path[i]
+            return categorized_path[i];
         }
 
         // If that search fails, that is.. unusual. Maybe everything
@@ -179,6 +178,13 @@ fn find_constraint_paths_between_regions(
         deque.push_back(from_region);
 
         while let Some(r) = deque.pop_front() {
+            debug!(
+                "find_constraint_paths_between_regions: from_region={:?} r={:?} value={}",
+                from_region,
+                r,
+                self.region_value_str(r),
+            );
+
             // Check if we reached the region we were looking for. If so,
             // we can reconstruct the path that led to it and return it.
             if target_test(r) {
@@ -206,9 +212,9 @@ fn find_constraint_paths_between_regions(
             // enqueue any regions we find, keeping track of how we
             // reached them.
             let fr_static = self.universal_regions.fr_static;
-            for constraint in self.constraint_graph.outgoing_edges(r,
-                                                                   &self.constraints,
-                                                                   fr_static) {
+            for constraint in self.constraint_graph
+                .outgoing_edges(r, &self.constraints, fr_static)
+            {
                 assert_eq!(constraint.sup, r);
                 let sub_region = constraint.sub;
                 if let Trace::NotVisited = context[sub_region] {
@@ -240,11 +246,9 @@ pub(super) fn report_error(
     ) {
         debug!("report_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr);
 
-        let (category, _, span) = self.best_blame_constraint(
-            mir,
-            fr,
-            |r| r == outlived_fr
-        );
+        let (category, _, span) = self.best_blame_constraint(mir, fr, |r| {
+            self.provides_universal_region(r, fr, outlived_fr)
+        });
 
         // Check if we can use one of the "nice region errors".
         if let (Some(f), Some(o)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {
@@ -260,23 +264,75 @@ pub(super) fn report_error(
             self.universal_regions.is_local_free_region(outlived_fr),
         );
 
-        debug!("report_error: fr_is_local={:?} outlived_fr_is_local={:?} category={:?}",
-               fr_is_local, outlived_fr_is_local, category);
+        debug!(
+            "report_error: fr_is_local={:?} outlived_fr_is_local={:?} category={:?}",
+            fr_is_local, outlived_fr_is_local, category
+        );
         match (category, fr_is_local, outlived_fr_is_local) {
-            (ConstraintCategory::Return, true, false) if self.is_closure_fn_mut(infcx, fr) =>
-                self.report_fnmut_error(mir, infcx, mir_def_id, fr, outlived_fr, span,
-                                        errors_buffer),
-            (ConstraintCategory::Assignment, true, false) |
-            (ConstraintCategory::CallArgument, true, false) =>
-                self.report_escaping_data_error(mir, infcx, mir_def_id, fr, outlived_fr,
-                                                category, span, errors_buffer),
-            _ =>
-                self.report_general_error(mir, infcx, mir_def_id, fr, fr_is_local,
-                                          outlived_fr, outlived_fr_is_local,
-                                          category, span, errors_buffer),
+            (ConstraintCategory::Return, true, false) if self.is_closure_fn_mut(infcx, fr) => {
+                self.report_fnmut_error(
+                    mir,
+                    infcx,
+                    mir_def_id,
+                    fr,
+                    outlived_fr,
+                    span,
+                    errors_buffer,
+                )
+            }
+            (ConstraintCategory::Assignment, true, false)
+            | (ConstraintCategory::CallArgument, true, false) => self.report_escaping_data_error(
+                mir,
+                infcx,
+                mir_def_id,
+                fr,
+                outlived_fr,
+                category,
+                span,
+                errors_buffer,
+            ),
+            _ => self.report_general_error(
+                mir,
+                infcx,
+                mir_def_id,
+                fr,
+                fr_is_local,
+                outlived_fr,
+                outlived_fr_is_local,
+                category,
+                span,
+                errors_buffer,
+            ),
         };
     }
 
+    /// We have a constraint `fr1: fr2` that is not satisfied, where
+    /// `fr2` represents some universal region. Here, `r` is some
+    /// region where we know that `fr1: r` and this function has the
+    /// job of determining whether `r` is "to blame" for the fact that
+    /// `fr1: fr2` is required.
+    ///
+    /// This is true under two conditions:
+    ///
+    /// - `r == fr2`
+    /// - `fr2` is `'static` and `r` is some placeholder in a universe
+    ///   that cannot be named by `fr1`; in that case, we will require
+    ///   that `fr1: 'static` because it is the only way to `fr1: r` to
+    ///   be satisfied. (See `add_incompatible_universe`.)
+    fn provides_universal_region(&self, r: RegionVid, fr1: RegionVid, fr2: RegionVid) -> bool {
+        debug!(
+            "provides_universal_region(r={:?}, fr1={:?}, fr2={:?})",
+            r, fr1, fr2
+        );
+        let result = {
+            r == fr2 || {
+                fr2 == self.universal_regions.fr_static && self.cannot_name_placeholder(fr1, r)
+            }
+        };
+        debug!("provides_universal_region: result = {:?}", result);
+        result
+    }
+
     /// Report a specialized error when `FnMut` closures return a reference to a captured variable.
     /// This function expects `fr` to be local and `outlived_fr` to not be local.
     ///
@@ -303,10 +359,10 @@ fn report_fnmut_error(
         span: Span,
         errors_buffer: &mut Vec<Diagnostic>,
     ) {
-        let mut diag = infcx.tcx.sess.struct_span_err(
-            span,
-            "captured variable cannot escape `FnMut` closure body",
-        );
+        let mut diag = infcx
+            .tcx
+            .sess
+            .struct_span_err(span, "captured variable cannot escape `FnMut` closure body");
 
         // We should check if the return type of this closure is in fact a closure - in that
         // case, we can special case the error further.
@@ -318,27 +374,28 @@ fn report_fnmut_error(
             "returns a reference to a captured variable which escapes the closure body"
         };
 
-        diag.span_label(
-            span,
-            message,
-        );
+        diag.span_label(span, message);
 
-        match self.give_region_a_name(infcx, mir, mir_def_id, outlived_fr, &mut 1).source {
-            RegionNameSource::NamedEarlyBoundRegion(fr_span) |
-            RegionNameSource::NamedFreeRegion(fr_span) |
-            RegionNameSource::SynthesizedFreeEnvRegion(fr_span, _) |
-            RegionNameSource::CannotMatchHirTy(fr_span, _) |
-            RegionNameSource::MatchedHirTy(fr_span) |
-            RegionNameSource::MatchedAdtAndSegment(fr_span) |
-            RegionNameSource::AnonRegionFromUpvar(fr_span, _) |
-            RegionNameSource::AnonRegionFromOutput(fr_span, _, _) => {
+        match self.give_region_a_name(infcx, mir, mir_def_id, outlived_fr, &mut 1)
+            .source
+        {
+            RegionNameSource::NamedEarlyBoundRegion(fr_span)
+            | RegionNameSource::NamedFreeRegion(fr_span)
+            | RegionNameSource::SynthesizedFreeEnvRegion(fr_span, _)
+            | RegionNameSource::CannotMatchHirTy(fr_span, _)
+            | RegionNameSource::MatchedHirTy(fr_span)
+            | RegionNameSource::MatchedAdtAndSegment(fr_span)
+            | RegionNameSource::AnonRegionFromUpvar(fr_span, _)
+            | RegionNameSource::AnonRegionFromOutput(fr_span, _, _) => {
                 diag.span_label(fr_span, "inferred to be a `FnMut` closure");
-            },
-            _ => {},
+            }
+            _ => {}
         }
 
-        diag.note("`FnMut` closures only have access to their captured variables while they are \
-                   executing...");
+        diag.note(
+            "`FnMut` closures only have access to their captured variables while they are \
+             executing...",
+        );
         diag.note("...therefore, they cannot allow references to captured variables to escape");
 
         diag.buffer(errors_buffer);
@@ -375,7 +432,7 @@ fn report_escaping_data_error(
             DefiningTy::Closure(..) => "closure",
             DefiningTy::Generator(..) => "generator",
             DefiningTy::FnDef(..) => "function",
-            DefiningTy::Const(..) => "const"
+            DefiningTy::Const(..) => "const",
         };
 
         // Revert to the normal error in these cases.
@@ -384,12 +441,23 @@ fn report_escaping_data_error(
             || (category == ConstraintCategory::Assignment && escapes_from == "function")
             || escapes_from == "const"
         {
-            return self.report_general_error(mir, infcx, mir_def_id,
-                                             fr, true, outlived_fr, false,
-                                             category, span, errors_buffer);
+            return self.report_general_error(
+                mir,
+                infcx,
+                mir_def_id,
+                fr,
+                true,
+                outlived_fr,
+                false,
+                category,
+                span,
+                errors_buffer,
+            );
         }
 
-        let mut diag = infcx.tcx.borrowed_data_escapes_closure(span, escapes_from, Origin::Mir);
+        let mut diag = infcx
+            .tcx
+            .borrowed_data_escapes_closure(span, escapes_from, Origin::Mir);
 
         if let Some((Some(outlived_fr_name), outlived_fr_span)) = outlived_fr_name_and_span {
             diag.span_label(
@@ -410,7 +478,10 @@ fn report_escaping_data_error(
                 ),
             );
 
-            diag.span_label(span, format!("`{}` escapes the {} body here", fr_name, escapes_from));
+            diag.span_label(
+                span,
+                format!("`{}` escapes the {} body here", fr_name, escapes_from),
+            );
         }
 
         diag.buffer(errors_buffer);
@@ -452,31 +523,41 @@ fn report_general_error(
         let counter = &mut 1;
         let fr_name = self.give_region_a_name(infcx, mir, mir_def_id, fr, counter);
         fr_name.highlight_region_name(&mut diag);
-        let outlived_fr_name = self.give_region_a_name(
-            infcx, mir, mir_def_id, outlived_fr, counter);
+        let outlived_fr_name =
+            self.give_region_a_name(infcx, mir, mir_def_id, outlived_fr, counter);
         outlived_fr_name.highlight_region_name(&mut diag);
 
-        let mir_def_name = if infcx.tcx.is_closure(mir_def_id) { "closure" } else { "function" };
+        let mir_def_name = if infcx.tcx.is_closure(mir_def_id) {
+            "closure"
+        } else {
+            "function"
+        };
 
         match (category, outlived_fr_is_local, fr_is_local) {
             (ConstraintCategory::Return, true, _) => {
-                diag.span_label(span, format!(
-                    "{} was supposed to return data with lifetime `{}` but it is returning \
-                    data with lifetime `{}`",
-                    mir_def_name, outlived_fr_name, fr_name
-                ));
-            },
+                diag.span_label(
+                    span,
+                    format!(
+                        "{} was supposed to return data with lifetime `{}` but it is returning \
+                         data with lifetime `{}`",
+                        mir_def_name, outlived_fr_name, fr_name
+                    ),
+                );
+            }
             _ => {
-                diag.span_label(span, format!(
-                    "{}requires that `{}` must outlive `{}`",
-                    category.description(), fr_name, outlived_fr_name,
-                ));
-            },
+                diag.span_label(
+                    span,
+                    format!(
+                        "{}requires that `{}` must outlive `{}`",
+                        category.description(),
+                        fr_name,
+                        outlived_fr_name,
+                    ),
+                );
+            }
         }
 
-        self.add_static_impl_trait_suggestion(
-            infcx, &mut diag, fr, fr_name, outlived_fr,
-        );
+        self.add_static_impl_trait_suggestion(infcx, &mut diag, fr, fr_name, outlived_fr);
 
         diag.buffer(errors_buffer);
     }
@@ -499,17 +580,18 @@ fn add_static_impl_trait_suggestion(
         fr_name: RegionName,
         outlived_fr: RegionVid,
     ) {
-        if let (
-            Some(f),
-            Some(ty::RegionKind::ReStatic)
-        ) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {
+        if let (Some(f), Some(ty::RegionKind::ReStatic)) =
+            (self.to_error_region(fr), self.to_error_region(outlived_fr))
+        {
             if let Some(ty::TyS {
                 sty: ty::TyKind::Opaque(did, substs),
                 ..
-            }) = infcx.tcx.is_suitable_region(f)
-                    .map(|r| r.def_id)
-                    .map(|id| infcx.tcx.return_type_impl_trait(id))
-                    .unwrap_or(None)
+            }) = infcx
+                .tcx
+                .is_suitable_region(f)
+                .map(|r| r.def_id)
+                .map(|id| infcx.tcx.return_type_impl_trait(id))
+                .unwrap_or(None)
             {
                 // Check whether or not the impl trait return type is intended to capture
                 // data with the static lifetime.
@@ -522,10 +604,9 @@ fn add_static_impl_trait_suggestion(
                     let mut found = false;
                     for predicate in bounds.predicates {
                         if let ty::Predicate::TypeOutlives(binder) = predicate {
-                            if let ty::OutlivesPredicate(
-                                _,
-                                ty::RegionKind::ReStatic
-                            ) = binder.skip_binder() {
+                            if let ty::OutlivesPredicate(_, ty::RegionKind::ReStatic) =
+                                binder.skip_binder()
+                            {
                                 found = true;
                                 break;
                             }
@@ -535,18 +616,18 @@ fn add_static_impl_trait_suggestion(
                     found
                 };
 
-                debug!("add_static_impl_trait_suggestion: has_static_predicate={:?}",
-                       has_static_predicate);
+                debug!(
+                    "add_static_impl_trait_suggestion: has_static_predicate={:?}",
+                    has_static_predicate
+                );
                 let static_str = keywords::StaticLifetime.name();
                 // If there is a static predicate, then the only sensible suggestion is to replace
                 // fr with `'static`.
                 if has_static_predicate {
-                    diag.help(
-                        &format!(
-                            "consider replacing `{}` with `{}`",
-                            fr_name, static_str,
-                        ),
-                    );
+                    diag.help(&format!(
+                        "consider replacing `{}` with `{}`",
+                        fr_name, static_str,
+                    ));
                 } else {
                     // Otherwise, we should suggest adding a constraint on the return type.
                     let span = infcx.tcx.def_span(*did);
@@ -581,25 +662,48 @@ fn add_static_impl_trait_suggestion(
         borrow_region: RegionVid,
         outlived_region: RegionVid,
     ) -> (ConstraintCategory, bool, Span, RegionName) {
-        let (category, from_closure, span) = self.best_blame_constraint(
-            mir,
-            borrow_region,
-            |r| r == outlived_region
-        );
-        let outlived_fr_name = self.give_region_a_name(
-            infcx, mir, mir_def_id, outlived_region, &mut 1);
+        let (category, from_closure, span) =
+            self.best_blame_constraint(mir, borrow_region, |r| r == outlived_region);
+        let outlived_fr_name =
+            self.give_region_a_name(infcx, mir, mir_def_id, outlived_region, &mut 1);
         (category, from_closure, span, outlived_fr_name)
     }
 
     // Finds some region R such that `fr1: R` and `R` is live at
     // `elem`.
     crate fn find_sub_region_live_at(&self, fr1: RegionVid, elem: Location) -> RegionVid {
-        // Find all paths
-        let (_path, r) =
-            self.find_constraint_paths_between_regions(fr1, |r| {
-                self.liveness_constraints.contains(r, elem)
-            }).unwrap();
-        r
+        debug!("find_sub_region_live_at(fr1={:?}, elem={:?})", fr1, elem);
+        self.find_constraint_paths_between_regions(fr1, |r| {
+            // First look for some `r` such that `fr1: r` and `r` is live at `elem`
+            debug!(
+                "find_sub_region_live_at: liveness_constraints for {:?} are {:?}",
+                r,
+                self.liveness_constraints.region_value_str(r),
+            );
+            self.liveness_constraints.contains(r, elem)
+        }).or_else(|| {
+                // If we fail to find that, we may find some `r` such that
+                // `fr1: r` and `r` is a placeholder from some universe
+                // `fr1` cannot name. This would force `fr1` to be
+                // `'static`.
+                self.find_constraint_paths_between_regions(fr1, |r| {
+                    self.cannot_name_placeholder(fr1, r)
+                })
+            })
+            .or_else(|| {
+                // If we fail to find THAT, it may be that `fr1` is a
+                // placeholder that cannot "fit" into its SCC. In that
+                // case, there should be some `r` where `fr1: r`, both
+                // `fr1` and `r` are in the same SCC, and `fr1` is a
+                // placeholder that `r` cannot name. We can blame that
+                // edge.
+                self.find_constraint_paths_between_regions(fr1, |r| {
+                    self.constraint_sccs.scc(fr1) == self.constraint_sccs.scc(r)
+                        && self.cannot_name_placeholder(r, fr1)
+                })
+            })
+            .map(|(_path, r)| r)
+            .unwrap()
     }
 
     // Finds a good span to blame for the fact that `fr1` outlives `fr2`.
@@ -609,34 +713,30 @@ fn add_static_impl_trait_suggestion(
         fr1: RegionVid,
         fr2: RegionVid,
     ) -> (ConstraintCategory, Span) {
-        let (category, _, span) = self.best_blame_constraint(mir, fr1, |r| r == fr2);
+        let (category, _, span) =
+            self.best_blame_constraint(mir, fr1, |r| self.provides_universal_region(r, fr1, fr2));
         (category, span)
     }
 
     fn retrieve_closure_constraint_info(
         &self,
         mir: &Mir<'tcx>,
-        constraint: &OutlivesConstraint
+        constraint: &OutlivesConstraint,
     ) -> (ConstraintCategory, bool, Span) {
         let loc = match constraint.locations {
             Locations::All(span) => return (constraint.category, false, span),
             Locations::Single(loc) => loc,
         };
 
-        let opt_span_category = self
-            .closure_bounds_mapping[&loc]
-            .get(&(constraint.sup, constraint.sub));
+        let opt_span_category =
+            self.closure_bounds_mapping[&loc].get(&(constraint.sup, constraint.sub));
         opt_span_category
             .map(|&(category, span)| (category, true, span))
             .unwrap_or((constraint.category, false, mir.source_info(loc).span))
     }
 
     /// Returns `true` if a closure is inferred to be an `FnMut` closure.
-    crate fn is_closure_fn_mut(
-        &self,
-        infcx: &InferCtxt<'_, '_, 'tcx>,
-        fr: RegionVid,
-    ) -> bool {
+    crate fn is_closure_fn_mut(&self, infcx: &InferCtxt<'_, '_, 'tcx>, fr: RegionVid) -> bool {
         if let Some(ty::ReFree(free_region)) = self.to_error_region(fr) {
             if let ty::BoundRegion::BrEnv = free_region.bound_region {
                 if let DefiningTy::Closure(def_id, substs) = self.universal_regions.defining_ty {
@@ -648,4 +748,24 @@ fn retrieve_closure_constraint_info(
 
         false
     }
+
+    /// If `r2` represents a placeholder region, then this returns
+    /// true if `r1` cannot name that placeholder in its
+    /// value. Otherwise, returns false.
+    fn cannot_name_placeholder(&self, r1: RegionVid, r2: RegionVid) -> bool {
+        debug!("cannot_name_value_of(r1={:?}, r2={:?})", r1, r2);
+
+        match self.definitions[r2].origin {
+            NLLRegionVariableOrigin::Placeholder(placeholder) => {
+                let universe1 = self.definitions[r1].universe;
+                debug!(
+                    "cannot_name_value_of: universe1={:?} placeholder={:?}",
+                    universe1, placeholder
+                );
+                universe1.cannot_name(placeholder.universe)
+            }
+
+            NLLRegionVariableOrigin::FreeRegion | NLLRegionVariableOrigin::Existential => false,
+        }
+    }
 }
index 2b671891fca6dd00736c2b1257fda7f1b467300b..99372a511a9de5e2c2307b83255c68c7d38bff93 100644 (file)
@@ -277,8 +277,7 @@ fn give_name_from_error_region(
             | ty::RePlaceholder(..)
             | ty::ReEmpty
             | ty::ReErased
-            | ty::ReClosureBound(..)
-            | ty::ReCanonical(..) => None,
+            | ty::ReClosureBound(..) => None,
         }
     }
 
index 50fd4afcd7ecaed09c916d27910b64c2de45c8cf..376f445924270b2bd130dae6f0a26cc50e3b998f 100644 (file)
@@ -345,6 +345,13 @@ fn init_free_and_bound_regions(&mut self) {
                     if scc_universe.can_name(placeholder.universe) {
                         self.scc_values.add_element(scc, placeholder);
                     } else {
+                        debug!(
+                            "init_free_and_bound_regions: placeholder {:?} is \
+                             not compatible with universe {:?} of its SCC {:?}",
+                            placeholder,
+                            scc_universe,
+                            scc,
+                        );
                         self.add_incompatible_universe(scc);
                     }
                 }
@@ -471,6 +478,9 @@ fn propagate_constraints(&mut self, _mir: &Mir<'tcx>) {
             let mut constraints: Vec<_> = self.constraints.iter().collect();
             constraints.sort();
             constraints
+                .into_iter()
+                .map(|c| (c, self.constraint_sccs.scc(c.sup), self.constraint_sccs.scc(c.sub)))
+                .collect::<Vec<_>>()
         });
 
         // To propagate constraints, we walk the DAG induced by the
@@ -560,6 +570,8 @@ fn universe_compatible(&self, scc_b: ConstraintSccIndex, scc_a: ConstraintSccInd
     /// `'a` with `'b` and not `'static`. But it will have to do for
     /// now.
     fn add_incompatible_universe(&mut self, scc: ConstraintSccIndex) {
+        debug!("add_incompatible_universe(scc={:?})", scc);
+
         let fr_static = self.universal_regions.fr_static;
         self.scc_values.add_all_points(scc);
         self.scc_values.add_element(scc, fr_static);
@@ -1226,6 +1238,10 @@ fn check_bound_universal_region<'gcx>(
         );
 
         let longer_fr_scc = self.constraint_sccs.scc(longer_fr);
+        debug!(
+            "check_bound_universal_region: longer_fr_scc={:?}",
+            longer_fr_scc,
+        );
 
         // If we have some bound universal region `'a`, then the only
         // elements it can contain is itself -- we don't know anything
@@ -1242,6 +1258,7 @@ fn check_bound_universal_region<'gcx>(
             Some(v) => v,
             None => return,
         };
+        debug!("check_bound_universal_region: error_element = {:?}", error_element);
 
         // Find the region that introduced this `error_element`.
         let error_region = match error_element {
index 994f20a011d650541e2fd4ec893aee6c1327f660..35ec478143546d90177a81c7f41efb2eebdcf5f8 100644 (file)
@@ -8,21 +8,23 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
+use borrow_check::nll::constraints::OutlivesConstraint;
 use borrow_check::nll::region_infer::TypeTest;
-use borrow_check::nll::type_check::Locations;
+use borrow_check::nll::type_check::{Locations, MirTypeckRegionConstraints};
 use borrow_check::nll::universal_regions::UniversalRegions;
+use borrow_check::nll::ToRegionVid;
 use rustc::infer::canonical::QueryRegionConstraint;
 use rustc::infer::outlives::env::RegionBoundPairs;
 use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
 use rustc::infer::region_constraints::{GenericKind, VerifyBound};
-use rustc::infer::{self, SubregionOrigin};
+use rustc::infer::{self, InferCtxt, SubregionOrigin};
 use rustc::mir::ConstraintCategory;
 use rustc::ty::subst::UnpackedKind;
 use rustc::ty::{self, TyCtxt};
 use syntax_pos::DUMMY_SP;
 
 crate struct ConstraintConversion<'a, 'gcx: 'tcx, 'tcx: 'a> {
+    infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     universal_regions: &'a UniversalRegions<'tcx>,
     region_bound_pairs: &'a RegionBoundPairs<'tcx>,
     param_env: ty::ParamEnv<'tcx>,
     locations: Locations,
     category: ConstraintCategory,
-    outlives_constraints: &'a mut ConstraintSet,
-    type_tests: &'a mut Vec<TypeTest<'tcx>>,
+    constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
 }
 
 impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
     crate fn new(
-        tcx: TyCtxt<'a, 'gcx, 'tcx>,
+        infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
         universal_regions: &'a UniversalRegions<'tcx>,
         region_bound_pairs: &'a RegionBoundPairs<'tcx>,
         implicit_region_bound: Option<ty::Region<'tcx>>,
         param_env: ty::ParamEnv<'tcx>,
         locations: Locations,
         category: ConstraintCategory,
-        outlives_constraints: &'a mut ConstraintSet,
-        type_tests: &'a mut Vec<TypeTest<'tcx>>,
+        constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
     ) -> Self {
         Self {
-            tcx,
+            infcx,
+            tcx: infcx.tcx,
             universal_regions,
             region_bound_pairs,
             implicit_region_bound,
             param_env,
             locations,
             category,
-            outlives_constraints,
-            type_tests,
+            constraints,
         }
     }
 
@@ -82,9 +82,9 @@ pub(super) fn convert(&mut self, query_constraint: &QueryRegionConstraint<'tcx>)
         // when we move to universes, we will, and this assertion
         // will start to fail.
         let ty::OutlivesPredicate(k1, r2) =
-            query_constraint.no_late_bound_regions().unwrap_or_else(|| {
+            query_constraint.no_bound_vars().unwrap_or_else(|| {
                 bug!(
-                    "query_constraint {:?} contained bound regions",
+                    "query_constraint {:?} contained bound vars",
                     query_constraint,
                 );
             });
@@ -113,7 +113,7 @@ pub(super) fn convert(&mut self, query_constraint: &QueryRegionConstraint<'tcx>)
     }
 
     fn verify_to_type_test(
-        &self,
+        &mut self,
         generic_kind: GenericKind<'tcx>,
         region: ty::Region<'tcx>,
         verify_bound: VerifyBound<'tcx>,
@@ -128,22 +128,30 @@ fn verify_to_type_test(
         }
     }
 
-    fn to_region_vid(&self, r: ty::Region<'tcx>) -> ty::RegionVid {
-        self.universal_regions.to_region_vid(r)
+    fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> ty::RegionVid {
+        if let ty::RePlaceholder(placeholder) = r {
+            self.constraints
+                .placeholder_region(self.infcx, *placeholder)
+                .to_region_vid()
+        } else {
+            self.universal_regions.to_region_vid(r)
+        }
     }
 
     fn add_outlives(&mut self, sup: ty::RegionVid, sub: ty::RegionVid) {
-        self.outlives_constraints.push(OutlivesConstraint {
-            locations: self.locations,
-            category: self.category,
-            sub,
-            sup,
-        });
+        self.constraints
+            .outlives_constraints
+            .push(OutlivesConstraint {
+                locations: self.locations,
+                category: self.category,
+                sub,
+                sup,
+            });
     }
 
     fn add_type_test(&mut self, type_test: TypeTest<'tcx>) {
         debug!("add_type_test(type_test={:?})", type_test);
-        self.type_tests.push(type_test);
+        self.constraints.type_tests.push(type_test);
     }
 }
 
@@ -156,8 +164,8 @@ fn push_sub_region_constraint(
         a: ty::Region<'tcx>,
         b: ty::Region<'tcx>,
     ) {
-        let b = self.universal_regions.to_region_vid(b);
-        let a = self.universal_regions.to_region_vid(a);
+        let b = self.to_region_vid(b);
+        let a = self.to_region_vid(a);
         self.add_outlives(b, a);
     }
 
index f8c839e4d3f88f697f2bdac6a3d1dc1b4a946cc0..3d0f3d9fc7d8d1ee8cffb99c9f357a0f9f206b89 100644 (file)
@@ -271,15 +271,14 @@ impl UniversalRegionRelationsBuilder<'cx, 'gcx, 'tcx> {
 
         for data in constraint_sets {
             constraint_conversion::ConstraintConversion::new(
-                self.infcx.tcx,
+                self.infcx,
                 &self.universal_regions,
                 &self.region_bound_pairs,
                 self.implicit_region_bound,
                 self.param_env,
                 Locations::All(DUMMY_SP),
                 ConstraintCategory::Internal,
-                &mut self.constraints.outlives_constraints,
-                &mut self.constraints.type_tests,
+                &mut self.constraints,
             ).convert_all(&data);
         }
 
index 3098acffa23dc0f0138616a2c6f1898880b2f0fe..734ddbc3ab9a72af39babca2f7080aac3e3577c9 100644 (file)
@@ -16,6 +16,7 @@
 use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
 use borrow_check::nll::facts::AllFacts;
 use borrow_check::nll::region_infer::values::LivenessValues;
+use borrow_check::nll::region_infer::values::PlaceholderIndex;
 use borrow_check::nll::region_infer::values::PlaceholderIndices;
 use borrow_check::nll::region_infer::values::RegionValueElements;
 use borrow_check::nll::region_infer::{ClosureRegionRequirementsExt, TypeTest};
 use dataflow::move_paths::MoveData;
 use dataflow::FlowAtLocation;
 use dataflow::MaybeInitializedPlaces;
+use either::Either;
 use rustc::hir;
 use rustc::hir::def_id::DefId;
 use rustc::infer::canonical::QueryRegionConstraint;
 use rustc::infer::outlives::env::RegionBoundPairs;
-use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
+use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime, NLLRegionVariableOrigin};
 use rustc::mir::interpret::EvalErrorKind::BoundsCheck;
 use rustc::mir::tcx::PlaceTy;
 use rustc::mir::visit::{PlaceContext, Visitor, MutatingUseContext, NonMutatingUseContext};
 use rustc::ty::fold::TypeFoldable;
 use rustc::ty::subst::{Subst, Substs, UnpackedKind};
 use rustc::ty::{self, RegionVid, ToPolyTraitRef, Ty, TyCtxt, TyKind};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::indexed_vec::IndexVec;
 use std::rc::Rc;
 use std::{fmt, iter};
 use syntax_pos::{Span, DUMMY_SP};
 use transform::{MirPass, MirSource};
 
-use either::Either;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-
 macro_rules! span_mirbug {
     ($context:expr, $elem:expr, $($message:tt)*) => ({
         $crate::borrow_check::nll::type_check::mirbug(
@@ -126,12 +127,13 @@ pub(crate) fn type_check<'gcx, 'tcx>(
 ) -> MirTypeckResults<'tcx> {
     let implicit_region_bound = infcx.tcx.mk_region(ty::ReVar(universal_regions.fr_fn_body));
     let mut constraints = MirTypeckRegionConstraints {
+        placeholder_indices: PlaceholderIndices::default(),
+        placeholder_index_to_region: IndexVec::default(),
         liveness_constraints: LivenessValues::new(elements),
         outlives_constraints: ConstraintSet::default(),
         closure_bounds_mapping: Default::default(),
         type_tests: Vec::default(),
     };
-    let mut placeholder_indices = PlaceholderIndices::default();
 
     let CreateResult {
         universal_region_relations,
@@ -151,7 +153,6 @@ pub(crate) fn type_check<'gcx, 'tcx>(
         borrow_set,
         all_facts,
         constraints: &mut constraints,
-        placeholder_indices: &mut placeholder_indices,
     };
 
     type_check_internal(
@@ -175,7 +176,6 @@ pub(crate) fn type_check<'gcx, 'tcx>(
 
     MirTypeckResults {
         constraints,
-        placeholder_indices,
         universal_region_relations,
     }
 }
@@ -359,7 +359,7 @@ fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
     }
 
     fn sanitize_type(&mut self, parent: &dyn fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> {
-        if ty.has_escaping_regions() || ty.references_error() {
+        if ty.has_escaping_bound_vars() || ty.references_error() {
             span_mirbug_and_err!(self, parent, "bad type {:?}", ty)
         } else {
             ty
@@ -730,18 +730,30 @@ struct BorrowCheckContext<'a, 'tcx: 'a> {
     all_facts: &'a mut Option<AllFacts>,
     borrow_set: &'a BorrowSet<'tcx>,
     constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
-    placeholder_indices: &'a mut PlaceholderIndices,
 }
 
 crate struct MirTypeckResults<'tcx> {
     crate constraints: MirTypeckRegionConstraints<'tcx>,
-    crate placeholder_indices: PlaceholderIndices,
     crate universal_region_relations: Rc<UniversalRegionRelations<'tcx>>,
 }
 
 /// A collection of region constraints that must be satisfied for the
 /// program to be considered well-typed.
 crate struct MirTypeckRegionConstraints<'tcx> {
+    /// Maps from a `ty::Placeholder` to the corresponding
+    /// `PlaceholderIndex` bit that we will use for it.
+    ///
+    /// To keep everything in sync, do not insert this set
+    /// directly. Instead, use the `placeholder_region` helper.
+    crate placeholder_indices: PlaceholderIndices,
+
+    /// Each time we add a placeholder to `placeholder_indices`, we
+    /// also create a corresponding "representative" region vid for
+    /// that wraps it. This vector tracks those. This way, when we
+    /// convert the same `ty::RePlaceholder(p)` twice, we can map to
+    /// the same underlying `RegionVid`.
+    crate placeholder_index_to_region: IndexVec<PlaceholderIndex, ty::Region<'tcx>>,
+
     /// In general, the type-checker is not responsible for enforcing
     /// liveness constraints; this job falls to the region inferencer,
     /// which performs a liveness analysis. However, in some limited
@@ -759,6 +771,25 @@ struct BorrowCheckContext<'a, 'tcx: 'a> {
     crate type_tests: Vec<TypeTest<'tcx>>,
 }
 
+impl MirTypeckRegionConstraints<'tcx> {
+    fn placeholder_region(
+        &mut self,
+        infcx: &InferCtxt<'_, '_, 'tcx>,
+        placeholder: ty::Placeholder,
+    ) -> ty::Region<'tcx> {
+        let placeholder_index = self.placeholder_indices.insert(placeholder);
+        match self.placeholder_index_to_region.get(placeholder_index) {
+            Some(&v) => v,
+            None => {
+                let origin = NLLRegionVariableOrigin::Placeholder(placeholder);
+                let region = infcx.next_nll_region_var_in_universe(origin, placeholder.universe);
+                self.placeholder_index_to_region.push(region);
+                region
+            }
+        }
+    }
+}
+
 /// The `Locations` type summarizes *where* region constraints are
 /// required to hold. Normally, this is at a particular point which
 /// created the obligation, but for constraints that the user gave, we
@@ -888,15 +919,14 @@ fn push_region_constraints(
 
         if let Some(ref mut borrowck_context) = self.borrowck_context {
             constraint_conversion::ConstraintConversion::new(
-                self.infcx.tcx,
+                self.infcx,
                 borrowck_context.universal_regions,
                 self.region_bound_pairs,
                 self.implicit_region_bound,
                 self.param_env,
                 locations,
                 category,
-                &mut borrowck_context.constraints.outlives_constraints,
-                &mut borrowck_context.constraints.type_tests,
+                &mut borrowck_context.constraints,
             ).convert_all(&data);
         }
     }
@@ -1264,7 +1294,7 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
             | StatementKind::StorageDead(_)
             | StatementKind::InlineAsm { .. }
             | StatementKind::EndRegion(_)
-            | StatementKind::Validate(..)
+            | StatementKind::Retag { .. }
             | StatementKind::Nop => {}
         }
     }
@@ -2184,8 +2214,8 @@ fn prove_closure_bounds(
                     .enumerate()
                     .filter_map(|(idx, constraint)| {
                         let ty::OutlivesPredicate(k1, r2) =
-                            constraint.no_late_bound_regions().unwrap_or_else(|| {
-                                bug!("query_constraint {:?} contained bound regions", constraint,);
+                            constraint.no_bound_vars().unwrap_or_else(|| {
+                                bug!("query_constraint {:?} contained bound vars", constraint,);
                             });
 
                         match k1.unpack() {
index 13ebf46bdb14904fa56eb17bf9182e52668d376f..b82efb29f6e56a8a0cf0bc2a1745f5f54cd6b4d6 100644 (file)
@@ -76,16 +76,20 @@ fn create_next_universe(&mut self) -> ty::UniverseIndex {
     }
 
     fn next_existential_region_var(&mut self) -> ty::Region<'tcx> {
-        let origin = NLLRegionVariableOrigin::Existential;
-        self.infcx.next_nll_region_var(origin)
+        if let Some(_) = &mut self.borrowck_context {
+            let origin = NLLRegionVariableOrigin::Existential;
+            self.infcx.next_nll_region_var(origin)
+        } else {
+            self.infcx.tcx.types.re_erased
+        }
     }
 
     fn next_placeholder_region(&mut self, placeholder: ty::Placeholder) -> ty::Region<'tcx> {
-        let origin = NLLRegionVariableOrigin::Placeholder(placeholder);
         if let Some(borrowck_context) = &mut self.borrowck_context {
-            borrowck_context.placeholder_indices.insert(placeholder);
+            borrowck_context.constraints.placeholder_region(self.infcx, placeholder)
+        } else {
+            self.infcx.tcx.types.re_erased
         }
-        self.infcx.next_nll_region_var(origin)
     }
 
     fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> {
index d2b39f088b65e8c401ae5a35dd01214a82d6acaa..8f52499124ab7580cb60f7c4fc71c3139fc96840 100644 (file)
@@ -167,8 +167,12 @@ pub fn stmt_expr(&mut self, mut block: BasicBlock, expr: Expr<'tcx>) -> BlockAnd
                     .into_boxed_slice();
                 let inputs = inputs
                     .into_iter()
-                    .map(|input| unpack!(block = this.as_local_operand(block, input)))
-                    .collect::<Vec<_>>()
+                    .map(|input| {
+                        (
+                            input.span(),
+                            unpack!(block = this.as_local_operand(block, input)),
+                        )
+                    }).collect::<Vec<_>>()
                     .into_boxed_slice();
                 this.cfg.push(
                     block,
index 38ab21b7c8c85bcf5728b46f2dc4ec4ab19162d8..2658d7f59a07f40f240bb0f3ac694366cb0febe1 100644 (file)
@@ -32,7 +32,7 @@
 use syntax::source_map::{Span, DUMMY_SP};
 
 use interpret::{self,
-    PlaceTy, MemPlace, OpTy, Operand, Value, Scalar, ConstValue, Pointer,
+    PlaceTy, MemPlace, OpTy, Operand, Immediate, Scalar, ConstValue, Pointer,
     EvalResult, EvalError, EvalErrorKind, GlobalId, EvalContext, StackPopCleanup,
     Allocation, AllocId, MemoryKind,
     snapshot, RefTracking,
@@ -115,7 +115,7 @@ pub fn op_to_const<'tcx>(
             _ => false,
         };
     let normalized_op = if normalize {
-        ecx.try_read_value(op)?
+        ecx.try_read_immediate(op)?
     } else {
         match op.op {
             Operand::Indirect(mplace) => Err(mplace),
@@ -137,9 +137,9 @@ pub fn op_to_const<'tcx>(
             let alloc = ecx.tcx.intern_const_alloc(alloc);
             ConstValue::ByRef(ptr.alloc_id, alloc, ptr.offset)
         },
-        Ok(Value::Scalar(x)) =>
+        Ok(Immediate::Scalar(x)) =>
             ConstValue::Scalar(x.not_undef()?),
-        Ok(Value::ScalarPair(a, b)) =>
+        Ok(Immediate::ScalarPair(a, b)) =>
             ConstValue::ScalarPair(a.not_undef()?, b.not_undef()?),
     };
     Ok(ty::Const::from_const_value(ecx.tcx.tcx, val, op.layout.ty))
index cfccb950e8276a64a8a1c998b76f06fae68b9561..69d2a89b5f237216732664bcfa033788c2275dae 100644 (file)
@@ -338,7 +338,7 @@ fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location
             mir::StatementKind::FakeRead(..) |
             mir::StatementKind::SetDiscriminant { .. } |
             mir::StatementKind::StorageLive(..) |
-            mir::StatementKind::Validate(..) |
+            mir::StatementKind::Retag { .. } |
             mir::StatementKind::AscribeUserType(..) |
             mir::StatementKind::Nop => {}
 
index 08696dc098e00398c86a11ccbe954e963e1fc470..874e862de238b7e1e8e253e0af83caac4eb71189 100644 (file)
 use rustc::mir::*;
 use rustc::mir::tcx::RvalueInitializationState;
 use rustc_data_structures::indexed_vec::{IndexVec};
+use smallvec::{SmallVec, smallvec};
 
 use std::collections::hash_map::Entry;
 use std::mem;
 
 use super::abs_domain::Lift;
-
 use super::{LocationMap, MoveData, MovePath, MovePathLookup, MovePathIndex, MoveOut, MoveOutIndex};
 use super::{MoveError, InitIndex, Init, InitLocation, LookupResult, InitKind};
 use super::IllegalMoveOriginKind::*;
@@ -64,8 +64,8 @@ fn new(mir: &'a Mir<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
     }
 
     fn new_move_path(move_paths: &mut IndexVec<MovePathIndex, MovePath<'tcx>>,
-                     path_map: &mut IndexVec<MovePathIndex, Vec<MoveOutIndex>>,
-                     init_path_map: &mut IndexVec<MovePathIndex, Vec<InitIndex>>,
+                     path_map: &mut IndexVec<MovePathIndex, SmallVec<[MoveOutIndex; 4]>>,
+                     init_path_map: &mut IndexVec<MovePathIndex, SmallVec<[InitIndex; 4]>>,
                      parent: Option<MovePathIndex>,
                      place: Place<'tcx>)
                      -> MovePathIndex
@@ -83,10 +83,10 @@ fn new_move_path(move_paths: &mut IndexVec<MovePathIndex, MovePath<'tcx>>,
             move_paths[move_path].next_sibling = next_sibling;
         }
 
-        let path_map_ent = path_map.push(vec![]);
+        let path_map_ent = path_map.push(smallvec![]);
         assert_eq!(path_map_ent, move_path);
 
-        let init_path_map_ent = init_path_map.push(vec![]);
+        let init_path_map_ent = init_path_map.push(smallvec![]);
         assert_eq!(init_path_map_ent, move_path);
 
         move_path
@@ -289,7 +289,7 @@ fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
                         self.gather_init(output, InitKind::Deep);
                     }
                 }
-                for input in inputs.iter() {
+                for (_, input) in inputs.iter() {
                     self.gather_operand(input);
                 }
             }
@@ -302,7 +302,7 @@ fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
                           "SetDiscriminant should not exist during borrowck");
             }
             StatementKind::EndRegion(_) |
-            StatementKind::Validate(..) |
+            StatementKind::Retag { .. } |
             StatementKind::AscribeUserType(..) |
             StatementKind::Nop => {}
         }
index 58a2b9361032e579ac3767cf5ecab10ab11762ee..2a026b8f52c2abad37515a7be6e86f396cd093b0 100644 (file)
@@ -13,6 +13,7 @@
 use rustc::mir::*;
 use rustc::util::nodemap::FxHashMap;
 use rustc_data_structures::indexed_vec::{IndexVec};
+use smallvec::SmallVec;
 use syntax_pos::{Span};
 
 use std::fmt;
@@ -141,14 +142,14 @@ pub struct MoveData<'tcx> {
     /// of executing the code at `l`. (There can be multiple MoveOut's
     /// for a given `l` because each MoveOut is associated with one
     /// particular path being moved.)
-    pub loc_map: LocationMap<Vec<MoveOutIndex>>,
-    pub path_map: IndexVec<MovePathIndex, Vec<MoveOutIndex>>,
+    pub loc_map: LocationMap<SmallVec<[MoveOutIndex; 4]>>,
+    pub path_map: IndexVec<MovePathIndex, SmallVec<[MoveOutIndex; 4]>>,
     pub rev_lookup: MovePathLookup<'tcx>,
     pub inits: IndexVec<InitIndex, Init>,
     /// Each Location `l` is mapped to the Inits that are effects
     /// of executing the code at `l`.
-    pub init_loc_map: LocationMap<Vec<InitIndex>>,
-    pub init_path_map: IndexVec<MovePathIndex, Vec<InitIndex>>,
+    pub init_loc_map: LocationMap<SmallVec<[InitIndex; 4]>>,
+    pub init_path_map: IndexVec<MovePathIndex, SmallVec<[InitIndex; 4]>>,
 }
 
 pub trait HasMoveData<'tcx> {
index 5f798135966d49c25068313a533e628e215dc51f..3a56d261111eee4ec0e46de7883944dc5358fde3 100644 (file)
@@ -165,7 +165,7 @@ pub fn const_eval_literal(
             LitKind::Str(ref s, _) => {
                 let s = s.as_str();
                 let id = self.tcx.allocate_bytes(s.as_bytes());
-                ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, self.tcx)
+                ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, &self.tcx)
             },
             LitKind::ByteStr(ref data) => {
                 let id = self.tcx.allocate_bytes(data);
index d53bb1dc4d63a255e12ee4add8dcd8200e605339..69ca21c56242f954a76cca7bd849d21f593bd41c 100644 (file)
@@ -669,14 +669,14 @@ fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
         }
         ty::Int(ity) if exhaustive_integer_patterns => {
             // FIXME(49937): refactor these bit manipulations into interpret.
-            let bits = Integer::from_attr(cx.tcx, SignedInt(ity)).size().bits() as u128;
+            let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128;
             let min = 1u128 << (bits - 1);
             let max = (1u128 << (bits - 1)) - 1;
             vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included)]
         }
         ty::Uint(uty) if exhaustive_integer_patterns => {
             // FIXME(49937): refactor these bit manipulations into interpret.
-            let bits = Integer::from_attr(cx.tcx, UnsignedInt(uty)).size().bits() as u128;
+            let bits = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size().bits() as u128;
             let max = !0u128 >> (128 - bits);
             vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included)]
         }
@@ -862,7 +862,7 @@ fn from_pat(tcx: TyCtxt<'_, 'tcx, 'tcx>,
     fn signed_bias(tcx: TyCtxt<'_, 'tcx, 'tcx>, ty: Ty<'tcx>) -> u128 {
         match ty.sty {
             ty::Int(ity) => {
-                let bits = Integer::from_attr(tcx, SignedInt(ity)).size().bits() as u128;
+                let bits = Integer::from_attr(&tcx, SignedInt(ity)).size().bits() as u128;
                 1u128 << (bits - 1)
             }
             _ => 0
index f2ae5774da875f8eb014635cbcef5e05ba42f894..735ceef229a22f36ba4986e5d28931a98e6da968 100644 (file)
@@ -238,8 +238,8 @@ fn check_match(
                                                         is non-empty",
                                                        pat_ty));
                     span_help!(&mut err, scrut.span,
-                               "Please ensure that all possible cases are being handled; \
-                                possibly adding wildcards or more match arms.");
+                               "ensure that all possible cases are being handled, \
+                                possibly by adding wildcards or more match arms");
                     err.emit();
                 }
                 // If the type *is* uninhabited, it's vacuously exhaustive
index bff87da9c771d5070906a2e082d512dee935d7de..0e1f8d1d32816ace04d2eb9e3189d9d4bfd49af9 100644 (file)
@@ -1313,7 +1313,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
         LitKind::Str(ref s, _) => {
             let s = s.as_str();
             let id = tcx.allocate_bytes(s.as_bytes());
-            ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, tcx)
+            ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, &tcx)
         },
         LitKind::ByteStr(ref data) => {
             let id = tcx.allocate_bytes(data);
index b2c8cba480259d6bf4a83c940a425b69c487c039..06748d60e45837233da8c75aaf4760f905b7ab83 100644 (file)
@@ -19,7 +19,7 @@
 use rustc::mir::CastKind;
 use rustc_apfloat::Float;
 
-use super::{EvalContext, Machine, PlaceTy, OpTy, Value};
+use super::{EvalContext, Machine, PlaceTy, OpTy, Immediate};
 
 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
     fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
@@ -45,7 +45,7 @@ pub fn cast(
 
             Misc => {
                 let src_layout = src.layout;
-                let src = self.read_value(src)?;
+                let src = self.read_immediate(src)?;
 
                 let src = if M::ENABLE_PTR_TRACKING_HOOKS && src_layout.ty.is_region_ptr() {
                     // The only `Misc` casts on references are those creating raw pointers.
@@ -61,14 +61,14 @@ pub fn cast(
                 if self.type_is_fat_ptr(src_layout.ty) {
                     match (src, self.type_is_fat_ptr(dest.layout.ty)) {
                         // pointers to extern types
-                        (Value::Scalar(_),_) |
+                        (Immediate::Scalar(_),_) |
                         // slices and trait objects to other slices/trait objects
-                        (Value::ScalarPair(..), true) => {
-                            // No change to value
-                            self.write_value(src, dest)?;
+                        (Immediate::ScalarPair(..), true) => {
+                            // No change to immediate
+                            self.write_immediate(src, dest)?;
                         }
                         // slices and trait objects to thin pointers (dropping the metadata)
-                        (Value::ScalarPair(data, _), false) => {
+                        (Immediate::ScalarPair(data, _), false) => {
                             self.write_scalar(data, dest)?;
                         }
                     }
@@ -118,11 +118,11 @@ pub fn cast(
             }
 
             UnsafeFnPointer => {
-                let src = self.read_value(src)?;
+                let src = self.read_immediate(src)?;
                 match dest.layout.ty.sty {
                     ty::FnPtr(_) => {
                         // No change to value
-                        self.write_value(*src, dest)?;
+                        self.write_immediate(*src, dest)?;
                     }
                     ref other => bug!("fn to unsafe fn cast on {:?}", other),
                 }
@@ -144,8 +144,8 @@ pub fn cast(
                             ty::ClosureKind::FnOnce,
                         );
                         let fn_ptr = self.memory.create_fn_alloc(instance).with_default_tag();
-                        let val = Value::Scalar(Scalar::Ptr(fn_ptr.into()).into());
-                        self.write_value(val, dest)?;
+                        let val = Immediate::Scalar(Scalar::Ptr(fn_ptr.into()).into());
+                        self.write_immediate(val, dest)?;
                     }
                     ref other => bug!("closure fn pointer on {:?}", other),
                 }
@@ -326,24 +326,28 @@ fn unsize_into_ptr(
 
         match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
             (&ty::Array(_, length), &ty::Slice(_)) => {
-                let ptr = self.read_value(src)?.to_scalar_ptr()?;
+                let ptr = self.read_immediate(src)?.to_scalar_ptr()?;
                 // u64 cast is from usize to u64, which is always good
-                let val = Value::new_slice(ptr, length.unwrap_usize(self.tcx.tcx), self.tcx.tcx);
-                self.write_value(val, dest)
+                let val = Immediate::new_slice(
+                    ptr,
+                    length.unwrap_usize(self.tcx.tcx),
+                    self,
+                );
+                self.write_immediate(val, dest)
             }
             (&ty::Dynamic(..), &ty::Dynamic(..)) => {
                 // For now, upcasts are limited to changes in marker
                 // traits, and hence never actually require an actual
                 // change to the vtable.
-                let val = self.read_value(src)?;
-                self.write_value(*val, dest)
+                let val = self.read_immediate(src)?;
+                self.write_immediate(*val, dest)
             }
             (_, &ty::Dynamic(ref data, _)) => {
                 // Initial cast from sized to dyn trait
                 let vtable = self.get_vtable(src_pointee_ty, data.principal())?;
-                let ptr = self.read_value(src)?.to_scalar_ptr()?;
-                let val = Value::new_dyn_trait(ptr, vtable);
-                self.write_value(val, dest)
+                let ptr = self.read_immediate(src)?.to_scalar_ptr()?;
+                let val = Immediate::new_dyn_trait(ptr, vtable);
+                self.write_immediate(val, dest)
             }
 
             _ => bug!("invalid unsizing {:?} -> {:?}", src.layout.ty, dest.layout.ty),
@@ -390,7 +394,7 @@ fn unsize_into(
                             src_field.into()
                         }
                         Err(..) => {
-                            let src_field_layout = src.layout.field(&self, i)?;
+                            let src_field_layout = src.layout.field(self, i)?;
                             // this must be a field covering the entire thing
                             assert_eq!(src.layout.fields.offset(i).bytes(), 0);
                             assert_eq!(src_field_layout.size, src.layout.size);
index bc7ad16dc97bc7b59caee2cd1b65736c9652f362..fc13c5fef2dda2a599fd5b5835348a0aacd91cb7 100644 (file)
@@ -32,7 +32,7 @@
 use rustc_data_structures::fx::FxHashMap;
 
 use super::{
-    Value, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
+    Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
     Memory, Machine
 };
 
@@ -139,8 +139,8 @@ pub fn access_mut(&mut self) -> EvalResult<'tcx, &mut Operand<Tag>> {
     }
 }
 
-impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
-    for &'b EvalContext<'a, 'mir, 'tcx, M>
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
+    for EvalContext<'a, 'mir, 'tcx, M>
 {
     #[inline]
     fn data_layout(&self) -> &layout::TargetDataLayout {
@@ -148,16 +148,7 @@ fn data_layout(&self) -> &layout::TargetDataLayout {
     }
 }
 
-impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
-    for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M>
-{
-    #[inline]
-    fn data_layout(&self) -> &layout::TargetDataLayout {
-        &self.tcx.data_layout
-    }
-}
-
-impl<'b, 'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for &'b EvalContext<'a, 'mir, 'tcx, M>
+impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for EvalContext<'a, 'mir, 'tcx, M>
     where M: Machine<'a, 'mir, 'tcx>
 {
     #[inline]
@@ -166,40 +157,19 @@ fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
     }
 }
 
-impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> layout::HasTyCtxt<'tcx>
-    for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M>
-{
-    #[inline]
-    fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
-        *self.tcx
-    }
-}
-
-impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
-    for &'b EvalContext<'a, 'mir, 'tcx, M>
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
+    for EvalContext<'a, 'mir, 'tcx, M>
 {
     type Ty = Ty<'tcx>;
     type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
 
     #[inline]
-    fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
+    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
         self.tcx.layout_of(self.param_env.and(ty))
             .map_err(|layout| EvalErrorKind::Layout(layout).into())
     }
 }
 
-impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
-    for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M>
-{
-    type Ty = Ty<'tcx>;
-    type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
-
-    #[inline]
-    fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
-        (&**self).layout_of(ty)
-    }
-}
-
 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
     pub fn new(
         tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
@@ -333,9 +303,9 @@ pub fn layout_of_local(
         self.layout_of(local_ty)
     }
 
-    pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value<M::PointerTag>> {
+    pub fn str_to_immediate(&mut self, s: &str) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
         let ptr = self.memory.allocate_static_bytes(s.as_bytes()).with_default_tag();
-        Ok(Value::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
+        Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self))
     }
 
     /// Return the actual dynamic size and alignment of the place at the given type.
@@ -457,11 +427,11 @@ pub fn push_stack_frame(
 
         // don't allocate at all for trivial constants
         if mir.local_decls.len() > 1 {
-            // We put some marker value into the locals that we later want to initialize.
+            // We put some marker immediate into the locals that we later want to initialize.
             // This can be anything except for LocalValue::Dead -- because *that* is the
             // value we use for things that we know are initially dead.
             let dummy =
-                LocalValue::Live(Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)));
+                LocalValue::Live(Operand::Immediate(Immediate::Scalar(ScalarMaybeUndef::Undef)));
             let mut locals = IndexVec::from_elem(dummy, &mir.local_decls);
             // Return place is handled specially by the `eval_place` functions, and the
             // entry in `locals` should never be used. Make it dead, to be sure.
@@ -652,13 +622,13 @@ pub fn dump_place(&self, place: Place<M::PointerTag>) {
                             ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
                         }
                     }
-                    Ok(Operand::Immediate(Value::Scalar(val))) => {
+                    Ok(Operand::Immediate(Immediate::Scalar(val))) => {
                         write!(msg, " {:?}", val).unwrap();
                         if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
                             allocs.push(ptr.alloc_id);
                         }
                     }
-                    Ok(Operand::Immediate(Value::ScalarPair(val1, val2))) => {
+                    Ok(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
                         write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
                         if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
                             allocs.push(ptr.alloc_id);
index 5fa0fef36935df82138ccb5b87358c2fc4626d63..cb2a750f4e3b6cc4a987a5fe88ee6ac31ff9833c 100644 (file)
@@ -115,8 +115,8 @@ pub fn emulate_intrinsic(
             | "add_with_overflow"
             | "sub_with_overflow"
             | "mul_with_overflow" => {
-                let lhs = self.read_value(args[0])?;
-                let rhs = self.read_value(args[1])?;
+                let lhs = self.read_immediate(args[0])?;
+                let rhs = self.read_immediate(args[1])?;
                 let (bin_op, ignore_overflow) = match intrinsic_name {
                     "overflowing_add" => (BinOp::Add, true),
                     "overflowing_sub" => (BinOp::Sub, true),
@@ -133,14 +133,14 @@ pub fn emulate_intrinsic(
                 }
             }
             "unchecked_shl" | "unchecked_shr" => {
-                let l = self.read_value(args[0])?;
-                let r = self.read_value(args[1])?;
+                let l = self.read_immediate(args[0])?;
+                let r = self.read_immediate(args[1])?;
                 let bin_op = match intrinsic_name {
                     "unchecked_shl" => BinOp::Shl,
                     "unchecked_shr" => BinOp::Shr,
                     _ => bug!("Already checked for int ops")
                 };
-                let (val, overflowed) = self.binary_op_val(bin_op, l, r)?;
+                let (val, overflowed) = self.binary_op_imm(bin_op, l, r)?;
                 if overflowed {
                     let layout = self.layout_of(substs.type_at(0))?;
                     let r_val =  r.to_scalar()?.to_bits(layout.size)?;
@@ -172,8 +172,8 @@ pub fn hook_fn(
         // Some fn calls are actually BinOp intrinsics
         if let Some((op, oflo)) = self.tcx.is_binop_lang_item(def_id) {
             let dest = dest.expect("128 lowerings can't diverge");
-            let l = self.read_value(args[0])?;
-            let r = self.read_value(args[1])?;
+            let l = self.read_immediate(args[0])?;
+            let r = self.read_immediate(args[1])?;
             if oflo {
                 self.binop_with_overflow(op, l, r, dest)?;
             } else {
@@ -183,7 +183,7 @@ pub fn hook_fn(
         } else if Some(def_id) == self.tcx.lang_items().panic_fn() {
             assert!(args.len() == 1);
             // &(&'static str, &'static str, u32, u32)
-            let ptr = self.read_value(args[0])?;
+            let ptr = self.read_immediate(args[0])?;
             let place = self.ref_to_mplace(ptr)?;
             let (msg, file, line, col) = (
                 self.mplace_field(place, 0)?,
@@ -192,9 +192,9 @@ pub fn hook_fn(
                 self.mplace_field(place, 3)?,
             );
 
-            let msg_place = self.ref_to_mplace(self.read_value(msg.into())?)?;
+            let msg_place = self.ref_to_mplace(self.read_immediate(msg.into())?)?;
             let msg = Symbol::intern(self.read_str(msg_place)?);
-            let file_place = self.ref_to_mplace(self.read_value(file.into())?)?;
+            let file_place = self.ref_to_mplace(self.read_immediate(file.into())?)?;
             let file = Symbol::intern(self.read_str(file_place)?);
             let line = self.read_scalar(line.into())?.to_u32()?;
             let col = self.read_scalar(col.into())?.to_u32()?;
@@ -203,7 +203,7 @@ pub fn hook_fn(
             assert!(args.len() == 2);
             // &'static str, &(&'static str, u32, u32)
             let msg = args[0];
-            let ptr = self.read_value(args[1])?;
+            let ptr = self.read_immediate(args[1])?;
             let place = self.ref_to_mplace(ptr)?;
             let (file, line, col) = (
                 self.mplace_field(place, 0)?,
@@ -211,9 +211,9 @@ pub fn hook_fn(
                 self.mplace_field(place, 2)?,
             );
 
-            let msg_place = self.ref_to_mplace(self.read_value(msg.into())?)?;
+            let msg_place = self.ref_to_mplace(self.read_immediate(msg.into())?)?;
             let msg = Symbol::intern(self.read_str(msg_place)?);
-            let file_place = self.ref_to_mplace(self.read_value(file.into())?)?;
+            let file_place = self.ref_to_mplace(self.read_immediate(file.into())?)?;
             let file = Symbol::intern(self.read_str(file_place)?);
             let line = self.read_scalar(line.into())?.to_u32()?;
             let col = self.read_scalar(col.into())?.to_u32()?;
index 7811dcb0663d5249d4d48bf088b43d5b2ea6ada0..e9d181479e52e9962735625b392a825d6d05a8e6 100644 (file)
     EvalContext, PlaceTy, OpTy, Pointer, MemPlace, MemoryKind,
 };
 
-/// Classifying memory accesses
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub enum MemoryAccess {
-    Read,
-    Write,
-}
-
 /// Whether this kind of memory is allowed to leak
 pub trait MayLeak: Copy {
     fn may_leak(self) -> bool;
@@ -181,17 +174,22 @@ fn box_alloc(
         dest: PlaceTy<'tcx, Self::PointerTag>,
     ) -> EvalResult<'tcx>;
 
-    /// Hook for performing extra checks on a memory access.
-    ///
-    /// Takes read-only access to the allocation so we can keep all the memory read
-    /// operations take `&self`.  Use a `RefCell` in `AllocExtra` if you
-    /// need to mutate.
+    /// Hook for performing extra checks on a memory read access.
     #[inline]
-    fn memory_accessed(
+    fn memory_read(
         _alloc: &Allocation<Self::PointerTag, Self::AllocExtra>,
         _ptr: Pointer<Self::PointerTag>,
         _size: Size,
-        _access: MemoryAccess,
+    ) -> EvalResult<'tcx> {
+        Ok(())
+    }
+
+    /// Hook for performing extra checks on a memory write access.
+    #[inline]
+    fn memory_written(
+        _alloc: &mut Allocation<Self::PointerTag, Self::AllocExtra>,
+        _ptr: Pointer<Self::PointerTag>,
+        _size: Size,
     ) -> EvalResult<'tcx> {
         Ok(())
     }
@@ -201,6 +199,7 @@ fn memory_accessed(
     fn memory_deallocated(
         _alloc: &mut Allocation<Self::PointerTag, Self::AllocExtra>,
         _ptr: Pointer<Self::PointerTag>,
+        _size: Size,
     ) -> EvalResult<'tcx> {
         Ok(())
     }
@@ -242,10 +241,10 @@ fn tag_dereference(
 
     /// Execute a validation operation
     #[inline]
-    fn validation_op(
+    fn retag(
         _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
-        _op: ::rustc::mir::ValidationOp,
-        _operand: &::rustc::mir::ValidationOperand<'tcx, ::rustc::mir::Place<'tcx>>,
+        _fn_entry: bool,
+        _place: PlaceTy<'tcx, Self::PointerTag>,
     ) -> EvalResult<'tcx> {
         Ok(())
     }
index 689a29cff6e9e465554829b05194cf14da53f332..6a109efe3c43e2cc3ae6dcc9b61e90852653b0c4 100644 (file)
@@ -30,7 +30,7 @@
 use super::{
     Pointer, AllocId, Allocation, ConstValue, GlobalId,
     EvalResult, Scalar, EvalErrorKind, AllocType, PointerArithmetic,
-    Machine, MemoryAccess, AllocMap, MayLeak, ScalarMaybeUndef, ErrorHandled,
+    Machine, AllocMap, MayLeak, ScalarMaybeUndef, ErrorHandled,
 };
 
 #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
@@ -77,16 +77,8 @@ pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
     pub(super) tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
 }
 
-impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
-    for &'b Memory<'a, 'mir, 'tcx, M>
-{
-    #[inline]
-    fn data_layout(&self) -> &TargetDataLayout {
-        &self.tcx.data_layout
-    }
-}
-impl<'a, 'b, 'c, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
-    for &'b &'c mut Memory<'a, 'mir, 'tcx, M>
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
+    for Memory<'a, 'mir, 'tcx, M>
 {
     #[inline]
     fn data_layout(&self) -> &TargetDataLayout {
@@ -232,7 +224,8 @@ pub fn deallocate(
         }
 
         // Let the machine take some extra action
-        M::memory_deallocated(&mut alloc, ptr)?;
+        let size = Size::from_bytes(alloc.bytes.len() as u64);
+        M::memory_deallocated(&mut alloc, ptr, size)?;
 
         // Don't forget to remember size and align of this now-dead allocation
         let old = self.dead_alloc_map.insert(
@@ -644,7 +637,7 @@ fn get_bytes_internal(
         }
 
         let alloc = self.get(ptr.alloc_id)?;
-        M::memory_accessed(alloc, ptr, size, MemoryAccess::Read)?;
+        M::memory_read(alloc, ptr, size)?;
 
         assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
         assert_eq!(size.bytes() as usize as u64, size.bytes());
@@ -690,7 +683,7 @@ fn get_bytes_mut(
         self.clear_relocations(ptr, size)?;
 
         let alloc = self.get_mut(ptr.alloc_id)?;
-        M::memory_accessed(alloc, ptr, size, MemoryAccess::Write)?;
+        M::memory_written(alloc, ptr, size)?;
 
         assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
         assert_eq!(size.bytes() as usize as u64, size.bytes());
@@ -737,6 +730,11 @@ pub fn intern_static(
             if self.alloc_map.contains_key(&alloc) {
                 // Not yet interned, so proceed recursively
                 self.intern_static(alloc, mutability)?;
+            } else if self.dead_alloc_map.contains_key(&alloc) {
+                // dangling pointer
+                return err!(ValidationFailure(
+                    "encountered dangling pointer in final constant".into(),
+                ))
             }
         }
         Ok(())
index 55037a99e0124827ce4adffb000862d856e30bcd..6b31c675cc7d6a1a5ce9bfd881c8df696fc01372 100644 (file)
@@ -34,8 +34,8 @@
 
 pub use self::memory::{Memory, MemoryKind};
 
-pub use self::machine::{Machine, AllocMap, MemoryAccess, MayLeak};
+pub use self::machine::{Machine, AllocMap, MayLeak};
 
-pub use self::operand::{ScalarMaybeUndef, Value, ValTy, Operand, OpTy};
+pub use self::operand::{ScalarMaybeUndef, Immediate, ImmTy, Operand, OpTy};
 
 pub use self::validity::RefTracking;
index d0a32161485b4218e50b2834e96f7e01226b94cf..6f66dd1e70a55bc0b53d75a5473fe38b0739ea3f 100644 (file)
@@ -112,7 +112,7 @@ pub fn to_u64(self) -> EvalResult<'tcx, u64> {
     }
 
     #[inline(always)]
-    pub fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, u64> {
+    pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
         self.not_undef()?.to_usize(cx)
     }
 
@@ -132,7 +132,7 @@ pub fn to_i64(self) -> EvalResult<'tcx, i64> {
     }
 
     #[inline(always)]
-    pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, i64> {
+    pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
         self.not_undef()?.to_isize(cx)
     }
 }
@@ -144,54 +144,57 @@ pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, i64> {
 /// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary
 /// operations and fat pointers. This idea was taken from rustc's codegen.
 /// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
-/// defined on `Value`, and do not have to work with a `Place`.
+/// defined on `Immediate`, and do not have to work with a `Place`.
 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
-pub enum Value<Tag=(), Id=AllocId> {
+pub enum Immediate<Tag=(), Id=AllocId> {
     Scalar(ScalarMaybeUndef<Tag, Id>),
     ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
 }
 
-impl Value {
+impl Immediate {
     #[inline]
-    pub fn with_default_tag<Tag>(self) -> Value<Tag>
+    pub fn with_default_tag<Tag>(self) -> Immediate<Tag>
         where Tag: Default
     {
         match self {
-            Value::Scalar(x) => Value::Scalar(x.with_default_tag()),
-            Value::ScalarPair(x, y) =>
-                Value::ScalarPair(x.with_default_tag(), y.with_default_tag()),
+            Immediate::Scalar(x) => Immediate::Scalar(x.with_default_tag()),
+            Immediate::ScalarPair(x, y) =>
+                Immediate::ScalarPair(x.with_default_tag(), y.with_default_tag()),
         }
     }
 }
 
-impl<'tcx, Tag> Value<Tag> {
+impl<'tcx, Tag> Immediate<Tag> {
     #[inline]
-    pub fn erase_tag(self) -> Value
+    pub fn erase_tag(self) -> Immediate
     {
         match self {
-            Value::Scalar(x) => Value::Scalar(x.erase_tag()),
-            Value::ScalarPair(x, y) =>
-                Value::ScalarPair(x.erase_tag(), y.erase_tag()),
+            Immediate::Scalar(x) => Immediate::Scalar(x.erase_tag()),
+            Immediate::ScalarPair(x, y) =>
+                Immediate::ScalarPair(x.erase_tag(), y.erase_tag()),
         }
     }
 
     pub fn new_slice(
         val: Scalar<Tag>,
         len: u64,
-        cx: impl HasDataLayout
+        cx: &impl HasDataLayout
     ) -> Self {
-        Value::ScalarPair(val.into(), Scalar::from_uint(len, cx.data_layout().pointer_size).into())
+        Immediate::ScalarPair(
+            val.into(),
+            Scalar::from_uint(len, cx.data_layout().pointer_size).into(),
+        )
     }
 
     pub fn new_dyn_trait(val: Scalar<Tag>, vtable: Pointer<Tag>) -> Self {
-        Value::ScalarPair(val.into(), Scalar::Ptr(vtable).into())
+        Immediate::ScalarPair(val.into(), Scalar::Ptr(vtable).into())
     }
 
     #[inline]
     pub fn to_scalar_or_undef(self) -> ScalarMaybeUndef<Tag> {
         match self {
-            Value::Scalar(val) => val,
-            Value::ScalarPair(..) => bug!("Got a fat pointer where a scalar was expected"),
+            Immediate::Scalar(val) => val,
+            Immediate::ScalarPair(..) => bug!("Got a fat pointer where a scalar was expected"),
         }
     }
 
@@ -203,18 +206,18 @@ pub fn to_scalar(self) -> EvalResult<'tcx, Scalar<Tag>> {
     #[inline]
     pub fn to_scalar_pair(self) -> EvalResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
         match self {
-            Value::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
-            Value::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?))
+            Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
+            Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?))
         }
     }
 
-    /// Convert the value into a pointer (or a pointer-sized integer).
+    /// Convert the immediate into a pointer (or a pointer-sized integer).
     /// Throws away the second half of a ScalarPair!
     #[inline]
     pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> {
         match self {
-            Value::Scalar(ptr) |
-            Value::ScalarPair(ptr, _) => ptr.not_undef(),
+            Immediate::Scalar(ptr) |
+            Immediate::ScalarPair(ptr, _) => ptr.not_undef(),
         }
     }
 
@@ -223,25 +226,25 @@ pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> {
     #[inline]
     pub fn to_meta(self) -> EvalResult<'tcx, Option<Scalar<Tag>>> {
         Ok(match self {
-            Value::Scalar(_) => None,
-            Value::ScalarPair(_, meta) => Some(meta.not_undef()?),
+            Immediate::Scalar(_) => None,
+            Immediate::ScalarPair(_, meta) => Some(meta.not_undef()?),
         })
     }
 }
 
-// ScalarPair needs a type to interpret, so we often have a value and a type together
+// ScalarPair needs a type to interpret, so we often have an immediate and a type together
 // as input for binary and cast operations.
 #[derive(Copy, Clone, Debug)]
-pub struct ValTy<'tcx, Tag=()> {
-    value: Value<Tag>,
+pub struct ImmTy<'tcx, Tag=()> {
+    immediate: Immediate<Tag>,
     pub layout: TyLayout<'tcx>,
 }
 
-impl<'tcx, Tag> ::std::ops::Deref for ValTy<'tcx, Tag> {
-    type Target = Value<Tag>;
+impl<'tcx, Tag> ::std::ops::Deref for ImmTy<'tcx, Tag> {
+    type Target = Immediate<Tag>;
     #[inline(always)]
-    fn deref(&self) -> &Value<Tag> {
-        &self.value
+    fn deref(&self) -> &Immediate<Tag> {
+        &self.immediate
     }
 }
 
@@ -250,7 +253,7 @@ fn deref(&self) -> &Value<Tag> {
 /// memory and to avoid having to store arbitrary-sized data here.
 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
 pub enum Operand<Tag=(), Id=AllocId> {
-    Immediate(Value<Tag, Id>),
+    Immediate(Immediate<Tag, Id>),
     Indirect(MemPlace<Tag, Id>),
 }
 
@@ -288,11 +291,11 @@ pub fn to_mem_place(self) -> MemPlace<Tag>
     }
 
     #[inline]
-    pub fn to_immediate(self) -> Value<Tag>
+    pub fn to_immediate(self) -> Immediate<Tag>
         where Tag: ::std::fmt::Debug
     {
         match self {
-            Operand::Immediate(val) => val,
+            Operand::Immediate(imm) => imm,
             _ => bug!("to_immediate: expected Operand::Immediate, got {:?}", self),
 
         }
@@ -323,11 +326,11 @@ fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self {
     }
 }
 
-impl<'tcx, Tag> From<ValTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
+impl<'tcx, Tag> From<ImmTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
     #[inline(always)]
-    fn from(val: ValTy<'tcx, Tag>) -> Self {
+    fn from(val: ImmTy<'tcx, Tag>) -> Self {
         OpTy {
-            op: Operand::Immediate(val.value),
+            op: Operand::Immediate(val.immediate),
             layout: val.layout
         }
     }
@@ -367,12 +370,12 @@ fn from_known_layout<'tcx>(
 }
 
 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
-    /// Try reading a value in memory; this is interesting particularly for ScalarPair.
+    /// Try reading an immediate in memory; this is interesting particularly for ScalarPair.
     /// Return None if the layout does not permit loading this as a value.
-    pub(super) fn try_read_value_from_mplace(
+    pub(super) fn try_read_immediate_from_mplace(
         &self,
         mplace: MPlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, Option<Value<M::PointerTag>>> {
+    ) -> EvalResult<'tcx, Option<Immediate<M::PointerTag>>> {
         if mplace.layout.is_unsized() {
             // Don't touch unsized
             return Ok(None);
@@ -383,14 +386,14 @@ pub(super) fn try_read_value_from_mplace(
             // Not all ZSTs have a layout we would handle below, so just short-circuit them
             // all here.
             self.memory.check_align(ptr, ptr_align)?;
-            return Ok(Some(Value::Scalar(Scalar::zst().into())));
+            return Ok(Some(Immediate::Scalar(Scalar::zst().into())));
         }
 
         let ptr = ptr.to_ptr()?;
         match mplace.layout.abi {
             layout::Abi::Scalar(..) => {
                 let scalar = self.memory.read_scalar(ptr, ptr_align, mplace.layout.size)?;
-                Ok(Some(Value::Scalar(scalar)))
+                Ok(Some(Immediate::Scalar(scalar)))
             }
             layout::Abi::ScalarPair(ref a, ref b) => {
                 let (a, b) = (&a.value, &b.value);
@@ -401,25 +404,25 @@ pub(super) fn try_read_value_from_mplace(
                 let b_ptr = ptr.offset(b_offset, self)?.into();
                 let a_val = self.memory.read_scalar(a_ptr, ptr_align, a_size)?;
                 let b_val = self.memory.read_scalar(b_ptr, ptr_align, b_size)?;
-                Ok(Some(Value::ScalarPair(a_val, b_val)))
+                Ok(Some(Immediate::ScalarPair(a_val, b_val)))
             }
             _ => Ok(None),
         }
     }
 
-    /// Try returning an immediate value for the operand.
-    /// If the layout does not permit loading this as a value, return where in memory
+    /// Try returning an immediate for the operand.
+    /// If the layout does not permit loading this as an immediate, return where in memory
     /// we can find the data.
     /// Note that for a given layout, this operation will either always fail or always
     /// succeed!  Whether it succeeds depends on whether the layout can be represented
-    /// in a `Value`, not on which data is stored there currently.
-    pub(crate) fn try_read_value(
+    /// in a `Immediate`, not on which data is stored there currently.
+    pub(crate) fn try_read_immediate(
         &self,
         src: OpTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, Result<Value<M::PointerTag>, MemPlace<M::PointerTag>>> {
+    ) -> EvalResult<'tcx, Result<Immediate<M::PointerTag>, MemPlace<M::PointerTag>>> {
         Ok(match src.try_as_mplace() {
             Ok(mplace) => {
-                if let Some(val) = self.try_read_value_from_mplace(mplace)? {
+                if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
                     Ok(val)
                 } else {
                     Err(*mplace)
@@ -429,14 +432,14 @@ pub(crate) fn try_read_value(
         })
     }
 
-    /// Read a value from a place, asserting that that is possible with the given layout.
+    /// Read an immediate from a place, asserting that that is possible with the given layout.
     #[inline(always)]
-    pub fn read_value(
+    pub fn read_immediate(
         &self,
         op: OpTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx, ValTy<'tcx, M::PointerTag>> {
-        if let Ok(value) = self.try_read_value(op)? {
-            Ok(ValTy { value, layout: op.layout })
+    ) -> EvalResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
+        if let Ok(immediate) = self.try_read_immediate(op)? {
+            Ok(ImmTy { immediate, layout: op.layout })
         } else {
             bug!("primitive read failed for type: {:?}", op.layout.ty);
         }
@@ -447,10 +450,7 @@ pub fn read_scalar(
         &self,
         op: OpTy<'tcx, M::PointerTag>
     ) -> EvalResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
-        match *self.read_value(op)? {
-            Value::ScalarPair(..) => bug!("got ScalarPair for type: {:?}", op.layout.ty),
-            Value::Scalar(val) => Ok(val),
-        }
+        Ok(self.read_immediate(op)?.to_scalar_or_undef())
     }
 
     // Turn the MPlace into a string (must already be dereferenced!)
@@ -470,16 +470,16 @@ pub fn uninit_operand(
         layout: TyLayout<'tcx>
     ) -> EvalResult<'tcx, Operand<M::PointerTag>> {
         // This decides which types we will use the Immediate optimization for, and hence should
-        // match what `try_read_value` and `eval_place_to_op` support.
+        // match what `try_read_immediate` and `eval_place_to_op` support.
         if layout.is_zst() {
-            return Ok(Operand::Immediate(Value::Scalar(Scalar::zst().into())));
+            return Ok(Operand::Immediate(Immediate::Scalar(Scalar::zst().into())));
         }
 
         Ok(match layout.abi {
             layout::Abi::Scalar(..) =>
-                Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)),
+                Operand::Immediate(Immediate::Scalar(ScalarMaybeUndef::Undef)),
             layout::Abi::ScalarPair(..) =>
-                Operand::Immediate(Value::ScalarPair(
+                Operand::Immediate(Immediate::ScalarPair(
                     ScalarMaybeUndef::Undef,
                     ScalarMaybeUndef::Undef,
                 )),
@@ -510,22 +510,22 @@ pub fn operand_field(
         let field = field.try_into().unwrap();
         let field_layout = op.layout.field(self, field)?;
         if field_layout.is_zst() {
-            let val = Value::Scalar(Scalar::zst().into());
-            return Ok(OpTy { op: Operand::Immediate(val), layout: field_layout });
+            let immediate = Immediate::Scalar(Scalar::zst().into());
+            return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
         }
         let offset = op.layout.fields.offset(field);
-        let value = match base {
+        let immediate = match base {
             // the field covers the entire type
             _ if offset.bytes() == 0 && field_layout.size == op.layout.size => base,
             // extract fields from types with `ScalarPair` ABI
-            Value::ScalarPair(a, b) => {
+            Immediate::ScalarPair(a, b) => {
                 let val = if offset.bytes() == 0 { a } else { b };
-                Value::Scalar(val)
+                Immediate::Scalar(val)
             },
-            Value::Scalar(val) =>
+            Immediate::Scalar(val) =>
                 bug!("field access on non aggregate {:#?}, {:#?}", val, op.layout),
         };
-        Ok(OpTy { op: Operand::Immediate(value), layout: field_layout })
+        Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout })
     }
 
     pub fn operand_downcast(
@@ -551,7 +551,7 @@ pub(super) fn deref_operand(
         &self,
         src: OpTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
-        let val = self.read_value(src)?;
+        let val = self.read_immediate(src)?;
         trace!("deref to {} on {:?}", val.layout.ty, *val);
         Ok(self.ref_to_mplace(val)?)
     }
@@ -568,7 +568,7 @@ pub fn operand_projection(
             Deref => self.deref_operand(base)?.into(),
             Subslice { .. } | ConstantIndex { .. } | Index(_) => if base.layout.is_zst() {
                 OpTy {
-                    op: Operand::Immediate(Value::Scalar(Scalar::zst().into())),
+                    op: Operand::Immediate(Immediate::Scalar(Scalar::zst().into())),
                     // the actual index doesn't matter, so we just pick a convenient one like 0
                     layout: base.layout.field(self, 0)?,
                 }
@@ -682,9 +682,12 @@ pub(super) fn const_value_to_op(
                 ).with_default_tag())
             },
             ConstValue::ScalarPair(a, b) =>
-                Ok(Operand::Immediate(Value::ScalarPair(a.into(), b.into())).with_default_tag()),
+                Ok(Operand::Immediate(Immediate::ScalarPair(
+                    a.into(),
+                    b.into(),
+                )).with_default_tag()),
             ConstValue::Scalar(x) =>
-                Ok(Operand::Immediate(Value::Scalar(x.into())).with_default_tag()),
+                Ok(Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag()),
         }
     }
     pub fn const_to_op(
@@ -722,7 +725,7 @@ pub fn read_discriminant(
         }
         // read raw discriminant value
         let discr_op = self.operand_field(rval, 0)?;
-        let discr_val = self.read_value(discr_op)?;
+        let discr_val = self.read_immediate(discr_op)?;
         let raw_discr = discr_val.to_scalar()?;
         trace!("discr value: {:?}", raw_discr);
         // post-process
@@ -740,7 +743,7 @@ pub fn read_discriminant(
                         .ty_adt_def().expect("tagged layout corresponds to adt")
                         .repr
                         .discr_type();
-                    let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
+                    let discr_ty = layout::Integer::from_attr(self, discr_ty);
                     let shift = 128 - discr_ty.size().bits();
                     let truncatee = sexted as u128;
                     (truncatee << shift) >> shift
index 5f4bafc39f3deec8aa57efb12157d67c36fd7a14..31824d5ec4a93380b2ad428361f36505e682c903 100644 (file)
@@ -15,7 +15,7 @@
 use rustc_apfloat::Float;
 use rustc::mir::interpret::{EvalResult, Scalar};
 
-use super::{EvalContext, PlaceTy, Value, Machine, ValTy};
+use super::{EvalContext, PlaceTy, Immediate, Machine, ImmTy};
 
 
 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
@@ -24,13 +24,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
     pub fn binop_with_overflow(
         &mut self,
         op: mir::BinOp,
-        left: ValTy<'tcx, M::PointerTag>,
-        right: ValTy<'tcx, M::PointerTag>,
+        left: ImmTy<'tcx, M::PointerTag>,
+        right: ImmTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx> {
-        let (val, overflowed) = self.binary_op_val(op, left, right)?;
-        let val = Value::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
-        self.write_value(val, dest)
+        let (val, overflowed) = self.binary_op_imm(op, left, right)?;
+        let val = Immediate::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
+        self.write_immediate(val, dest)
     }
 
     /// Applies the binary operation `op` to the arguments and writes the result to the
@@ -38,11 +38,11 @@ pub fn binop_with_overflow(
     pub fn binop_ignore_overflow(
         &mut self,
         op: mir::BinOp,
-        left: ValTy<'tcx, M::PointerTag>,
-        right: ValTy<'tcx, M::PointerTag>,
+        left: ImmTy<'tcx, M::PointerTag>,
+        right: ImmTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx> {
-        let (val, _overflowed) = self.binary_op_val(op, left, right)?;
+        let (val, _overflowed) = self.binary_op_imm(op, left, right)?;
         self.write_scalar(val, dest)
     }
 }
@@ -283,13 +283,13 @@ fn binary_int_op(
     }
 
     /// Convenience wrapper that's useful when keeping the layout together with the
-    /// value.
+    /// immediate value.
     #[inline]
-    pub fn binary_op_val(
+    pub fn binary_op_imm(
         &self,
         bin_op: mir::BinOp,
-        left: ValTy<'tcx, M::PointerTag>,
-        right: ValTy<'tcx, M::PointerTag>,
+        left: ImmTy<'tcx, M::PointerTag>,
+        right: ImmTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
         self.binary_op(
             bin_op,
index 0eae2bfb226c614fa47c7f7549e032895d1985fa..fa4d31846df4a3de276c2fa8799a248c5c09e316 100644 (file)
@@ -25,7 +25,7 @@
 };
 use super::{
     EvalContext, Machine, AllocMap,
-    Value, ValTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
+    Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
 };
 
 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
@@ -128,7 +128,7 @@ pub fn from_scalar_ptr(ptr: Scalar<Tag>, align: Align) -> Self {
 
     /// Produces a Place that will error if attempted to be read from or written to
     #[inline(always)]
-    pub fn null(cx: impl HasDataLayout) -> Self {
+    pub fn null(cx: &impl HasDataLayout) -> Self {
         Self::from_scalar_ptr(Scalar::ptr_null(cx), Align::from_bytes(1, 1).unwrap())
     }
 
@@ -156,7 +156,7 @@ pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
 impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
     /// Produces a MemPlace that works for ZST but nothing else
     #[inline]
-    pub fn dangling(layout: TyLayout<'tcx>, cx: impl HasDataLayout) -> Self {
+    pub fn dangling(layout: TyLayout<'tcx>, cx: &impl HasDataLayout) -> Self {
         MPlaceTy {
             mplace: MemPlace::from_scalar_ptr(
                 Scalar::from_uint(layout.align.abi(), cx.pointer_size()),
@@ -172,7 +172,7 @@ fn from_aligned_ptr(ptr: Pointer<Tag>, layout: TyLayout<'tcx>) -> Self {
     }
 
     #[inline]
-    pub(super) fn len(self, cx: impl HasDataLayout) -> EvalResult<'tcx, u64> {
+    pub(super) fn len(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
         if self.layout.is_unsized() {
             // We need to consult `meta` metadata
             match self.layout.ty.sty {
@@ -201,10 +201,10 @@ pub(super) fn vtable(self) -> EvalResult<'tcx, Pointer<Tag>> {
 
 impl<'tcx, Tag: ::std::fmt::Debug> OpTy<'tcx, Tag> {
     #[inline(always)]
-    pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Value<Tag>> {
+    pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Immediate<Tag>> {
         match self.op {
             Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
-            Operand::Immediate(value) => Err(value),
+            Operand::Immediate(imm) => Err(imm),
         }
     }
 
@@ -217,7 +217,7 @@ pub fn to_mem_place(self) -> MPlaceTy<'tcx, Tag> {
 impl<'tcx, Tag: ::std::fmt::Debug> Place<Tag> {
     /// Produces a Place that will error if attempted to be read from or written to
     #[inline(always)]
-    pub fn null(cx: impl HasDataLayout) -> Self {
+    pub fn null(cx: &impl HasDataLayout) -> Self {
         Place::Ptr(MemPlace::null(cx))
     }
 
@@ -269,7 +269,7 @@ impl<'a, 'mir, 'tcx, Tag, M> EvalContext<'a, 'mir, 'tcx, M>
     /// Alignment is just based on the type.  This is the inverse of `create_ref`.
     pub fn ref_to_mplace(
         &self,
-        val: ValTy<'tcx, M::PointerTag>,
+        val: ImmTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         let pointee_type = val.layout.ty.builtin_deref(true).unwrap().ty;
         let layout = self.layout_of(pointee_type)?;
@@ -299,30 +299,24 @@ pub fn ref_to_mplace(
 
     /// Turn a mplace into a (thin or fat) pointer, as a reference, pointing to the same space.
     /// This is the inverse of `ref_to_mplace`.
+    /// `mutbl` indicates whether we are create a shared or mutable ref, or a raw pointer (`None`).
     pub fn create_ref(
         &mut self,
         place: MPlaceTy<'tcx, M::PointerTag>,
-        borrow_kind: Option<mir::BorrowKind>,
-    ) -> EvalResult<'tcx, Value<M::PointerTag>> {
+        mutbl: Option<hir::Mutability>,
+    ) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
         // Pointer tag tracking might want to adjust the tag
         let place = if M::ENABLE_PTR_TRACKING_HOOKS {
             let (size, _) = self.size_and_align_of_mplace(place)?
                 // for extern types, just cover what we can
                 .unwrap_or_else(|| place.layout.size_and_align());
-            let mutbl = match borrow_kind {
-                Some(mir::BorrowKind::Mut { .. }) |
-                Some(mir::BorrowKind::Unique) =>
-                    Some(hir::MutMutable),
-                Some(_) => Some(hir::MutImmutable),
-                None => None,
-            };
             M::tag_reference(self, *place, place.layout.ty, size, mutbl)?
         } else {
             *place
         };
         Ok(match place.meta {
-            None => Value::Scalar(place.ptr.into()),
-            Some(meta) => Value::ScalarPair(place.ptr.into(), meta.into()),
+            None => Immediate::Scalar(place.ptr.into()),
+            Some(meta) => Immediate::ScalarPair(place.ptr.into(), meta.into()),
         })
     }
 
@@ -516,7 +510,7 @@ pub fn place_downcast(
             Place::Ptr(mplace) =>
                 self.mplace_downcast(MPlaceTy { mplace, layout: base.layout }, variant)?.into(),
             Place::Local { .. } => {
-                let layout = base.layout.for_variant(&self, variant);
+                let layout = base.layout.for_variant(self, variant);
                 PlaceTy { layout, ..base }
             }
         })
@@ -635,17 +629,17 @@ pub fn write_scalar(
         val: impl Into<ScalarMaybeUndef<M::PointerTag>>,
         dest: PlaceTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx> {
-        self.write_value(Value::Scalar(val.into()), dest)
+        self.write_immediate(Immediate::Scalar(val.into()), dest)
     }
 
-    /// Write a value to a place
+    /// Write an immediate to a place
     #[inline(always)]
-    pub fn write_value(
+    pub fn write_immediate(
         &mut self,
-        src_val: Value<M::PointerTag>,
+        src: Immediate<M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx> {
-        self.write_value_no_validate(src_val, dest)?;
+        self.write_immediate_no_validate(src, dest)?;
 
         if M::enforce_validity(self) {
             // Data got changed, better make sure it matches the type!
@@ -655,40 +649,40 @@ pub fn write_value(
         Ok(())
     }
 
-    /// Write a value to a place.
+    /// Write an immediate to a place.
     /// If you use this you are responsible for validating that things got copied at the
     /// right type.
-    fn write_value_no_validate(
+    fn write_immediate_no_validate(
         &mut self,
-        src_val: Value<M::PointerTag>,
+        src: Immediate<M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx> {
         if cfg!(debug_assertions) {
             // This is a very common path, avoid some checks in release mode
             assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
-            match src_val {
-                Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) =>
+            match src {
+                Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) =>
                     assert_eq!(self.pointer_size(), dest.layout.size,
                         "Size mismatch when writing pointer"),
-                Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size, .. })) =>
+                Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size, .. })) =>
                     assert_eq!(Size::from_bytes(size.into()), dest.layout.size,
                         "Size mismatch when writing bits"),
-                Value::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size
-                Value::ScalarPair(_, _) => {
+                Immediate::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size
+                Immediate::ScalarPair(_, _) => {
                     // FIXME: Can we check anything here?
                 }
             }
         }
-        trace!("write_value: {:?} <- {:?}: {}", *dest, src_val, dest.layout.ty);
+        trace!("write_immediate: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
 
-        // See if we can avoid an allocation. This is the counterpart to `try_read_value`,
+        // See if we can avoid an allocation. This is the counterpart to `try_read_immediate`,
         // but not factored as a separate function.
         let mplace = match dest.place {
             Place::Local { frame, local } => {
                 match *self.stack[frame].locals[local].access_mut()? {
                     Operand::Immediate(ref mut dest_val) => {
                         // Yay, we can just change the local directly.
-                        *dest_val = src_val;
+                        *dest_val = src;
                         return Ok(());
                     },
                     Operand::Indirect(mplace) => mplace, // already in memory
@@ -699,15 +693,15 @@ fn write_value_no_validate(
         let dest = MPlaceTy { mplace, layout: dest.layout };
 
         // This is already in memory, write there.
-        self.write_value_to_mplace_no_validate(src_val, dest)
+        self.write_immediate_to_mplace_no_validate(src, dest)
     }
 
-    /// Write a value to memory.
+    /// Write an immediate to memory.
     /// If you use this you are responsible for validating that things git copied at the
     /// right type.
-    fn write_value_to_mplace_no_validate(
+    fn write_immediate_to_mplace_no_validate(
         &mut self,
-        value: Value<M::PointerTag>,
+        value: Immediate<M::PointerTag>,
         dest: MPlaceTy<'tcx, M::PointerTag>,
     ) -> EvalResult<'tcx> {
         let (ptr, ptr_align) = dest.to_scalar_ptr_align();
@@ -727,10 +721,10 @@ fn write_value_to_mplace_no_validate(
         // memory.  The code below is not sufficient, with enough padding it might not
         // cover all the bytes!
         match value {
-            Value::Scalar(scalar) => {
+            Immediate::Scalar(scalar) => {
                 match dest.layout.abi {
                     layout::Abi::Scalar(_) => {}, // fine
-                    _ => bug!("write_value_to_mplace: invalid Scalar layout: {:#?}",
+                    _ => bug!("write_immediate_to_mplace: invalid Scalar layout: {:#?}",
                             dest.layout)
                 }
 
@@ -738,16 +732,16 @@ fn write_value_to_mplace_no_validate(
                     ptr, ptr_align.min(dest.layout.align), scalar, dest.layout.size
                 )
             }
-            Value::ScalarPair(a_val, b_val) => {
+            Immediate::ScalarPair(a_val, b_val) => {
                 let (a, b) = match dest.layout.abi {
                     layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
-                    _ => bug!("write_value_to_mplace: invalid ScalarPair layout: {:#?}",
+                    _ => bug!("write_immediate_to_mplace: invalid ScalarPair layout: {:#?}",
                               dest.layout)
                 };
-                let (a_size, b_size) = (a.size(&self), b.size(&self));
-                let (a_align, b_align) = (a.align(&self), b.align(&self));
+                let (a_size, b_size) = (a.size(self), b.size(self));
+                let (a_align, b_align) = (a.align(self), b.align(self));
                 let b_offset = a_size.abi_align(b_align);
-                let b_ptr = ptr.offset(b_offset, &self)?.into();
+                let b_ptr = ptr.offset(b_offset, self)?.into();
 
                 // It is tempting to verify `b_offset` against `layout.fields.offset(1)`,
                 // but that does not work: We could be a newtype around a pair, then the
@@ -794,10 +788,10 @@ fn copy_op_no_validate(
             "Layout mismatch when copying!\nsrc: {:#?}\ndest: {:#?}", src, dest);
 
         // Let us see if the layout is simple so we take a shortcut, avoid force_allocation.
-        let src = match self.try_read_value(src)? {
+        let src = match self.try_read_immediate(src)? {
             Ok(src_val) => {
                 // Yay, we got a value that we can write directly.
-                return self.write_value_no_validate(src_val, dest);
+                return self.write_immediate_no_validate(src_val, dest);
             }
             Err(mplace) => mplace,
         };
@@ -879,7 +873,7 @@ pub fn force_allocation(
                         let ptr = self.allocate(local_layout, MemoryKind::Stack)?;
                         // We don't have to validate as we can assume the local
                         // was already valid for its type.
-                        self.write_value_to_mplace_no_validate(value, ptr)?;
+                        self.write_immediate_to_mplace_no_validate(value, ptr)?;
                         let mplace = ptr.mplace;
                         // Update the local
                         *self.stack[frame].locals[local].access_mut()? =
@@ -902,7 +896,7 @@ pub fn allocate(
         if layout.is_unsized() {
             assert!(self.tcx.features().unsized_locals, "cannot alloc memory for unsized type");
             // FIXME: What should we do here? We should definitely also tag!
-            Ok(MPlaceTy::dangling(layout, &self))
+            Ok(MPlaceTy::dangling(layout, self))
         } else {
             let ptr = self.memory.allocate(layout.size, layout.align, kind)?;
             let ptr = M::tag_new_allocation(self, ptr, kind)?;
@@ -929,7 +923,7 @@ pub fn write_discriminant_index(
                 // raw discriminants for enums are isize or bigger during
                 // their computation, but the in-memory tag is the smallest possible
                 // representation
-                let size = tag.value.size(self.tcx.tcx);
+                let size = tag.value.size(self);
                 let shift = 128 - size.bits();
                 let discr_val = (discr_val << shift) >> shift;
 
index cff2288fd8720c3ac04cd97045a105d8617d8d29..0926ddb59665995aafc943f63d40e256fd205ee4 100644 (file)
@@ -24,7 +24,7 @@
 use syntax::source_map::Span;
 
 use super::eval_context::{LocalValue, StackPopCleanup};
-use super::{Frame, Memory, Operand, MemPlace, Place, Value, ScalarMaybeUndef};
+use super::{Frame, Memory, Operand, MemPlace, Place, Immediate, ScalarMaybeUndef};
 use const_eval::CompileTimeInterpreter;
 
 #[derive(Default)]
@@ -237,11 +237,11 @@ fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
     }
 }
 
-impl_stable_hash_for!(enum ::interpret::Value {
+impl_stable_hash_for!(enum ::interpret::Immediate {
     Scalar(x),
     ScalarPair(x, y),
 });
-impl_snapshot_for!(enum Value {
+impl_snapshot_for!(enum Immediate {
     Scalar(s),
     ScalarPair(s, t),
 });
index 1bab536e3e0f079d2d21eac24984f5c3e1a80b36..db055204c0a1986600a83cc1a42b4b0f296d1bc4 100644 (file)
@@ -12,7 +12,7 @@
 //!
 //! The main entry point is the `step` method.
 
-use rustc::mir;
+use rustc::{hir, mir};
 use rustc::ty::layout::LayoutOf;
 use rustc::mir::interpret::{EvalResult, Scalar, PointerArithmetic};
 
@@ -118,11 +118,10 @@ fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> EvalResult<'tcx> {
             // interpreter is solely intended for borrowck'ed code.
             FakeRead(..) => {}
 
-            // Validity checks.
-            Validate(op, ref places) => {
-                for operand in places {
-                    M::validation_op(self, op, operand)?;
-                }
+            // Retagging.
+            Retag { fn_entry, ref place } => {
+                let dest = self.eval_place(place)?;
+                M::retag(self, fn_entry, dest)?;
             }
 
             EndRegion(..) => {}
@@ -160,9 +159,9 @@ fn eval_rvalue_into_place(
 
             BinaryOp(bin_op, ref left, ref right) => {
                 let layout = if binop_left_homogeneous(bin_op) { Some(dest.layout) } else { None };
-                let left = self.read_value(self.eval_operand(left, layout)?)?;
+                let left = self.read_immediate(self.eval_operand(left, layout)?)?;
                 let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };
-                let right = self.read_value(self.eval_operand(right, layout)?)?;
+                let right = self.read_immediate(self.eval_operand(right, layout)?)?;
                 self.binop_ignore_overflow(
                     bin_op,
                     left,
@@ -173,9 +172,9 @@ fn eval_rvalue_into_place(
 
             CheckedBinaryOp(bin_op, ref left, ref right) => {
                 // Due to the extra boolean in the result, we can never reuse the `dest.layout`.
-                let left = self.read_value(self.eval_operand(left, None)?)?;
+                let left = self.read_immediate(self.eval_operand(left, None)?)?;
                 let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };
-                let right = self.read_value(self.eval_operand(right, layout)?)?;
+                let right = self.read_immediate(self.eval_operand(right, layout)?)?;
                 self.binop_with_overflow(
                     bin_op,
                     left,
@@ -186,7 +185,7 @@ fn eval_rvalue_into_place(
 
             UnaryOp(un_op, ref operand) => {
                 // The operand always has the same type as the result.
-                let val = self.read_value(self.eval_operand(operand, Some(dest.layout))?)?;
+                let val = self.read_immediate(self.eval_operand(operand, Some(dest.layout))?)?;
                 let val = self.unary_op(un_op, val.to_scalar()?, dest.layout)?;
                 self.write_scalar(val, dest)?;
             }
@@ -218,7 +217,7 @@ fn eval_rvalue_into_place(
             Repeat(ref operand, _) => {
                 let op = self.eval_operand(operand, None)?;
                 let dest = self.force_allocation(dest)?;
-                let length = dest.len(&self)?;
+                let length = dest.len(self)?;
 
                 if length > 0 {
                     // write the first
@@ -228,7 +227,7 @@ fn eval_rvalue_into_place(
                     if length > 1 {
                         // copy the rest
                         let (dest, dest_align) = first.to_scalar_ptr_align();
-                        let rest = dest.ptr_offset(first.layout.size, &self)?;
+                        let rest = dest.ptr_offset(first.layout.size, self)?;
                         self.memory.copy_repeatedly(
                             dest, dest_align, rest, dest_align, first.layout.size, length - 1, true
                         )?;
@@ -240,7 +239,7 @@ fn eval_rvalue_into_place(
                 // FIXME(CTFE): don't allow computing the length of arrays in const eval
                 let src = self.eval_place(place)?;
                 let mplace = self.force_allocation(src)?;
-                let len = mplace.len(&self)?;
+                let len = mplace.len(self)?;
                 let size = self.pointer_size();
                 self.write_scalar(
                     Scalar::from_uint(len, size),
@@ -251,8 +250,16 @@ fn eval_rvalue_into_place(
             Ref(_, borrow_kind, ref place) => {
                 let src = self.eval_place(place)?;
                 let val = self.force_allocation(src)?;
-                let val = self.create_ref(val, Some(borrow_kind))?;
-                self.write_value(val, dest)?;
+                let mutbl = match borrow_kind {
+                    mir::BorrowKind::Mut { .. } |
+                    mir::BorrowKind::Unique =>
+                        hir::MutMutable,
+                    mir::BorrowKind::Shared |
+                    mir::BorrowKind::Shallow =>
+                        hir::MutImmutable,
+                };
+                let val = self.create_ref(val, Some(mutbl))?;
+                self.write_immediate(val, dest)?;
             }
 
             NullaryOp(mir::NullOp::Box, _) => {
index ee84e49022f1b9cfe6cafd12f93078dbe4ce4b00..b5df625b3028bb33d1a411384003db85d7680280 100644 (file)
@@ -17,7 +17,7 @@
 
 use rustc::mir::interpret::{EvalResult, PointerArithmetic, EvalErrorKind, Scalar};
 use super::{
-    EvalContext, Machine, Value, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
+    EvalContext, Machine, Immediate, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
 };
 
 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
@@ -51,7 +51,7 @@ pub(super) fn eval_terminator(
                 ref targets,
                 ..
             } => {
-                let discr = self.read_value(self.eval_operand(discr, None)?)?;
+                let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
                 trace!("SwitchInt({:?})", *discr);
 
                 // Branch to the `otherwise` case by default, if no match is found.
@@ -138,7 +138,7 @@ pub(super) fn eval_terminator(
                 target,
                 ..
             } => {
-                let cond_val = self.read_value(self.eval_operand(cond, None)?)?
+                let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
                     .to_scalar()?.to_bool()?;
                 if expected == cond_val {
                     self.goto_block(Some(target))?;
@@ -147,10 +147,10 @@ pub(super) fn eval_terminator(
                     use rustc::mir::interpret::EvalErrorKind::*;
                     return match *msg {
                         BoundsCheck { ref len, ref index } => {
-                            let len = self.read_value(self.eval_operand(len, None)?)
+                            let len = self.read_immediate(self.eval_operand(len, None)?)
                                 .expect("can't eval len").to_scalar()?
                                 .to_bits(self.memory().pointer_size())? as u64;
-                            let index = self.read_value(self.eval_operand(index, None)?)
+                            let index = self.read_immediate(self.eval_operand(index, None)?)
                                 .expect("can't eval index").to_scalar()?
                                 .to_bits(self.memory().pointer_size())? as u64;
                             err!(BoundsCheck { len, index })
@@ -402,10 +402,10 @@ fn eval_fn_call(
             ty::InstanceDef::Virtual(_, idx) => {
                 let ptr_size = self.pointer_size();
                 let ptr_align = self.tcx.data_layout.pointer_align;
-                let ptr = self.ref_to_mplace(self.read_value(args[0])?)?;
+                let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
                 let vtable = ptr.vtable()?;
                 let fn_ptr = self.memory.read_ptr_sized(
-                    vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
+                    vtable.offset(ptr_size * (idx as u64 + 3), self)?,
                     ptr_align
                 )?.to_ptr()?;
                 let instance = self.memory.get_fn(fn_ptr)?;
@@ -416,8 +416,8 @@ fn eval_fn_call(
                 let mut args = args.to_vec();
                 let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
                 let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
-                args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(&self, 0)?;
-                args[0].op = Operand::Immediate(Value::Scalar(ptr.ptr.into())); // strip vtable
+                args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?;
+                args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable
                 trace!("Patched self operand to {:#?}", args[0]);
                 // recurse with concrete function
                 self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
@@ -455,7 +455,7 @@ fn drop_in_place(
         };
 
         let ty = self.tcx.mk_unit(); // return type is ()
-        let dest = MPlaceTy::dangling(self.layout_of(ty)?, &self);
+        let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
 
         self.eval_fn_call(
             instance,
index c189ec0ca5c7717231defc15f5b7d33678c8a1b5..c5366a5ce6a4d472fca82937378a7cae20b00c4e 100644 (file)
@@ -60,9 +60,9 @@ pub fn get_vtable(
         let drop = self.memory.create_fn_alloc(drop).with_default_tag();
         self.memory.write_ptr_sized(vtable, ptr_align, Scalar::Ptr(drop).into())?;
 
-        let size_ptr = vtable.offset(ptr_size, &self)?;
+        let size_ptr = vtable.offset(ptr_size, self)?;
         self.memory.write_ptr_sized(size_ptr, ptr_align, Scalar::from_uint(size, ptr_size).into())?;
-        let align_ptr = vtable.offset(ptr_size * 2, &self)?;
+        let align_ptr = vtable.offset(ptr_size * 2, self)?;
         self.memory.write_ptr_sized(align_ptr, ptr_align,
             Scalar::from_uint(align, ptr_size).into())?;
 
@@ -70,7 +70,7 @@ pub fn get_vtable(
             if let Some((def_id, substs)) = *method {
                 let instance = self.resolve(def_id, substs)?;
                 let fn_ptr = self.memory.create_fn_alloc(instance).with_default_tag();
-                let method_ptr = vtable.offset(ptr_size * (3 + i as u64), &self)?;
+                let method_ptr = vtable.offset(ptr_size * (3 + i as u64), self)?;
                 self.memory.write_ptr_sized(method_ptr, ptr_align, Scalar::Ptr(fn_ptr).into())?;
             }
         }
index 9d86e737dd5b6aa8acd953ce895f6f055beb1896..4dbae3c8c3d280ab465ff57318951287f7b74b7b 100644 (file)
@@ -20,7 +20,7 @@
 };
 
 use super::{
-    ValTy, OpTy, MPlaceTy, Machine, EvalContext, ScalarMaybeUndef
+    ImmTy, OpTy, MPlaceTy, Machine, EvalContext, ScalarMaybeUndef
 };
 
 macro_rules! validation_failure {
@@ -144,7 +144,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
     /// Make sure that `value` is valid for `ty`, *assuming* `ty` is a primitive type.
     fn validate_primitive_type(
         &self,
-        value: ValTy<'tcx, M::PointerTag>,
+        value: ImmTy<'tcx, M::PointerTag>,
         path: &Vec<PathElem>,
         ref_tracking: Option<&mut RefTracking<'tcx, M::PointerTag>>,
         const_mode: bool,
@@ -408,7 +408,7 @@ pub fn validate_operand(
                             ),
                         _ =>
                             return validation_failure!(
-                                format!("non-integer enum discriminant"), path
+                                String::from("non-integer enum discriminant"), path
                             ),
                     }
                 };
@@ -465,7 +465,7 @@ pub fn validate_operand(
             _ => dest.layout.ty.builtin_deref(true).is_some(),
         };
         if primitive {
-            let value = try_validation!(self.read_value(dest),
+            let value = try_validation!(self.read_immediate(dest),
                 "uninitialized or unrepresentable data", path);
             return self.validate_primitive_type(
                 value,
index aaa97e337265331ca88cea0774cf109902206553..1a35f4da20bf158a1fddab082043edcf30e28cc0 100644 (file)
@@ -16,7 +16,6 @@
 
 #![feature(nll)]
 #![feature(in_band_lifetimes)]
-#![cfg_attr(stage0, feature(impl_header_lifetime_elision))]
 #![feature(slice_patterns)]
 #![feature(slice_sort_by_cached_key)]
 #![feature(box_patterns)]
@@ -94,6 +93,7 @@ pub fn provide(providers: &mut Providers) {
     borrow_check::provide(providers);
     shim::provide(providers);
     transform::provide(providers);
+    monomorphize::partitioning::provide(providers);
     providers.const_eval = const_eval::const_eval_provider;
     providers.const_eval_raw = const_eval::const_eval_raw_provider;
     providers.check_match = hair::pattern::check_match;
index 8c6966691328a44029a83cf9378dec343482d07d..8e27635dee8c13868c3d5e9a04cd1bf3289a83f0 100644 (file)
@@ -905,12 +905,12 @@ fn create_mono_items_for_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                                   trait_ty: Ty<'tcx>,
                                                   impl_ty: Ty<'tcx>,
                                                   output: &mut Vec<MonoItem<'tcx>>) {
-    assert!(!trait_ty.needs_subst() && !trait_ty.has_escaping_regions() &&
-            !impl_ty.needs_subst() && !impl_ty.has_escaping_regions());
+    assert!(!trait_ty.needs_subst() && !trait_ty.has_escaping_bound_vars() &&
+            !impl_ty.needs_subst() && !impl_ty.has_escaping_bound_vars());
 
     if let ty::Dynamic(ref trait_ty, ..) = trait_ty.sty {
         let poly_trait_ref = trait_ty.principal().with_self_ty(tcx, impl_ty);
-        assert!(!poly_trait_ref.has_escaping_regions());
+        assert!(!poly_trait_ref.has_escaping_bound_vars());
 
         // Walk all methods of the trait, including those of its supertraits
         let methods = tcx.vtable_methods(poly_trait_ref);
@@ -1082,7 +1082,7 @@ fn push_extra_entry_roots(&mut self) {
         // regions must appear in the argument
         // listing.
         let main_ret_ty = self.tcx.erase_regions(
-            &main_ret_ty.no_late_bound_regions().unwrap(),
+            &main_ret_ty.no_bound_vars().unwrap(),
         );
 
         let start_instance = Instance::resolve(
index 4c4d56c8938386af83b716168b4c2ea82f12f9cf..9d69a5669b1c0a85cd8971579b93c849d359371a 100644 (file)
@@ -382,6 +382,7 @@ pub fn push_type_name(&self, t: Ty<'tcx>, output: &mut String) {
                 self.push_type_params(substs, iter::empty(), output);
             }
             ty::Error |
+            ty::Bound(..) |
             ty::Infer(_) |
             ty::UnnormalizedProjection(..) |
             ty::Projection(..) |
index f0a35ca7adbd21a02f15b97316bab98d80988dc8..6dba020120f848385ac0a3bcb5a1cd39f818bcd1 100644 (file)
 //! source-level module, functions from the same module will be available for
 //! inlining, even when they are not marked #[inline].
 
-use monomorphize::collector::InliningMap;
+use std::collections::hash_map::Entry;
+use std::cmp;
+use std::sync::Arc;
+
+use syntax::ast::NodeId;
+use syntax::symbol::InternedString;
 use rustc::dep_graph::{WorkProductId, WorkProduct, DepNode, DepConstructor};
 use rustc::hir::CodegenFnAttrFlags;
-use rustc::hir::def_id::{DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
 use rustc::hir::map::DefPathData;
 use rustc::mir::mono::{Linkage, Visibility, CodegenUnitNameBuilder};
 use rustc::middle::exported_symbols::SymbolExportLevel;
 use rustc::ty::{self, TyCtxt, InstanceDef};
 use rustc::ty::item_path::characteristic_def_id_of_type;
-use rustc::util::nodemap::{FxHashMap, FxHashSet};
-use std::collections::hash_map::Entry;
-use std::cmp;
-use syntax::ast::NodeId;
-use syntax::symbol::InternedString;
+use rustc::ty::query::Providers;
+use rustc::util::common::time;
+use rustc::util::nodemap::{DefIdSet, FxHashMap, FxHashSet};
 use rustc::mir::mono::MonoItem;
+
+use monomorphize::collector::InliningMap;
+use monomorphize::collector::{self, MonoItemCollectionMode};
 use monomorphize::item::{MonoItemExt, InstantiationMode};
 
 pub use rustc::mir::mono::CodegenUnit;
@@ -505,7 +511,7 @@ fn mono_item_visibility(
         //
         // * First is weak lang items. These are basically mechanisms for
         //   libcore to forward-reference symbols defined later in crates like
-        //   the standard library or `#[panic_implementation]` definitions. The
+        //   the standard library or `#[panic_handler]` definitions. The
         //   definition of these weak lang items needs to be referenceable by
         //   libcore, so we're no longer a candidate for internalization.
         //   Removal of these functions can't be done by LLVM but rather must be
@@ -892,3 +898,146 @@ fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         }
     }
 }
+
+fn collect_and_partition_mono_items<'a, 'tcx>(
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    cnum: CrateNum,
+) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>)
+{
+    assert_eq!(cnum, LOCAL_CRATE);
+
+    let collection_mode = match tcx.sess.opts.debugging_opts.print_mono_items {
+        Some(ref s) => {
+            let mode_string = s.to_lowercase();
+            let mode_string = mode_string.trim();
+            if mode_string == "eager" {
+                MonoItemCollectionMode::Eager
+            } else {
+                if mode_string != "lazy" {
+                    let message = format!("Unknown codegen-item collection mode '{}'. \
+                                           Falling back to 'lazy' mode.",
+                                          mode_string);
+                    tcx.sess.warn(&message);
+                }
+
+                MonoItemCollectionMode::Lazy
+            }
+        }
+        None => {
+            if tcx.sess.opts.cg.link_dead_code {
+                MonoItemCollectionMode::Eager
+            } else {
+                MonoItemCollectionMode::Lazy
+            }
+        }
+    };
+
+    let (items, inlining_map) =
+        time(tcx.sess, "monomorphization collection", || {
+            collector::collect_crate_mono_items(tcx, collection_mode)
+    });
+
+    tcx.sess.abort_if_errors();
+
+    ::monomorphize::assert_symbols_are_distinct(tcx, items.iter());
+
+    let strategy = if tcx.sess.opts.incremental.is_some() {
+        PartitioningStrategy::PerModule
+    } else {
+        PartitioningStrategy::FixedUnitCount(tcx.sess.codegen_units())
+    };
+
+    let codegen_units = time(tcx.sess, "codegen unit partitioning", || {
+        partition(
+            tcx,
+            items.iter().cloned(),
+            strategy,
+            &inlining_map
+        )
+            .into_iter()
+            .map(Arc::new)
+            .collect::<Vec<_>>()
+    });
+
+    let mono_items: DefIdSet = items.iter().filter_map(|mono_item| {
+        match *mono_item {
+            MonoItem::Fn(ref instance) => Some(instance.def_id()),
+            MonoItem::Static(def_id) => Some(def_id),
+            _ => None,
+        }
+    }).collect();
+
+    if tcx.sess.opts.debugging_opts.print_mono_items.is_some() {
+        let mut item_to_cgus: FxHashMap<_, Vec<_>> = Default::default();
+
+        for cgu in &codegen_units {
+            for (&mono_item, &linkage) in cgu.items() {
+                item_to_cgus.entry(mono_item)
+                            .or_default()
+                            .push((cgu.name().clone(), linkage));
+            }
+        }
+
+        let mut item_keys: Vec<_> = items
+            .iter()
+            .map(|i| {
+                let mut output = i.to_string(tcx);
+                output.push_str(" @@");
+                let mut empty = Vec::new();
+                let cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty);
+                cgus.as_mut_slice().sort_by_key(|&(ref name, _)| name.clone());
+                cgus.dedup();
+                for &(ref cgu_name, (linkage, _)) in cgus.iter() {
+                    output.push_str(" ");
+                    output.push_str(&cgu_name.as_str());
+
+                    let linkage_abbrev = match linkage {
+                        Linkage::External => "External",
+                        Linkage::AvailableExternally => "Available",
+                        Linkage::LinkOnceAny => "OnceAny",
+                        Linkage::LinkOnceODR => "OnceODR",
+                        Linkage::WeakAny => "WeakAny",
+                        Linkage::WeakODR => "WeakODR",
+                        Linkage::Appending => "Appending",
+                        Linkage::Internal => "Internal",
+                        Linkage::Private => "Private",
+                        Linkage::ExternalWeak => "ExternalWeak",
+                        Linkage::Common => "Common",
+                    };
+
+                    output.push_str("[");
+                    output.push_str(linkage_abbrev);
+                    output.push_str("]");
+                }
+                output
+            })
+            .collect();
+
+        item_keys.sort();
+
+        for item in item_keys {
+            println!("MONO_ITEM {}", item);
+        }
+    }
+
+    (Arc::new(mono_items), Arc::new(codegen_units))
+}
+
+pub fn provide(providers: &mut Providers) {
+    providers.collect_and_partition_mono_items =
+        collect_and_partition_mono_items;
+
+    providers.is_codegened_item = |tcx, def_id| {
+        let (all_mono_items, _) =
+            tcx.collect_and_partition_mono_items(LOCAL_CRATE);
+        all_mono_items.contains(&def_id)
+    };
+
+    providers.codegen_unit = |tcx, name| {
+        let (_, all) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
+        all.iter()
+            .find(|cgu| *cgu.name() == name)
+            .cloned()
+            .unwrap_or_else(|| panic!("failed to find cgu with name {:?}", name))
+    };
+}
index 7061504cd0ae74b7708565116e645667d2dca047..76a8501fb177a89c4375e3c99fa175a8da097aaf 100644 (file)
@@ -844,7 +844,9 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
     let param_env = gcx.param_env(def_id);
 
     // Normalize the sig.
-    let sig = gcx.fn_sig(def_id).no_late_bound_regions().expect("LBR in ADT constructor signature");
+    let sig = gcx.fn_sig(def_id)
+        .no_bound_vars()
+        .expect("LBR in ADT constructor signature");
     let sig = gcx.normalize_erasing_regions(param_env, sig);
 
     let (adt_def, substs) = match sig.output().sty {
diff --git a/src/librustc_mir/transform/add_retag.rs b/src/librustc_mir/transform/add_retag.rs
new file mode 100644 (file)
index 0000000..a50011c
--- /dev/null
@@ -0,0 +1,174 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.
+//! It has to be run really early, before transformations like inlining, because
+//! introducing these calls *adds* UB -- so, conceptually, this pass is actually part
+//! of MIR building, and only after this pass we think of the program has having the
+//! normal MIR semantics.
+
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::mir::*;
+use transform::{MirPass, MirSource};
+
+pub struct AddRetag;
+
+/// Determines whether this place is local: If it is part of a local variable.
+/// We do not consider writes to pointers local, only writes that immediately assign
+/// to a local variable.
+/// One important property here is that evaluating the place immediately after
+/// the assignment must produce the same place as what was used during the assignment.
+fn is_local<'tcx>(
+    place: &Place<'tcx>,
+) -> bool {
+    use rustc::mir::Place::*;
+
+    match *place {
+        Local { .. } => true,
+        Promoted(_) |
+        Static(_) => false,
+        Projection(ref proj) => {
+            match proj.elem {
+                ProjectionElem::Deref |
+                ProjectionElem::Index(_) =>
+                    // Which place these point to depends on external circumstances
+                    // (a local storing the array index, the current value of
+                    // the projection base), so we stop tracking here.
+                    false,
+                ProjectionElem::Field { .. } |
+                ProjectionElem::ConstantIndex { .. } |
+                ProjectionElem::Subslice { .. } |
+                ProjectionElem::Downcast { .. } =>
+                    // These just offset by a constant, entirely independent of everything else.
+                    is_local(&proj.base),
+            }
+        }
+    }
+}
+
+/// Determine whether this type has a reference in it, recursing below compound types but
+/// not below references.
+fn has_reference<'a, 'gcx, 'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
+    match ty.sty {
+        // Primitive types that are not references
+        ty::Bool | ty::Char |
+        ty::Float(_) | ty::Int(_) | ty::Uint(_) |
+        ty::RawPtr(..) | ty::FnPtr(..) |
+        ty::Str | ty::FnDef(..) | ty::Never =>
+            false,
+        // References
+        ty::Ref(..) => true,
+        ty::Adt(..) if ty.is_box() => true,
+        // Compound types
+        ty::Array(ty, ..) | ty::Slice(ty) =>
+            has_reference(ty, tcx),
+        ty::Tuple(tys) =>
+            tys.iter().any(|ty| has_reference(ty, tcx)),
+        ty::Adt(adt, substs) =>
+            adt.variants.iter().any(|v| v.fields.iter().any(|f|
+                has_reference(f.ty(tcx, substs), tcx)
+            )),
+        // Conservative fallback
+        _ => true,
+    }
+}
+
+impl MirPass for AddRetag {
+    fn run_pass<'a, 'tcx>(&self,
+                          tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                          _src: MirSource,
+                          mir: &mut Mir<'tcx>)
+    {
+        if !tcx.sess.opts.debugging_opts.mir_emit_retag {
+            return;
+        }
+        let (span, arg_count) = (mir.span, mir.arg_count);
+        let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
+        let needs_retag = |place: &Place<'tcx>| {
+            is_local(place) && has_reference(place.ty(&*local_decls, tcx).to_ty(tcx), tcx)
+        };
+
+        // PART 1
+        // Retag arguments at the beginning of the start block.
+        {
+            let source_info = SourceInfo {
+                scope: OUTERMOST_SOURCE_SCOPE,
+                span: span, // FIXME: Consider using just the span covering the function
+                            // argument declaration.
+            };
+            // Gather all arguments, skip return value.
+            let places = local_decls.iter_enumerated().skip(1).take(arg_count)
+                    .map(|(local, _)| Place::Local(local))
+                    .filter(needs_retag)
+                    .collect::<Vec<_>>();
+            // Emit their retags.
+            basic_blocks[START_BLOCK].statements.splice(0..0,
+                places.into_iter().map(|place| Statement {
+                    source_info,
+                    kind: StatementKind::Retag { fn_entry: true, place },
+                })
+            );
+        }
+
+        // PART 2
+        // Retag return values of functions.
+        // We collect the return destinations because we cannot mutate while iterating.
+        let mut returns: Vec<(SourceInfo, Place<'tcx>, BasicBlock)> = Vec::new();
+        for block_data in basic_blocks.iter_mut() {
+            match block_data.terminator {
+                Some(Terminator { kind: TerminatorKind::Call { ref destination, .. },
+                                  source_info }) => {
+                    // Remember the return destination for later
+                    if let Some(ref destination) = destination {
+                        if needs_retag(&destination.0) {
+                            returns.push((source_info, destination.0.clone(), destination.1));
+                        }
+                    }
+                }
+                _ => {
+                    // Not a block ending in a Call -> ignore.
+                    // `Drop` is also a call, but it doesn't return anything so we are good.
+                }
+            }
+        }
+        // Now we go over the returns we collected to retag the return values.
+        for (source_info, dest_place, dest_block) in returns {
+            basic_blocks[dest_block].statements.insert(0, Statement {
+                source_info,
+                kind: StatementKind::Retag { fn_entry: false, place: dest_place },
+            });
+        }
+
+        // PART 3
+        // Add retag after assignment.
+        for block_data in basic_blocks {
+            // We want to insert statements as we iterate.  To this end, we
+            // iterate backwards using indices.
+            for i in (0..block_data.statements.len()).rev() {
+                match block_data.statements[i].kind {
+                    // Assignments can make values obtained elsewhere "local".
+                    // We could try to be smart here and e.g. only retag if the assignment
+                    // loaded from memory, but that seems risky: We might miss a subtle corner
+                    // case.
+                    StatementKind::Assign(ref place, box Rvalue::Use(..))
+                    if needs_retag(place) => {
+                        // Insert a retag after the assignment.
+                        let source_info = block_data.statements[i].source_info;
+                        block_data.statements.insert(i+1,Statement {
+                            source_info,
+                            kind: StatementKind::Retag { fn_entry: false, place: place.clone() },
+                        });
+                    }
+                    _ => {},
+                }
+            }
+        }
+    }
+}
diff --git a/src/librustc_mir/transform/add_validation.rs b/src/librustc_mir/transform/add_validation.rs
deleted file mode 100644 (file)
index 5b489b5..0000000
+++ /dev/null
@@ -1,395 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! This pass adds validation calls (AcquireValid, ReleaseValid) where appropriate.
-//! It has to be run really early, before transformations like inlining, because
-//! introducing these calls *adds* UB -- so, conceptually, this pass is actually part
-//! of MIR building, and only after this pass we think of the program has having the
-//! normal MIR semantics.
-
-use rustc::ty::{self, TyCtxt, RegionKind};
-use rustc::hir;
-use rustc::mir::*;
-use rustc::middle::region;
-use transform::{MirPass, MirSource};
-
-pub struct AddValidation;
-
-/// Determine the "context" of the place: Mutability and region.
-fn place_context<'a, 'tcx, D>(
-    place: &Place<'tcx>,
-    local_decls: &D,
-    tcx: TyCtxt<'a, 'tcx, 'tcx>
-) -> (Option<region::Scope>, hir::Mutability)
-    where D: HasLocalDecls<'tcx>
-{
-    use rustc::mir::Place::*;
-
-    match *place {
-        Local { .. } => (None, hir::MutMutable),
-        Promoted(_) |
-        Static(_) => (None, hir::MutImmutable),
-        Projection(ref proj) => {
-            match proj.elem {
-                ProjectionElem::Deref => {
-                    // Computing the inside the recursion makes this quadratic.
-                    // We don't expect deep paths though.
-                    let ty = proj.base.ty(local_decls, tcx).to_ty(tcx);
-                    // A Deref projection may restrict the context, this depends on the type
-                    // being deref'd.
-                    let context = match ty.sty {
-                        ty::Ref(re, _, mutbl) => {
-                            let re = match re {
-                                &RegionKind::ReScope(ce) => Some(ce),
-                                &RegionKind::ReErased =>
-                                    bug!("AddValidation pass must be run before erasing lifetimes"),
-                                _ => None
-                            };
-                            (re, mutbl)
-                        }
-                        ty::RawPtr(_) =>
-                            // There is no guarantee behind even a mutable raw pointer,
-                            // no write locks are acquired there, so we also don't want to
-                            // release any.
-                            (None, hir::MutImmutable),
-                        ty::Adt(adt, _) if adt.is_box() => (None, hir::MutMutable),
-                        _ => bug!("Deref on a non-pointer type {:?}", ty),
-                    };
-                    // "Intersect" this restriction with proj.base.
-                    if let (Some(_), hir::MutImmutable) = context {
-                        // This is already as restricted as it gets, no need to even recurse
-                        context
-                    } else {
-                        let base_context = place_context(&proj.base, local_decls, tcx);
-                        // The region of the outermost Deref is always most restrictive.
-                        let re = context.0.or(base_context.0);
-                        let mutbl = context.1.and(base_context.1);
-                        (re, mutbl)
-                    }
-
-                }
-                _ => place_context(&proj.base, local_decls, tcx),
-            }
-        }
-    }
-}
-
-/// Check if this function contains an unsafe block or is an unsafe function.
-fn fn_contains_unsafe<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource) -> bool {
-    use rustc::hir::intravisit::{self, Visitor, FnKind};
-    use rustc::hir::map::blocks::FnLikeNode;
-    use rustc::hir::Node;
-
-    /// Decide if this is an unsafe block
-    fn block_is_unsafe(block: &hir::Block) -> bool {
-        use rustc::hir::BlockCheckMode::*;
-
-        match block.rules {
-            UnsafeBlock(_) | PushUnsafeBlock(_) => true,
-            // For PopUnsafeBlock, we don't actually know -- but we will always also check all
-            // parent blocks, so we can safely declare the PopUnsafeBlock to not be unsafe.
-            DefaultBlock | PopUnsafeBlock(_) => false,
-        }
-    }
-
-    /// Decide if this FnLike is a closure
-    fn fn_is_closure<'a>(fn_like: FnLikeNode<'a>) -> bool {
-        match fn_like.kind() {
-            FnKind::Closure(_) => true,
-            FnKind::Method(..) | FnKind::ItemFn(..) => false,
-        }
-    }
-
-    let node_id = tcx.hir.as_local_node_id(src.def_id).unwrap();
-    let fn_like = match tcx.hir.body_owner_kind(node_id) {
-        hir::BodyOwnerKind::Fn => {
-            match FnLikeNode::from_node(tcx.hir.get(node_id)) {
-                Some(fn_like) => fn_like,
-                None => return false, // e.g. struct ctor shims -- such auto-generated code cannot
-                                      // contain unsafe.
-            }
-        },
-        _ => return false, // only functions can have unsafe
-    };
-
-    // Test if the function is marked unsafe.
-    if fn_like.unsafety() == hir::Unsafety::Unsafe {
-        return true;
-    }
-
-    // For closures, we need to walk up the parents and see if we are inside an unsafe fn or
-    // unsafe block.
-    if fn_is_closure(fn_like) {
-        let mut cur = fn_like.id();
-        loop {
-            // Go further upwards.
-            cur = tcx.hir.get_parent_node(cur);
-            let node = tcx.hir.get(cur);
-            // Check if this is an unsafe function
-            if let Some(fn_like) = FnLikeNode::from_node(node) {
-                if !fn_is_closure(fn_like) {
-                    if fn_like.unsafety() == hir::Unsafety::Unsafe {
-                        return true;
-                    }
-                }
-            }
-            // Check if this is an unsafe block, or an item
-            match node {
-                Node::Expr(&hir::Expr { node: hir::ExprKind::Block(ref block, _), ..}) => {
-                    if block_is_unsafe(&*block) {
-                        // Found an unsafe block, we can bail out here.
-                        return true;
-                    }
-                }
-                Node::Item(..) => {
-                    // No walking up beyond items.  This makes sure the loop always terminates.
-                    break;
-                }
-                _ => {},
-            }
-        }
-    }
-
-    // Visit the entire body of the function and check for unsafe blocks in there
-    struct FindUnsafe {
-        found_unsafe: bool,
-    }
-    let mut finder = FindUnsafe { found_unsafe: false };
-    // Run the visitor on the NodeId we got.  Seems like there is no uniform way to do that.
-    finder.visit_body(tcx.hir.body(fn_like.body()));
-
-    impl<'tcx> Visitor<'tcx> for FindUnsafe {
-        fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {
-            intravisit::NestedVisitorMap::None
-        }
-
-        fn visit_block(&mut self, b: &'tcx hir::Block) {
-            if self.found_unsafe { return; } // short-circuit
-
-            if block_is_unsafe(b) {
-                // We found an unsafe block.  We can stop searching.
-                self.found_unsafe = true;
-            } else {
-                // No unsafe block here, go on searching.
-                intravisit::walk_block(self, b);
-            }
-        }
-    }
-
-    finder.found_unsafe
-}
-
-impl MirPass for AddValidation {
-    fn run_pass<'a, 'tcx>(&self,
-                          tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          src: MirSource,
-                          mir: &mut Mir<'tcx>)
-    {
-        let emit_validate = tcx.sess.opts.debugging_opts.mir_emit_validate;
-        if emit_validate == 0 {
-            return;
-        }
-        let restricted_validation = emit_validate == 1 && fn_contains_unsafe(tcx, src);
-        let (span, arg_count) = (mir.span, mir.arg_count);
-        let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
-
-        // Convert a place to a validation operand.
-        let place_to_operand = |place: Place<'tcx>| -> ValidationOperand<'tcx, Place<'tcx>> {
-            let (re, mutbl) = place_context(&place, local_decls, tcx);
-            let ty = place.ty(local_decls, tcx).to_ty(tcx);
-            ValidationOperand { place, ty, re, mutbl }
-        };
-
-        // Emit an Acquire at the beginning of the given block.  If we are in restricted emission
-        // mode (mir_emit_validate=1), also emit a Release immediately after the Acquire.
-        let emit_acquire = |block: &mut BasicBlockData<'tcx>, source_info, operands: Vec<_>| {
-            if operands.len() == 0 {
-                return; // Nothing to do
-            }
-            // Emit the release first, to avoid cloning if we do not emit it
-            if restricted_validation {
-                let release_stmt = Statement {
-                    source_info,
-                    kind: StatementKind::Validate(ValidationOp::Release, operands.clone()),
-                };
-                block.statements.insert(0, release_stmt);
-            }
-            // Now, the acquire
-            let acquire_stmt = Statement {
-                source_info,
-                kind: StatementKind::Validate(ValidationOp::Acquire, operands),
-            };
-            block.statements.insert(0, acquire_stmt);
-        };
-
-        // PART 1
-        // Add an AcquireValid at the beginning of the start block.
-        {
-            let source_info = SourceInfo {
-                scope: OUTERMOST_SOURCE_SCOPE,
-                span: span, // FIXME: Consider using just the span covering the function
-                            // argument declaration.
-            };
-            // Gather all arguments, skip return value.
-            let operands = local_decls.iter_enumerated().skip(1).take(arg_count)
-                    .map(|(local, _)| place_to_operand(Place::Local(local))).collect();
-            emit_acquire(&mut basic_blocks[START_BLOCK], source_info, operands);
-        }
-
-        // PART 2
-        // Add ReleaseValid/AcquireValid around function call terminators.  We don't use a visitor
-        // because we need to access the block that a Call jumps to.
-        let mut returns : Vec<(SourceInfo, Place<'tcx>, BasicBlock)> = Vec::new();
-        for block_data in basic_blocks.iter_mut() {
-            match block_data.terminator {
-                Some(Terminator { kind: TerminatorKind::Call { ref args, ref destination, .. },
-                                  source_info }) => {
-                    // Before the call: Release all arguments *and* the return value.
-                    // The callee may write into the return value!  Note that this relies
-                    // on "release of uninitialized" to be a NOP.
-                    if !restricted_validation {
-                        let release_stmt = Statement {
-                            source_info,
-                            kind: StatementKind::Validate(ValidationOp::Release,
-                                destination.iter().map(|dest| place_to_operand(dest.0.clone()))
-                                .chain(
-                                    args.iter().filter_map(|op| {
-                                        match op {
-                                            &Operand::Copy(ref place) |
-                                            &Operand::Move(ref place) =>
-                                                Some(place_to_operand(place.clone())),
-                                            &Operand::Constant(..) => { None },
-                                        }
-                                    })
-                                ).collect())
-                        };
-                        block_data.statements.push(release_stmt);
-                    }
-                    // Remember the return destination for later
-                    if let &Some(ref destination) = destination {
-                        returns.push((source_info, destination.0.clone(), destination.1));
-                    }
-                }
-                Some(Terminator { kind: TerminatorKind::Drop { location: ref place, .. },
-                                  source_info }) |
-                Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref place, .. },
-                                  source_info }) => {
-                    // Before the call: Release all arguments
-                    if !restricted_validation {
-                        let release_stmt = Statement {
-                            source_info,
-                            kind: StatementKind::Validate(ValidationOp::Release,
-                                    vec![place_to_operand(place.clone())]),
-                        };
-                        block_data.statements.push(release_stmt);
-                    }
-                    // drop doesn't return anything, so we need no acquire.
-                }
-                _ => {
-                    // Not a block ending in a Call -> ignore.
-                }
-            }
-        }
-        // Now we go over the returns we collected to acquire the return values.
-        for (source_info, dest_place, dest_block) in returns {
-            emit_acquire(
-                &mut basic_blocks[dest_block],
-                source_info,
-                vec![place_to_operand(dest_place)]
-            );
-        }
-
-        if restricted_validation {
-            // No part 3 for us.
-            return;
-        }
-
-        // PART 3
-        // Add ReleaseValid/AcquireValid around Ref and Cast.  Again an iterator does not seem very
-        // suited as we need to add new statements before and after each Ref.
-        for block_data in basic_blocks {
-            // We want to insert statements around Ref commands as we iterate.  To this end, we
-            // iterate backwards using indices.
-            for i in (0..block_data.statements.len()).rev() {
-                match block_data.statements[i].kind {
-                    // When the borrow of this ref expires, we need to recover validation.
-                    StatementKind::Assign(_, box Rvalue::Ref(_, _, _)) => {
-                        // Due to a lack of NLL; we can't capture anything directly here.
-                        // Instead, we have to re-match and clone there.
-                        let (dest_place, re, src_place) = match block_data.statements[i].kind {
-                            StatementKind::Assign(ref dest_place,
-                                                  box Rvalue::Ref(re, _, ref src_place)) => {
-                                (dest_place.clone(), re, src_place.clone())
-                            },
-                            _ => bug!("We already matched this."),
-                        };
-                        // So this is a ref, and we got all the data we wanted.
-                        // Do an acquire of the result -- but only what it points to, so add a Deref
-                        // projection.
-                        let acquire_stmt = Statement {
-                            source_info: block_data.statements[i].source_info,
-                            kind: StatementKind::Validate(ValidationOp::Acquire,
-                                    vec![place_to_operand(dest_place.deref())]),
-                        };
-                        block_data.statements.insert(i+1, acquire_stmt);
-
-                        // The source is released until the region of the borrow ends.
-                        let op = match re {
-                            &RegionKind::ReScope(ce) => ValidationOp::Suspend(ce),
-                            &RegionKind::ReErased =>
-                                bug!("AddValidation pass must be run before erasing lifetimes"),
-                            _ => ValidationOp::Release,
-                        };
-                        let release_stmt = Statement {
-                            source_info: block_data.statements[i].source_info,
-                            kind: StatementKind::Validate(op, vec![place_to_operand(src_place)]),
-                        };
-                        block_data.statements.insert(i, release_stmt);
-                    }
-                    // Casts can change what validation does (e.g. unsizing)
-                    StatementKind::Assign(_, box Rvalue::Cast(kind, Operand::Copy(_), _)) |
-                    StatementKind::Assign(_, box Rvalue::Cast(kind, Operand::Move(_), _))
-                        if kind != CastKind::Misc =>
-                    {
-                        // Due to a lack of NLL; we can't capture anything directly here.
-                        // Instead, we have to re-match and clone there.
-                        let (dest_place, src_place) = match block_data.statements[i].kind {
-                            StatementKind::Assign(ref dest_place,
-                                    box Rvalue::Cast(_, Operand::Copy(ref src_place), _)) |
-                            StatementKind::Assign(ref dest_place,
-                                    box Rvalue::Cast(_, Operand::Move(ref src_place), _)) =>
-                            {
-                                (dest_place.clone(), src_place.clone())
-                            },
-                            _ => bug!("We already matched this."),
-                        };
-
-                        // Acquire of the result
-                        let acquire_stmt = Statement {
-                            source_info: block_data.statements[i].source_info,
-                            kind: StatementKind::Validate(ValidationOp::Acquire,
-                                    vec![place_to_operand(dest_place)]),
-                        };
-                        block_data.statements.insert(i+1, acquire_stmt);
-
-                        // Release of the input
-                        let release_stmt = Statement {
-                            source_info: block_data.statements[i].source_info,
-                            kind: StatementKind::Validate(ValidationOp::Release,
-                                                            vec![place_to_operand(src_place)]),
-                        };
-                        block_data.statements.insert(i, release_stmt);
-                    }
-                    _ => {},
-                }
-            }
-        }
-    }
-}
index edd15c39fed3e03ab4ab68d0040058cdf3180e2d..c28bb0ca35704b56d93378ec61474c32473d832b 100644 (file)
@@ -113,7 +113,7 @@ fn visit_statement(&mut self,
             StatementKind::StorageLive(..) |
             StatementKind::StorageDead(..) |
             StatementKind::EndRegion(..) |
-            StatementKind::Validate(..) |
+            StatementKind::Retag { .. } |
             StatementKind::AscribeUserType(..) |
             StatementKind::Nop => {
                 // safe (at least as emitted during MIR construction)
index 8ee009db023f0beb304ebedb23e43130aa31b587..4f92ba400481bfc2759e4f1fab4ba40cb3efd934 100644 (file)
@@ -28,7 +28,7 @@
     HasTyCtxt, TargetDataLayout, HasDataLayout,
 };
 
-use interpret::{self, EvalContext, ScalarMaybeUndef, Value, OpTy, MemoryKind};
+use interpret::{self, EvalContext, ScalarMaybeUndef, Immediate, OpTy, MemoryKind};
 use const_eval::{CompileTimeInterpreter, error_to_const_error, eval_promoted, mk_borrowck_eval_cx};
 use transform::{MirPass, MirSource};
 
@@ -87,23 +87,23 @@ struct ConstPropagator<'a, 'mir, 'tcx:'a+'mir> {
     param_env: ParamEnv<'tcx>,
 }
 
-impl<'a, 'b, 'tcx> LayoutOf for &'a ConstPropagator<'a, 'b, 'tcx> {
+impl<'a, 'b, 'tcx> LayoutOf for ConstPropagator<'a, 'b, 'tcx> {
     type Ty = ty::Ty<'tcx>;
     type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
 
-    fn layout_of(self, ty: ty::Ty<'tcx>) -> Self::TyLayout {
+    fn layout_of(&self, ty: ty::Ty<'tcx>) -> Self::TyLayout {
         self.tcx.layout_of(self.param_env.and(ty))
     }
 }
 
-impl<'a, 'b, 'tcx> HasDataLayout for &'a ConstPropagator<'a, 'b, 'tcx> {
+impl<'a, 'b, 'tcx> HasDataLayout for ConstPropagator<'a, 'b, 'tcx> {
     #[inline]
     fn data_layout(&self) -> &TargetDataLayout {
         &self.tcx.data_layout
     }
 }
 
-impl<'a, 'b, 'tcx> HasTyCtxt<'tcx> for &'a ConstPropagator<'a, 'b, 'tcx> {
+impl<'a, 'b, 'tcx> HasTyCtxt<'tcx> for ConstPropagator<'a, 'b, 'tcx> {
     #[inline]
     fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
         self.tcx
@@ -354,7 +354,7 @@ fn const_prop(
             Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
                 type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some((
                     OpTy {
-                        op: interpret::Operand::Immediate(Value::Scalar(
+                        op: interpret::Operand::Immediate(Immediate::Scalar(
                             Scalar::Bits {
                                 bits: n as u128,
                                 size: self.tcx.data_layout.pointer_size.bytes() as u8,
@@ -397,7 +397,7 @@ fn const_prop(
                     this.ecx.unary_op(op, prim, arg.layout)
                 })?;
                 let res = OpTy {
-                    op: interpret::Operand::Immediate(Value::Scalar(val.into())),
+                    op: interpret::Operand::Immediate(Immediate::Scalar(val.into())),
                     layout: place_layout,
                 };
                 Some((res, span))
@@ -418,7 +418,7 @@ fn const_prop(
                 }
 
                 let r = self.use_ecx(source_info, |this| {
-                    this.ecx.read_value(right.0)
+                    this.ecx.read_immediate(right.0)
                 })?;
                 if op == BinOp::Shr || op == BinOp::Shl {
                     let left_ty = left.ty(self.mir, self.tcx);
@@ -451,14 +451,14 @@ fn const_prop(
                 }
                 let left = self.eval_operand(left, source_info)?;
                 let l = self.use_ecx(source_info, |this| {
-                    this.ecx.read_value(left.0)
+                    this.ecx.read_immediate(left.0)
                 })?;
                 trace!("const evaluating {:?} for {:?} and {:?}", op, left, right);
                 let (val, overflow) = self.use_ecx(source_info, |this| {
-                    this.ecx.binary_op_val(op, l, r)
+                    this.ecx.binary_op_imm(op, l, r)
                 })?;
                 let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue {
-                    Value::ScalarPair(
+                    Immediate::ScalarPair(
                         val.into(),
                         Scalar::from_bool(overflow).into(),
                     )
@@ -468,7 +468,7 @@ fn const_prop(
                         let _: Option<()> = self.use_ecx(source_info, |_| Err(err));
                         return None;
                     }
-                    Value::Scalar(val.into())
+                    Immediate::Scalar(val.into())
                 };
                 let res = OpTy {
                     op: interpret::Operand::Immediate(val),
@@ -591,7 +591,7 @@ fn visit_terminator_kind(
         if let TerminatorKind::Assert { expected, msg, cond, .. } = kind {
             if let Some(value) = self.eval_operand(cond, source_info) {
                 trace!("assertion on {:?} should be {:?}", value, expected);
-                let expected = Value::Scalar(Scalar::from_bool(*expected).into());
+                let expected = Immediate::Scalar(Scalar::from_bool(*expected).into());
                 if expected != value.0.to_immediate() {
                     // poison all places this operand references so that further code
                     // doesn't use the invalid value
@@ -629,7 +629,7 @@ fn visit_terminator_kind(
                                 .eval_operand(len, source_info)
                                 .expect("len must be const");
                             let len = match len.0.to_immediate() {
-                                Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
+                                Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
                                     bits, ..
                                 })) => bits,
                                 _ => bug!("const len not primitive: {:?}", len),
@@ -638,7 +638,7 @@ fn visit_terminator_kind(
                                 .eval_operand(index, source_info)
                                 .expect("index must be const");
                             let index = match index.0.to_immediate() {
-                                Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
+                                Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
                                     bits, ..
                                 })) => bits,
                                 _ => bug!("const index not primitive: {:?}", index),
index c697391d86776aa5b0bbe064a82e46f57b85b791..6351a6b40cb03a9db2aad442e38cdffb55f778b2 100644 (file)
 
 struct EraseRegionsVisitor<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    in_validation_statement: bool,
 }
 
 impl<'a, 'tcx> EraseRegionsVisitor<'a, 'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
         EraseRegionsVisitor {
             tcx,
-            in_validation_statement: false,
         }
     }
 }
 
 impl<'a, 'tcx> MutVisitor<'tcx> for EraseRegionsVisitor<'a, 'tcx> {
     fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
-        if !self.in_validation_statement {
-            *ty = self.tcx.erase_regions(ty);
-        }
+        *ty = self.tcx.erase_regions(ty);
         self.super_ty(ty);
     }
 
@@ -58,20 +54,11 @@ fn visit_statement(&mut self,
                        block: BasicBlock,
                        statement: &mut Statement<'tcx>,
                        location: Location) {
-        // Do NOT delete EndRegion if validation statements are emitted.
-        // Validation needs EndRegion.
-        if self.tcx.sess.opts.debugging_opts.mir_emit_validate == 0 {
-            if let StatementKind::EndRegion(_) = statement.kind {
-                statement.kind = StatementKind::Nop;
-            }
+        if let StatementKind::EndRegion(_) = statement.kind {
+            statement.kind = StatementKind::Nop;
         }
 
-        self.in_validation_statement = match statement.kind {
-            StatementKind::Validate(..) => true,
-            _ => false,
-        };
         self.super_statement(block, statement, location);
-        self.in_validation_statement = false;
     }
 }
 
index 5963f1a481c659942fca679b6ce4d13ea7696718..199cf5650fda8c2f6f5f82d2f239e26d56a1f560 100644 (file)
@@ -691,6 +691,14 @@ fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockDat
         self.in_cleanup_block = false;
     }
 
+    fn visit_retag(&mut self, fn_entry: &mut bool, place: &mut Place<'tcx>, loc: Location) {
+        self.super_retag(fn_entry, place, loc);
+
+        // We have to patch all inlined retags to be aware that they are no longer
+        // happening on function entry.
+        *fn_entry = false;
+    }
+
     fn visit_terminator_kind(&mut self, block: BasicBlock,
                              kind: &mut TerminatorKind<'tcx>, loc: Location) {
         self.super_terminator_kind(block, kind, loc);
index bd7d9d367618be73f1bc77ce12513fae40d64087..80072153167f3c2eb6115cbfaef6905cc4b66220 100644 (file)
@@ -143,7 +143,7 @@ fn check_lang_item_type<'a, 'tcx, D>(
 {
     let did = tcx.require_lang_item(lang_item);
     let poly_sig = tcx.fn_sig(did);
-    let sig = poly_sig.no_late_bound_regions().unwrap();
+    let sig = poly_sig.no_bound_vars().unwrap();
     let lhs_ty = lhs.ty(local_decls, tcx);
     let rhs_ty = rhs.ty(local_decls, tcx);
     let place_ty = place.ty(local_decls, tcx).to_ty(tcx);
index 46c73c27fe10d547548bef204d4217cf2f08fcf6..92cfcb3fd56cb4d3f04898c972206ff9e07349fb 100644 (file)
@@ -23,7 +23,7 @@
 use syntax::ast;
 use syntax_pos::Span;
 
-pub mod add_validation;
+pub mod add_retag;
 pub mod add_moves_for_packed_drops;
 pub mod cleanup_post_borrowck;
 pub mod check_unsafety;
@@ -258,19 +258,21 @@ fn optimized_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx
         // Remove all `FakeRead` statements and the borrows that are only
         // used for checking matches
         &cleanup_post_borrowck::CleanFakeReadsAndBorrows,
+
         &simplify::SimplifyCfg::new("early-opt"),
 
         // These next passes must be executed together
         &add_call_guards::CriticalCallEdges,
         &elaborate_drops::ElaborateDrops,
         &no_landing_pads::NoLandingPads,
-        // AddValidation needs to run after ElaborateDrops and before EraseRegions, and it needs
-        // an AllCallEdges pass right before it.
-        &add_call_guards::AllCallEdges,
-        &add_validation::AddValidation,
         // AddMovesForPackedDrops needs to run after drop
         // elaboration.
         &add_moves_for_packed_drops::AddMovesForPackedDrops,
+        // AddRetag needs to run after ElaborateDrops, and it needs
+        // an AllCallEdges pass right before it.  Otherwise it should
+        // run fairly late, but before optimizations begin.
+        &add_call_guards::AllCallEdges,
+        &add_retag::AddRetag,
 
         &simplify::SimplifyCfg::new("elaborate-drops"),
 
index a232176eacc8fd8a814a520f422601f2537ba8bd..ca9c4eb9b8bb971f3d8bca1046c98d2afb4c30ee 100644 (file)
@@ -1167,7 +1167,7 @@ fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>, locat
                 StatementKind::StorageDead(_) |
                 StatementKind::InlineAsm {..} |
                 StatementKind::EndRegion(_) |
-                StatementKind::Validate(..) |
+                StatementKind::Retag { .. } |
                 StatementKind::AscribeUserType(..) |
                 StatementKind::Nop => {}
             }
index 6ab68789c027ba7bbdeb1a1aea580d676979b47d..1e193485950579bcae7671ca912c7b12cddad674 100644 (file)
@@ -241,7 +241,7 @@ fn check_statement(
         // These are all NOPs
         | StatementKind::StorageLive(_)
         | StatementKind::StorageDead(_)
-        | StatementKind::Validate(..)
+        | StatementKind::Retag { .. }
         | StatementKind::EndRegion(_)
         | StatementKind::AscribeUserType(..)
         | StatementKind::Nop => Ok(()),
index 4b4b284b02cd575a7a80bcd9420524bb8b0d3bd6..c1c127fa8d6484979f00ed3e383929c21f6d3a7c 100644 (file)
@@ -68,7 +68,7 @@ fn is_nop_landing_pad(
                 StatementKind::Assign(_, _) |
                 StatementKind::SetDiscriminant { .. } |
                 StatementKind::InlineAsm { .. } |
-                StatementKind::Validate { .. } => {
+                StatementKind::Retag { .. } => {
                     return false;
                 }
             }
index 05044574e5ca3a4f1485edacf093d1eff821d41d..a5a19f04b7e8e49b1e61617ace060e2a29c07e94 100644 (file)
@@ -162,7 +162,7 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             mir::StatementKind::StorageDead(_) |
             mir::StatementKind::InlineAsm { .. } |
             mir::StatementKind::EndRegion(_) |
-            mir::StatementKind::Validate(..) |
+            mir::StatementKind::Retag { .. } |
             mir::StatementKind::AscribeUserType(..) |
             mir::StatementKind::Nop => continue,
             mir::StatementKind::SetDiscriminant{ .. } =>
index 1715086686c4224dfb70900f34c14a9cf37a0a16..ae0483e3c140c8dcc810702a2018b9c3d347d09f 100644 (file)
@@ -575,7 +575,7 @@ fn cannot_mutate_in_match_guard(
             OGN = o
         );
         err.span_label(mutate_span, format!("cannot {}", action));
-        err.span_label(match_span, format!("value is immutable in match guard"));
+        err.span_label(match_span, String::from("value is immutable in match guard"));
 
         self.cancel_if_wrong_origin(err, o)
     }
index d16094e8238deba487d4b1d67e0d14d8ecb8e8ae..12c13b8f81531b330bd08c1225cad594ba7d4f95 100644 (file)
@@ -204,7 +204,7 @@ pub fn categorize<'tcx>(context: PlaceContext<'tcx>) -> Option<DefUse> {
         PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) |
         PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) |
         PlaceContext::NonUse(NonUseContext::AscribeUserTy) |
-        PlaceContext::NonUse(NonUseContext::Validate) =>
+        PlaceContext::MutatingUse(MutatingUseContext::Retag) =>
             Some(DefUse::Use),
 
         ///////////////////////////////////////////////////////////////////////////
index 0e9596244cd581cb818c89f33d681002edde192f..61861da62f759134b1c6858ab4167d169b70453c 100644 (file)
@@ -403,25 +403,6 @@ fn visit_item(&mut self, item: &'a Item) {
                     }
                 }
             }
-            ItemKind::TraitAlias(Generics { ref params, .. }, ..) => {
-                for param in params {
-                    match param.kind {
-                        GenericParamKind::Lifetime { .. } => {}
-                        GenericParamKind::Type { ref default, .. } => {
-                            if !param.bounds.is_empty() {
-                                self.err_handler()
-                                    .span_err(param.ident.span, "type parameters on the left \
-                                        side of a trait alias cannot be bounded");
-                            }
-                            if !default.is_none() {
-                                self.err_handler()
-                                    .span_err(param.ident.span, "type parameters on the left \
-                                        side of a trait alias cannot have defaults");
-                            }
-                        }
-                    }
-                }
-            }
             ItemKind::Mod(_) => {
                 // Ensure that `path` attributes on modules are recorded as used (c.f. #35584).
                 attr::first_attr_value_str_by_name(&item.attrs, "path");
index 06c8545aacfd84fdb1e6f4ab0c9cb7b7b3810dbd..ecfe7d13782de3dc2c0daa875d9e731dbcc0d44c 100644 (file)
@@ -84,7 +84,7 @@ fn visit_statement(&mut self,
             StatementKind::Assign(..) => "StatementKind::Assign",
             StatementKind::FakeRead(..) => "StatementKind::FakeRead",
             StatementKind::EndRegion(..) => "StatementKind::EndRegion",
-            StatementKind::Validate(..) => "StatementKind::Validate",
+            StatementKind::Retag { .. } => "StatementKind::Retag",
             StatementKind::SetDiscriminant { .. } => "StatementKind::SetDiscriminant",
             StatementKind::StorageLive(..) => "StatementKind::StorageLive",
             StatementKind::StorageDead(..) => "StatementKind::StorageDead",
index ebd87e87ff60a4c2a72adefc9073d73ac31b5580..17ca8c275c3cb39e48c20be846f1914f11e09f10 100644 (file)
@@ -538,9 +538,9 @@ fn is_expected(self, def: Def) -> bool {
         match self {
             PathSource::Type => match def {
                 Def::Struct(..) | Def::Union(..) | Def::Enum(..) |
-                Def::Trait(..) | Def::TyAlias(..) | Def::AssociatedTy(..) |
-                Def::PrimTy(..) | Def::TyParam(..) | Def::SelfTy(..) |
-                Def::Existential(..) |
+                Def::Trait(..) | Def::TraitAlias(..) | Def::TyAlias(..) |
+                Def::AssociatedTy(..) | Def::PrimTy(..) | Def::TyParam(..) |
+                Def::SelfTy(..) | Def::Existential(..) |
                 Def::ForeignTy(..) => true,
                 _ => false,
             },
@@ -3122,7 +3122,10 @@ fn smart_resolve_path_fragment(&mut self,
                         return (err, candidates);
                     }
                     (Def::TyAlias(..), PathSource::Trait(_)) => {
-                        err.span_label(span, "type aliases cannot be used for traits");
+                        err.span_label(span, "type aliases cannot be used as traits");
+                        if nightly_options::is_nightly_build() {
+                            err.note("did you mean to use a trait alias?");
+                        }
                         return (err, candidates);
                     }
                     (Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
@@ -3589,7 +3592,17 @@ fn resolve_path(
         );
 
         for (i, &Segment { ident, id }) in path.iter().enumerate() {
-            debug!("resolve_path ident {} {:?}", i, ident);
+            debug!("resolve_path ident {} {:?} {:?}", i, ident, id);
+            let record_segment_def = |this: &mut Self, def| {
+                if record_used {
+                    if let Some(id) = id {
+                        if !this.def_map.contains_key(&id) {
+                            assert!(id != ast::DUMMY_NODE_ID, "Trying to resolve dummy id");
+                            this.record_def(id, PathResolution::new(def));
+                        }
+                    }
+                }
+            };
 
             let is_last = i == path.len() - 1;
             let ns = if is_last { opt_ns.unwrap_or(TypeNS) } else { TypeNS };
@@ -3673,6 +3686,7 @@ fn resolve_path(
                     // we found a local variable or type param
                     Some(LexicalScopeBinding::Def(def))
                             if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => {
+                        record_segment_def(self, def);
                         return PathResult::NonModule(PathResolution::with_unresolved_segments(
                             def, path.len() - 1
                         ));
@@ -3690,14 +3704,7 @@ fn resolve_path(
                     let maybe_assoc = opt_ns != Some(MacroNS) && PathSource::Type.is_expected(def);
                     if let Some(next_module) = binding.module() {
                         module = Some(ModuleOrUniformRoot::Module(next_module));
-                        if record_used {
-                            if let Some(id) = id {
-                                if !self.def_map.contains_key(&id) {
-                                    assert!(id != ast::DUMMY_NODE_ID, "Trying to resolve dummy id");
-                                    self.record_def(id, PathResolution::new(def));
-                                }
-                            }
-                        }
+                        record_segment_def(self, def);
                     } else if def == Def::ToolMod && i + 1 != path.len() {
                         let def = Def::NonMacroAttr(NonMacroAttrKind::Tool);
                         return PathResult::NonModule(PathResolution::new(def));
@@ -3884,7 +3891,7 @@ fn adjust_local_def(&mut self,
                             // report an error.
                             if record_used {
                                 resolve_error(self, span,
-                                        ResolutionError::CannotCaptureDynamicEnvironmentInFnItem);
+                                    ResolutionError::CannotCaptureDynamicEnvironmentInFnItem);
                             }
                             return Def::Err;
                         }
@@ -3892,7 +3899,7 @@ fn adjust_local_def(&mut self,
                             // Still doesn't deal with upvars
                             if record_used {
                                 resolve_error(self, span,
-                                        ResolutionError::AttemptToUseNonConstantValueInConstant);
+                                    ResolutionError::AttemptToUseNonConstantValueInConstant);
                             }
                             return Def::Err;
                         }
@@ -4944,7 +4951,7 @@ fn show_candidates(err: &mut DiagnosticBuilder,
         err.span_suggestions_with_applicability(
             span,
             &msg,
-            path_strings,
+            path_strings.into_iter(),
             Applicability::Unspecified,
         );
     } else {
index d5f344346c2d17238fe4bd89eec7322b4bb72d5a..43a5fdb7a025ff2977da7a3666cc132ec80661d6 100644 (file)
@@ -449,6 +449,9 @@ fn resolve_macro_to_def(
                     return Err(Determinacy::Determined);
                 }
             }
+            Def::Err => {
+                return Err(Determinacy::Determined);
+            }
             _ => panic!("expected `Def::Macro` or `Def::NonMacroAttr`"),
         }
 
index 359640ccda2b1eb6360e868f45c0dd69c42df2b6..a3694cd73ad53e7411f57e9aff07e2d4b20b737e 100644 (file)
@@ -36,7 +36,6 @@
 
 use std::cell::{Cell, RefCell};
 use std::collections::BTreeMap;
-use std::fmt::Write;
 use std::{mem, ptr};
 
 /// Contains data for specific types of import directives.
@@ -780,17 +779,14 @@ struct UniformPathsCanaryResults<'a> {
 
             let msg = format!("`{}` import is ambiguous", name);
             let mut err = self.session.struct_span_err(span, &msg);
-            let mut suggestion_choices = String::new();
+            let mut suggestion_choices = vec![];
             if external_crate.is_some() {
-                write!(suggestion_choices, "`::{}`", name);
+                suggestion_choices.push(format!("`::{}`", name));
                 err.span_label(span,
                     format!("can refer to external crate `::{}`", name));
             }
             if let Some(result) = results.module_scope {
-                if !suggestion_choices.is_empty() {
-                    suggestion_choices.push_str(" or ");
-                }
-                write!(suggestion_choices, "`self::{}`", name);
+                suggestion_choices.push(format!("`self::{}`", name));
                 if uniform_paths_feature {
                     err.span_label(result.span,
                         format!("can refer to `self::{}`", name));
@@ -803,7 +799,7 @@ struct UniformPathsCanaryResults<'a> {
                 err.span_label(result.span,
                     format!("shadowed by block-scoped `{}`", name));
             }
-            err.help(&format!("write {} explicitly instead", suggestion_choices));
+            err.help(&format!("write {} explicitly instead", suggestion_choices.join(" or ")));
             if uniform_paths_feature {
                 err.note("relative `use` paths enabled by `#![feature(uniform_paths)]`");
             } else {
index c0b718e4863fc05afaf06e649433194f7dddea5f..9bc3fbe7c245a9ef3238439977640aabd963dba7 100644 (file)
@@ -92,7 +92,7 @@ pub struct DumpVisitor<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> {
     // we only write one macro def per unique macro definition, and
     // one macro use per unique callsite span.
     // mac_defs: FxHashSet<Span>,
-    macro_calls: FxHashSet<Span>,
+    // macro_calls: FxHashSet<Span>,
 }
 
 impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> {
@@ -108,7 +108,7 @@ pub fn new(
             span: span_utils,
             cur_scope: CRATE_NODE_ID,
             // mac_defs: FxHashSet::default(),
-            macro_calls: FxHashSet::default(),
+            // macro_calls: FxHashSet::default(),
         }
     }
 
@@ -771,8 +771,7 @@ fn dump_path_ref(&mut self, id: NodeId, path: &ast::Path) {
     }
 
     fn process_path(&mut self, id: NodeId, path: &'l ast::Path) {
-        debug!("process_path {:?}", path);
-        if generated_code(path.span) {
+        if self.span.filter_generated(path.span) {
             return;
         }
         self.dump_path_ref(id, path);
@@ -1031,18 +1030,20 @@ fn process_var_decl(&mut self, p: &'l ast::Pat, value: String) {
     /// If the span is not macro-generated, do nothing, else use callee and
     /// callsite spans to record macro definition and use data, using the
     /// mac_uses and mac_defs sets to prevent multiples.
-    fn process_macro_use(&mut self, span: Span) {
-        let source_span = span.source_callsite();
-        if !self.macro_calls.insert(source_span) {
-            return;
-        }
+    fn process_macro_use(&mut self, _span: Span) {
+        // FIXME if we're not dumping the defs (see below), there is no point
+        // dumping refs either.
+        // let source_span = span.source_callsite();
+        // if !self.macro_calls.insert(source_span) {
+        //     return;
+        // }
 
-        let data = match self.save_ctxt.get_macro_use_data(span) {
-            None => return,
-            Some(data) => data,
-        };
+        // let data = match self.save_ctxt.get_macro_use_data(span) {
+        //     None => return,
+        //     Some(data) => data,
+        // };
 
-        self.dumper.macro_use(data);
+        // self.dumper.macro_use(data);
 
         // FIXME write the macro def
         // let mut hasher = DefaultHasher::new();
index e14ac73ee10202b1eb450fdf5de59d5511f810c9..d2354f38e2685e90c55105952795a3133211034f 100644 (file)
@@ -93,7 +93,7 @@ pub fn compilation_opts(&mut self, data: CompilationOptions) {
         self.result.compilation = Some(data);
     }
 
-    pub fn macro_use(&mut self, data: MacroRef) {
+    pub fn _macro_use(&mut self, data: MacroRef) {
         if self.config.pub_only || self.config.reachable_only {
             return;
         }
index bb686e914a0486c098e6fea02d4541ecb6c24717..684ea4c78978cc2b9a79f5a45c448bf4508d8318 100644 (file)
@@ -13,6 +13,3 @@ bitflags = "1.0"
 log = "0.4"
 rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
 serialize = { path = "../libserialize" }
-
-[features]
-jemalloc = []
index 90b5b97b51ee2da68a4008a725433df62c82f266..b4d393749c4869173e689ff7fd5f734e7248b137 100644 (file)
@@ -11,7 +11,7 @@
 use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
 use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
 
-fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
                                      -> Option<Uniform>
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@@ -41,7 +41,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     })
 }
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -75,7 +75,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
     ret.make_indirect();
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -109,7 +109,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     arg.make_indirect();
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 62462f04d8f56574706e040b8daba5aa2988fde3..85789d7d4d7454d1d4c01c7c5f35629ce2abcb2b 100644 (file)
 use abi::call::{ArgType, FnType, };
 use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
 
-fn classify_ret_ty<'a, Ty, C>(_tuncx: C, ret: &mut ArgType<'a, Ty>)
+fn classify_ret_ty<'a, Ty, C>(_cx: &C, ret: &mut ArgType<'a, Ty>)
   where Ty: TyLayoutMethods<'a, C> + Copy,
         C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
   ret.extend_integer_width_to(32);
 }
 
-fn classify_arg_ty<'a, Ty, C>(_cx: C, arg: &mut ArgType<'a, Ty>)
+fn classify_arg_ty<'a, Ty, C>(_cx: &C, arg: &mut ArgType<'a, Ty>)
   where Ty: TyLayoutMethods<'a, C> + Copy,
         C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
   arg.extend_integer_width_to(32);
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
   where Ty: TyLayoutMethods<'a, C> + Copy,
         C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 249aad2d937ea27606587ad9ea58311c6c70cae7..b4ffae7385aa78b00a4f269a58901319336290d8 100644 (file)
@@ -12,7 +12,7 @@
 use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
 use spec::HasTargetSpec;
 
-fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
                                      -> Option<Uniform>
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@@ -42,7 +42,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     })
 }
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, vfp: bool)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>, vfp: bool)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -77,7 +77,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, vfp: bool)
     ret.make_indirect();
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, vfp: bool)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, vfp: bool)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -101,7 +101,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, vfp: bool)
     });
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
 {
index 81d6f7b134b3e126bb8d86116ccf8b8de532a0c6..3bd2594bdb97220b801cb9f3d1b22984b79c0f02 100644 (file)
@@ -16,7 +16,7 @@
 // See the https://github.com/kripken/emscripten-fastcomp-clang repository.
 // The class `EmscriptenABIInfo` in `/lib/CodeGen/TargetInfo.cpp` contains the ABI definitions.
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -42,7 +42,7 @@ fn classify_arg_ty<Ty>(arg: &mut ArgType<Ty>) {
     }
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 1e8af52e3e8192c3f45054442b71182774e77371..a40cb6c76f0cd3fc67b04faf20f56acedae71c6f 100644 (file)
@@ -11,7 +11,7 @@
 use abi::call::{ArgType, FnType, Reg, Uniform};
 use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods};
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<Ty>, offset: &mut Size)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     if !ret.layout.is_aggregate() {
@@ -22,7 +22,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
     }
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<Ty>, offset: &mut Size)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     let dl = cx.data_layout();
@@ -44,7 +44,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
     *offset = offset.abi_align(align) + size.abi_align(align);
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<Ty>)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     let mut offset = Size::ZERO;
index 8e2dd99696e176d3e78c1833cdce0ef1189a7bea..adf5a3c94ea013267608d1f078216e966811e6c0 100644 (file)
@@ -27,7 +27,7 @@ fn extend_integer_width_mips<Ty>(arg: &mut ArgType<Ty>, bits: u64) {
     arg.extend_integer_width_to(bits);
 }
 
-fn float_reg<'a, Ty, C>(cx: C, ret: &ArgType<'a, Ty>, i: usize) -> Option<Reg>
+fn float_reg<'a, Ty, C>(cx: &C, ret: &ArgType<'a, Ty>, i: usize) -> Option<Reg>
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -41,7 +41,7 @@ fn float_reg<'a, Ty, C>(cx: C, ret: &ArgType<'a, Ty>, i: usize) -> Option<Reg>
     }
 }
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -83,7 +83,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
     }
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -151,7 +151,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     });
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 788497a378fe9ee549874dfd2063b3fe84a23d56..8f9ef2544e6020fb1d0873aba4f3c56eb6736820 100644 (file)
@@ -137,7 +137,7 @@ impl Reg {
 }
 
 impl Reg {
-    pub fn align<C: HasDataLayout>(&self, cx: C) -> Align {
+    pub fn align<C: HasDataLayout>(&self, cx: &C) -> Align {
         let dl = cx.data_layout();
         match self.kind {
             RegKind::Integer => {
@@ -188,7 +188,7 @@ fn from(unit: Reg) -> Uniform {
 }
 
 impl Uniform {
-    pub fn align<C: HasDataLayout>(&self, cx: C) -> Align {
+    pub fn align<C: HasDataLayout>(&self, cx: &C) -> Align {
         self.unit.align(cx)
     }
 }
@@ -225,12 +225,12 @@ pub fn pair(a: Reg, b: Reg) -> CastTarget {
         }
     }
 
-    pub fn size<C: HasDataLayout>(&self, cx: C) -> Size {
+    pub fn size<C: HasDataLayout>(&self, cx: &C) -> Size {
         (self.prefix_chunk * self.prefix.iter().filter(|x| x.is_some()).count() as u64)
              .abi_align(self.rest.align(cx)) + self.rest.total
     }
 
-    pub fn align<C: HasDataLayout>(&self, cx: C) -> Align {
+    pub fn align<C: HasDataLayout>(&self, cx: &C) -> Align {
         self.prefix.iter()
             .filter_map(|x| x.map(|kind| Reg { kind, size: self.prefix_chunk }.align(cx)))
             .fold(cx.data_layout().aggregate_align.max(self.rest.align(cx)),
@@ -249,8 +249,8 @@ fn is_aggregate(&self) -> bool {
         }
     }
 
-    fn homogeneous_aggregate<C>(&self, cx: C) -> Option<Reg>
-        where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = Self> + Copy
+    fn homogeneous_aggregate<C>(&self, cx: &C) -> Option<Reg>
+        where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = Self>
     {
         match self.abi {
             Abi::Uninhabited => None,
@@ -483,7 +483,7 @@ pub struct FnType<'a, Ty> {
 }
 
 impl<'a, Ty> FnType<'a, Ty> {
-    pub fn adjust_for_cabi<C>(&mut self, cx: C, abi: ::spec::abi::Abi) -> Result<(), String>
+    pub fn adjust_for_cabi<C>(&mut self, cx: &C, abi: ::spec::abi::Abi) -> Result<(), String>
         where Ty: TyLayoutMethods<'a, C> + Copy,
               C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
     {
index 3be3034143a3473ef798d53222129cfbf4f29a7b..b9b012020b7f4b6914324e49d69c220f4038c0ed 100644 (file)
@@ -11,7 +11,7 @@
 use abi::call::{ArgType, FnType, Reg, Uniform};
 use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods};
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<Ty>, offset: &mut Size)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     if !ret.layout.is_aggregate() {
@@ -22,7 +22,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
     }
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<Ty>, offset: &mut Size)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     let dl = cx.data_layout();
@@ -44,7 +44,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
     *offset = offset.abi_align(align) + size.abi_align(align);
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<Ty>)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     let mut offset = Size::ZERO;
index 0c5ec77a3982d418045ca79dca502c1b892020f3..80a4d693dc36bb248f9f17b849ac49eab56272fc 100644 (file)
@@ -22,7 +22,7 @@ enum ABI {
 }
 use self::ABI::*;
 
-fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI)
+fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, abi: ABI)
                                        -> Option<Uniform>
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@@ -52,7 +52,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: AB
     })
 }
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, abi: ABI)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>, abi: ABI)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -95,7 +95,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, abi: ABI)
     ret.make_indirect();
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, abi: ABI)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -134,7 +134,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI)
     });
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 37be6ea41c63a091c1744d9d3486918308c77e3f..d6d8ea719180a20722cfaf405b21fe36ae297bf5 100644 (file)
@@ -24,7 +24,7 @@ fn classify_ret_ty<'a, Ty, C>(ret: &mut ArgType<Ty>)
     }
 }
 
-fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool
+fn is_single_fp_element<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>) -> bool
     where Ty: TyLayoutMethods<'a, C>,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -41,7 +41,7 @@ fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool
     }
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -67,7 +67,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     }
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 1e8af52e3e8192c3f45054442b71182774e77371..a40cb6c76f0cd3fc67b04faf20f56acedae71c6f 100644 (file)
@@ -11,7 +11,7 @@
 use abi::call::{ArgType, FnType, Reg, Uniform};
 use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods};
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<Ty>, offset: &mut Size)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     if !ret.layout.is_aggregate() {
@@ -22,7 +22,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
     }
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<Ty>, offset: &mut Size)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     let dl = cx.data_layout();
@@ -44,7 +44,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
     *offset = offset.abi_align(align) + size.abi_align(align);
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<Ty>)
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
 {
     let mut offset = Size::ZERO;
index a58aebc2ff063f29cde42dbff574888f4f23bc37..a609feb3f57ec8f273b69f7778f2c80e0b23db11 100644 (file)
@@ -13,7 +13,7 @@
 use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
 use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
 
-fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
                                      -> Option<Uniform>
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@@ -41,7 +41,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     })
 }
 
-fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
+fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -69,7 +69,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
     ret.make_indirect();
 }
 
-fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
+fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -95,7 +95,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
     });
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 1dcaafcf77fcf77527a45c51674ec5133c8ea52b..9a95e5b192b2135b109a2f446240d440ea5ce2aa 100644 (file)
@@ -18,7 +18,7 @@ pub enum Flavor {
     Fastcall
 }
 
-fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool
+fn is_single_fp_element<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>) -> bool
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
@@ -35,7 +35,7 @@ fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool
     }
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>, flavor: Flavor)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>, flavor: Flavor)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
 {
index eade086ec48eca0bfba54a6fecd70f8fab072476..4c9446508939e21082bbacb0f310b77ccecfb03a 100644 (file)
@@ -31,12 +31,12 @@ enum Class {
 const LARGEST_VECTOR_SIZE: usize = 512;
 const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
 
-fn classify_arg<'a, Ty, C>(cx: C, arg: &ArgType<'a, Ty>)
+fn classify_arg<'a, Ty, C>(cx: &C, arg: &ArgType<'a, Ty>)
                           -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory>
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
-    fn classify<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>,
+    fn classify<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>,
                           cls: &mut [Option<Class>], off: Size) -> Result<(), Memory>
         where Ty: TyLayoutMethods<'a, C> + Copy,
             C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@@ -178,7 +178,7 @@ fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
     target
 }
 
-pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>)
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
     where Ty: TyLayoutMethods<'a, C> + Copy,
           C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
 {
index 1a5d2801af0c5d1b2a6c7be416c99d0368c43494..c7d0469e556a75cf15127c71c153ff21d414d94a 100644 (file)
@@ -203,11 +203,11 @@ pub fn vector_align(&self, vec_size: Size) -> Align {
     }
 }
 
-pub trait HasDataLayout: Copy {
+pub trait HasDataLayout {
     fn data_layout(&self) -> &TargetDataLayout;
 }
 
-impl<'a> HasDataLayout for &'a TargetDataLayout {
+impl HasDataLayout for TargetDataLayout {
     fn data_layout(&self) -> &TargetDataLayout {
         self
     }
@@ -267,7 +267,7 @@ pub fn is_abi_aligned(self, align: Align) -> bool {
     }
 
     #[inline]
-    pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: C) -> Option<Size> {
+    pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
         let dl = cx.data_layout();
 
         let bytes = self.bytes().checked_add(offset.bytes())?;
@@ -280,7 +280,7 @@ pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: C) -> Option<Size>
     }
 
     #[inline]
-    pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: C) -> Option<Size> {
+    pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
         let dl = cx.data_layout();
 
         let bytes = self.bytes().checked_mul(count)?;
@@ -457,7 +457,7 @@ pub fn size(self) -> Size {
         }
     }
 
-    pub fn align<C: HasDataLayout>(self, cx: C) -> Align {
+    pub fn align<C: HasDataLayout>(self, cx: &C) -> Align {
         let dl = cx.data_layout();
 
         match self {
@@ -492,7 +492,7 @@ pub fn fit_unsigned(x: u128) -> Integer {
     }
 
     /// Find the smallest integer with the given alignment.
-    pub fn for_abi_align<C: HasDataLayout>(cx: C, align: Align) -> Option<Integer> {
+    pub fn for_abi_align<C: HasDataLayout>(cx: &C, align: Align) -> Option<Integer> {
         let dl = cx.data_layout();
 
         let wanted = align.abi();
@@ -505,7 +505,7 @@ pub fn for_abi_align<C: HasDataLayout>(cx: C, align: Align) -> Option<Integer> {
     }
 
     /// Find the largest integer with the given alignment or less.
-    pub fn approximate_abi_align<C: HasDataLayout>(cx: C, align: Align) -> Integer {
+    pub fn approximate_abi_align<C: HasDataLayout>(cx: &C, align: Align) -> Integer {
         let dl = cx.data_layout();
 
         let wanted = align.abi();
@@ -571,7 +571,7 @@ pub enum Primitive {
 }
 
 impl<'a, 'tcx> Primitive {
-    pub fn size<C: HasDataLayout>(self, cx: C) -> Size {
+    pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
         let dl = cx.data_layout();
 
         match self {
@@ -582,7 +582,7 @@ pub fn size<C: HasDataLayout>(self, cx: C) -> Size {
         }
     }
 
-    pub fn align<C: HasDataLayout>(self, cx: C) -> Align {
+    pub fn align<C: HasDataLayout>(self, cx: &C) -> Align {
         let dl = cx.data_layout();
 
         match self {
@@ -642,7 +642,7 @@ pub fn is_bool(&self) -> bool {
     /// Returns the valid range as a `x..y` range.
     ///
     /// If `x` and `y` are equal, the range is full, not empty.
-    pub fn valid_range_exclusive<C: HasDataLayout>(&self, cx: C) -> Range<u128> {
+    pub fn valid_range_exclusive<C: HasDataLayout>(&self, cx: &C) -> Range<u128> {
         // For a (max) value of -1, max will be `-1 as usize`, which overflows.
         // However, that is fine here (it would still represent the full range),
         // i.e., if the range is everything.
@@ -854,7 +854,7 @@ pub struct LayoutDetails {
 }
 
 impl LayoutDetails {
-    pub fn scalar<C: HasDataLayout>(cx: C, scalar: Scalar) -> Self {
+    pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
         let size = scalar.value.size(cx);
         let align = scalar.value.align(cx);
         LayoutDetails {
@@ -891,20 +891,20 @@ pub trait LayoutOf {
     type Ty;
     type TyLayout;
 
-    fn layout_of(self, ty: Self::Ty) -> Self::TyLayout;
+    fn layout_of(&self, ty: Self::Ty) -> Self::TyLayout;
 }
 
 pub trait TyLayoutMethods<'a, C: LayoutOf<Ty = Self>>: Sized {
-    fn for_variant(this: TyLayout<'a, Self>, cx: C, variant_index: usize) -> TyLayout<'a, Self>;
-    fn field(this: TyLayout<'a, Self>, cx: C, i: usize) -> C::TyLayout;
+    fn for_variant(this: TyLayout<'a, Self>, cx: &C, variant_index: usize) -> TyLayout<'a, Self>;
+    fn field(this: TyLayout<'a, Self>, cx: &C, i: usize) -> C::TyLayout;
 }
 
 impl<'a, Ty> TyLayout<'a, Ty> {
-    pub fn for_variant<C>(self, cx: C, variant_index: usize) -> Self
+    pub fn for_variant<C>(self, cx: &C, variant_index: usize) -> Self
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> {
         Ty::for_variant(self, cx, variant_index)
     }
-    pub fn field<C>(self, cx: C, i: usize) -> C::TyLayout
+    pub fn field<C>(self, cx: &C, i: usize) -> C::TyLayout
     where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> {
         Ty::field(self, cx, i)
     }
index 10ba27e38f47c7dc0e1662310dcb27648b678e51..e60c9922d467a5193a43ab78977fb40d41cddf57 100644 (file)
@@ -22,7 +22,6 @@
       html_root_url = "https://doc.rust-lang.org/nightly/")]
 
 #![feature(box_syntax)]
-#![cfg_attr(stage0, feature(min_const_fn))]
 #![feature(nll)]
 #![feature(slice_patterns)]
 
index 8f7ee11d575eee8412d08292704091a715149ad2..9ef4fe3b3af4b4d380f20f9d6ea3fe8d5c2b701a 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
+use spec::{LldFlavor, LinkerFlavor, Target, TargetOptions, TargetResult};
 
 pub fn target() -> TargetResult {
     let mut base = super::fuchsia_base::opts();
@@ -24,7 +24,7 @@ pub fn target() -> TargetResult {
         target_os: "fuchsia".to_string(),
         target_env: String::new(),
         target_vendor: String::new(),
-        linker_flavor: LinkerFlavor::Gcc,
+        linker_flavor: LinkerFlavor::Lld(LldFlavor::Ld),
         options: TargetOptions {
             abi_blacklist: super::arm_base::abi_blacklist(),
             .. base
index 38b3f2528fe8694bb922d8657760173a935444e7..8774c15ff01211b1dc6f30b95fcaa23f7fef057f 100644 (file)
@@ -44,7 +44,6 @@ pub fn opts() -> TargetOptions {
         dll_suffix: ".dylib".to_string(),
         archive_format: "bsd".to_string(),
         pre_link_args: LinkArgs::new(),
-        exe_allocation_crate: super::maybe_jemalloc(),
         has_elf_tls: version >= (10, 7),
         abi_return_struct_as_int: true,
         emit_debug_gdb_scripts: false,
index 296eaca7c7df013dac7c7cd23cf7cc6a74a2484a..e926e4913d634801a0b1fac235f461fb01ddc0fb 100644 (file)
@@ -99,10 +99,6 @@ pub fn opts(arch: Arch) -> Result<TargetOptions, String> {
         pre_link_args,
         has_elf_tls: false,
         eliminate_frame_pointer: false,
-        // The following line is a workaround for jemalloc 4.5 being broken on
-        // ios. jemalloc 5.0 is supposed to fix this.
-        // see https://github.com/rust-lang/rust/issues/45262
-        exe_allocation_crate: None,
         .. super::apple_base::opts()
     })
 }
index c5e3385a91ca1bd70853c4fd1ba4414e78c7f4d4..055bca6c34848621d11685de781c6c8db554b216 100644 (file)
@@ -14,7 +14,7 @@ pub fn target() -> TargetResult {
     let mut base = super::android_base::opts();
     // https://developer.android.com/ndk/guides/abis.html#armeabi
     base.features = "+strict-align,+v5te".to_string();
-    base.max_atomic_width = Some(64);
+    base.max_atomic_width = Some(32);
 
     Ok(Target {
         llvm_target: "arm-linux-androideabi".to_string(),
index 2ffa74e737fd5244e664dfa9c3b98b2bce67c511..fb78cf495e22ac2caf3db616db59cbfc531c0e07 100644 (file)
@@ -38,7 +38,6 @@ pub fn opts() -> TargetOptions {
         // dynamic linking.
         tls_model: "local-exec".to_string(),
         relro_level: RelroLevel::Full,
-        exe_allocation_crate: super::maybe_jemalloc(),
         .. Default::default()
     }
 }
index 32eac8663afac78f84b975b55884a3705368cfca..a9e317b7cb8a74d5f8ab907b8c8c2bcd9ab552fd 100644 (file)
@@ -33,7 +33,6 @@ pub fn opts() -> TargetOptions {
         pre_link_args: args,
         position_independent_executables: true,
         relro_level: RelroLevel::Full,
-        exe_allocation_crate: super::maybe_jemalloc(),
         .. Default::default()
     }
 }
index 04b8a6e706064f1a570364528e076e686a74dc46..c8a2946da50a50f7582f12faac6fd296862c303e 100644 (file)
@@ -34,7 +34,6 @@ pub fn opts() -> TargetOptions {
         position_independent_executables: true,
         eliminate_frame_pointer: false, // FIXME 43575
         relro_level: RelroLevel::Full,
-        exe_allocation_crate: super::maybe_jemalloc(),
         abi_return_struct_as_int: true,
         .. Default::default()
     }
index b593b83532614b4801a0f9ecc8755aafb36d0588..8c20755492e31569f67b086d12e69839e2949fbb 100644 (file)
@@ -8,27 +8,19 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use spec::{LinkArgs, LinkerFlavor, TargetOptions};
+use spec::{LldFlavor, LinkArgs, LinkerFlavor, TargetOptions};
 use std::default::Default;
 
 pub fn opts() -> TargetOptions {
     let mut args = LinkArgs::new();
-    args.insert(LinkerFlavor::Gcc, vec![
-        // We want to be able to strip as much executable code as possible
-        // from the linker command line, and this flag indicates to the
-        // linker that it can avoid linking in dynamic libraries that don't
-        // actually satisfy any symbols up to that point (as with many other
-        // resolutions the linker does). This option only applies to all
-        // following libraries so we're sure to pass it as one of the first
-        // arguments.
-        // FIXME: figure out whether these linker args are desirable
-        //"-Wl,--as-needed".to_string(),
-
-        // Always enable NX protection when it is available
-        //"-Wl,-z,noexecstack".to_string(),
+    args.insert(LinkerFlavor::Lld(LldFlavor::Ld), vec![
+        "--build-id".to_string(), "--hash-style=gnu".to_string(),
+        "-z".to_string(), "rodynamic".to_string(),
     ]);
 
     TargetOptions {
+        linker: Some("rust-lld".to_owned()),
+        lld_flavor: LldFlavor::Ld,
         dynamic_linking: true,
         executables: true,
         target_family: Some("unix".to_string()),
index 4a9cd9e2f3233614e91537026f261ec275234c93..01f65d573632242ea0cbf2eab54343716efa00cb 100644 (file)
@@ -36,7 +36,6 @@ pub fn opts() -> TargetOptions {
         pre_link_args: args,
         position_independent_executables: true,
         relro_level: RelroLevel::Full,
-        exe_allocation_crate: super::maybe_jemalloc(),
         has_elf_tls: true,
         .. Default::default()
     }
index d43d45f64a55c8fc57b46218d79cfa9abbe2a275..c8af81c02ea4a48b5a8a72cd7e1307272532fd63 100644 (file)
@@ -444,11 +444,11 @@ pub struct Target {
     pub options: TargetOptions,
 }
 
-pub trait HasTargetSpec: Copy {
+pub trait HasTargetSpec {
     fn target_spec(&self) -> &Target;
 }
 
-impl<'a> HasTargetSpec for &'a Target {
+impl HasTargetSpec for Target {
     fn target_spec(&self) -> &Target {
         self
     }
@@ -1270,14 +1270,6 @@ macro_rules! target_option_val {
     }
 }
 
-fn maybe_jemalloc() -> Option<String> {
-    if cfg!(feature = "jemalloc") {
-        Some("alloc_jemalloc".to_string())
-    } else {
-        None
-    }
-}
-
 /// Either a target triple string or a path to a JSON file.
 #[derive(PartialEq, Clone, Debug, Hash, RustcEncodable, RustcDecodable)]
 pub enum TargetTriple {
index c14cc3f5bc3befd3420d50edcf9d531807271e97..93b889d5d399ee12032fd31a1343b4ba40ad85c7 100644 (file)
@@ -18,7 +18,6 @@ pub fn opts() -> TargetOptions {
         has_rpath: true,
         target_family: Some("unix".to_string()),
         is_like_solaris: true,
-        exe_allocation_crate: super::maybe_jemalloc(),
 
         .. Default::default()
     }
index 62148a740dff56833b1495b1f823d7652c429d96..08df78d0db05d906e231941176b10e3ed6248da6 100644 (file)
@@ -8,13 +8,12 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use spec::{LinkerFlavor, Target, TargetResult};
+use spec::{LldFlavor, LinkerFlavor, Target, TargetResult};
 
 pub fn target() -> TargetResult {
     let mut base = super::fuchsia_base::opts();
     base.cpu = "x86-64".to_string();
     base.max_atomic_width = Some(64);
-    base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
     base.stack_probes = true;
 
     Ok(Target {
@@ -27,7 +26,7 @@ pub fn target() -> TargetResult {
         target_os: "fuchsia".to_string(),
         target_env: String::new(),
         target_vendor: String::new(),
-        linker_flavor: LinkerFlavor::Gcc,
+        linker_flavor: LinkerFlavor::Lld(LldFlavor::Ld),
         options: base,
     })
 }
index 5d6badf120286c42d5be864cae36f6efb33cc71c..bf252053199f8df143035e1b6bfa4c8dcfd4fe0d 100644 (file)
@@ -506,6 +506,7 @@ fn assemble_clauses_from_assoc_ty_values<'tcx>(
                     ty::GeneratorWitness(..) |
                     ty::UnnormalizedProjection(..) |
                     ty::Infer(..) |
+                    ty::Bound(..) |
                     ty::Error => {
                         bug!("unexpected type {:?}", ty)
                     }
index 2ad7ab7c4d92753bb4c073f61933743cf3370980..af64522f18398a4a646f88d460e8489263380f41 100644 (file)
@@ -274,7 +274,7 @@ fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
 
         ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
 
-        ty::Infer(..) | ty::Error => {
+        ty::Bound(..) | ty::Infer(..) | ty::Error => {
             // By the time this code runs, all type variables ought to
             // be fully resolved.
             Err(NoSolution)
index cde08f6832ace274442b89c18e51996c782ed491..7cc064f9c3d3d81906ad96caaa92a9e4c5e79a23 100644 (file)
@@ -108,7 +108,7 @@ fn compute_implied_outlives_bounds<'tcx>(
         // From the full set of obligations, just filter down to the
         // region relationships.
         implied_bounds.extend(obligations.into_iter().flat_map(|obligation| {
-            assert!(!obligation.has_escaping_regions());
+            assert!(!obligation.has_escaping_bound_vars());
             match obligation.predicate {
                 ty::Predicate::Trait(..) |
                 ty::Predicate::Subtype(..) |
@@ -122,14 +122,14 @@ fn compute_implied_outlives_bounds<'tcx>(
                     vec![]
                 }
 
-                ty::Predicate::RegionOutlives(ref data) => match data.no_late_bound_regions() {
+                ty::Predicate::RegionOutlives(ref data) => match data.no_bound_vars() {
                     None => vec![],
                     Some(ty::OutlivesPredicate(r_a, r_b)) => {
                         vec![OutlivesBound::RegionSubRegion(r_b, r_a)]
                     }
                 },
 
-                ty::Predicate::TypeOutlives(ref data) => match data.no_late_bound_regions() {
+                ty::Predicate::TypeOutlives(ref data) => match data.no_bound_vars() {
                     None => vec![],
                     Some(ty::OutlivesPredicate(ty_a, r_b)) => {
                         let ty_a = infcx.resolve_type_vars_if_possible(&ty_a);
index c71898f73ecad69267aa9f0b395ba479d47f411a..052ca37b313717fa7123762068df759a37a3558c 100644 (file)
@@ -93,6 +93,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) {
             ty::GeneratorWitness(..) |
             ty::UnnormalizedProjection(..) |
             ty::Infer(..) |
+            ty::Bound(..) |
             ty::Error => {
                 bug!("unexpected type {:?}", ty);
             }
index 7ddc56974d816157abbf594c8c15c24f7d52765e..18f8473b5b56d7d88fb600878aaa16ff8fbbe0b3 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Conversion from AST representation of types to the ty.rs
+//! Conversion from AST representation of types to the `ty.rs`
 //! representation.  The main routine here is `ast_ty_to_ty()`: each use
 //! is parameterized by an instance of `AstConv`.
 
@@ -181,7 +181,6 @@ pub fn ast_path_substs_for_ty(&self,
         item_segment: &hir::PathSegment)
         -> &'tcx Substs<'tcx>
     {
-
         let (substs, assoc_bindings) = item_segment.with_generic_args(|generic_args| {
             self.create_substs_for_ast_path(
                 span,
@@ -545,7 +544,7 @@ pub fn create_substs_for_generic_args<'a, 'b>(
     }
 
     /// Given the type/region arguments provided to some path (along with
-    /// an implicit Self, if this is a trait reference) returns the complete
+    /// an implicit `Self`, if this is a trait reference) returns the complete
     /// set of substitutions. This may involve applying defaulted type parameters.
     ///
     /// Note that the type listing given here is *exactly* what the user provided.
@@ -722,7 +721,7 @@ pub(super) fn instantiate_poly_trait_ref_inner(&self,
     {
         let trait_def_id = self.trait_def_id(trait_ref);
 
-        debug!("ast_path_to_poly_trait_ref({:?}, def_id={:?})", trait_ref, trait_def_id);
+        debug!("instantiate_poly_trait_ref({:?}, def_id={:?})", trait_ref, trait_def_id);
 
         self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1);
 
@@ -739,11 +738,11 @@ pub(super) fn instantiate_poly_trait_ref_inner(&self,
             let predicate: Result<_, ErrorReported> =
                 self.ast_type_binding_to_poly_projection_predicate(
                     trait_ref.ref_id, poly_trait_ref, binding, speculative, &mut dup_bindings);
-            // ok to ignore Err() because ErrorReported (see above)
+            // ok to ignore Err because ErrorReported (see above)
             Some((predicate.ok()?, binding.span))
         }));
 
-        debug!("ast_path_to_poly_trait_ref({:?}, projections={:?}) -> {:?}",
+        debug!("instantiate_poly_trait_ref({:?}, projections={:?}) -> {:?}",
                trait_ref, poly_projections, poly_trait_ref);
         poly_trait_ref
     }
@@ -948,8 +947,8 @@ fn ast_path_to_ty(&self,
         )
     }
 
-    /// Transform a PolyTraitRef into a PolyExistentialTraitRef by
-    /// removing the dummy Self type (TRAIT_OBJECT_DUMMY_SELF).
+    /// Transform a `PolyTraitRef` into a `PolyExistentialTraitRef` by
+    /// removing the dummy `Self` type (`TRAIT_OBJECT_DUMMY_SELF`).
     fn trait_ref_to_existential(&self, trait_ref: ty::TraitRef<'tcx>)
                                 -> ty::ExistentialTraitRef<'tcx> {
         assert_eq!(trait_ref.self_ty().sty, TRAIT_OBJECT_DUMMY_SELF);
@@ -975,9 +974,10 @@ fn conv_object_ty_poly_trait_ref(&self,
         let principal = self.instantiate_poly_trait_ref(&trait_bounds[0],
                                                         dummy_self,
                                                         &mut projection_bounds);
+        debug!("principal: {:?}", principal);
 
         for trait_bound in trait_bounds[1..].iter() {
-            // Sanity check for non-principal trait bounds
+            // sanity check for non-principal trait bounds
             self.instantiate_poly_trait_ref(trait_bound,
                                             dummy_self,
                                             &mut vec![]);
@@ -1009,11 +1009,11 @@ fn conv_object_ty_poly_trait_ref(&self,
             })
         });
 
-        // check that there are no gross object safety violations,
+        // Check that there are no gross object safety violations;
         // most importantly, that the supertraits don't contain Self,
-        // to avoid ICE-s.
+        // to avoid ICEs.
         let object_safety_violations =
-            tcx.astconv_object_safety_violations(principal.def_id());
+            tcx.global_tcx().astconv_object_safety_violations(principal.def_id());
         if !object_safety_violations.is_empty() {
             tcx.report_object_safety_error(
                 span, principal.def_id(), object_safety_violations)
@@ -1021,7 +1021,7 @@ fn conv_object_ty_poly_trait_ref(&self,
             return tcx.types.err;
         }
 
-        // use a btreeset to keep output in a more consistent order
+        // Use a BTreeSet to keep output in a more consistent order.
         let mut associated_types = BTreeSet::default();
 
         for tr in traits::supertraits(tcx, principal) {
@@ -1060,7 +1060,7 @@ fn conv_object_ty_poly_trait_ref(&self,
         v.sort_by(|a, b| a.stable_cmp(tcx, b));
         let existential_predicates = ty::Binder::bind(tcx.mk_existential_predicates(v.into_iter()));
 
-        // Explicitly specified region bound. Use that.
+        // Use explicitly-specified region bound.
         let region_bound = if !lifetime.is_elided() {
             self.ast_region_to_region(lifetime, None)
         } else {
@@ -1347,7 +1347,7 @@ pub fn prohibit_assoc_ty_binding(tcx: TyCtxt, span: Span) {
         err.span_label(span, "associated type not allowed here").emit();
     }
 
-    // Check a type Path and convert it to a Ty.
+    // Check a type `Path` and convert it to a `Ty`.
     pub fn def_to_ty(&self,
                      opt_self_ty: Option<Ty<'tcx>>,
                      path: &hir::Path,
@@ -1442,8 +1442,8 @@ pub fn def_to_ty(&self,
     /// Parses the programmer's textual representation of a type into our
     /// internal notion of a type.
     pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty) -> Ty<'tcx> {
-        debug!("ast_ty_to_ty(id={:?}, ast_ty={:?})",
-               ast_ty.id, ast_ty);
+        debug!("ast_ty_to_ty(id={:?}, ast_ty={:?} ty_ty={:?})",
+               ast_ty.id, ast_ty, ast_ty.node);
 
         let tcx = self.tcx();
 
@@ -1748,7 +1748,7 @@ pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, param_ty: Ty<'tcx>)
             self.region_bounds.iter().map(|&(region_bound, span)| {
                 // account for the binder being introduced below; no need to shift `param_ty`
                 // because, at present at least, it can only refer to early-bound regions
-                let region_bound = tcx.mk_region(ty::fold::shift_region(*region_bound, 1));
+                let region_bound = ty::fold::shift_region(tcx, region_bound, 1);
                 let outlives = ty::OutlivesPredicate(param_ty, region_bound);
                 (ty::Binder::dummy(outlives).to_predicate(), span)
             }).chain(
index 3204ef556f5ddc3b1beac7f5e897407bf3ded8e7..40f2072079a5a1ba203536b642b3f3d14ea8c53c 100644 (file)
@@ -816,7 +816,7 @@ fn check_pat_tuple_struct(&self,
         }
         // Replace constructor type with constructed type for tuple struct patterns.
         let pat_ty = pat_ty.fn_sig(tcx).output();
-        let pat_ty = pat_ty.no_late_bound_regions().expect("expected fn type");
+        let pat_ty = pat_ty.no_bound_vars().expect("expected fn type");
 
         self.demand_eqtype(pat.span, expected, pat_ty);
 
index e0ee26cba082842aab75d16aa133f5b5f0a903d3..3f0a3531244424c68a7e2b99e4adcb9e2f0ae564 100644 (file)
@@ -128,7 +128,7 @@ fn pointer_kind(&self, t: Ty<'tcx>, span: Span) ->
             ty::Opaque(def_id, substs) => Some(PointerKind::OfOpaque(def_id, substs)),
             ty::Param(ref p) => Some(PointerKind::OfParam(p)),
             // Insufficient type information.
-            ty::Infer(_) => None,
+            ty::Bound(..) | ty::Infer(_) => None,
 
             ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
             ty::Float(_) | ty::Array(..) | ty::GeneratorWitness(..) |
index 3f4d187813d5d087185bf8a51a5cfa733bc99297..010561d1001e5ad61cc4cf0af5da187681e0a8a6 100644 (file)
 use super::{check_fn, Expectation, FnCtxt, GeneratorTypes};
 
 use astconv::AstConv;
+use middle::region;
 use rustc::hir::def_id::DefId;
 use rustc::infer::{InferOk, InferResult};
 use rustc::infer::LateBoundRegionConversionTime;
 use rustc::infer::type_variable::TypeVariableOrigin;
+use rustc::traits::Obligation;
 use rustc::traits::error_reporting::ArgKind;
 use rustc::ty::{self, ToPolyTraitRef, Ty, GenericParamDefKind};
 use rustc::ty::fold::TypeFoldable;
@@ -458,7 +460,7 @@ fn sig_of_closure_with_expectation(
         // Create a `PolyFnSig`. Note the oddity that late bound
         // regions appearing free in `expected_sig` are now bound up
         // in this binder we are creating.
-        assert!(!expected_sig.sig.has_regions_bound_above(ty::INNERMOST));
+        assert!(!expected_sig.sig.has_vars_bound_above(ty::INNERMOST));
         let bound_sig = ty::Binder::bind(self.tcx.mk_fn_sig(
             expected_sig.sig.inputs().iter().cloned(),
             expected_sig.sig.output(),
@@ -479,7 +481,7 @@ fn sig_of_closure_with_expectation(
         // Along the way, it also writes out entries for types that the user
         // wrote into our tables, which are then later used by the privacy
         // check.
-        match self.check_supplied_sig_against_expectation(expr_def_id, decl, &closure_sigs) {
+        match self.check_supplied_sig_against_expectation(expr_def_id, decl, body, &closure_sigs) {
             Ok(infer_ok) => self.register_infer_ok_obligations(infer_ok),
             Err(_) => return self.sig_of_closure_no_expectation(expr_def_id, decl, body),
         }
@@ -523,6 +525,7 @@ fn check_supplied_sig_against_expectation(
         &self,
         expr_def_id: DefId,
         decl: &hir::FnDecl,
+        body: &hir::Body,
         expected_sigs: &ClosureSignatures<'tcx>,
     ) -> InferResult<'tcx, ()> {
         // Get the signature S that the user gave.
@@ -575,6 +578,31 @@ fn check_supplied_sig_against_expectation(
                 } = self.at(cause, self.param_env)
                     .eq(*expected_ty, supplied_ty)?;
                 all_obligations.extend(obligations);
+
+                // Also, require that the supplied type must outlive
+                // the closure body.
+                let closure_body_region = self.tcx.mk_region(
+                    ty::ReScope(
+                        region::Scope {
+                            id: body.value.hir_id.local_id,
+                            data: region::ScopeData::Node,
+                        },
+                    ),
+                );
+                all_obligations.push(
+                    Obligation::new(
+                        cause.clone(),
+                        self.param_env,
+                        ty::Predicate::TypeOutlives(
+                            ty::Binder::dummy(
+                                ty::OutlivesPredicate(
+                                    supplied_ty,
+                                    closure_body_region,
+                                ),
+                            ),
+                        ),
+                    ),
+                );
             }
 
             let (supplied_output_ty, _) = self.infcx.replace_late_bound_regions_with_fresh_var(
index 7773e2d570844b892d2157b0941d05580c7e7e26..0a196834cb49442635af0176299d45aa6f90dd80 100644 (file)
@@ -132,7 +132,7 @@ pub fn demand_coerce_diag(&self,
                 if compatible_variants.peek().is_some() {
                     let expr_text = print::to_string(print::NO_ANN, |s| s.print_expr(expr));
                     let suggestions = compatible_variants
-                        .map(|v| format!("{}({})", v, expr_text)).collect::<Vec<_>>();
+                        .map(|v| format!("{}({})", v, expr_text));
                     err.span_suggestions_with_applicability(
                         expr.span,
                         "try using a variant of the expected type",
index da96d4f0cba42ce97df75969fcd74e060dd7d655..3156458b4aa4abe0690057c9cbfccfadb98e72a0 100644 (file)
@@ -419,7 +419,7 @@ pub fn check_platform_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                     let mut structural_to_nomimal = FxHashMap::default();
 
                     let sig = tcx.fn_sig(def_id);
-                    let sig = sig.no_late_bound_regions().unwrap();
+                    let sig = sig.no_bound_vars().unwrap();
                     if intr.inputs.len() != sig.inputs().len() {
                         span_err!(tcx.sess, it.span, E0444,
                                   "platform-specific intrinsic has invalid number of \
index 04c32fa88271ad24c7a4c45bafeac4c63c222951..637f3eaae9a6ac91e0f3e5874cd420354699f697 100644 (file)
@@ -289,8 +289,14 @@ pub fn lookup_method_in_trait(&self,
         // Trait must have a method named `m_name` and it should not have
         // type parameters or early-bound regions.
         let tcx = self.tcx;
-        let method_item =
-            self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
+        let method_item = match self.associated_item(trait_def_id, m_name, Namespace::Value) {
+            Some(method_item) => method_item,
+            None => {
+                tcx.sess.delay_span_bug(span,
+                    "operator trait does not have corresponding operator method");
+                return None;
+            }
+        };
         let def_id = method_item.def_id;
         let generics = tcx.generics_of(def_id);
         assert_eq!(generics.params.len(), 0);
@@ -331,7 +337,7 @@ pub fn lookup_method_in_trait(&self,
                 value
             }
         };
-        assert!(!bounds.has_escaping_regions());
+        assert!(!bounds.has_escaping_bound_vars());
 
         let cause = traits::ObligationCause::misc(span, self.body_id);
         obligations.extend(traits::predicates_for_generics(cause.clone(),
index 5a8a9632350f2621959bd8d811a3a36b98574c24..305efd0d75af6ed89b0fc26cbb2cac3d10733c48 100644 (file)
@@ -1374,7 +1374,7 @@ fn xform_method_sig(&self,
                fn_sig,
                substs);
 
-        assert!(!substs.has_escaping_regions());
+        assert!(!substs.has_escaping_bound_vars());
 
         // It is possible for type parameters or early-bound lifetimes
         // to appear in the signature of `self`. The substitutions we
index 5a63a2971e49deb7f9f159a1d92cd0075b19b90c..cd243d414439447d978d9559591b346a4c499512 100644 (file)
@@ -521,7 +521,7 @@ fn suggest_use_candidates(&self,
                     with_crate_prefix(|| self.tcx.item_path_str(*did)),
                     additional_newline
                 )
-            }).collect();
+            });
 
             err.span_suggestions_with_applicability(
                                                     span,
index 4851938653b53d33eeebd38b78dd083c3a76234d..7dfdb926c60ef63a0b9f8fe8d71fb0bfe451e660 100644 (file)
@@ -653,8 +653,8 @@ fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> Self {
 
     fn register_predicate(&self, obligation: traits::PredicateObligation<'tcx>) {
         debug!("register_predicate({:?})", obligation);
-        if obligation.has_escaping_regions() {
-            span_bug!(obligation.cause.span, "escaping regions in predicate {:?}",
+        if obligation.has_escaping_bound_vars() {
+            span_bug!(obligation.cause.span, "escaping bound vars in predicate {:?}",
                       obligation);
         }
         self.fulfillment_cx
@@ -976,7 +976,7 @@ fn visit_local(&mut self, local: &'gcx hir::Local) {
                     o_ty
                 };
 
-                let c_ty = self.fcx.inh.infcx.canonicalize_response(&revealed_ty);
+                let c_ty = self.fcx.inh.infcx.canonicalize_user_type_annotation(&revealed_ty);
                 debug!("visit_local: ty.hir_id={:?} o_ty={:?} revealed_ty={:?} c_ty={:?}",
                        ty.hir_id, o_ty, revealed_ty, c_ty);
                 self.fcx.tables.borrow_mut().user_provided_tys_mut().insert(ty.hir_id, c_ty);
@@ -1167,7 +1167,7 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
         }
     }
 
-    // Check that a function marked as `#[panic_implementation]` has signature `fn(&PanicInfo) -> !`
+    // Check that a function marked as `#[panic_handler]` has signature `fn(&PanicInfo) -> !`
     if let Some(panic_impl_did) = fcx.tcx.lang_items().panic_impl() {
         if panic_impl_did == fcx.tcx.hir.local_def_id(fn_id) {
             if let Some(panic_info_did) = fcx.tcx.lang_items().panic_info() {
@@ -1928,7 +1928,7 @@ fn projected_ty_from_poly_trait_ref(&self,
     }
 
     fn normalize_ty(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
-        if ty.has_escaping_regions() {
+        if ty.has_escaping_bound_vars() {
             ty // FIXME: normalization and escaping regions
         } else {
             self.normalize_associated_types_in(span, &ty)
@@ -2137,7 +2137,7 @@ pub fn write_method_call(&self,
                             method.substs[i]
                         }
                     });
-                    self.infcx.canonicalize_response(&UserSubsts {
+                    self.infcx.canonicalize_user_type_annotation(&UserSubsts {
                         substs: just_method_substs,
                         user_self_ty: None, // not relevant here
                     })
@@ -2181,7 +2181,7 @@ pub fn write_user_substs_from_substs(
         );
 
         if !substs.is_noop() {
-            let user_substs = self.infcx.canonicalize_response(&UserSubsts {
+            let user_substs = self.infcx.canonicalize_user_type_annotation(&UserSubsts {
                 substs,
                 user_self_ty,
             });
@@ -2431,7 +2431,7 @@ pub fn add_obligations_for_parameters(&self,
                                           cause: traits::ObligationCause<'tcx>,
                                           predicates: &ty::InstantiatedPredicates<'tcx>)
     {
-        assert!(!predicates.has_escaping_regions());
+        assert!(!predicates.has_escaping_bound_vars());
 
         debug!("add_obligations_for_parameters(predicates={:?})",
                predicates);
@@ -4744,7 +4744,7 @@ pub fn suggest_ref_or_into(
         } else if !self.check_for_cast(err, expr, found, expected) {
             let methods = self.get_conversion_methods(expr.span, expected, found);
             if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
-                let suggestions = iter::repeat(&expr_text).zip(methods.iter())
+                let mut suggestions = iter::repeat(&expr_text).zip(methods.iter())
                     .filter_map(|(receiver, method)| {
                         let method_call = format!(".{}()", method.ident);
                         if receiver.ends_with(&method_call) {
@@ -4760,8 +4760,8 @@ pub fn suggest_ref_or_into(
                                 Some(format!("{}{}", receiver, method_call))
                             }
                         }
-                    }).collect::<Vec<_>>();
-                if !suggestions.is_empty() {
+                    }).peekable();
+                if suggestions.peek().is_some() {
                     err.span_suggestions_with_applicability(
                         expr.span,
                         "try using a conversion method",
@@ -5188,8 +5188,8 @@ pub fn instantiate_value_path(&self,
                 }
             },
         );
-        assert!(!substs.has_escaping_regions());
-        assert!(!ty.has_escaping_regions());
+        assert!(!substs.has_escaping_bound_vars());
+        assert!(!ty.has_escaping_bound_vars());
 
         // Write the "user substs" down first thing for later.
         let hir_id = self.tcx.hir.node_to_hir_id(node_id);
index ea84e874b1a5b3b9dc6abb2a7f5bad9ff37f3bc5..527ba276de2732556ea7c4211facc15e06a6db0e 100644 (file)
@@ -153,6 +153,9 @@ pub fn check_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: Def
         hir::ItemKind::Trait(..) => {
             check_trait(tcx, item);
         }
+        hir::ItemKind::TraitAlias(..) => {
+            check_trait(tcx, item);
+        }
         _ => {}
     }
 }
index 05a83dd307c38047e7bb2e76a6f7dab2603f6388..a5ad31e0b6b5f0de5e1abc1821522935186383ff 100644 (file)
@@ -31,8 +31,9 @@ pub fn check_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_def_id: DefId) {
     Checker { tcx, trait_def_id }
         .check(tcx.lang_items().drop_trait(), visit_implementation_of_drop)
         .check(tcx.lang_items().copy_trait(), visit_implementation_of_copy)
-        .check(tcx.lang_items().coerce_unsized_trait(),
-               visit_implementation_of_coerce_unsized);
+        .check(tcx.lang_items().coerce_unsized_trait(), visit_implementation_of_coerce_unsized)
+        .check(tcx.lang_items().dispatch_from_dyn_trait(),
+            visit_implementation_of_dispatch_from_dyn);
 }
 
 struct Checker<'a, 'tcx: 'a> {
@@ -98,7 +99,7 @@ fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did:
 
     let span = tcx.hir.span(impl_node_id);
     let param_env = tcx.param_env(impl_did);
-    assert!(!self_type.has_escaping_regions());
+    assert!(!self_type.has_escaping_bound_vars());
 
     debug!("visit_implementation_of_copy: self_type={:?} (free)",
            self_type);
@@ -162,6 +163,174 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 }
 
+fn visit_implementation_of_dispatch_from_dyn<'a, 'tcx>(
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    impl_did: DefId,
+) {
+    debug!("visit_implementation_of_dispatch_from_dyn: impl_did={:?}",
+           impl_did);
+    if impl_did.is_local() {
+        let dispatch_from_dyn_trait = tcx.lang_items().dispatch_from_dyn_trait().unwrap();
+
+        let impl_node_id = tcx.hir.as_local_node_id(impl_did).unwrap();
+        let span = tcx.hir.span(impl_node_id);
+
+        let source = tcx.type_of(impl_did);
+        assert!(!source.has_escaping_bound_vars());
+        let target = {
+            let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
+            assert_eq!(trait_ref.def_id, dispatch_from_dyn_trait);
+
+            trait_ref.substs.type_at(1)
+        };
+
+        debug!("visit_implementation_of_dispatch_from_dyn: {:?} -> {:?}",
+            source,
+            target);
+
+        let param_env = tcx.param_env(impl_did);
+
+        let create_err = |msg: &str| {
+            struct_span_err!(tcx.sess, span, E0378, "{}", msg)
+        };
+
+        tcx.infer_ctxt().enter(|infcx| {
+            let cause = ObligationCause::misc(span, impl_node_id);
+
+            use ty::TyKind::*;
+            match (&source.sty, &target.sty) {
+                (&Ref(r_a, _, mutbl_a), Ref(r_b, _, mutbl_b))
+                    if infcx.at(&cause, param_env).eq(r_a, r_b).is_ok()
+                    && mutbl_a == *mutbl_b => (),
+                (&RawPtr(tm_a), &RawPtr(tm_b))
+                    if tm_a.mutbl == tm_b.mutbl => (),
+                (&Adt(def_a, substs_a), &Adt(def_b, substs_b))
+                    if def_a.is_struct() && def_b.is_struct() =>
+                {
+                    if def_a != def_b {
+                        let source_path = tcx.item_path_str(def_a.did);
+                        let target_path = tcx.item_path_str(def_b.did);
+
+                        create_err(
+                            &format!(
+                                "the trait `DispatchFromDyn` may only be implemented \
+                                for a coercion between structures with the same \
+                                definition; expected `{}`, found `{}`",
+                                source_path, target_path,
+                            )
+                        ).emit();
+
+                        return
+                    }
+
+                    if def_a.repr.c() || def_a.repr.packed() {
+                        create_err(
+                            "structs implementing `DispatchFromDyn` may not have \
+                             `#[repr(packed)]` or `#[repr(C)]`"
+                        ).emit();
+                    }
+
+                    let fields = &def_a.non_enum_variant().fields;
+
+                    let coerced_fields = fields.iter().filter_map(|field| {
+                        if tcx.type_of(field.did).is_phantom_data() {
+                            // ignore PhantomData fields
+                            return None
+                        }
+
+                        let ty_a = field.ty(tcx, substs_a);
+                        let ty_b = field.ty(tcx, substs_b);
+                        if let Ok(ok) = infcx.at(&cause, param_env).eq(ty_a, ty_b) {
+                            if ok.obligations.is_empty() {
+                                create_err(
+                                    "the trait `DispatchFromDyn` may only be implemented \
+                                     for structs containing the field being coerced, \
+                                     `PhantomData` fields, and nothing else"
+                                ).note(
+                                    &format!(
+                                        "extra field `{}` of type `{}` is not allowed",
+                                        field.ident, ty_a,
+                                    )
+                                ).emit();
+
+                                return None;
+                            }
+                        }
+
+                        Some(field)
+                    }).collect::<Vec<_>>();
+
+                    if coerced_fields.is_empty() {
+                        create_err(
+                            "the trait `DispatchFromDyn` may only be implemented \
+                            for a coercion between structures with a single field \
+                            being coerced, none found"
+                        ).emit();
+                    } else if coerced_fields.len() > 1 {
+                        create_err(
+                            "implementing the `DispatchFromDyn` trait requires multiple coercions",
+                        ).note(
+                            "the trait `DispatchFromDyn` may only be implemented \
+                                for a coercion between structures with a single field \
+                                being coerced"
+                        ).note(
+                            &format!(
+                                "currently, {} fields need coercions: {}",
+                                coerced_fields.len(),
+                                coerced_fields.iter().map(|field| {
+                                    format!("`{}` (`{}` to `{}`)",
+                                        field.ident,
+                                        field.ty(tcx, substs_a),
+                                        field.ty(tcx, substs_b),
+                                    )
+                                }).collect::<Vec<_>>()
+                                .join(", ")
+                            )
+                        ).emit();
+                    } else {
+                        let mut fulfill_cx = TraitEngine::new(infcx.tcx);
+
+                        for field in coerced_fields {
+
+                            let predicate = tcx.predicate_for_trait_def(
+                                param_env,
+                                cause.clone(),
+                                dispatch_from_dyn_trait,
+                                0,
+                                field.ty(tcx, substs_a),
+                                &[field.ty(tcx, substs_b).into()]
+                            );
+
+                            fulfill_cx.register_predicate_obligation(&infcx, predicate);
+                        }
+
+                        // Check that all transitive obligations are satisfied.
+                        if let Err(errors) = fulfill_cx.select_all_or_error(&infcx) {
+                            infcx.report_fulfillment_errors(&errors, None, false);
+                        }
+
+                        // Finally, resolve all regions.
+                        let region_scope_tree = region::ScopeTree::default();
+                        let outlives_env = OutlivesEnvironment::new(param_env);
+                        infcx.resolve_regions_and_report_errors(
+                            impl_did,
+                            &region_scope_tree,
+                            &outlives_env,
+                            SuppressRegionErrors::default(),
+                        );
+                    }
+                }
+                _ => {
+                    create_err(
+                        "the trait `DispatchFromDyn` may only be implemented \
+                        for a coercion between structures"
+                    ).emit();
+                }
+            }
+        })
+    }
+}
+
 pub fn coerce_unsized_info<'a, 'gcx>(gcx: TyCtxt<'a, 'gcx, 'gcx>,
                                      impl_did: DefId)
                                      -> CoerceUnsizedInfo {
@@ -187,7 +356,7 @@ pub fn coerce_unsized_info<'a, 'gcx>(gcx: TyCtxt<'a, 'gcx, 'gcx>,
 
     let span = gcx.hir.span(impl_node_id);
     let param_env = gcx.param_env(impl_did);
-    assert!(!source.has_escaping_regions());
+    assert!(!source.has_escaping_bound_vars());
 
     let err_info = CoerceUnsizedInfo { custom_kind: None };
 
@@ -236,7 +405,7 @@ pub fn coerce_unsized_info<'a, 'gcx>(gcx: TyCtxt<'a, 'gcx, 'gcx>,
                               E0377,
                               "the trait `CoerceUnsized` may only be implemented \
                                for a coercion between structures with the same \
-                               definition; expected {}, found {}",
+                               definition; expected `{}`, found `{}`",
                               source_path,
                               target_path);
                     return err_info;
@@ -341,7 +510,7 @@ pub fn coerce_unsized_info<'a, 'gcx>(gcx: TyCtxt<'a, 'gcx, 'gcx>,
                                       diff_fields.len(),
                                       diff_fields.iter()
                                           .map(|&(i, a, b)| {
-                                              format!("{} ({} to {})", fields[i].ident, a, b)
+                                              format!("`{}` (`{}` to `{}`)", fields[i].ident, a, b)
                                           })
                                           .collect::<Vec<_>>()
                                           .join(", ")));
index eb52a013b0566ef649d9b075596554e0e7f55f82..74dea7fe411ad0e710a016afd5fabc6bb39089e3 100644 (file)
@@ -58,6 +58,8 @@
 
 use std::iter;
 
+struct OnlySelfBounds(bool);
+
 ///////////////////////////////////////////////////////////////////////////
 // Main entry point
 
@@ -208,7 +210,7 @@ fn projected_ty_from_poly_trait_ref(
         item_def_id: DefId,
         poly_trait_ref: ty::PolyTraitRef<'tcx>,
     ) -> Ty<'tcx> {
-        if let Some(trait_ref) = poly_trait_ref.no_late_bound_regions() {
+        if let Some(trait_ref) = poly_trait_ref.no_bound_vars() {
             self.tcx().mk_projection(item_def_id, trait_ref.substs)
         } else {
             // no late-bound regions, we can just ignore the binder
@@ -245,8 +247,8 @@ fn type_param_predicates<'a, 'tcx>(
     use rustc::hir::*;
 
     // In the AST, bounds can derive from two places. Either
-    // written inline like `<T:Foo>` or in a where clause like
-    // `where T:Foo`.
+    // written inline like `<T : Foo>` or in a where clause like
+    // `where T : Foo`.
 
     let param_id = tcx.hir.as_local_node_id(def_id).unwrap();
     let param_owner = tcx.hir.ty_param_owner(param_id);
@@ -317,12 +319,13 @@ fn type_param_predicates<'a, 'tcx>(
     let icx = ItemCtxt::new(tcx, item_def_id);
     result
         .predicates
-        .extend(icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty));
+        .extend(icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty,
+            OnlySelfBounds(true)));
     result
 }
 
 impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
-    /// Find bounds from hir::Generics. This requires scanning through the
+    /// Find bounds from `hir::Generics`. This requires scanning through the
     /// AST. We do this to avoid having to convert *all* the bounds, which
     /// would create artificial cycles. Instead we can only convert the
     /// bounds for a type parameter `X` if `X::Foo` is used.
@@ -331,6 +334,7 @@ fn type_parameter_bounds_in_generics(
         ast_generics: &hir::Generics,
         param_id: ast::NodeId,
         ty: Ty<'tcx>,
+        only_self_bounds: OnlySelfBounds,
     ) -> Vec<(ty::Predicate<'tcx>, Span)> {
         let from_ty_params = ast_generics
             .params
@@ -350,9 +354,17 @@ fn type_parameter_bounds_in_generics(
                 hir::WherePredicate::BoundPredicate(ref bp) => Some(bp),
                 _ => None,
             })
-            .filter(|bp| is_param(self.tcx, &bp.bounded_ty, param_id))
-            .flat_map(|bp| bp.bounds.iter())
-            .flat_map(|b| predicates_from_bound(self, ty, b));
+            .flat_map(|bp| {
+                let bt = if is_param(self.tcx, &bp.bounded_ty, param_id) {
+                    Some(ty)
+                } else if !only_self_bounds.0 {
+                    Some(self.to_ty(&bp.bounded_ty))
+                } else {
+                    None
+                };
+                bp.bounds.iter().filter_map(move |b| bt.map(|bt| (bt, b)))
+            })
+            .flat_map(|(bt, b)| predicates_from_bound(self, bt, b));
 
         from_ty_params.chain(from_where_clauses).collect()
     }
@@ -419,12 +431,9 @@ fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_id: ast::NodeId) {
             tcx.predicates_of(def_id);
         }
         hir::ItemKind::TraitAlias(..) => {
-            span_err!(
-                tcx.sess,
-                it.span,
-                E0645,
-                "trait aliases are not yet implemented (see issue #41517)"
-            );
+            tcx.generics_of(def_id);
+            tcx.at(it.span).super_predicates_of(def_id);
+            tcx.predicates_of(def_id);
         }
         hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => {
             tcx.generics_of(def_id);
@@ -693,15 +702,20 @@ fn super_predicates_of<'a, 'tcx>(
 
     let icx = ItemCtxt::new(tcx, trait_def_id);
 
-    // Convert the bounds that follow the colon, e.g. `Bar+Zed` in `trait Foo : Bar+Zed`.
+    // Convert the bounds that follow the colon, e.g. `Bar + Zed` in `trait Foo : Bar + Zed`.
     let self_param_ty = tcx.mk_self_type();
     let superbounds1 = compute_bounds(&icx, self_param_ty, bounds, SizedByDefault::No, item.span);
 
     let superbounds1 = superbounds1.predicates(tcx, self_param_ty);
 
     // Convert any explicit superbounds in the where clause,
-    // e.g. `trait Foo where Self : Bar`:
-    let superbounds2 = icx.type_parameter_bounds_in_generics(generics, item.id, self_param_ty);
+    // e.g. `trait Foo where Self : Bar`.
+    // In the case of trait aliases, however, we include all bounds in the where clause,
+    // so e.g. `trait Foo = where u32: PartialEq<Self>` would include `u32: PartialEq<Self>`
+    // as one of its "superpredicates".
+    let is_trait_alias = ty::is_trait_alias(tcx, trait_def_id);
+    let superbounds2 = icx.type_parameter_bounds_in_generics(
+        generics, item.id, self_param_ty, OnlySelfBounds(!is_trait_alias));
 
     // Combine the two lists to form the complete set of superbounds:
     let superbounds: Vec<_> = superbounds1.into_iter().chain(superbounds2).collect();
@@ -709,6 +723,7 @@ fn super_predicates_of<'a, 'tcx>(
     // Now require that immediate supertraits are converted,
     // which will, in turn, reach indirect supertraits.
     for &(pred, span) in &superbounds {
+        debug!("superbound: {:?}", pred);
         if let ty::Predicate::Trait(bound) = pred {
             tcx.at(span).super_predicates_of(bound.def_id());
         }
@@ -1678,6 +1693,7 @@ fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter:
 
     let icx = ItemCtxt::new(tcx, def_id);
     let no_generics = hir::Generics::empty();
+    let empty_trait_items = HirVec::new();
 
     let mut predicates = UniquePredicates::new();
 
@@ -1722,6 +1738,10 @@ fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter:
                     is_trait = Some((ty::TraitRef::identity(tcx, def_id), items));
                     generics
                 }
+                ItemKind::TraitAlias(ref generics, _) => {
+                    is_trait = Some((ty::TraitRef::identity(tcx, def_id), &empty_trait_items));
+                    generics
+                }
                 ItemKind::Existential(ExistTy {
                     ref bounds,
                     impl_trait_fn,
@@ -2010,10 +2030,10 @@ pub fn compute_bounds<'gcx: 'tcx, 'tcx>(
     }
 }
 
-/// Converts a specific GenericBound from the AST into a set of
+/// Converts a specific `GenericBound` from the AST into a set of
 /// predicates that apply to the self-type. A vector is returned
-/// because this can be anywhere from 0 predicates (`T:?Sized` adds no
-/// predicates) to 1 (`T:Foo`) to many (`T:Bar<X=i32>` adds `T:Bar`
+/// because this can be anywhere from zero predicates (`T : ?Sized` adds no
+/// predicates) to one (`T : Foo`) to many (`T : Bar<X=i32>` adds `T : Bar`
 /// and `<T as Bar>::X == i32`).
 fn predicates_from_bound<'tcx>(
     astconv: &dyn AstConv<'tcx, 'tcx>,
index f57d050fa2d77641e9cdef52b34d0d4cb75182ac..c81aea2465b7be40b39d5840be6c07bbeb915061 100644 (file)
@@ -3084,6 +3084,66 @@ impl<T, U> CoerceUnsized<Foo<U>> for Foo<T> where T: CoerceUnsized<U> {}
 struct.
 "##,
 
+E0378: r##"
+The `DispatchFromDyn` trait currently can only be implemented for
+builtin pointer types and structs that are newtype wrappers around them
+— that is, the struct must have only one field (except for`PhantomData`),
+and that field must itself implement `DispatchFromDyn`.
+
+Examples:
+
+```
+#![feature(dispatch_from_dyn, unsize)]
+use std::{
+    marker::Unsize,
+    ops::DispatchFromDyn,
+};
+
+struct Ptr<T: ?Sized>(*const T);
+
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Ptr<U>> for Ptr<T>
+where
+    T: Unsize<U>,
+{}
+```
+
+```
+#![feature(dispatch_from_dyn)]
+use std::{
+    ops::DispatchFromDyn,
+    marker::PhantomData,
+};
+
+struct Wrapper<T> {
+    ptr: T,
+    _phantom: PhantomData<()>,
+}
+
+impl<T, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T>
+where
+    T: DispatchFromDyn<U>,
+{}
+```
+
+Example of illegal `DispatchFromDyn` implementation
+(illegal because of extra field)
+
+```compile-fail,E0378
+#![feature(dispatch_from_dyn)]
+use std::ops::DispatchFromDyn;
+
+struct WrapperExtraField<T> {
+    ptr: T,
+    extra_stuff: i32,
+}
+
+impl<T, U> DispatchFromDyn<WrapperExtraField<U>> for WrapperExtraField<T>
+where
+    T: DispatchFromDyn<U>,
+{}
+```
+"##,
+
 E0390: r##"
 You tried to implement methods for a primitive type. Erroneous code example:
 
index 96b75c4792d759486f8966789592149271d3a2d3..d748d93d8988e63470b142ac6381cf7f01f9285b 100644 (file)
@@ -167,7 +167,6 @@ fn is_free_region<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, region: Region<'_>) -> bool
         RegionKind::ReEmpty
         | RegionKind::ReErased
         | RegionKind::ReClosureBound(..)
-        | RegionKind::ReCanonical(..)
         | RegionKind::ReScope(..)
         | RegionKind::ReVar(..)
         | RegionKind::RePlaceholder(..)
index 3e523c0c7f559c1481e400ea98c9c2f4a72fa011..47d34c909961e16e53a4a0d79e12a40b4a2e5aac 100644 (file)
@@ -338,6 +338,7 @@ fn add_constraints_from_ty(&mut self,
 
             ty::UnnormalizedProjection(..) |
             ty::GeneratorWitness(..) |
+            ty::Bound(..) |
             ty::Infer(..) => {
                 bug!("unexpected type encountered in \
                       variance inference: {}",
@@ -426,7 +427,6 @@ fn add_constraints_from_region(&mut self,
                 // way early-bound regions do, so we skip them here.
             }
 
-            ty::ReCanonical(_) |
             ty::ReFree(..) |
             ty::ReClosureBound(..) |
             ty::ReScope(..) |
index 88240e844edc23887fb5f66d4b4416854b8937f4..9f68fd56c5e04512d023436a9f7b0f5c47479914 100644 (file)
@@ -552,6 +552,14 @@ pub fn generics(&self) -> Option<&Generics> {
             _ => return None,
         })
     }
+
+    pub fn is_associated(&self) -> bool {
+        match *self {
+            ItemEnum::TypedefItem(_, _) |
+            ItemEnum::AssociatedTypeItem(_, _) => true,
+            _ => false,
+        }
+    }
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
@@ -1260,7 +1268,6 @@ fn clean(&self, cx: &DocContext) -> Option<Lifetime> {
             ty::RePlaceholder(..) |
             ty::ReEmpty |
             ty::ReClosureBound(_) |
-            ty::ReCanonical(_) |
             ty::ReErased => None
         }
     }
@@ -2733,6 +2740,7 @@ fn clean(&self, cx: &DocContext) -> Type {
 
             ty::Closure(..) | ty::Generator(..) => Tuple(vec![]), // FIXME(pcwalton)
 
+            ty::Bound(..) => panic!("Bound"),
             ty::UnnormalizedProjection(..) => panic!("UnnormalizedProjection"),
             ty::GeneratorWitness(..) => panic!("GeneratorWitness"),
             ty::Infer(..) => panic!("Infer"),
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
new file mode 100644 (file)
index 0000000..903aafe
--- /dev/null
@@ -0,0 +1,560 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::fmt;
+use std::path::PathBuf;
+
+use errors;
+use errors::emitter::ColorConfig;
+use getopts;
+use rustc::lint::Level;
+use rustc::session::early_error;
+use rustc::session::config::{CodegenOptions, DebuggingOptions, ErrorOutputType, Externs};
+use rustc::session::config::{nightly_options, build_codegen_options, build_debugging_options,
+                             get_cmd_lint_options};
+use rustc::session::search_paths::SearchPaths;
+use rustc_driver;
+use rustc_target::spec::TargetTriple;
+use syntax::edition::Edition;
+
+use core::new_handler;
+use externalfiles::ExternalHtml;
+use html;
+use html::markdown::IdMap;
+use opts;
+use passes::{self, DefaultPassOption};
+use theme;
+
+/// Configuration options for rustdoc.
+#[derive(Clone)]
+pub struct Options {
+    // Basic options / Options passed directly to rustc
+
+    /// The crate root or Markdown file to load.
+    pub input: PathBuf,
+    /// The name of the crate being documented.
+    pub crate_name: Option<String>,
+    /// How to format errors and warnings.
+    pub error_format: ErrorOutputType,
+    /// Library search paths to hand to the compiler.
+    pub libs: SearchPaths,
+    /// The list of external crates to link against.
+    pub externs: Externs,
+    /// List of `cfg` flags to hand to the compiler. Always includes `rustdoc`.
+    pub cfgs: Vec<String>,
+    /// Codegen options to hand to the compiler.
+    pub codegen_options: CodegenOptions,
+    /// Debugging (`-Z`) options to pass to the compiler.
+    pub debugging_options: DebuggingOptions,
+    /// The target used to compile the crate against.
+    pub target: Option<TargetTriple>,
+    /// Edition used when reading the crate. Defaults to "2015". Also used by default when
+    /// compiling doctests from the crate.
+    pub edition: Edition,
+    /// The path to the sysroot. Used during the compilation process.
+    pub maybe_sysroot: Option<PathBuf>,
+    /// Linker to use when building doctests.
+    pub linker: Option<PathBuf>,
+    /// Lint information passed over the command-line.
+    pub lint_opts: Vec<(String, Level)>,
+    /// Whether to ask rustc to describe the lints it knows. Practically speaking, this will not be
+    /// used, since we abort if we have no input file, but it's included for completeness.
+    pub describe_lints: bool,
+    /// What level to cap lints at.
+    pub lint_cap: Option<Level>,
+
+    // Options specific to running doctests
+
+    /// Whether we should run doctests instead of generating docs.
+    pub should_test: bool,
+    /// List of arguments to pass to the test harness, if running tests.
+    pub test_args: Vec<String>,
+
+    // Options that affect the documentation process
+
+    /// The selected default set of passes to use.
+    ///
+    /// Be aware: This option can come both from the CLI and from crate attributes!
+    pub default_passes: DefaultPassOption,
+    /// Any passes manually selected by the user.
+    ///
+    /// Be aware: This option can come both from the CLI and from crate attributes!
+    pub manual_passes: Vec<String>,
+    /// Whether to display warnings during doc generation or while gathering doctests. By default,
+    /// all non-rustdoc-specific lints are allowed when generating docs.
+    pub display_warnings: bool,
+
+    // Options that alter generated documentation pages
+
+    /// Crate version to note on the sidebar of generated docs.
+    pub crate_version: Option<String>,
+    /// Collected options specific to outputting final pages.
+    pub render_options: RenderOptions,
+}
+
+impl fmt::Debug for Options {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        struct FmtExterns<'a>(&'a Externs);
+
+        impl<'a> fmt::Debug for FmtExterns<'a> {
+            fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+                f.debug_map()
+                    .entries(self.0.iter())
+                    .finish()
+            }
+        }
+
+        f.debug_struct("Options")
+            .field("input", &self.input)
+            .field("crate_name", &self.crate_name)
+            .field("error_format", &self.error_format)
+            .field("libs", &self.libs)
+            .field("externs", &FmtExterns(&self.externs))
+            .field("cfgs", &self.cfgs)
+            .field("codegen_options", &"...")
+            .field("debugging_options", &"...")
+            .field("target", &self.target)
+            .field("edition", &self.edition)
+            .field("maybe_sysroot", &self.maybe_sysroot)
+            .field("linker", &self.linker)
+            .field("lint_opts", &self.lint_opts)
+            .field("describe_lints", &self.describe_lints)
+            .field("lint_cap", &self.lint_cap)
+            .field("should_test", &self.should_test)
+            .field("test_args", &self.test_args)
+            .field("default_passes", &self.default_passes)
+            .field("manual_passes", &self.manual_passes)
+            .field("display_warnings", &self.display_warnings)
+            .field("crate_version", &self.crate_version)
+            .field("render_options", &self.render_options)
+            .finish()
+    }
+}
+
+/// Configuration options for the HTML page-creation process.
+#[derive(Clone, Debug)]
+pub struct RenderOptions {
+    /// Output directory to generate docs into. Defaults to `doc`.
+    pub output: PathBuf,
+    /// External files to insert into generated pages.
+    pub external_html: ExternalHtml,
+    /// A pre-populated `IdMap` with the default headings and any headings added by Markdown files
+    /// processed by `external_html`.
+    pub id_map: IdMap,
+    /// If present, playground URL to use in the "Run" button added to code samples.
+    ///
+    /// Be aware: This option can come both from the CLI and from crate attributes!
+    pub playground_url: Option<String>,
+    /// Whether to sort modules alphabetically on a module page instead of using declaration order.
+    /// `true` by default.
+    ///
+    /// FIXME(misdreavus): the flag name is `--sort-modules-by-appearance` but the meaning is
+    /// inverted once read
+    pub sort_modules_alphabetically: bool,
+    /// List of themes to extend the docs with. Original argument name is included to assist in
+    /// displaying errors if it fails a theme check.
+    pub themes: Vec<PathBuf>,
+    /// If present, CSS file that contains rules to add to the default CSS.
+    pub extension_css: Option<PathBuf>,
+    /// A map of crate names to the URL to use instead of querying the crate's `html_root_url`.
+    pub extern_html_root_urls: BTreeMap<String, String>,
+    /// If present, suffix added to CSS/JavaScript files when referencing them in generated pages.
+    pub resource_suffix: String,
+    /// Whether to run the static CSS/JavaScript through a minifier when outputting them. `true` by
+    /// default.
+    ///
+    /// FIXME(misdreavus): the flag name is `--disable-minification` but the meaning is inverted
+    /// once read
+    pub enable_minification: bool,
+    /// Whether to create an index page in the root of the output directory. If this is true but
+    /// `enable_index_page` is None, generate a static listing of crates instead.
+    pub enable_index_page: bool,
+    /// A file to use as the index page at the root of the output directory. Overrides
+    /// `enable_index_page` to be true if set.
+    pub index_page: Option<PathBuf>,
+
+    // Options specific to reading standalone Markdown files
+
+    /// Whether to generate a table of contents on the output file when reading a standalone
+    /// Markdown file.
+    pub markdown_no_toc: bool,
+    /// Additional CSS files to link in pages generated from standlone Markdown files.
+    pub markdown_css: Vec<String>,
+    /// If present, playground URL to use in the "Run" button added to code samples generated from
+    /// standalone Markdown files. If not present, `playground_url` is used.
+    pub markdown_playground_url: Option<String>,
+}
+
+impl Options {
+    /// Parses the given command-line for options. If an error message or other early-return has
+    /// been printed, returns `Err` with the exit code.
+    pub fn from_matches(matches: &getopts::Matches) -> Result<Options, isize> {
+        // Check for unstable options.
+        nightly_options::check_nightly_options(&matches, &opts());
+
+        if matches.opt_present("h") || matches.opt_present("help") {
+            ::usage("rustdoc");
+            return Err(0);
+        } else if matches.opt_present("version") {
+            rustc_driver::version("rustdoc", &matches);
+            return Err(0);
+        }
+
+        if matches.opt_strs("passes") == ["list"] {
+            println!("Available passes for running rustdoc:");
+            for pass in passes::PASSES {
+                println!("{:>20} - {}", pass.name(), pass.description());
+            }
+            println!("\nDefault passes for rustdoc:");
+            for &name in passes::DEFAULT_PASSES {
+                println!("{:>20}", name);
+            }
+            println!("\nPasses run with `--document-private-items`:");
+            for &name in passes::DEFAULT_PRIVATE_PASSES {
+                println!("{:>20}", name);
+            }
+            return Err(0);
+        }
+
+        let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
+            Some("auto") => ColorConfig::Auto,
+            Some("always") => ColorConfig::Always,
+            Some("never") => ColorConfig::Never,
+            None => ColorConfig::Auto,
+            Some(arg) => {
+                early_error(ErrorOutputType::default(),
+                            &format!("argument for --color must be `auto`, `always` or `never` \
+                                      (instead was `{}`)", arg));
+            }
+        };
+        let error_format = match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
+            Some("human") => ErrorOutputType::HumanReadable(color),
+            Some("json") => ErrorOutputType::Json(false),
+            Some("pretty-json") => ErrorOutputType::Json(true),
+            Some("short") => ErrorOutputType::Short(color),
+            None => ErrorOutputType::HumanReadable(color),
+            Some(arg) => {
+                early_error(ErrorOutputType::default(),
+                            &format!("argument for --error-format must be `human`, `json` or \
+                                      `short` (instead was `{}`)", arg));
+            }
+        };
+
+        let codegen_options = build_codegen_options(matches, error_format);
+        let debugging_options = build_debugging_options(matches, error_format);
+
+        let diag = new_handler(error_format,
+                               None,
+                               debugging_options.treat_err_as_bug,
+                               debugging_options.ui_testing);
+
+        // check for deprecated options
+        check_deprecated_options(&matches, &diag);
+
+        let to_check = matches.opt_strs("theme-checker");
+        if !to_check.is_empty() {
+            let paths = theme::load_css_paths(include_bytes!("html/static/themes/light.css"));
+            let mut errors = 0;
+
+            println!("rustdoc: [theme-checker] Starting tests!");
+            for theme_file in to_check.iter() {
+                print!(" - Checking \"{}\"...", theme_file);
+                let (success, differences) = theme::test_theme_against(theme_file, &paths, &diag);
+                if !differences.is_empty() || !success {
+                    println!(" FAILED");
+                    errors += 1;
+                    if !differences.is_empty() {
+                        println!("{}", differences.join("\n"));
+                    }
+                } else {
+                    println!(" OK");
+                }
+            }
+            if errors != 0 {
+                return Err(1);
+            }
+            return Err(0);
+        }
+
+        if matches.free.is_empty() {
+            diag.struct_err("missing file operand").emit();
+            return Err(1);
+        }
+        if matches.free.len() > 1 {
+            diag.struct_err("too many file operands").emit();
+            return Err(1);
+        }
+        let input = PathBuf::from(&matches.free[0]);
+
+        let mut libs = SearchPaths::new();
+        for s in &matches.opt_strs("L") {
+            libs.add_path(s, error_format);
+        }
+        let externs = match parse_externs(&matches) {
+            Ok(ex) => ex,
+            Err(err) => {
+                diag.struct_err(&err).emit();
+                return Err(1);
+            }
+        };
+        let extern_html_root_urls = match parse_extern_html_roots(&matches) {
+            Ok(ex) => ex,
+            Err(err) => {
+                diag.struct_err(err).emit();
+                return Err(1);
+            }
+        };
+
+        let test_args = matches.opt_strs("test-args");
+        let test_args: Vec<String> = test_args.iter()
+                                              .flat_map(|s| s.split_whitespace())
+                                              .map(|s| s.to_string())
+                                              .collect();
+
+        let should_test = matches.opt_present("test");
+
+        let output = matches.opt_str("o")
+                            .map(|s| PathBuf::from(&s))
+                            .unwrap_or_else(|| PathBuf::from("doc"));
+        let mut cfgs = matches.opt_strs("cfg");
+        cfgs.push("rustdoc".to_string());
+
+        let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s));
+
+        if let Some(ref p) = extension_css {
+            if !p.is_file() {
+                diag.struct_err("option --extend-css argument must be a file").emit();
+                return Err(1);
+            }
+        }
+
+        let mut themes = Vec::new();
+        if matches.opt_present("themes") {
+            let paths = theme::load_css_paths(include_bytes!("html/static/themes/light.css"));
+
+            for (theme_file, theme_s) in matches.opt_strs("themes")
+                                                .iter()
+                                                .map(|s| (PathBuf::from(&s), s.to_owned())) {
+                if !theme_file.is_file() {
+                    diag.struct_err("option --themes arguments must all be files").emit();
+                    return Err(1);
+                }
+                let (success, ret) = theme::test_theme_against(&theme_file, &paths, &diag);
+                if !success || !ret.is_empty() {
+                    diag.struct_err(&format!("invalid theme: \"{}\"", theme_s))
+                        .help("check what's wrong with the --theme-checker option")
+                        .emit();
+                    return Err(1);
+                }
+                themes.push(theme_file);
+            }
+        }
+
+        let mut id_map = html::markdown::IdMap::new();
+        id_map.populate(html::render::initial_ids());
+        let external_html = match ExternalHtml::load(
+                &matches.opt_strs("html-in-header"),
+                &matches.opt_strs("html-before-content"),
+                &matches.opt_strs("html-after-content"),
+                &matches.opt_strs("markdown-before-content"),
+                &matches.opt_strs("markdown-after-content"), &diag, &mut id_map) {
+            Some(eh) => eh,
+            None => return Err(3),
+        };
+
+        let edition = matches.opt_str("edition").unwrap_or("2015".to_string());
+        let edition = match edition.parse() {
+            Ok(e) => e,
+            Err(_) => {
+                diag.struct_err("could not parse edition").emit();
+                return Err(1);
+            }
+        };
+
+        match matches.opt_str("r").as_ref().map(|s| &**s) {
+            Some("rust") | None => {}
+            Some(s) => {
+                diag.struct_err(&format!("unknown input format: {}", s)).emit();
+                return Err(1);
+            }
+        }
+
+        match matches.opt_str("w").as_ref().map(|s| &**s) {
+            Some("html") | None => {}
+            Some(s) => {
+                diag.struct_err(&format!("unknown output format: {}", s)).emit();
+                return Err(1);
+            }
+        }
+
+        let index_page = matches.opt_str("index-page").map(|s| PathBuf::from(&s));
+        if let Some(ref index_page) = index_page {
+            if !index_page.is_file() {
+                diag.struct_err("option `--index-page` argument must be a file").emit();
+                return Err(1);
+            }
+        }
+
+        let target = matches.opt_str("target").map(|target| {
+            if target.ends_with(".json") {
+                TargetTriple::TargetPath(PathBuf::from(target))
+            } else {
+                TargetTriple::TargetTriple(target)
+            }
+        });
+
+        let default_passes = if matches.opt_present("no-defaults") {
+            passes::DefaultPassOption::None
+        } else if matches.opt_present("document-private-items") {
+            passes::DefaultPassOption::Private
+        } else {
+            passes::DefaultPassOption::Default
+        };
+        let manual_passes = matches.opt_strs("passes");
+
+        let crate_name = matches.opt_str("crate-name");
+        let playground_url = matches.opt_str("playground-url");
+        let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from);
+        let display_warnings = matches.opt_present("display-warnings");
+        let linker = matches.opt_str("linker").map(PathBuf::from);
+        let sort_modules_alphabetically = !matches.opt_present("sort-modules-by-appearance");
+        let resource_suffix = matches.opt_str("resource-suffix").unwrap_or_default();
+        let enable_minification = !matches.opt_present("disable-minification");
+        let markdown_no_toc = matches.opt_present("markdown-no-toc");
+        let markdown_css = matches.opt_strs("markdown-css");
+        let markdown_playground_url = matches.opt_str("markdown-playground-url");
+        let crate_version = matches.opt_str("crate-version");
+        let enable_index_page = matches.opt_present("enable-index-page") || index_page.is_some();
+
+        let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format);
+
+        Ok(Options {
+            input,
+            crate_name,
+            error_format,
+            libs,
+            externs,
+            cfgs,
+            codegen_options,
+            debugging_options,
+            target,
+            edition,
+            maybe_sysroot,
+            linker,
+            lint_opts,
+            describe_lints,
+            lint_cap,
+            should_test,
+            test_args,
+            default_passes,
+            manual_passes,
+            display_warnings,
+            crate_version,
+            render_options: RenderOptions {
+                output,
+                external_html,
+                id_map,
+                playground_url,
+                sort_modules_alphabetically,
+                themes,
+                extension_css,
+                extern_html_root_urls,
+                resource_suffix,
+                enable_minification,
+                enable_index_page,
+                index_page,
+                markdown_no_toc,
+                markdown_css,
+                markdown_playground_url,
+            }
+        })
+    }
+
+    /// Returns whether the file given as `self.input` is a Markdown file.
+    pub fn markdown_input(&self) -> bool {
+        self.input.extension()
+            .map_or(false, |e| e == "md" || e == "markdown")
+    }
+}
+
+/// Prints deprecation warnings for deprecated options
+fn check_deprecated_options(matches: &getopts::Matches, diag: &errors::Handler) {
+    let deprecated_flags = [
+       "input-format",
+       "output-format",
+       "no-defaults",
+       "passes",
+    ];
+
+    for flag in deprecated_flags.into_iter() {
+        if matches.opt_present(flag) {
+            let mut err = diag.struct_warn(&format!("the '{}' flag is considered deprecated",
+                                                    flag));
+            err.warn("please see https://github.com/rust-lang/rust/issues/44136");
+
+            if *flag == "no-defaults" {
+                err.help("you may want to use --document-private-items");
+            }
+
+            err.emit();
+        }
+    }
+
+    let removed_flags = [
+        "plugins",
+        "plugin-path",
+    ];
+
+    for &flag in removed_flags.iter() {
+        if matches.opt_present(flag) {
+            diag.struct_warn(&format!("the '{}' flag no longer functions", flag))
+                .warn("see CVE-2018-1000622")
+                .emit();
+        }
+    }
+}
+
+/// Extracts `--extern-html-root-url` arguments from `matches` and returns a map of crate names to
+/// the given URLs. If an `--extern-html-root-url` argument was ill-formed, returns an error
+/// describing the issue.
+fn parse_extern_html_roots(
+    matches: &getopts::Matches,
+) -> Result<BTreeMap<String, String>, &'static str> {
+    let mut externs = BTreeMap::new();
+    for arg in &matches.opt_strs("extern-html-root-url") {
+        let mut parts = arg.splitn(2, '=');
+        let name = parts.next().ok_or("--extern-html-root-url must not be empty")?;
+        let url = parts.next().ok_or("--extern-html-root-url must be of the form name=url")?;
+        externs.insert(name.to_string(), url.to_string());
+    }
+
+    Ok(externs)
+}
+
+/// Extracts `--extern CRATE=PATH` arguments from `matches` and
+/// returns a map mapping crate names to their paths or else an
+/// error message.
+// FIXME(eddyb) This shouldn't be duplicated with `rustc::session`.
+fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
+    let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
+    for arg in &matches.opt_strs("extern") {
+        let mut parts = arg.splitn(2, '=');
+        let name = parts.next().ok_or("--extern value must not be empty".to_string())?;
+        let location = parts.next().map(|s| s.to_string());
+        if location.is_none() && !nightly_options::is_unstable_enabled(matches) {
+            return Err("the `-Z unstable-options` flag must also be passed to \
+                        enable `--extern crate_name` without `=path`".to_string());
+        }
+        let name = name.to_string();
+        externs.entry(name).or_default().insert(location);
+    }
+    Ok(Externs::new(externs))
+}
index d6b0127e44d019e02faf3de39f5b359b9808b9c6..0bd6f6bf8a2f42495640ebfee3ad3676a1783a26 100644 (file)
@@ -28,7 +28,6 @@
 
 use syntax::ast::{self, Ident, NodeId};
 use syntax::source_map;
-use syntax::edition::Edition;
 use syntax::feature_gate::UnstableFeatures;
 use syntax::json::JsonEmitter;
 use syntax::ptr::P;
@@ -43,9 +42,9 @@
 use rustc_data_structures::sync::{self, Lrc};
 use std::rc::Rc;
 use std::sync::Arc;
-use std::path::PathBuf;
 
 use visit_ast::RustdocVisitor;
+use config::{Options as RustdocOptions, RenderOptions};
 use clean;
 use clean::{get_path_for_type, Clean, MAX_DEF_ID, AttributesExt};
 use html::render::RenderInfo;
@@ -320,32 +319,33 @@ pub fn new_handler(error_format: ErrorOutputType,
     )
 }
 
-pub fn run_core(search_paths: SearchPaths,
-                cfgs: Vec<String>,
-                externs: config::Externs,
-                input: Input,
-                triple: Option<TargetTriple>,
-                maybe_sysroot: Option<PathBuf>,
-                allow_warnings: bool,
-                crate_name: Option<String>,
-                force_unstable_if_unmarked: bool,
-                edition: Edition,
-                cg: CodegenOptions,
-                error_format: ErrorOutputType,
-                cmd_lints: Vec<(String, lint::Level)>,
-                lint_cap: Option<lint::Level>,
-                describe_lints: bool,
-                mut manual_passes: Vec<String>,
-                mut default_passes: passes::DefaultPassOption,
-                treat_err_as_bug: bool,
-                ui_testing: bool,
-) -> (clean::Crate, RenderInfo, Vec<String>) {
+pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOptions, Vec<String>) {
     // Parse, resolve, and typecheck the given crate.
 
-    let cpath = match input {
-        Input::File(ref p) => Some(p.clone()),
-        _ => None
-    };
+    let RustdocOptions {
+        input,
+        crate_name,
+        error_format,
+        libs,
+        externs,
+        cfgs,
+        codegen_options,
+        debugging_options,
+        target,
+        edition,
+        maybe_sysroot,
+        lint_opts,
+        describe_lints,
+        lint_cap,
+        mut default_passes,
+        mut manual_passes,
+        display_warnings,
+        render_options,
+        ..
+    } = options;
+
+    let cpath = Some(input.clone());
+    let input = Input::File(input);
 
     let intra_link_resolution_failure_name = lint::builtin::INTRA_DOC_LINK_RESOLUTION_FAILURE.name;
     let warnings_lint_name = lint::builtin::WARNINGS.name;
@@ -359,7 +359,7 @@ pub fn run_core(search_paths: SearchPaths,
                                      missing_docs.to_owned(),
                                      missing_doc_example.to_owned()];
 
-    whitelisted_lints.extend(cmd_lints.iter().map(|(lint, _)| lint).cloned());
+    whitelisted_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned());
 
     let lints = lint::builtin::HardwiredLints.get_lints()
                     .into_iter()
@@ -372,33 +372,28 @@ pub fn run_core(search_paths: SearchPaths,
                             Some((lint.name_lower(), lint::Allow))
                         }
                     })
-                    .chain(cmd_lints.into_iter())
+                    .chain(lint_opts.into_iter())
                     .collect::<Vec<_>>();
 
     let host_triple = TargetTriple::from_triple(config::host_triple());
     // plays with error output here!
     let sessopts = config::Options {
         maybe_sysroot,
-        search_paths,
+        search_paths: libs,
         crate_types: vec![config::CrateType::Rlib],
-        lint_opts: if !allow_warnings {
+        lint_opts: if !display_warnings {
             lints
         } else {
             vec![]
         },
         lint_cap: Some(lint_cap.unwrap_or_else(|| lint::Forbid)),
-        cg,
+        cg: codegen_options,
         externs,
-        target_triple: triple.unwrap_or(host_triple),
+        target_triple: target.unwrap_or(host_triple),
         // Ensure that rustdoc works even if rustc is feature-staged
         unstable_features: UnstableFeatures::Allow,
         actually_rustdoc: true,
-        debugging_opts: config::DebuggingOptions {
-            force_unstable_if_unmarked,
-            treat_err_as_bug,
-            ui_testing,
-            ..config::basic_debugging_options()
-        },
+        debugging_opts: debugging_options.clone(),
         error_format,
         edition,
         describe_lints,
@@ -408,8 +403,8 @@ pub fn run_core(search_paths: SearchPaths,
         let source_map = Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping()));
         let diagnostic_handler = new_handler(error_format,
                                              Some(source_map.clone()),
-                                             treat_err_as_bug,
-                                             ui_testing);
+                                             debugging_options.treat_err_as_bug,
+                                             debugging_options.ui_testing);
 
         let mut sess = session::build_session_(
             sessopts, cpath, diagnostic_handler, source_map,
@@ -621,7 +616,7 @@ fn report_deprecated_attr(name: &str, diag: &errors::Handler) {
 
             ctxt.sess().abort_if_errors();
 
-            (krate, ctxt.renderinfo.into_inner(), passes)
+            (krate, ctxt.renderinfo.into_inner(), render_options, passes)
         }), &sess)
     })
 }
index 9631ea059cc4300f0337ac755716024033e3133a..c7a2dd6da3f754126b6861eef4e2a0d6cdf33f20 100644 (file)
@@ -16,7 +16,7 @@
 use html::markdown::{IdMap, ErrorCodes, Markdown};
 use std::cell::RefCell;
 
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 pub struct ExternalHtml {
     /// Content that will be included inline in the <head> section of a
     /// rendered Markdown file or generated documentation
index 22fa887c358145865fc03c5ece73b93eb2644472..649a5c7ff33b698f7e6912bf5ca93e5476296ab2 100644 (file)
@@ -905,7 +905,7 @@ pub fn markdown_links(md: &str) -> Vec<(String, Option<Range<usize>>)> {
     links
 }
 
-#[derive(Default)]
+#[derive(Clone, Default, Debug)]
 pub struct IdMap {
     map: FxHashMap<String, usize>,
 }
index 0fc2473725a16c95db172dd25d4c5217d223cf41..efd71ad0763e0e61abcbeb638f1e15c6118952d7 100644 (file)
@@ -52,8 +52,7 @@
 use std::sync::Arc;
 use std::rc::Rc;
 
-use externalfiles::ExternalHtml;
-
+use errors;
 use serialize::json::{ToJson, Json, as_json};
 use syntax::ast;
 use syntax::ext::base::MacroKind;
@@ -67,6 +66,7 @@
 use rustc_data_structures::flock;
 
 use clean::{self, AttributesExt, GetDefId, SelfTy, Mutability};
+use config::RenderOptions;
 use doctree;
 use fold::DocFolder;
 use html::escape::Escape;
@@ -490,18 +490,25 @@ pub fn initial_ids() -> Vec<String> {
 
 /// Generates the documentation for `crate` into the directory `dst`
 pub fn run(mut krate: clean::Crate,
-           extern_urls: BTreeMap<String, String>,
-           external_html: &ExternalHtml,
-           playground_url: Option<String>,
-           dst: PathBuf,
-           resource_suffix: String,
+           options: RenderOptions,
            passes: FxHashSet<String>,
-           css_file_extension: Option<PathBuf>,
            renderinfo: RenderInfo,
-           sort_modules_alphabetically: bool,
-           themes: Vec<PathBuf>,
-           enable_minification: bool,
-           id_map: IdMap) -> Result<(), Error> {
+           diag: &errors::Handler) -> Result<(), Error> {
+    // need to save a copy of the options for rendering the index page
+    let md_opts = options.clone();
+    let RenderOptions {
+        output,
+        external_html,
+        id_map,
+        playground_url,
+        sort_modules_alphabetically,
+        themes,
+        extension_css,
+        extern_html_root_urls,
+        resource_suffix,
+        ..
+    } = options;
+
     let src_root = match krate.src {
         FileName::Real(ref p) => match p.parent() {
             Some(p) => p.to_path_buf(),
@@ -518,10 +525,10 @@ pub fn run(mut krate: clean::Crate,
         layout: layout::Layout {
             logo: String::new(),
             favicon: String::new(),
-            external_html: external_html.clone(),
+            external_html,
             krate: krate.name.clone(),
         },
-        css_file_extension,
+        css_file_extension: extension_css,
         created_dirs: Default::default(),
         sort_modules_alphabetically,
         themes,
@@ -563,6 +570,7 @@ pub fn run(mut krate: clean::Crate,
             }
         }
     }
+    let dst = output;
     try_err!(fs::create_dir_all(&dst), &dst);
     krate = render_sources(&dst, &mut scx, krate)?;
     let cx = Context {
@@ -625,7 +633,7 @@ pub fn run(mut krate: clean::Crate,
             },
             _ => PathBuf::new(),
         };
-        let extern_url = extern_urls.get(&e.name).map(|u| &**u);
+        let extern_url = extern_html_root_urls.get(&e.name).map(|u| &**u);
         cache.extern_locations.insert(n, (e.name.clone(), src_root,
                                           extern_location(e, extern_url, &cx.dst)));
 
@@ -666,7 +674,7 @@ pub fn run(mut krate: clean::Crate,
     CACHE_KEY.with(|v| *v.borrow_mut() = cache.clone());
     CURRENT_LOCATION_KEY.with(|s| s.borrow_mut().clear());
 
-    write_shared(&cx, &krate, &*cache, index, enable_minification)?;
+    write_shared(&cx, &krate, &*cache, index, &md_opts, diag)?;
 
     // And finally render the whole crate's documentation
     cx.krate(krate)
@@ -742,11 +750,14 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
             Json::Object(crate_data))
 }
 
-fn write_shared(cx: &Context,
-                krate: &clean::Crate,
-                cache: &Cache,
-                search_index: String,
-                enable_minification: bool) -> Result<(), Error> {
+fn write_shared(
+    cx: &Context,
+    krate: &clean::Crate,
+    cache: &Cache,
+    search_index: String,
+    options: &RenderOptions,
+    diag: &errors::Handler,
+) -> Result<(), Error> {
     // Write out the shared files. Note that these are shared among all rustdoc
     // docs placed in the output directory, so this needs to be a synchronized
     // operation with respect to all other rustdocs running around.
@@ -757,10 +768,10 @@ fn write_shared(cx: &Context,
 
     write_minify(cx.dst.join(&format!("rustdoc{}.css", cx.shared.resource_suffix)),
                  include_str!("static/rustdoc.css"),
-                 enable_minification)?;
+                 options.enable_minification)?;
     write_minify(cx.dst.join(&format!("settings{}.css", cx.shared.resource_suffix)),
                  include_str!("static/settings.css"),
-                 enable_minification)?;
+                 options.enable_minification)?;
 
     // To avoid "light.css" to be overwritten, we'll first run over the received themes and only
     // then we'll run over the "official" styles.
@@ -784,11 +795,11 @@ fn write_shared(cx: &Context,
           include_bytes!("static/wheel.svg"))?;
     write_minify(cx.dst.join(&format!("light{}.css", cx.shared.resource_suffix)),
                  include_str!("static/themes/light.css"),
-                 enable_minification)?;
+                 options.enable_minification)?;
     themes.insert("light".to_owned());
     write_minify(cx.dst.join(&format!("dark{}.css", cx.shared.resource_suffix)),
                  include_str!("static/themes/dark.css"),
-                 enable_minification)?;
+                 options.enable_minification)?;
     themes.insert("dark".to_owned());
 
     let mut themes: Vec<&String> = themes.iter().collect();
@@ -844,10 +855,10 @@ fn write_shared(cx: &Context,
 
     write_minify(cx.dst.join(&format!("main{}.js", cx.shared.resource_suffix)),
                  include_str!("static/main.js"),
-                 enable_minification)?;
+                 options.enable_minification)?;
     write_minify(cx.dst.join(&format!("settings{}.js", cx.shared.resource_suffix)),
                  include_str!("static/settings.js"),
-                 enable_minification)?;
+                 options.enable_minification)?;
 
     {
         let mut data = format!("var resourcesSuffix = \"{}\";\n",
@@ -855,24 +866,24 @@ fn write_shared(cx: &Context,
         data.push_str(include_str!("static/storage.js"));
         write_minify(cx.dst.join(&format!("storage{}.js", cx.shared.resource_suffix)),
                      &data,
-                     enable_minification)?;
+                     options.enable_minification)?;
     }
 
     if let Some(ref css) = cx.shared.css_file_extension {
         let out = cx.dst.join(&format!("theme{}.css", cx.shared.resource_suffix));
-        if !enable_minification {
+        if !options.enable_minification {
             try_err!(fs::copy(css, out), css);
         } else {
             let mut f = try_err!(File::open(css), css);
             let mut buffer = String::with_capacity(1000);
 
             try_err!(f.read_to_string(&mut buffer), css);
-            write_minify(out, &buffer, enable_minification)?;
+            write_minify(out, &buffer, options.enable_minification)?;
         }
     }
     write_minify(cx.dst.join(&format!("normalize{}.css", cx.shared.resource_suffix)),
                  include_str!("static/normalize.css"),
-                 enable_minification)?;
+                 options.enable_minification)?;
     write(cx.dst.join("FiraSans-Regular.woff"),
           include_bytes!("static/FiraSans-Regular.woff"))?;
     write(cx.dst.join("FiraSans-Medium.woff"),
@@ -902,8 +913,9 @@ fn write_shared(cx: &Context,
     write(cx.dst.join("COPYRIGHT.txt"),
           include_bytes!("static/COPYRIGHT.txt"))?;
 
-    fn collect(path: &Path, krate: &str, key: &str) -> io::Result<Vec<String>> {
+    fn collect(path: &Path, krate: &str, key: &str) -> io::Result<(Vec<String>, Vec<String>)> {
         let mut ret = Vec::new();
+        let mut krates = Vec::new();
         if path.exists() {
             for line in BufReader::new(File::open(path)?).lines() {
                 let line = line?;
@@ -914,9 +926,13 @@ fn collect(path: &Path, krate: &str, key: &str) -> io::Result<Vec<String>> {
                     continue;
                 }
                 ret.push(line.to_string());
+                krates.push(line[key.len() + 2..].split('"')
+                                                 .next()
+                                                 .map(|s| s.to_owned())
+                                                 .unwrap_or_else(|| String::new()));
             }
         }
-        Ok(ret)
+        Ok((ret, krates))
     }
 
     fn show_item(item: &IndexItem, krate: &str) -> String {
@@ -931,7 +947,7 @@ fn show_item(item: &IndexItem, krate: &str) -> String {
 
     let dst = cx.dst.join("aliases.js");
     {
-        let mut all_aliases = try_err!(collect(&dst, &krate.name, "ALIASES"), &dst);
+        let (mut all_aliases, _) = try_err!(collect(&dst, &krate.name, "ALIASES"), &dst);
         let mut w = try_err!(File::create(&dst), &dst);
         let mut output = String::with_capacity(100);
         for (alias, items) in &cache.aliases {
@@ -955,7 +971,7 @@ fn show_item(item: &IndexItem, krate: &str) -> String {
 
     // Update the search index
     let dst = cx.dst.join("search-index.js");
-    let mut all_indexes = try_err!(collect(&dst, &krate.name, "searchIndex"), &dst);
+    let (mut all_indexes, mut krates) = try_err!(collect(&dst, &krate.name, "searchIndex"), &dst);
     all_indexes.push(search_index);
     // Sort the indexes by crate so the file will be generated identically even
     // with rustdoc running in parallel.
@@ -963,12 +979,52 @@ fn show_item(item: &IndexItem, krate: &str) -> String {
     let mut w = try_err!(File::create(&dst), &dst);
     try_err!(writeln!(&mut w, "var N = null;var searchIndex = {{}};"), &dst);
     for index in &all_indexes {
-        try_err!(write_minify_replacer(&mut w, &*index, enable_minification,
+        try_err!(write_minify_replacer(&mut w, &*index, options.enable_minification,
                                        &[(minifier::js::Keyword::Null, "N")]),
                  &dst);
     }
     try_err!(writeln!(&mut w, "initSearch(searchIndex);"), &dst);
 
+    if options.enable_index_page {
+        if let Some(index_page) = options.index_page.clone() {
+            let mut md_opts = options.clone();
+            md_opts.output = cx.dst.clone();
+            md_opts.external_html = (*cx.shared).layout.external_html.clone();
+
+            ::markdown::render(index_page, md_opts, diag);
+        } else {
+            let dst = cx.dst.join("index.html");
+            let mut w = BufWriter::new(try_err!(File::create(&dst), &dst));
+            let page = layout::Page {
+                title: "Index of crates",
+                css_class: "mod",
+                root_path: "./",
+                description: "List of crates",
+                keywords: BASIC_KEYWORDS,
+                resource_suffix: &cx.shared.resource_suffix,
+            };
+            krates.push(krate.name.clone());
+            krates.sort();
+            krates.dedup();
+
+            let content = format!(
+"<h1 class='fqn'>\
+     <span class='in-band'>List of all crates</span>\
+</h1><ul class='mod'>{}</ul>",
+                                  krates
+                                    .iter()
+                                    .map(|s| {
+                                        format!("<li><a href=\"{}/index.html\">{}</li>", s, s)
+                                    })
+                                    .collect::<String>());
+            try_err!(layout::render(&mut w, &cx.shared.layout,
+                                    &page, &(""), &content,
+                                    cx.shared.css_file_extension.is_some(),
+                                    &cx.shared.themes), &dst);
+            try_err!(w.flush(), &dst);
+        }
+    }
+
     // Update the list of all implementors for traits
     let dst = cx.dst.join("implementors");
     for (&did, imps) in &cache.implementors {
@@ -1022,7 +1078,8 @@ fn show_item(item: &IndexItem, krate: &str) -> String {
                             remote_item_type.css_class(),
                             remote_path[remote_path.len() - 1]));
 
-        let mut all_implementors = try_err!(collect(&mydst, &krate.name, "implementors"), &mydst);
+        let (mut all_implementors, _) = try_err!(collect(&mydst, &krate.name, "implementors"),
+                                                 &mydst);
         all_implementors.push(implementors);
         // Sort the implementors by crate so the file will be generated
         // identically even with rustdoc running in parallel.
@@ -2260,8 +2317,8 @@ fn document(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Re
     if let Some(ref name) = item.name {
         info!("Documenting {}", name);
     }
-    document_stability(w, cx, item)?;
-    document_full(w, item, cx, "")?;
+    document_stability(w, cx, item, false)?;
+    document_full(w, item, cx, "", false)?;
     Ok(())
 }
 
@@ -2270,15 +2327,19 @@ fn render_markdown(w: &mut fmt::Formatter,
                    cx: &Context,
                    md_text: &str,
                    links: Vec<(String, String)>,
-                   prefix: &str)
+                   prefix: &str,
+                   is_hidden: bool)
                    -> fmt::Result {
     let mut ids = cx.id_map.borrow_mut();
-    write!(w, "<div class='docblock'>{}{}</div>",
-        prefix, Markdown(md_text, &links, RefCell::new(&mut ids), cx.codes))
+    write!(w, "<div class='docblock{}'>{}{}</div>",
+           if is_hidden { " hidden" } else { "" },
+           prefix,
+           Markdown(md_text, &links, RefCell::new(&mut ids),
+           cx.codes))
 }
 
 fn document_short(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, link: AssocItemLink,
-                  prefix: &str) -> fmt::Result {
+                  prefix: &str, is_hidden: bool) -> fmt::Result {
     if let Some(s) = item.doc_value() {
         let markdown = if s.contains('\n') {
             format!("{} [Read more]({})",
@@ -2286,28 +2347,33 @@ fn document_short(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, link
         } else {
             plain_summary_line(Some(s))
         };
-        render_markdown(w, cx, &markdown, item.links(), prefix)?;
+        render_markdown(w, cx, &markdown, item.links(), prefix, is_hidden)?;
     } else if !prefix.is_empty() {
-        write!(w, "<div class='docblock'>{}</div>", prefix)?;
+        write!(w, "<div class='docblock{}'>{}</div>",
+               if is_hidden { " hidden" } else { "" },
+               prefix)?;
     }
     Ok(())
 }
 
 fn document_full(w: &mut fmt::Formatter, item: &clean::Item,
-                 cx: &Context, prefix: &str) -> fmt::Result {
+                 cx: &Context, prefix: &str, is_hidden: bool) -> fmt::Result {
     if let Some(s) = cx.shared.maybe_collapsed_doc_value(item) {
         debug!("Doc block: =====\n{}\n=====", s);
-        render_markdown(w, cx, &*s, item.links(), prefix)?;
+        render_markdown(w, cx, &*s, item.links(), prefix, is_hidden)?;
     } else if !prefix.is_empty() {
-        write!(w, "<div class='docblock'>{}</div>", prefix)?;
+        write!(w, "<div class='docblock{}'>{}</div>",
+               if is_hidden { " hidden" } else { "" },
+               prefix)?;
     }
     Ok(())
 }
 
-fn document_stability(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Result {
+fn document_stability(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
+                      is_hidden: bool) -> fmt::Result {
     let stabilities = short_stability(item, cx, true);
     if !stabilities.is_empty() {
-        write!(w, "<div class='stability'>")?;
+        write!(w, "<div class='stability{}'>", if is_hidden { " hidden" } else { "" })?;
         for stability in stabilities {
             write!(w, "{}", stability)?;
         }
@@ -3872,14 +3938,21 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             RenderMode::ForDeref { mut_: deref_mut_ } => should_render_item(&item, deref_mut_),
         };
 
+        let (is_hidden, extra_class) = if trait_.is_none() ||
+                                          item.doc_value().is_some() ||
+                                          item.inner.is_associated() {
+            (false, "")
+        } else {
+            (true, " hidden")
+        };
         match item.inner {
             clean::MethodItem(clean::Method { ref decl, .. }) |
-            clean::TyMethodItem(clean::TyMethod{ ref decl, .. }) => {
+            clean::TyMethodItem(clean::TyMethod { ref decl, .. }) => {
                 // Only render when the method is not static or we allow static methods
                 if render_method_item {
                     let id = cx.derive_id(format!("{}.{}", item_type, name));
                     let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
-                    write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
+                    write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
                     write!(w, "{}", spotlight_decl(decl)?)?;
                     write!(w, "<span id='{}' class='invisible'>", ns_id)?;
                     write!(w, "<table class='table-display'><tbody><tr><td><code>")?;
@@ -3901,7 +3974,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::TypedefItem(ref tydef, _) => {
                 let id = cx.derive_id(format!("{}.{}", ItemType::AssociatedType, name));
                 let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id))?;
                 write!(w, "</code></span></h4>\n")?;
@@ -3909,7 +3982,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::AssociatedConstItem(ref ty, ref default) => {
                 let id = cx.derive_id(format!("{}.{}", item_type, name));
                 let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_const(w, item, ty, default.as_ref(), link.anchor(&id))?;
                 let src = if let Some(l) = (Item { cx, item }).src_href() {
@@ -3923,7 +3996,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::AssociatedTypeItem(ref bounds, ref default) => {
                 let id = cx.derive_id(format!("{}.{}", item_type, name));
                 let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id))?;
                 write!(w, "</code></span></h4>\n")?;
@@ -3940,25 +4013,25 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
                     if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
                         // We need the stability of the item from the trait
                         // because impls can't have a stability.
-                        document_stability(w, cx, it)?;
+                        document_stability(w, cx, it, is_hidden)?;
                         if item.doc_value().is_some() {
-                            document_full(w, item, cx, "")?;
+                            document_full(w, item, cx, "", is_hidden)?;
                         } else if show_def_docs {
                             // In case the item isn't documented,
                             // provide short documentation from the trait.
-                            document_short(w, cx, it, link, "")?;
+                            document_short(w, cx, it, link, "", is_hidden)?;
                         }
                     }
                 } else {
-                    document_stability(w, cx, item)?;
+                    document_stability(w, cx, item, is_hidden)?;
                     if show_def_docs {
-                        document_full(w, item, cx, "")?;
+                        document_full(w, item, cx, "", is_hidden)?;
                     }
                 }
             } else {
-                document_stability(w, cx, item)?;
+                document_stability(w, cx, item, is_hidden)?;
                 if show_def_docs {
-                    document_short(w, cx, item, link, "")?;
+                    document_short(w, cx, item, link, "", is_hidden)?;
                 }
             }
         }
index b31100716397b81e581e0b94ac195a82967953b4..4425712eed7a15aaf9fecc4ae761453769f31aac 100644 (file)
     onEach(document.getElementsByClassName('method'), func);
     onEach(document.getElementsByClassName('associatedconstant'), func);
     onEach(document.getElementsByClassName('impl'), func);
+    onEach(document.getElementsByClassName('impl-items'), function(e) {
+        onEach(e.getElementsByClassName('associatedconstant'), func);
+        var hiddenElems = e.getElementsByClassName('hidden');
+        var needToggle = false;
+
+        for (var i = 0; i < hiddenElems.length; ++i) {
+            if (hasClass(hiddenElems[i], "content") === false &&
+                hasClass(hiddenElems[i], "docblock") === false) {
+                needToggle = true;
+                break;
+            }
+        }
+        if (needToggle === true) {
+            var newToggle = document.createElement('a');
+            newToggle.href = 'javascript:void(0)';
+            newToggle.className = 'collapse-toggle hidden-default collapsed';
+            newToggle.innerHTML = "[<span class='inner'>" + labelForToggleButton(true) + "</span>" +
+                                  "] Show hidden undocumented items";
+            newToggle.onclick = function() {
+                if (hasClass(this, "collapsed")) {
+                    removeClass(this, "collapsed");
+                    onEach(this.parentNode.getElementsByClassName("hidden"), function(x) {
+                        if (hasClass(x, "content") === false) {
+                            removeClass(x, "hidden");
+                            addClass(x, "x");
+                        }
+                    }, true);
+                    this.innerHTML = "[<span class='inner'>" + labelForToggleButton(false) +
+                                     "</span>] Hide undocumented items"
+                } else {
+                    addClass(this, "collapsed");
+                    onEach(this.parentNode.getElementsByClassName("x"), function(x) {
+                        if (hasClass(x, "content") === false) {
+                            addClass(x, "hidden");
+                            removeClass(x, "x");
+                        }
+                    }, true);
+                    this.innerHTML = "[<span class='inner'>" + labelForToggleButton(true) +
+                                     "</span>] Show hidden undocumented items";
+                }
+            };
+            e.insertBefore(newToggle, e.firstChild);
+        }
+    });
 
     function createToggle(otherMessage, fontSize, extraClass, show) {
         var span = document.createElement('span');
index ad6cdfd3e733b131b13f7c9891629f490c40ebbc..8f679b4d22b25d51f292e0b278f7ae7578e66e2f 100644 (file)
@@ -479,17 +479,6 @@ h4 > code, h3 > code, .invisible > code {
        margin-bottom: 15px;
 }
 
-.content .impl-items .method, .content .impl-items > .type, .impl-items > .associatedconstant {
-       margin-left: 20px;
-}
-.content .impl-items .docblock, .content .impl-items .stability {
-       margin-bottom: .6em;
-}
-
-.content .impl-items > .stability {
-       margin-left: 40px;
-}
-
 .content .docblock > .impl-items {
        margin-left: 20px;
        margin-top: -34px;
@@ -531,7 +520,20 @@ h4 > code, h3 > code, .invisible > code {
        top: -9px;
        left: -13px;
 }
-.methods > .stability {
+
+.content .impl-items .method, .content .impl-items > .type, .impl-items > .associatedconstant {
+       margin-left: 20px;
+}
+
+.content .impl-items .docblock, .content .impl-items .stability {
+       margin-bottom: .6em;
+}
+
+.content .impl-items > .stability {
+       margin-left: 40px;
+}
+
+.methods > .stability, .content .impl-items > .stability {
        margin-top: -8px;
 }
 
@@ -839,6 +841,11 @@ h3 > .collapse-toggle, h4 > .collapse-toggle {
        text-align: center;
 }
 
+.collapse-toggle.hidden-default {
+       position: relative;
+       margin-left: 20px;
+}
+
 .ghost {
        display: none;
 }
index e10e330402f5edf3ca56fae7f527ab7bad8cdcb2..5f7a8c75d3c5f6973775c2a09fd98b1af6e4693c 100644 (file)
@@ -15,11 +15,19 @@ var mainTheme = document.getElementById("mainThemeStyle");
 
 var savedHref = [];
 
-function onEach(arr, func) {
+function onEach(arr, func, reversed) {
     if (arr && arr.length > 0 && func) {
-        for (var i = 0; i < arr.length; i++) {
-            if (func(arr[i]) === true) {
-                return true;
+        if (reversed !== true) {
+            for (var i = 0; i < arr.length; ++i) {
+                if (func(arr[i]) === true) {
+                    return true;
+                }
+            }
+        } else {
+            for (var i = arr.length - 1; i >= 0; --i) {
+                if (func(arr[i]) === true) {
+                    return true;
+                }
             }
         }
     }
index 45a0494849bfa4fcb3529054343d525692a88d84..f0f36f0355ed6bef960b767854a52b5b8abf9fae 100644 (file)
 
 extern crate serialize as rustc_serialize; // used by deriving
 
-use errors::ColorConfig;
-
-use std::collections::{BTreeMap, BTreeSet};
 use std::default::Default;
 use std::env;
 use std::panic;
-use std::path::{Path, PathBuf};
 use std::process;
 use std::sync::mpsc::channel;
 
-use syntax::edition::Edition;
-use externalfiles::ExternalHtml;
 use rustc::session::{early_warn, early_error};
-use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::{ErrorOutputType, RustcOptGroup, Externs, CodegenOptions};
-use rustc::session::config::{nightly_options, build_codegen_options};
-use rustc_target::spec::TargetTriple;
-use rustc::session::config::get_cmd_lint_options;
+use rustc::session::config::{ErrorOutputType, RustcOptGroup};
 
 #[macro_use]
 mod externalfiles;
 
 mod clean;
+mod config;
 mod core;
 mod doctree;
 mod fold;
@@ -99,6 +90,7 @@ pub mod html {
 struct Output {
     krate: clean::Crate,
     renderinfo: html::render::RenderInfo,
+    renderopts: config::RenderOptions,
     passes: Vec<String>,
 }
 
@@ -334,6 +326,17 @@ fn opts() -> Vec<RustcOptGroup> {
                 "LEVEL",
             )
         }),
+        unstable("index-page", |o| {
+             o.optopt("",
+                      "index-page",
+                      "Markdown file to be used as index page",
+                      "PATH")
+        }),
+        unstable("enable-index-page", |o| {
+             o.optflag("",
+                       "enable-index-page",
+                       "To enable generation of the index page")
+        }),
     ]
 }
 
@@ -356,372 +359,57 @@ fn main_args(args: &[String]) -> isize {
             early_error(ErrorOutputType::default(), &err.to_string());
         }
     };
-    // Check for unstable options.
-    nightly_options::check_nightly_options(&matches, &opts());
-
-    if matches.opt_present("h") || matches.opt_present("help") {
-        usage("rustdoc");
-        return 0;
-    } else if matches.opt_present("version") {
-        rustc_driver::version("rustdoc", &matches);
-        return 0;
-    }
-
-    if matches.opt_strs("passes") == ["list"] {
-        println!("Available passes for running rustdoc:");
-        for pass in passes::PASSES {
-            println!("{:>20} - {}", pass.name(), pass.description());
-        }
-        println!("\nDefault passes for rustdoc:");
-        for &name in passes::DEFAULT_PASSES {
-            println!("{:>20}", name);
-        }
-        println!("\nPasses run with `--document-private-items`:");
-        for &name in passes::DEFAULT_PRIVATE_PASSES {
-            println!("{:>20}", name);
-        }
-        return 0;
-    }
-
-    let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
-        Some("auto") => ColorConfig::Auto,
-        Some("always") => ColorConfig::Always,
-        Some("never") => ColorConfig::Never,
-        None => ColorConfig::Auto,
-        Some(arg) => {
-            early_error(ErrorOutputType::default(),
-                        &format!("argument for --color must be `auto`, `always` or `never` \
-                                  (instead was `{}`)", arg));
-        }
-    };
-    let error_format = match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
-        Some("human") => ErrorOutputType::HumanReadable(color),
-        Some("json") => ErrorOutputType::Json(false),
-        Some("pretty-json") => ErrorOutputType::Json(true),
-        Some("short") => ErrorOutputType::Short(color),
-        None => ErrorOutputType::HumanReadable(color),
-        Some(arg) => {
-            early_error(ErrorOutputType::default(),
-                        &format!("argument for --error-format must be `human`, `json` or \
-                                  `short` (instead was `{}`)", arg));
-        }
-    };
-    let treat_err_as_bug = matches.opt_strs("Z").iter().any(|x| {
-        *x == "treat-err-as-bug"
-    });
-    let ui_testing = matches.opt_strs("Z").iter().any(|x| {
-        *x == "ui-testing"
-    });
-
-    let diag = core::new_handler(error_format, None, treat_err_as_bug, ui_testing);
-
-    // check for deprecated options
-    check_deprecated_options(&matches, &diag);
-
-    let to_check = matches.opt_strs("theme-checker");
-    if !to_check.is_empty() {
-        let paths = theme::load_css_paths(include_bytes!("html/static/themes/light.css"));
-        let mut errors = 0;
-
-        println!("rustdoc: [theme-checker] Starting tests!");
-        for theme_file in to_check.iter() {
-            print!(" - Checking \"{}\"...", theme_file);
-            let (success, differences) = theme::test_theme_against(theme_file, &paths, &diag);
-            if !differences.is_empty() || !success {
-                println!(" FAILED");
-                errors += 1;
-                if !differences.is_empty() {
-                    println!("{}", differences.join("\n"));
-                }
-            } else {
-                println!(" OK");
-            }
-        }
-        if errors != 0 {
-            return 1;
-        }
-        return 0;
-    }
-
-    if matches.free.is_empty() {
-        diag.struct_err("missing file operand").emit();
-        return 1;
-    }
-    if matches.free.len() > 1 {
-        diag.struct_err("too many file operands").emit();
-        return 1;
-    }
-    let input = &matches.free[0];
-
-    let mut libs = SearchPaths::new();
-    for s in &matches.opt_strs("L") {
-        libs.add_path(s, error_format);
-    }
-    let externs = match parse_externs(&matches) {
-        Ok(ex) => ex,
-        Err(err) => {
-            diag.struct_err(&err).emit();
-            return 1;
-        }
-    };
-    let extern_urls = match parse_extern_html_roots(&matches) {
-        Ok(ex) => ex,
-        Err(err) => {
-            diag.struct_err(err).emit();
-            return 1;
-        }
+    let options = match config::Options::from_matches(&matches) {
+        Ok(opts) => opts,
+        Err(code) => return code,
     };
 
-    let test_args = matches.opt_strs("test-args");
-    let test_args: Vec<String> = test_args.iter()
-                                          .flat_map(|s| s.split_whitespace())
-                                          .map(|s| s.to_string())
-                                          .collect();
-
-    let should_test = matches.opt_present("test");
-    let markdown_input = Path::new(input).extension()
-        .map_or(false, |e| e == "md" || e == "markdown");
-
-    let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
-    let css_file_extension = matches.opt_str("e").map(|s| PathBuf::from(&s));
-    let mut cfgs = matches.opt_strs("cfg");
-    cfgs.push("rustdoc".to_string());
-
-    if let Some(ref p) = css_file_extension {
-        if !p.is_file() {
-            diag.struct_err("option --extend-css argument must be a file").emit();
-            return 1;
-        }
-    }
-
-    let mut themes = Vec::new();
-    if matches.opt_present("themes") {
-        let paths = theme::load_css_paths(include_bytes!("html/static/themes/light.css"));
-
-        for (theme_file, theme_s) in matches.opt_strs("themes")
-                                            .iter()
-                                            .map(|s| (PathBuf::from(&s), s.to_owned())) {
-            if !theme_file.is_file() {
-                diag.struct_err("option --themes arguments must all be files").emit();
-                return 1;
-            }
-            let (success, ret) = theme::test_theme_against(&theme_file, &paths, &diag);
-            if !success || !ret.is_empty() {
-                diag.struct_err(&format!("invalid theme: \"{}\"", theme_s))
-                    .help("check what's wrong with the --theme-checker option")
-                    .emit();
-                return 1;
-            }
-            themes.push(theme_file);
-        }
-    }
-
-    let mut id_map = html::markdown::IdMap::new();
-    id_map.populate(html::render::initial_ids());
-    let external_html = match ExternalHtml::load(
-            &matches.opt_strs("html-in-header"),
-            &matches.opt_strs("html-before-content"),
-            &matches.opt_strs("html-after-content"),
-            &matches.opt_strs("markdown-before-content"),
-            &matches.opt_strs("markdown-after-content"), &diag, &mut id_map) {
-        Some(eh) => eh,
-        None => return 3,
-    };
-    let crate_name = matches.opt_str("crate-name");
-    let playground_url = matches.opt_str("playground-url");
-    let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from);
-    let display_warnings = matches.opt_present("display-warnings");
-    let linker = matches.opt_str("linker").map(PathBuf::from);
-    let sort_modules_alphabetically = !matches.opt_present("sort-modules-by-appearance");
-    let resource_suffix = matches.opt_str("resource-suffix");
-    let enable_minification = !matches.opt_present("disable-minification");
-
-    let edition = matches.opt_str("edition").unwrap_or("2015".to_string());
-    let edition = match edition.parse() {
-        Ok(e) => e,
-        Err(_) => {
-            diag.struct_err("could not parse edition").emit();
-            return 1;
-        }
-    };
+    let diag = core::new_handler(options.error_format,
+                                 None,
+                                 options.debugging_options.treat_err_as_bug,
+                                 options.debugging_options.ui_testing);
 
-    let cg = build_codegen_options(&matches, ErrorOutputType::default());
-
-    match (should_test, markdown_input) {
-        (true, true) => {
-            return markdown::test(input, cfgs, libs, externs, test_args, maybe_sysroot,
-                                  display_warnings, linker, edition, cg, &diag)
-        }
-        (true, false) => {
-            return test::run(Path::new(input), cfgs, libs, externs, test_args, crate_name,
-                             maybe_sysroot, display_warnings, linker, edition, cg)
-        }
-        (false, true) => return markdown::render(Path::new(input),
-                                                 output.unwrap_or(PathBuf::from("doc")),
-                                                 &matches, &external_html,
-                                                 !matches.opt_present("markdown-no-toc"), &diag),
+    match (options.should_test, options.markdown_input()) {
+        (true, true) => return markdown::test(options, &diag),
+        (true, false) => return test::run(options),
+        (false, true) => return markdown::render(options.input, options.render_options, &diag),
         (false, false) => {}
     }
 
-    let output_format = matches.opt_str("w");
-
-    let res = acquire_input(PathBuf::from(input), externs, edition, cg, &matches, error_format,
-                            move |out| {
-        let Output { krate, passes, renderinfo } = out;
-        let diag = core::new_handler(error_format, None, treat_err_as_bug, ui_testing);
+    // need to move these items separately because we lose them by the time the closure is called,
+    // but we can't crates the Handler ahead of time because it's not Send
+    let diag_opts = (options.error_format,
+                     options.debugging_options.treat_err_as_bug,
+                     options.debugging_options.ui_testing);
+    rust_input(options, move |out| {
+        let Output { krate, passes, renderinfo, renderopts } = out;
         info!("going to format");
-        match output_format.as_ref().map(|s| &**s) {
-            Some("html") | None => {
-                html::render::run(krate, extern_urls, &external_html, playground_url,
-                                  output.unwrap_or(PathBuf::from("doc")),
-                                  resource_suffix.unwrap_or(String::new()),
-                                  passes.into_iter().collect(),
-                                  css_file_extension,
-                                  renderinfo,
-                                  sort_modules_alphabetically,
-                                  themes,
-                                  enable_minification, id_map)
-                    .expect("failed to generate documentation");
-                0
-            }
-            Some(s) => {
-                diag.struct_err(&format!("unknown output format: {}", s)).emit();
-                1
-            }
-        }
-    });
-    res.unwrap_or_else(|s| {
-        diag.struct_err(&format!("input error: {}", s)).emit();
-        1
+        let (error_format, treat_err_as_bug, ui_testing) = diag_opts;
+        let diag = core::new_handler(error_format, None, treat_err_as_bug, ui_testing);
+        html::render::run(krate, renderopts, passes.into_iter().collect(), renderinfo, &diag)
+            .expect("failed to generate documentation");
+        0
     })
 }
 
-/// Looks inside the command line arguments to extract the relevant input format
-/// and files and then generates the necessary rustdoc output for formatting.
-fn acquire_input<R, F>(input: PathBuf,
-                       externs: Externs,
-                       edition: Edition,
-                       cg: CodegenOptions,
-                       matches: &getopts::Matches,
-                       error_format: ErrorOutputType,
-                       f: F)
-                       -> Result<R, String>
-where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
-    match matches.opt_str("r").as_ref().map(|s| &**s) {
-        Some("rust") => Ok(rust_input(input, externs, edition, cg, matches, error_format, f)),
-        Some(s) => Err(format!("unknown input format: {}", s)),
-        None => Ok(rust_input(input, externs, edition, cg, matches, error_format, f))
-    }
-}
-
-/// Extracts `--extern CRATE=PATH` arguments from `matches` and
-/// returns a map mapping crate names to their paths or else an
-/// error message.
-// FIXME(eddyb) This shouldn't be duplicated with `rustc::session`.
-fn parse_externs(matches: &getopts::Matches) -> Result<Externs, String> {
-    let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
-    for arg in &matches.opt_strs("extern") {
-        let mut parts = arg.splitn(2, '=');
-        let name = parts.next().ok_or("--extern value must not be empty".to_string())?;
-        let location = parts.next().map(|s| s.to_string());
-        if location.is_none() && !nightly_options::is_unstable_enabled(matches) {
-            return Err("the `-Z unstable-options` flag must also be passed to \
-                        enable `--extern crate_name` without `=path`".to_string());
-        }
-        let name = name.to_string();
-        externs.entry(name).or_default().insert(location);
-    }
-    Ok(Externs::new(externs))
-}
-
-/// Extracts `--extern-html-root-url` arguments from `matches` and returns a map of crate names to
-/// the given URLs. If an `--extern-html-root-url` argument was ill-formed, returns an error
-/// describing the issue.
-fn parse_extern_html_roots(matches: &getopts::Matches)
-    -> Result<BTreeMap<String, String>, &'static str>
-{
-    let mut externs = BTreeMap::new();
-    for arg in &matches.opt_strs("extern-html-root-url") {
-        let mut parts = arg.splitn(2, '=');
-        let name = parts.next().ok_or("--extern-html-root-url must not be empty")?;
-        let url = parts.next().ok_or("--extern-html-root-url must be of the form name=url")?;
-        externs.insert(name.to_string(), url.to_string());
-    }
-
-    Ok(externs)
-}
-
 /// Interprets the input file as a rust source file, passing it through the
 /// compiler all the way through the analysis passes. The rustdoc output is then
 /// generated from the cleaned AST of the crate.
 ///
 /// This form of input will run all of the plug/cleaning passes
-fn rust_input<R, F>(cratefile: PathBuf,
-                    externs: Externs,
-                    edition: Edition,
-                    cg: CodegenOptions,
-                    matches: &getopts::Matches,
-                    error_format: ErrorOutputType,
-                    f: F) -> R
+fn rust_input<R, F>(options: config::Options, f: F) -> R
 where R: 'static + Send,
       F: 'static + Send + FnOnce(Output) -> R
 {
-    let default_passes = if matches.opt_present("no-defaults") {
-        passes::DefaultPassOption::None
-    } else if matches.opt_present("document-private-items") {
-        passes::DefaultPassOption::Private
-    } else {
-        passes::DefaultPassOption::Default
-    };
-
-    let manual_passes = matches.opt_strs("passes");
-    let plugins = matches.opt_strs("plugins");
-
     // First, parse the crate and extract all relevant information.
-    let mut paths = SearchPaths::new();
-    for s in &matches.opt_strs("L") {
-        paths.add_path(s, ErrorOutputType::default());
-    }
-    let mut cfgs = matches.opt_strs("cfg");
-    cfgs.push("rustdoc".to_string());
-    let triple = matches.opt_str("target").map(|target| {
-        if target.ends_with(".json") {
-            TargetTriple::TargetPath(PathBuf::from(target))
-        } else {
-            TargetTriple::TargetTriple(target)
-        }
-    });
-    let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from);
-    let crate_name = matches.opt_str("crate-name");
-    let crate_version = matches.opt_str("crate-version");
-    let plugin_path = matches.opt_str("plugin-path");
-
     info!("starting to run rustc");
-    let display_warnings = matches.opt_present("display-warnings");
-
-    let force_unstable_if_unmarked = matches.opt_strs("Z").iter().any(|x| {
-        *x == "force-unstable-if-unmarked"
-    });
-    let treat_err_as_bug = matches.opt_strs("Z").iter().any(|x| {
-        *x == "treat-err-as-bug"
-    });
-    let ui_testing = matches.opt_strs("Z").iter().any(|x| {
-        *x == "ui-testing"
-    });
-
-    let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format);
 
     let (tx, rx) = channel();
 
     let result = rustc_driver::monitor(move || syntax::with_globals(move || {
-        use rustc::session::config::Input;
-
-        let (mut krate, renderinfo, passes) =
-            core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot,
-                           display_warnings, crate_name.clone(),
-                           force_unstable_if_unmarked, edition, cg, error_format,
-                           lint_opts, lint_cap, describe_lints, manual_passes, default_passes,
-                           treat_err_as_bug, ui_testing);
+        let crate_name = options.crate_name.clone();
+        let crate_version = options.crate_version.clone();
+        let (mut krate, renderinfo, renderopts, passes) = core::run_core(options);
 
         info!("finished with rustc");
 
@@ -731,14 +419,6 @@ fn rust_input<R, F>(cratefile: PathBuf,
 
         krate.version = crate_version;
 
-        if !plugins.is_empty() {
-            eprintln!("WARNING: --plugins no longer functions; see CVE-2018-1000622");
-        }
-
-        if !plugin_path.is_none() {
-            eprintln!("WARNING: --plugin-path no longer functions; see CVE-2018-1000622");
-        }
-
         info!("Executing passes");
 
         for pass in &passes {
@@ -761,7 +441,12 @@ fn rust_input<R, F>(cratefile: PathBuf,
             krate = pass(krate);
         }
 
-        tx.send(f(Output { krate: krate, renderinfo: renderinfo, passes: passes })).unwrap();
+        tx.send(f(Output {
+            krate: krate,
+            renderinfo: renderinfo,
+            renderopts,
+            passes: passes
+        })).unwrap();
     }));
 
     match result {
@@ -769,27 +454,3 @@ fn rust_input<R, F>(cratefile: PathBuf,
         Err(_) => panic::resume_unwind(Box::new(errors::FatalErrorMarker)),
     }
 }
-
-/// Prints deprecation warnings for deprecated options
-fn check_deprecated_options(matches: &getopts::Matches, diag: &errors::Handler) {
-    let deprecated_flags = [
-       "input-format",
-       "output-format",
-       "no-defaults",
-       "passes",
-    ];
-
-    for flag in deprecated_flags.into_iter() {
-        if matches.opt_present(flag) {
-            let mut err = diag.struct_warn(&format!("the '{}' flag is considered deprecated",
-                                                    flag));
-            err.warn("please see https://github.com/rust-lang/rust/issues/44136");
-
-            if *flag == "no-defaults" {
-                err.help("you may want to use --document-private-items");
-            }
-
-            err.emit();
-        }
-    }
-}
index a3ae953e6ee9b99cdf19705905c2534e86617756..8008f8848d45d054f0c80b68820350005f8a6e98 100644 (file)
 use std::default::Default;
 use std::fs::File;
 use std::io::prelude::*;
-use std::path::{PathBuf, Path};
+use std::path::PathBuf;
 use std::cell::RefCell;
 
 use errors;
-use getopts;
 use testing;
-use rustc::session::search_paths::SearchPaths;
-use rustc::session::config::{Externs, CodegenOptions};
 use syntax::source_map::DUMMY_SP;
 use syntax::feature_gate::UnstableFeatures;
-use syntax::edition::Edition;
 
-use externalfiles::{ExternalHtml, LoadStringError, load_string};
+use externalfiles::{LoadStringError, load_string};
 
+use config::{Options, RenderOptions};
 use html::escape::Escape;
 use html::markdown;
 use html::markdown::{ErrorCodes, IdMap, Markdown, MarkdownWithToc, find_testable_code};
@@ -51,24 +48,25 @@ fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
 
 /// Render `input` (e.g. "foo.md") into an HTML file in `output`
 /// (e.g. output = "bar" => "bar/foo.html").
-pub fn render(input: &Path, mut output: PathBuf, matches: &getopts::Matches,
-              external_html: &ExternalHtml, include_toc: bool, diag: &errors::Handler) -> isize {
+pub fn render(input: PathBuf, options: RenderOptions, diag: &errors::Handler) -> isize {
+    let mut output = options.output;
     output.push(input.file_stem().unwrap());
     output.set_extension("html");
 
     let mut css = String::new();
-    for name in &matches.opt_strs("markdown-css") {
+    for name in &options.markdown_css {
         let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
         css.push_str(&s)
     }
 
-    let input_str = match load_string(input, diag) {
+    let input_str = match load_string(&input, diag) {
         Ok(s) => s,
         Err(LoadStringError::ReadFail) => return 1,
         Err(LoadStringError::BadUtf8) => return 2,
     };
-    if let Some(playground) = matches.opt_str("markdown-playground-url").or(
-                              matches.opt_str("playground-url")) {
+    let playground_url = options.markdown_playground_url
+                            .or(options.playground_url);
+    if let Some(playground) = playground_url {
         markdown::PLAYGROUND.with(|s| { *s.borrow_mut() = Some((None, playground)); });
     }
 
@@ -77,7 +75,7 @@ pub fn render(input: &Path, mut output: PathBuf, matches: &getopts::Matches,
             diag.struct_err(&format!("{}: {}", output.display(), e)).emit();
             return 4;
         }
-        Ok(f) => f
+        Ok(f) => f,
     };
 
     let (metadata, text) = extract_leading_metadata(&input_str);
@@ -89,7 +87,7 @@ pub fn render(input: &Path, mut output: PathBuf, matches: &getopts::Matches,
 
     let mut ids = IdMap::new();
     let error_codes = ErrorCodes::from(UnstableFeatures::from_environment().is_nightly_build());
-    let text = if include_toc {
+    let text = if !options.markdown_no_toc {
         MarkdownWithToc(text, RefCell::new(&mut ids), error_codes).to_string()
     } else {
         Markdown(text, &[], RefCell::new(&mut ids), error_codes).to_string()
@@ -124,10 +122,10 @@ pub fn render(input: &Path, mut output: PathBuf, matches: &getopts::Matches,
 </html>"#,
         title = Escape(title),
         css = css,
-        in_header = external_html.in_header,
-        before_content = external_html.before_content,
+        in_header = options.external_html.in_header,
+        before_content = options.external_html.before_content,
         text = text,
-        after_content = external_html.after_content,
+        after_content = options.external_html.after_content,
     );
 
     match err {
@@ -140,11 +138,8 @@ pub fn render(input: &Path, mut output: PathBuf, matches: &getopts::Matches,
 }
 
 /// Run any tests/code examples in the markdown file `input`.
-pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
-            mut test_args: Vec<String>, maybe_sysroot: Option<PathBuf>,
-            display_warnings: bool, linker: Option<PathBuf>, edition: Edition,
-            cg: CodegenOptions, diag: &errors::Handler) -> isize {
-    let input_str = match load_string(input, diag) {
+pub fn test(mut options: Options, diag: &errors::Handler) -> isize {
+    let input_str = match load_string(&options.input, diag) {
         Ok(s) => s,
         Err(LoadStringError::ReadFail) => return 1,
         Err(LoadStringError::BadUtf8) => return 2,
@@ -152,19 +147,20 @@ pub fn test(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs,
 
     let mut opts = TestOptions::default();
     opts.no_crate_inject = true;
-    opts.display_warnings = display_warnings;
-    let mut collector = Collector::new(input.to_owned(), cfgs, libs, cg, externs,
-                                       true, opts, maybe_sysroot, None,
-                                       Some(PathBuf::from(input)),
-                                       linker, edition);
+    opts.display_warnings = options.display_warnings;
+    let mut collector = Collector::new(options.input.display().to_string(), options.cfgs,
+                                       options.libs, options.codegen_options, options.externs,
+                                       true, opts, options.maybe_sysroot, None,
+                                       Some(options.input),
+                                       options.linker, options.edition);
     collector.set_position(DUMMY_SP);
     let codes = ErrorCodes::from(UnstableFeatures::from_environment().is_nightly_build());
     let res = find_testable_code(&input_str, &mut collector, codes);
     if let Err(err) = res {
         diag.span_warn(DUMMY_SP, &err.to_string());
     }
-    test_args.insert(0, "rustdoctest".to_string());
-    testing::test_main(&test_args, collector.tests,
-                       testing::Options::new().display_output(display_warnings));
+    options.test_args.insert(0, "rustdoctest".to_string());
+    testing::test_main(&options.test_args, collector.tests,
+                       testing::Options::new().display_output(options.display_warnings));
     0
 }
index 2e6e76b5a4039ea4a0b1fe2c3aacf4cb243c2364..d9bab91fd0c7892b8b79e70333f7a16b84035e90 100644 (file)
@@ -12,7 +12,7 @@
 use std::ffi::OsString;
 use std::io::prelude::*;
 use std::io;
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
 use std::panic::{self, AssertUnwindSafe};
 use std::process::Command;
 use std::str;
@@ -42,6 +42,7 @@
 use errors::emitter::ColorConfig;
 
 use clean::Attributes;
+use config::Options;
 use html::markdown::{self, ErrorCodes, LangString};
 
 #[derive(Clone, Default)]
@@ -55,34 +56,23 @@ pub struct TestOptions {
     pub attrs: Vec<String>,
 }
 
-pub fn run(input_path: &Path,
-           cfgs: Vec<String>,
-           libs: SearchPaths,
-           externs: Externs,
-           mut test_args: Vec<String>,
-           crate_name: Option<String>,
-           maybe_sysroot: Option<PathBuf>,
-           display_warnings: bool,
-           linker: Option<PathBuf>,
-           edition: Edition,
-           cg: CodegenOptions)
-           -> isize {
-    let input = config::Input::File(input_path.to_owned());
+pub fn run(mut options: Options) -> isize {
+    let input = config::Input::File(options.input.clone());
 
     let sessopts = config::Options {
-        maybe_sysroot: maybe_sysroot.clone().or_else(
+        maybe_sysroot: options.maybe_sysroot.clone().or_else(
             || Some(env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_path_buf())),
-        search_paths: libs.clone(),
+        search_paths: options.libs.clone(),
         crate_types: vec![config::CrateType::Dylib],
-        cg: cg.clone(),
-        externs: externs.clone(),
+        cg: options.codegen_options.clone(),
+        externs: options.externs.clone(),
         unstable_features: UnstableFeatures::from_environment(),
         lint_cap: Some(::rustc::lint::Level::Allow),
         actually_rustdoc: true,
         debugging_opts: config::DebuggingOptions {
             ..config::basic_debugging_options()
         },
-        edition,
+        edition: options.edition,
         ..config::Options::default()
     };
     driver::spawn_thread_pool(sessopts, |sessopts| {
@@ -93,13 +83,14 @@ pub fn run(input_path: &Path,
                                             Some(source_map.clone()));
 
         let mut sess = session::build_session_(
-            sessopts, Some(input_path.to_owned()), handler, source_map.clone(),
+            sessopts, Some(options.input), handler, source_map.clone(),
         );
         let codegen_backend = rustc_driver::get_codegen_backend(&sess);
         let cstore = CStore::new(codegen_backend.metadata_loader());
         rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
 
-        let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
+        let mut cfg = config::build_configuration(&sess,
+                                                  config::parse_cfgspecs(options.cfgs.clone()));
         target_features::add_configuration(&mut cfg, &sess, &*codegen_backend);
         sess.parse_sess.config = cfg;
 
@@ -119,24 +110,24 @@ pub fn run(input_path: &Path,
             ).expect("phase_2_configure_and_expand aborted in rustdoc!")
         };
 
-        let crate_name = crate_name.unwrap_or_else(|| {
+        let crate_name = options.crate_name.unwrap_or_else(|| {
             ::rustc_codegen_utils::link::find_crate_name(None, &hir_forest.krate().attrs, &input)
         });
         let mut opts = scrape_test_config(hir_forest.krate());
-        opts.display_warnings |= display_warnings;
+        opts.display_warnings |= options.display_warnings;
         let mut collector = Collector::new(
             crate_name,
-            cfgs,
-            libs,
-            cg,
-            externs,
+            options.cfgs,
+            options.libs,
+            options.codegen_options,
+            options.externs,
             false,
             opts,
-            maybe_sysroot,
+            options.maybe_sysroot,
             Some(source_map),
-             None,
-            linker,
-            edition
+            None,
+            options.linker,
+            options.edition
         );
 
         {
@@ -153,11 +144,11 @@ pub fn run(input_path: &Path,
             });
         }
 
-        test_args.insert(0, "rustdoctest".to_string());
+        options.test_args.insert(0, "rustdoctest".to_string());
 
-        testing::test_main(&test_args,
+        testing::test_main(&options.test_args,
                         collector.tests.into_iter().collect(),
-                        testing::Options::new().display_output(display_warnings));
+                        testing::Options::new().display_output(options.display_warnings));
         0
     })
 }
@@ -378,7 +369,7 @@ pub fn make_test(s: &str,
                  dont_insert_main: bool,
                  opts: &TestOptions)
                  -> (String, usize) {
-    let (crate_attrs, everything_else) = partition_source(s);
+    let (crate_attrs, everything_else, crates) = partition_source(s);
     let everything_else = everything_else.trim();
     let mut line_offset = 0;
     let mut prog = String::new();
@@ -402,10 +393,84 @@ pub fn make_test(s: &str,
     // are intended to be crate attributes.
     prog.push_str(&crate_attrs);
 
+    // Uses libsyntax to parse the doctest and find if there's a main fn and the extern
+    // crate already is included.
+    let (already_has_main, already_has_extern_crate) = crate::syntax::with_globals(|| {
+        use crate::syntax::{ast, parse::{self, ParseSess}, source_map::FilePathMapping};
+        use crate::syntax_pos::FileName;
+        use errors::emitter::EmitterWriter;
+        use errors::Handler;
+
+        let filename = FileName::Anon;
+        let source = crates + &everything_else;
+
+        // any errors in parsing should also appear when the doctest is compiled for real, so just
+        // send all the errors that libsyntax emits directly into a Sink instead of stderr
+        let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+        let emitter = EmitterWriter::new(box io::sink(), None, false, false);
+        let handler = Handler::with_emitter(false, false, box emitter);
+        let sess = ParseSess::with_span_handler(handler, cm);
+
+        debug!("about to parse: \n{}", source);
+
+        let mut found_main = false;
+        let mut found_extern_crate = cratename.is_none();
+
+        let mut parser = match parse::maybe_new_parser_from_source_str(&sess, filename, source) {
+            Ok(p) => p,
+            Err(errs) => {
+                for mut err in errs {
+                    err.cancel();
+                }
+
+                return (found_main, found_extern_crate);
+            }
+        };
+
+        loop {
+            match parser.parse_item() {
+                Ok(Some(item)) => {
+                    if !found_main {
+                        if let ast::ItemKind::Fn(..) = item.node {
+                            if item.ident.as_str() == "main" {
+                                found_main = true;
+                            }
+                        }
+                    }
+
+                    if !found_extern_crate {
+                        if let ast::ItemKind::ExternCrate(original) = item.node {
+                            // This code will never be reached if `cratename` is none because
+                            // `found_extern_crate` is initialized to `true` if it is none.
+                            let cratename = cratename.unwrap();
+
+                            match original {
+                                Some(name) => found_extern_crate = name.as_str() == cratename,
+                                None => found_extern_crate = item.ident.as_str() == cratename,
+                            }
+                        }
+                    }
+
+                    if found_main && found_extern_crate {
+                        break;
+                    }
+                }
+                Ok(None) => break,
+                Err(mut e) => {
+                    e.cancel();
+                    break;
+                }
+            }
+        }
+
+        (found_main, found_extern_crate)
+    });
+
     // Don't inject `extern crate std` because it's already injected by the
     // compiler.
-    if !s.contains("extern crate") && !opts.no_crate_inject && cratename != Some("std") {
+    if !already_has_extern_crate && !opts.no_crate_inject && cratename != Some("std") {
         if let Some(cratename) = cratename {
+            // Make sure its actually used if not included.
             if s.contains(cratename) {
                 prog.push_str(&format!("extern crate {};\n", cratename));
                 line_offset += 1;
@@ -413,19 +478,6 @@ pub fn make_test(s: &str,
         }
     }
 
-    // FIXME (#21299): prefer libsyntax or some other actual parser over this
-    // best-effort ad hoc approach
-    let already_has_main = s.lines()
-        .map(|line| {
-            let comment = line.find("//");
-            if let Some(comment_begins) = comment {
-                &line[0..comment_begins]
-            } else {
-                line
-            }
-        })
-        .any(|code| code.contains("fn main"));
-
     if dont_insert_main || already_has_main {
         prog.push_str(everything_else);
     } else {
@@ -441,9 +493,10 @@ pub fn make_test(s: &str,
 }
 
 // FIXME(aburka): use a real parser to deal with multiline attributes
-fn partition_source(s: &str) -> (String, String) {
+fn partition_source(s: &str) -> (String, String, String) {
     let mut after_header = false;
     let mut before = String::new();
+    let mut crates = String::new();
     let mut after = String::new();
 
     for line in s.lines() {
@@ -457,12 +510,17 @@ fn partition_source(s: &str) -> (String, String) {
             after.push_str(line);
             after.push_str("\n");
         } else {
+            if trimline.starts_with("#[macro_use] extern crate")
+                || trimline.starts_with("extern crate") {
+                crates.push_str(line);
+                crates.push_str("\n");
+            }
             before.push_str(line);
             before.push_str("\n");
         }
     }
 
-    (before, after)
+    (before, after, crates)
 }
 
 pub trait Tester {
@@ -1014,4 +1072,38 @@ fn make_test_display_warnings() {
         let output = make_test(input, None, false, &opts);
         assert_eq!(output, (expected, 1));
     }
+
+    #[test]
+    fn make_test_issues_21299_33731() {
+        let opts = TestOptions::default();
+
+        let input =
+"// fn main
+assert_eq!(2+2, 4);";
+
+        let expected =
+"#![allow(unused)]
+fn main() {
+// fn main
+assert_eq!(2+2, 4);
+}".to_string();
+
+        let output = make_test(input, None, false, &opts);
+        assert_eq!(output, (expected, 2));
+
+        let input =
+"extern crate hella_qwop;
+assert_eq!(asdf::foo, 4);";
+
+        let expected =
+"#![allow(unused)]
+extern crate hella_qwop;
+extern crate asdf;
+fn main() {
+assert_eq!(asdf::foo, 4);
+}".to_string();
+
+        let output = make_test(input, Some("asdf"), false, &opts);
+        assert_eq!(output, (expected, 3));
+    }
 }
index cd1e3438fc372f1d6726344e3510bbdb1ede821f..0f22459b34349fe79a22a8a3f9e5a33e4aa03cf7 100644 (file)
@@ -14,7 +14,6 @@ crate-type = ["dylib", "rlib"]
 
 [dependencies]
 alloc = { path = "../liballoc" }
-alloc_jemalloc = { path = "../liballoc_jemalloc", optional = true }
 alloc_system = { path = "../liballoc_system" }
 panic_unwind = { path = "../libpanic_unwind", optional = true }
 panic_abort = { path = "../libpanic_abort" }
@@ -43,9 +42,6 @@ build_helper = { path = "../build_helper" }
 
 [features]
 backtrace = []
-debug-jemalloc = ["alloc_jemalloc/debug"]
-jemalloc = ["alloc_jemalloc"]
-force_alloc_system = []
 panic-unwind = ["panic_unwind"]
 profiler = ["profiler_builtins"]
 
index 31fc9ed3f772d3e254a4f103866a072a9ae1d098..1ff342fa7a7bea25213c1bb5bf4d4ea8a374dc90 100644 (file)
 //! In a given program, the standard library has one “global” memory allocator
 //! that is used for example by `Box<T>` and `Vec<T>`.
 //!
-//! Currently the default global allocator is unspecified.
-//! The compiler may link to a version of [jemalloc] on some platforms,
-//! but this is not guaranteed.
-//! Libraries, however, like `cdylib`s and `staticlib`s are guaranteed
-//! to use the [`System`] by default.
+//! Currently the default global allocator is unspecified. Libraries, however,
+//! like `cdylib`s and `staticlib`s are guaranteed to use the [`System`] by
+//! default.
 //!
-//! [jemalloc]: https://github.com/jemalloc/jemalloc
 //! [`System`]: struct.System.html
 //!
 //! # The `#[global_allocator]` attribute
index ef5dae724b247eef53a77abd52376d286f76c621..aea4522892cba3839ae2188df0fcbd195ec8799a 100644 (file)
@@ -20,7 +20,7 @@
 use hash::{Hash, Hasher, BuildHasher, SipHasher13};
 use iter::{FromIterator, FusedIterator};
 use mem::{self, replace};
-use ops::{Deref, Index};
+use ops::{Deref, DerefMut, Index};
 use sys;
 
 use super::table::{self, Bucket, EmptyBucket, Fallibility, FullBucket, FullBucketMut, RawTable,
@@ -435,12 +435,13 @@ fn search_hashed<K, V, M, F>(table: M, hash: SafeHash, is_match: F) -> InternalE
         return InternalEntry::TableIsEmpty;
     }
 
-    search_hashed_nonempty(table, hash, is_match)
+    search_hashed_nonempty(table, hash, is_match, true)
 }
 
 /// Search for a pre-hashed key when the hash map is known to be non-empty.
 #[inline]
-fn search_hashed_nonempty<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F)
+fn search_hashed_nonempty<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F,
+                                      compare_hashes: bool)
     -> InternalEntry<K, V, M>
     where M: Deref<Target = RawTable<K, V>>,
           F: FnMut(&K) -> bool
@@ -476,7 +477,7 @@ fn search_hashed_nonempty<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F)
         }
 
         // If the hash doesn't match, it can't be this one..
-        if hash == full.hash() {
+        if !compare_hashes || hash == full.hash() {
             // If the key doesn't match, it can't be this one..
             if is_match(full.read().0) {
                 return InternalEntry::Occupied { elem: full };
@@ -488,6 +489,57 @@ fn search_hashed_nonempty<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F)
     }
 }
 
+/// Same as `search_hashed_nonempty` but for mutable access.
+#[inline]
+fn search_hashed_nonempty_mut<K, V, M, F>(table: M, hash: SafeHash, mut is_match: F,
+                                          compare_hashes: bool)
+    -> InternalEntry<K, V, M>
+    where M: DerefMut<Target = RawTable<K, V>>,
+          F: FnMut(&K) -> bool
+{
+    // Do not check the capacity as an extra branch could slow the lookup.
+
+    let size = table.size();
+    let mut probe = Bucket::new(table, hash);
+    let mut displacement = 0;
+
+    loop {
+        let mut full = match probe.peek() {
+            Empty(bucket) => {
+                // Found a hole!
+                return InternalEntry::Vacant {
+                    hash,
+                    elem: NoElem(bucket, displacement),
+                };
+            }
+            Full(bucket) => bucket,
+        };
+
+        let probe_displacement = full.displacement();
+
+        if probe_displacement < displacement {
+            // Found a luckier bucket than me.
+            // We can finish the search early if we hit any bucket
+            // with a lower distance to initial bucket than we've probed.
+            return InternalEntry::Vacant {
+                hash,
+                elem: NeqElem(full, probe_displacement),
+            };
+        }
+
+        // If the hash doesn't match, it can't be this one..
+        if hash == full.hash() || !compare_hashes {
+            // If the key doesn't match, it can't be this one..
+            if is_match(full.read_mut().0) {
+                return InternalEntry::Occupied { elem: full };
+            }
+        }
+        displacement += 1;
+        probe = full.next();
+        debug_assert!(displacement <= size);
+    }
+}
+
 fn pop_internal<K, V>(starting_bucket: FullBucketMut<K, V>)
     -> (K, V, &mut RawTable<K, V>)
 {
@@ -593,7 +645,7 @@ fn search<'a, Q: ?Sized>(&'a self, q: &Q)
         }
 
         let hash = self.make_hash(q);
-        search_hashed_nonempty(&self.table, hash, |k| q.eq(k.borrow()))
+        search_hashed_nonempty(&self.table, hash, |k| q.eq(k.borrow()), true)
             .into_occupied_bucket()
     }
 
@@ -608,7 +660,7 @@ fn search_mut<'a, Q: ?Sized>(&'a mut self, q: &Q)
         }
 
         let hash = self.make_hash(q);
-        search_hashed_nonempty(&mut self.table, hash, |k| q.eq(k.borrow()))
+        search_hashed_nonempty(&mut self.table, hash, |k| q.eq(k.borrow()), true)
             .into_occupied_bucket()
     }
 
@@ -1484,6 +1536,68 @@ pub fn retain<F>(&mut self, mut f: F)
     }
 }
 
+impl<K, V, S> HashMap<K, V, S>
+    where K: Eq + Hash,
+          S: BuildHasher
+{
+    /// Creates a raw entry builder for the HashMap.
+    ///
+    /// Raw entries provide the lowest level of control for searching and
+    /// manipulating a map. They must be manually initialized with a hash and
+    /// then manually searched. After this, insertions into a vacant entry
+    /// still require an owned key to be provided.
+    ///
+    /// Raw entries are useful for such exotic situations as:
+    ///
+    /// * Hash memoization
+    /// * Deferring the creation of an owned key until it is known to be required
+    /// * Using a search key that doesn't work with the Borrow trait
+    /// * Using custom comparison logic without newtype wrappers
+    ///
+    /// Because raw entries provide much more low-level control, it's much easier
+    /// to put the HashMap into an inconsistent state which, while memory-safe,
+    /// will cause the map to produce seemingly random results. Higher-level and
+    /// more foolproof APIs like `entry` should be preferred when possible.
+    ///
+    /// In particular, the hash used to initialized the raw entry must still be
+    /// consistent with the hash of the key that is ultimately stored in the entry.
+    /// This is because implementations of HashMap may need to recompute hashes
+    /// when resizing, at which point only the keys are available.
+    ///
+    /// Raw entries give mutable access to the keys. This must not be used
+    /// to modify how the key would compare or hash, as the map will not re-evaluate
+    /// where the key should go, meaning the keys may become "lost" if their
+    /// location does not reflect their state. For instance, if you change a key
+    /// so that the map now contains keys which compare equal, search may start
+    /// acting eratically, with two keys randomly masking eachother. Implementations
+    /// are free to assume this doesn't happen (within the limits of memory-safety).
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<K, V, S> {
+        self.reserve(1);
+        RawEntryBuilderMut { map: self }
+    }
+
+    /// Creates a raw immutable entry builder for the HashMap.
+    ///
+    /// Raw entries provide the lowest level of control for searching and
+    /// manipulating a map. They must be manually initialized with a hash and
+    /// then manually searched.
+    ///
+    /// This is useful for
+    /// * Hash memoization
+    /// * Using a search key that doesn't work with the Borrow trait
+    /// * Using custom comparison logic without newtype wrappers
+    ///
+    /// Unless you are in such a situation, higher-level and more foolproof APIs like
+    /// `get` should be preferred.
+    ///
+    /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn raw_entry(&self) -> RawEntryBuilder<K, V, S> {
+        RawEntryBuilder { map: self }
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<K, V, S> PartialEq for HashMap<K, V, S>
     where K: Eq + Hash,
@@ -1724,6 +1838,456 @@ fn into_entry(self, key: K) -> Option<Entry<'a, K, V>> {
     }
 }
 
+/// A builder for computing where in a HashMap a key-value pair would be stored.
+///
+/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
+///
+/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> {
+    map: &'a mut HashMap<K, V, S>,
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This is a lower-level version of [`Entry`].
+///
+/// This `enum` is constructed from the [`raw_entry`] method on [`HashMap`].
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`Entry`]: enum.Entry.html
+/// [`raw_entry`]: struct.HashMap.html#method.raw_entry
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> {
+    /// An occupied entry.
+    Occupied(RawOccupiedEntryMut<'a, K, V>),
+    /// A vacant entry.
+    Vacant(RawVacantEntryMut<'a, K, V, S>),
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`RawEntryMut`] enum.
+///
+/// [`RawEntryMut`]: enum.RawEntryMut.html
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a> {
+    elem: FullBucket<K, V, &'a mut RawTable<K, V>>,
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`RawEntryMut`] enum.
+///
+/// [`RawEntryMut`]: enum.RawEntryMut.html
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> {
+    elem: VacantEntryState<K, V, &'a mut RawTable<K, V>>,
+    hash_builder: &'a S,
+}
+
+/// A builder for computing where in a HashMap a key-value pair would be stored.
+///
+/// See the [`HashMap::raw_entry`] docs for usage examples.
+///
+/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+pub struct RawEntryBuilder<'a, K: 'a, V: 'a, S: 'a> {
+    map: &'a HashMap<K, V, S>,
+}
+
+impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S>
+    where S: BuildHasher,
+          K: Eq + Hash,
+{
+    /// Create a `RawEntryMut` from the given key.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
+        where K: Borrow<Q>,
+              Q: Hash + Eq
+    {
+        let mut hasher = self.map.hash_builder.build_hasher();
+        k.hash(&mut hasher);
+        self.from_key_hashed_nocheck(hasher.finish(), k)
+    }
+
+    /// Create a `RawEntryMut` from the given key and its hash.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
+        where K: Borrow<Q>,
+              Q: Eq
+    {
+        self.from_hash(hash, |q| q.borrow().eq(k))
+    }
+
+    fn search<F>(self, hash: u64, is_match: F, compare_hashes: bool)  -> RawEntryMut<'a, K, V, S>
+        where for<'b> F: FnMut(&'b K) -> bool,
+    {
+        match search_hashed_nonempty_mut(&mut self.map.table,
+                                         SafeHash::new(hash),
+                                         is_match,
+                                         compare_hashes) {
+            InternalEntry::Occupied { elem } => {
+                RawEntryMut::Occupied(RawOccupiedEntryMut { elem })
+            }
+            InternalEntry::Vacant { elem, .. } => {
+                RawEntryMut::Vacant(RawVacantEntryMut {
+                    elem,
+                    hash_builder: &self.map.hash_builder,
+                })
+            }
+            InternalEntry::TableIsEmpty => {
+                unreachable!()
+            }
+        }
+    }
+    /// Create a `RawEntryMut` from the given hash.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
+        where for<'b> F: FnMut(&'b K) -> bool,
+    {
+        self.search(hash, is_match, true)
+    }
+
+    /// Search possible locations for an element with hash `hash` until `is_match` returns true for
+    /// one of them. There is no guarantee that all keys passed to `is_match` will have the provided
+    /// hash.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn search_bucket<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
+        where for<'b> F: FnMut(&'b K) -> bool,
+    {
+        self.search(hash, is_match, false)
+    }
+}
+
+impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S>
+    where S: BuildHasher,
+{
+    /// Access an entry by key.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
+        where K: Borrow<Q>,
+              Q: Hash + Eq
+    {
+        let mut hasher = self.map.hash_builder.build_hasher();
+        k.hash(&mut hasher);
+        self.from_key_hashed_nocheck(hasher.finish(), k)
+    }
+
+    /// Access an entry by a key and its hash.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
+        where K: Borrow<Q>,
+              Q: Hash + Eq
+
+    {
+        self.from_hash(hash, |q| q.borrow().eq(k))
+    }
+
+    fn search<F>(self, hash: u64, is_match: F, compare_hashes: bool) -> Option<(&'a K, &'a V)>
+        where F: FnMut(&K) -> bool
+    {
+        match search_hashed_nonempty(&self.map.table,
+                                     SafeHash::new(hash),
+                                     is_match,
+                                     compare_hashes) {
+            InternalEntry::Occupied { elem } => Some(elem.into_refs()),
+            InternalEntry::Vacant { .. } => None,
+            InternalEntry::TableIsEmpty => unreachable!(),
+        }
+    }
+
+    /// Access an entry by hash.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
+        where F: FnMut(&K) -> bool
+    {
+        self.search(hash, is_match, true)
+    }
+
+    /// Search possible locations for an element with hash `hash` until `is_match` returns true for
+    /// one of them. There is no guarantee that all keys passed to `is_match` will have the provided
+    /// hash.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn search_bucket<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
+        where F: FnMut(&K) -> bool
+    {
+        self.search(hash, is_match, false)
+    }
+}
+
+impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// mutable references to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(hash_raw_entry)]
+    /// use std::collections::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    ///
+    /// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 12).1 += 10;
+    /// assert_eq!(map["poneyland"], 22);
+    /// ```
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
+        where K: Hash,
+              S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.into_key_value(),
+            RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns mutable references to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(hash_raw_entry)]
+    /// use std::collections::HashMap;
+    ///
+    /// let mut map: HashMap<&str, String> = HashMap::new();
+    ///
+    /// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
+    ///     ("poneyland", "hoho".to_string())
+    /// });
+    ///
+    /// assert_eq!(map["poneyland"], "hoho".to_string());
+    /// ```
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
+        where F: FnOnce() -> (K, V),
+              K: Hash,
+              S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.into_key_value(),
+            RawEntryMut::Vacant(entry) => {
+                let (k, v) = default();
+                entry.insert(k, v)
+            }
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(hash_raw_entry)]
+    /// use std::collections::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.raw_entry_mut()
+    ///    .from_key("poneyland")
+    ///    .and_modify(|_k, v| { *v += 1 })
+    ///    .or_insert("poneyland", 42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.raw_entry_mut()
+    ///    .from_key("poneyland")
+    ///    .and_modify(|_k, v| { *v += 1 })
+    ///    .or_insert("poneyland", 0);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn and_modify<F>(self, f: F) -> Self
+        where F: FnOnce(&mut K, &mut V)
+    {
+        match self {
+            RawEntryMut::Occupied(mut entry) => {
+                {
+                    let (k, v) = entry.get_key_value_mut();
+                    f(k, v);
+                }
+                RawEntryMut::Occupied(entry)
+            },
+            RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
+        }
+    }
+}
+
+impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
+    /// Gets a reference to the key in the entry.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn key(&self) -> &K {
+        self.elem.read().0
+    }
+
+    /// Gets a mutable reference to the key in the entry.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn key_mut(&mut self) -> &mut K {
+        self.elem.read_mut().0
+    }
+
+    /// Converts the entry into a mutable reference to the key in the entry
+    /// with a lifetime bound to the map itself.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn into_key(self) -> &'a mut K {
+        self.elem.into_mut_refs().0
+    }
+
+    /// Gets a reference to the value in the entry.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn get(&self) -> &V {
+        self.elem.read().1
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn into_mut(self) -> &'a mut V {
+        self.elem.into_mut_refs().1
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn get_mut(&mut self) -> &mut V {
+        self.elem.read_mut().1
+    }
+
+    /// Gets a reference to the key and value in the entry.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn get_key_value(&mut self) -> (&K, &V) {
+        self.elem.read()
+    }
+
+    /// Gets a mutable reference to the key and value in the entry.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
+        self.elem.read_mut()
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
+    /// with a lifetime bound to the map itself.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
+        self.elem.into_mut_refs()
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn insert(&mut self, value: V) -> V {
+        mem::replace(self.get_mut(), value)
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn insert_key(&mut self, key: K) -> K {
+        mem::replace(self.key_mut(), key)
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn remove(self) -> V {
+        pop_internal(self.elem).1
+    }
+
+    /// Take the ownership of the key and value from the map.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn remove_entry(self) -> (K, V) {
+        let (k, v, _) = pop_internal(self.elem);
+        (k, v)
+    }
+}
+
+impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
+        where K: Hash,
+              S: BuildHasher,
+    {
+        let mut hasher = self.hash_builder.build_hasher();
+        key.hash(&mut hasher);
+        self.insert_hashed_nocheck(hasher.finish(), key, value)
+    }
+
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    #[unstable(feature = "hash_raw_entry", issue = "54043")]
+    pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) {
+        let hash = SafeHash::new(hash);
+        let b = match self.elem {
+            NeqElem(mut bucket, disp) => {
+                if disp >= DISPLACEMENT_THRESHOLD {
+                    bucket.table_mut().set_tag(true);
+                }
+                robin_hood(bucket, disp, hash, key, value)
+            },
+            NoElem(mut bucket, disp) => {
+                if disp >= DISPLACEMENT_THRESHOLD {
+                    bucket.table_mut().set_tag(true);
+                }
+                bucket.put(hash, key, value)
+            },
+        };
+        b.into_mut_refs()
+    }
+}
+
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+impl<'a, K, V, S> Debug for RawEntryBuilderMut<'a, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("RawEntryBuilder")
+         .finish()
+    }
+}
+
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+impl<'a, K: Debug, V: Debug, S> Debug for RawEntryMut<'a, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match *self {
+            RawEntryMut::Vacant(ref v) => {
+                f.debug_tuple("RawEntry")
+                    .field(v)
+                    .finish()
+            }
+            RawEntryMut::Occupied(ref o) => {
+                f.debug_tuple("RawEntry")
+                    .field(o)
+                    .finish()
+            }
+        }
+    }
+}
+
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+impl<'a, K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'a, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("RawOccupiedEntryMut")
+         .field("key", self.key())
+         .field("value", self.get())
+         .finish()
+    }
+}
+
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+impl<'a, K, V, S> Debug for RawVacantEntryMut<'a, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("RawVacantEntryMut")
+         .finish()
+    }
+}
+
+#[unstable(feature = "hash_raw_entry", issue = "54043")]
+impl<'a, K, V, S> Debug for RawEntryBuilder<'a, K, V, S> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.debug_struct("RawEntryBuilder")
+         .finish()
+    }
+}
+
 /// A view into a single entry in a map, which may either be vacant or occupied.
 ///
 /// This `enum` is constructed from the [`entry`] method on [`HashMap`].
@@ -3681,4 +4245,106 @@ fn test_try_reserve() {
         }
     }
 
+    #[test]
+    fn test_raw_entry() {
+        use super::RawEntryMut::{Occupied, Vacant};
+
+        let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+        let mut map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
+            use core::hash::{BuildHasher, Hash, Hasher};
+
+            let mut hasher = map.hasher().build_hasher();
+            k.hash(&mut hasher);
+            hasher.finish()
+        };
+
+        // Existing key (insert)
+        match map.raw_entry_mut().from_key(&1) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                assert_eq!(view.get(), &10);
+                assert_eq!(view.insert(100), 10);
+            }
+        }
+        let hash1 = compute_hash(&map, 1);
+        assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
+        assert_eq!(map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(), (&1, &100));
+        assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(), (&1, &100));
+        assert_eq!(map.raw_entry().search_bucket(hash1, |k| *k == 1).unwrap(), (&1, &100));
+        assert_eq!(map.len(), 6);
+
+        // Existing key (update)
+        match map.raw_entry_mut().from_key(&2) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                let v = view.get_mut();
+                let new_v = (*v) * 10;
+                *v = new_v;
+            }
+        }
+        let hash2 = compute_hash(&map, 2);
+        assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
+        assert_eq!(map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(), (&2, &200));
+        assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(), (&2, &200));
+        assert_eq!(map.raw_entry().search_bucket(hash2, |k| *k == 2).unwrap(), (&2, &200));
+        assert_eq!(map.len(), 6);
+
+        // Existing key (take)
+        let hash3 = compute_hash(&map, 3);
+        match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
+            Vacant(_) => unreachable!(),
+            Occupied(view) => {
+                assert_eq!(view.remove_entry(), (3, 30));
+            }
+        }
+        assert_eq!(map.raw_entry().from_key(&3), None);
+        assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
+        assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
+        assert_eq!(map.raw_entry().search_bucket(hash3, |k| *k == 3), None);
+        assert_eq!(map.len(), 5);
+
+
+        // Nonexistent key (insert)
+        match map.raw_entry_mut().from_key(&10) {
+            Occupied(_) => unreachable!(),
+            Vacant(view) => {
+                assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
+            }
+        }
+        assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
+        assert_eq!(map.len(), 6);
+
+        // Ensure all lookup methods produce equivalent results.
+        for k in 0..12 {
+            let hash = compute_hash(&map, k);
+            let v = map.get(&k).cloned();
+            let kv = v.as_ref().map(|v| (&k, v));
+
+            assert_eq!(map.raw_entry().from_key(&k), kv);
+            assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+            assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+            assert_eq!(map.raw_entry().search_bucket(hash, |q| *q == k), kv);
+
+            match map.raw_entry_mut().from_key(&k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().search_bucket(hash, |q| *q == k) {
+                Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+        }
+    }
+
 }
index 017949291bcf16e37f200141b9254d9fdbaad544..49012a7d3419604d8ec943ae409619b2aded356a 100644 (file)
@@ -1755,12 +1755,19 @@ pub fn canonicalize<P: AsRef<Path>>(path: P) -> io::Result<PathBuf> {
 ///
 /// [changes]: ../io/index.html#platform-specific-behavior
 ///
+/// **NOTE**: If a parent of the given path doesn't exist, this function will
+/// return an error. To create a directory and all its missing parents at the
+/// same time, use the [`create_dir_all`] function.
+///
 /// # Errors
 ///
 /// This function will return an error in the following situations, but is not
 /// limited to just these cases:
 ///
 /// * User lacks permissions to create directory at `path`.
+/// * A parent of the given path doesn't exist. (To create a directory and all
+///   its missing parents at the same time, use the [`create_dir_all`]
+///   function.)
 /// * `path` already exists.
 ///
 /// # Examples
index a4db879680566d2a98634e675052e5ba52312b95..b55d12e4c8dc96693dc81493fc9e7d8a94995bfa 100644 (file)
 #![feature(cfg_target_vendor)]
 #![feature(char_error_internals)]
 #![feature(compiler_builtins_lib)]
-#![cfg_attr(stage0, feature(min_const_fn))]
 #![feature(const_int_ops)]
 #![feature(const_ip)]
 #![feature(const_raw_ptr_deref)]
 #![feature(prelude_import)]
 #![feature(ptr_internals)]
 #![feature(raw)]
+#![feature(hash_raw_entry)]
 #![feature(rustc_attrs)]
 #![feature(rustc_const_unstable)]
 #![feature(std_internals)]
 
 #![default_lib_allocator]
 
-// Always use alloc_system during stage0 since we don't know if the alloc_*
-// crate the stage0 compiler will pick by default is enabled (e.g.
-// if the user has disabled jemalloc in `./configure`).
-// `force_alloc_system` is *only* intended as a workaround for local rebuilds
-// with a rustc without jemalloc.
-// FIXME(#44236) shouldn't need MSVC logic
-#[cfg(all(not(target_env = "msvc"),
-          any(all(stage0, not(test)), feature = "force_alloc_system")))]
+#[cfg(stage0)]
 #[global_allocator]
 static ALLOC: alloc_system::System = alloc_system::System;
 
index 5c87035d8e929214e11ced115b76469307e2923a..4bc18a57e92a6a6319743f8553c9f07c1fa582a9 100644 (file)
@@ -264,6 +264,9 @@ impl RefUnwindSafe for atomic::AtomicI32 {}
 #[cfg(target_has_atomic = "64")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicI64 {}
+#[cfg(all(not(stage0), target_has_atomic = "128"))]
+#[unstable(feature = "integer_atomics", issue = "32976")]
+impl RefUnwindSafe for atomic::AtomicI128 {}
 
 #[cfg(target_has_atomic = "ptr")]
 #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
@@ -280,6 +283,9 @@ impl RefUnwindSafe for atomic::AtomicU32 {}
 #[cfg(target_has_atomic = "64")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicU64 {}
+#[cfg(all(not(stage0), target_has_atomic = "128"))]
+#[unstable(feature = "integer_atomics", issue = "32976")]
+impl RefUnwindSafe for atomic::AtomicU128 {}
 
 #[cfg(target_has_atomic = "8")]
 #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
index 77f125f3c5b569b2b9597592f09a53a116149cd9..3d5920dfb69ac8aa59f2125e36c1028ea247a89d 100644 (file)
@@ -141,6 +141,10 @@ pub fn saw_nul(&self) -> bool {
     pub fn get_argv(&self) -> &Vec<*const c_char> {
         &self.argv.0
     }
+    #[cfg(not(target_os = "fuchsia"))]
+    pub fn get_program(&self) -> &CString {
+        return &self.program;
+    }
 
     #[allow(dead_code)]
     pub fn get_cwd(&self) -> &Option<CString> {
@@ -244,6 +248,10 @@ pub fn push(&mut self, item: CString) {
     pub fn as_ptr(&self) -> *const *const c_char {
         self.ptrs.as_ptr()
     }
+    #[cfg(not(target_os = "fuchsia"))]
+    pub fn get_items(&self) -> &[CString] {
+        return &self.items;
+    }
 }
 
 fn construct_envp(env: BTreeMap<DefaultEnvKey, OsString>, saw_nul: &mut bool) -> CStringArray {
index 7f1f9353c6d09613e6f406aa018e6d2d160d1bc1..f41bd2c20720a65511c13c4028d53965cd4d0efc 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use env;
+use ffi::CString;
 use io::{self, Error, ErrorKind};
 use libc::{self, c_int, gid_t, pid_t, uid_t};
 use ptr;
@@ -39,13 +41,15 @@ pub fn spawn(&mut self, default: Stdio, needs_stdin: bool)
             return Ok((ret, ours))
         }
 
+        let possible_paths = self.compute_possible_paths(envp.as_ref());
+
         let (input, output) = sys::pipe::anon_pipe()?;
 
         let pid = unsafe {
             match cvt(libc::fork())? {
                 0 => {
                     drop(input);
-                    let err = self.do_exec(theirs, envp.as_ref());
+                    let err = self.do_exec(theirs, envp.as_ref(), possible_paths);
                     let errno = err.raw_os_error().unwrap_or(libc::EINVAL) as u32;
                     let bytes = [
                         (errno >> 24) as u8,
@@ -113,12 +117,48 @@ pub fn exec(&mut self, default: Stdio) -> io::Error {
                                   "nul byte found in provided data")
         }
 
+        let possible_paths = self.compute_possible_paths(envp.as_ref());
         match self.setup_io(default, true) {
-            Ok((_, theirs)) => unsafe { self.do_exec(theirs, envp.as_ref()) },
+            Ok((_, theirs)) => unsafe { self.do_exec(theirs, envp.as_ref(), possible_paths) },
             Err(e) => e,
         }
     }
 
+    fn compute_possible_paths(&self, maybe_envp: Option<&CStringArray>) -> Option<Vec<CString>> {
+        let program = self.get_program().as_bytes();
+        if program.contains(&b'/') {
+            return None;
+        }
+        // Outside the match so we can borrow it for the lifetime of the function.
+        let parent_path = env::var("PATH").ok();
+        let paths = match maybe_envp {
+            Some(envp) => {
+                match envp.get_items().iter().find(|var| var.as_bytes().starts_with(b"PATH=")) {
+                    Some(p) => &p.as_bytes()[5..],
+                    None => return None,
+                }
+            },
+            // maybe_envp is None if the process isn't changing the parent's env at all.
+            None => {
+                match parent_path.as_ref() {
+                    Some(p) => p.as_bytes(),
+                    None => return None,
+                }
+            },
+        };
+
+        let mut possible_paths = vec![];
+        for path in paths.split(|p| *p == b':') {
+            let mut binary_path = Vec::with_capacity(program.len() + path.len() + 1);
+            binary_path.extend_from_slice(path);
+            binary_path.push(b'/');
+            binary_path.extend_from_slice(program);
+            let c_binary_path = CString::new(binary_path).unwrap();
+            possible_paths.push(c_binary_path);
+        }
+        return Some(possible_paths);
+    }
+
     // And at this point we've reached a special time in the life of the
     // child. The child must now be considered hamstrung and unable to
     // do anything other than syscalls really. Consider the following
@@ -152,7 +192,8 @@ pub fn exec(&mut self, default: Stdio) -> io::Error {
     unsafe fn do_exec(
         &mut self,
         stdio: ChildPipes,
-        maybe_envp: Option<&CStringArray>
+        maybe_envp: Option<&CStringArray>,
+        maybe_possible_paths: Option<Vec<CString>>,
     ) -> io::Error {
         use sys::{self, cvt_r};
 
@@ -193,9 +234,6 @@ macro_rules! t {
         if let Some(ref cwd) = *self.get_cwd() {
             t!(cvt(libc::chdir(cwd.as_ptr())));
         }
-        if let Some(envp) = maybe_envp {
-            *sys::os::environ() = envp.as_ptr();
-        }
 
         // emscripten has no signal support.
         #[cfg(not(any(target_os = "emscripten")))]
@@ -231,8 +269,53 @@ macro_rules! t {
             t!(callback());
         }
 
-        libc::execvp(self.get_argv()[0], self.get_argv().as_ptr());
-        io::Error::last_os_error()
+        // If the program isn't an absolute path, and our environment contains a PATH var, then we
+        // implement the PATH traversal ourselves so that it honors the child's PATH instead of the
+        // parent's. This mirrors the logic that exists in glibc's execvpe, except using the
+        // child's env to fetch PATH.
+        match maybe_possible_paths {
+            Some(possible_paths) => {
+                let mut pending_error = None;
+                for path in possible_paths {
+                    libc::execve(
+                        path.as_ptr(),
+                        self.get_argv().as_ptr(),
+                        maybe_envp.map(|envp| envp.as_ptr()).unwrap_or_else(|| *sys::os::environ())
+                    );
+                    let err = io::Error::last_os_error();
+                    match err.kind() {
+                        io::ErrorKind::PermissionDenied => {
+                            // If we saw a PermissionDenied, and none of the other entries in
+                            // $PATH are successful, then we'll return the first EACCESS we see.
+                            if pending_error.is_none() {
+                                pending_error = Some(err);
+                            }
+                        },
+                        // Errors which indicate we failed to find a file are ignored and we try
+                        // the next entry in the path.
+                        io::ErrorKind::NotFound | io::ErrorKind::TimedOut => {
+                            continue
+                        },
+                        // Any other error means we found a file and couldn't execute it.
+                        _ => {
+                            return err;
+                        }
+                    }
+                }
+                if let Some(err) = pending_error {
+                    return err;
+                }
+                return io::Error::from_raw_os_error(libc::ENOENT);
+            },
+            _ => {
+                libc::execve(
+                    self.get_argv()[0],
+                    self.get_argv().as_ptr(),
+                    maybe_envp.map(|envp| envp.as_ptr()).unwrap_or_else(|| *sys::os::environ())
+                );
+                return io::Error::last_os_error()
+            }
+        }
     }
 
     #[cfg(not(any(target_os = "macos", target_os = "freebsd",
index 59f100fad1bb9a1eda0aa41d72fa42c6b7c506d6..4df4751117244451299281594de09ec169aa75b4 100644 (file)
@@ -14,6 +14,7 @@
 
 use cell::UnsafeCell;
 use fmt;
+use hint;
 use mem;
 
 /// A thread local storage key which owns its contents.
@@ -145,13 +146,13 @@ macro_rules! thread_local {
 
     // process multiple declarations
     ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
-        __thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
-        thread_local!($($rest)*);
+        $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
+        $crate::thread_local!($($rest)*);
     );
 
     // handle a single declaration
     ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
-        __thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
+        $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
     );
 }
 
@@ -201,7 +202,7 @@ unsafe fn __getit() -> $crate::option::Option<
     };
     ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
         $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
-            __thread_local_inner!(@key $(#[$attr])* $vis $name, $t, $init);
+            $crate::__thread_local_inner!(@key $(#[$attr])* $vis $name, $t, $init);
     }
 }
 
@@ -275,7 +276,15 @@ unsafe fn init(&self, slot: &UnsafeCell<Option<T>>) -> &T {
         // operations a little differently and make this safe to call.
         mem::replace(&mut *ptr, Some(value));
 
-        (*ptr).as_ref().unwrap()
+        // After storing `Some` we want to get a reference to the contents of
+        // what we just stored. While we could use `unwrap` here and it should
+        // always work it empirically doesn't seem to always get optimized away,
+        // which means that using something like `try_with` can pull in
+        // panicking code and cause a large size bloat.
+        match *ptr {
+            Some(ref x) => x,
+            None => hint::unreachable_unchecked(),
+        }
     }
 
     /// Acquires a reference to the value in this TLS key.
index 3702c91966fdacbddc3fa554ed3d5285aaf8f413..a57b8dc7237678489baf2476a7350cc6f26e7b09 100644 (file)
@@ -1149,8 +1149,18 @@ pub fn unpark(&self) {
             _ => panic!("inconsistent state in unpark"),
         }
 
-        // Coordinate wakeup through the mutex and a condvar notification
-        let _lock = self.inner.lock.lock().unwrap();
+        // There is a period between when the parked thread sets `state` to
+        // `PARKED` (or last checked `state` in the case of a spurious wake
+        // up) and when it actually waits on `cvar`. If we were to notify
+        // during this period it would be ignored and then when the parked
+        // thread went to sleep it would never wake up. Fortunately, it has
+        // `lock` locked at this stage so we can acquire `lock` to wait until
+        // it is ready to receive the notification.
+        //
+        // Releasing `lock` before the call to `notify_one` means that when the
+        // parked thread wakes it doesn't get woken only to have to wait for us
+        // to release `lock`.
+        drop(self.inner.lock.lock().unwrap());
         self.inner.cvar.notify_one()
     }
 
index a9acc70d1a5b982f421a7047c13b2412ee2e8a1a..cfedda18a7e22886e2e972cc30d82a36157976a5 100644 (file)
@@ -288,9 +288,9 @@ pub enum TraitBoundModifier {
 }
 
 /// The AST represents all type param bounds as types.
-/// typeck::collect::compute_bounds matches these against
-/// the "special" built-in traits (see middle::lang_items) and
-/// detects Copy, Send and Sync.
+/// `typeck::collect::compute_bounds` matches these against
+/// the "special" built-in traits (see `middle::lang_items`) and
+/// detects `Copy`, `Send` and `Sync`.
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum GenericBound {
     Trait(PolyTraitRef, TraitBoundModifier),
index 33b651e1b385403b42f504d7b0ca65c4f255a493..cc8af70a050c9d127588da4155d32ea2b1550c21 100644 (file)
@@ -252,7 +252,7 @@ pub fn span(&self) -> Span {
 
 pub struct MacroExpander<'a, 'b:'a> {
     pub cx: &'a mut ExtCtxt<'b>,
-    monotonic: bool, // c.f. `cx.monotonic_expander()`
+    monotonic: bool, // cf. `cx.monotonic_expander()`
 }
 
 impl<'a, 'b> MacroExpander<'a, 'b> {
@@ -387,6 +387,8 @@ fn expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment {
                         add_derived_markers(&mut self.cx, item.span(), &traits, item.clone());
                     let derives = derives.entry(invoc.expansion_data.mark).or_default();
 
+                    derives.reserve(traits.len());
+                    invocations.reserve(traits.len());
                     for path in &traits {
                         let mark = Mark::fresh(self.cx.current_expansion.mark);
                         derives.push(mark);
@@ -687,7 +689,7 @@ fn visit_item(&mut self, i: &'ast ast::Item) {
                         "proc_macro_hygiene",
                         self.span,
                         GateIssue::Language,
-                        &format!("procedural macros cannot expand to macro definitions"),
+                        "procedural macros cannot expand to macro definitions",
                     );
                 }
                 visit::walk_item(self, i);
@@ -1030,7 +1032,7 @@ pub fn parse_ast_fragment(&mut self, kind: AstFragmentKind, macro_legacy_warning
                 }
             },
             AstFragmentKind::Ty => AstFragment::Ty(self.parse_ty()?),
-            AstFragmentKind::Pat => AstFragment::Pat(self.parse_pat()?),
+            AstFragmentKind::Pat => AstFragment::Pat(self.parse_pat(None)?),
         })
     }
 
index 62bc9fae3b59af13855894295903953da83f643c..37800a334c6daf7ca4d54efe97e76ef9fb902426 100644 (file)
@@ -419,7 +419,7 @@ pub fn parse_item_panic(parser: &mut Parser) -> Option<P<Item>> {
 }
 
 pub fn parse_pat_panic(parser: &mut Parser) -> P<Pat> {
-    panictry!(parser.parse_pat())
+    panictry!(parser.parse_pat(None))
 }
 
 pub fn parse_arm_panic(parser: &mut Parser) -> Arm {
index 03a8376e7636138ce00524d8618fa8d500616153..06c1d58070e21fe3524dcc9463e51cbb2e3c7d2a 100644 (file)
@@ -143,6 +143,8 @@ struct MatcherTtFrame<'a> {
     idx: usize,
 }
 
+type NamedMatchVec = SmallVec<[NamedMatch; 4]>;
+
 /// Represents a single "position" (aka "matcher position", aka "item"), as described in the module
 /// documentation.
 #[derive(Clone)]
@@ -168,7 +170,7 @@ struct MatcherPos<'a> {
     /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
     /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
     /// wants the shared `matches`, one should use `up.matches`.
-    matches: Vec<Rc<Vec<NamedMatch>>>,
+    matches: Box<[Rc<NamedMatchVec>]>,
     /// The position in `matches` corresponding to the first metavar in this matcher's sequence of
     /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
     /// to `matches[match_lo]`.
@@ -278,9 +280,14 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
     })
 }
 
-/// Initialize `len` empty shared `Vec`s to be used to store matches of metavars.
-fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> {
-    (0..len).into_iter().map(|_| Rc::new(Vec::new())).collect()
+/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
+fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
+    if len == 0 {
+        vec![]
+    } else {
+        let empty_matches = Rc::new(SmallVec::new());
+        vec![empty_matches.clone(); len]
+    }.into_boxed_slice()
 }
 
 /// Generate the top-level matcher position in which the "dot" is before the first token of the
@@ -332,7 +339,7 @@ fn initial_matcher_pos(ms: &[TokenTree], open: Span) -> MatcherPos {
 /// token tree it was derived from.
 #[derive(Debug, Clone)]
 pub enum NamedMatch {
-    MatchedSeq(Rc<Vec<NamedMatch>>, DelimSpan),
+    MatchedSeq(Rc<NamedMatchVec>, DelimSpan),
     MatchedNonterminal(Rc<Nonterminal>),
 }
 
@@ -540,7 +547,7 @@ fn inner_parse_loop<'a>(
                         new_item.match_cur += seq.num_captures;
                         new_item.idx += 1;
                         for idx in item.match_cur..item.match_cur + seq.num_captures {
-                            new_item.push_match(idx, MatchedSeq(Rc::new(vec![]), sp));
+                            new_item.push_match(idx, MatchedSeq(Rc::new(smallvec![]), sp));
                         }
                         cur_items.push(new_item);
                     }
@@ -881,7 +888,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
                 FatalError.raise();
             }
         },
-        "pat" => token::NtPat(panictry!(p.parse_pat())),
+        "pat" => token::NtPat(panictry!(p.parse_pat(None))),
         "expr" => token::NtExpr(panictry!(p.parse_expr())),
         "literal" => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
         "ty" => token::NtTy(panictry!(p.parse_ty())),
index d4dc1f8b50851d30ce7a2e2c0d5fe61861016188..55652c481bd3f07478d90588229b6c3ae755825a 100644 (file)
@@ -349,7 +349,7 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (active, abi_thiscall, "1.19.0", None, None),
 
     // Allows a test to fail without failing the whole suite
-    (active, allow_fail, "1.19.0", Some(42219), None),
+    (active, allow_fail, "1.19.0", Some(46488), None),
 
     // Allows unsized tuple coercion.
     (active, unsized_tuple_coercion, "1.20.0", Some(42877), None),
@@ -376,7 +376,7 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (active, non_exhaustive, "1.22.0", Some(44109), None),
 
     // `crate` as visibility modifier, synonymous to `pub(crate)`
-    (active, crate_visibility_modifier, "1.23.0", Some(45388), None),
+    (active, crate_visibility_modifier, "1.23.0", Some(53120), None),
 
     // extern types
     (active, extern_types, "1.23.0", Some(43467), None),
@@ -391,13 +391,13 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (active, generic_associated_types, "1.23.0", Some(44265), None),
 
     // `extern` in paths
-    (active, extern_in_paths, "1.23.0", Some(44660), None),
+    (active, extern_in_paths, "1.23.0", Some(55600), None),
 
     // Use `?` as the Kleene "at most one" operator
     (active, macro_at_most_once_rep, "1.25.0", Some(48075), None),
 
     // Infer static outlives requirements; RFC 2093
-    (active, infer_static_outlives_requirements, "1.26.0", Some(44493), None),
+    (active, infer_static_outlives_requirements, "1.26.0", Some(54185), None),
 
     // Multiple patterns with `|` in `if let` and `while let`
     (active, if_while_or_patterns, "1.26.0", Some(48215), None),
@@ -448,9 +448,6 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     // Integer match exhaustiveness checking
     (active, exhaustive_integer_patterns, "1.30.0", Some(50907), None),
 
-    // RFC 2070: #[panic_implementation] / #[panic_handler]
-    (active, panic_implementation, "1.28.0", Some(44489), None),
-
     // #[doc(keyword = "...")]
     (active, doc_keyword, "1.28.0", Some(51315), None),
 
@@ -466,7 +463,7 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (active, test_2018_feature, "1.31.0", Some(0), Some(Edition::Edition2018)),
 
     // Support for arbitrary delimited token streams in non-macro attributes
-    (active, unrestricted_attribute_tokens, "1.30.0", Some(44690), None),
+    (active, unrestricted_attribute_tokens, "1.30.0", Some(55208), None),
 
     // Allows `use x::y;` to resolve through `self::x`, not just `::x`
     (active, uniform_paths, "1.30.0", Some(53130), None),
@@ -503,7 +500,7 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (active, underscore_const_names, "1.31.0", Some(54912), None),
 
     // `extern crate foo as bar;` puts `bar` into extern prelude.
-    (active, extern_crate_item_prelude, "1.31.0", Some(54658), None),
+    (active, extern_crate_item_prelude, "1.31.0", Some(55599), None),
 
     // `reason = ` in lint attributes and `expect` lint attribute
     (active, lint_reasons, "1.31.0", Some(54503), None),
@@ -541,6 +538,8 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
      Some("subsumed by `#![feature(proc_macro_hygiene)]`")),
     (removed, proc_macro_gen, "1.27.0", Some(54727), None,
      Some("subsumed by `#![feature(proc_macro_hygiene)]`")),
+    (removed, panic_implementation, "1.28.0", Some(44489), None,
+     Some("subsumed by `#[panic_handler]`")),
 );
 
 declare_features! (
@@ -1160,16 +1159,6 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
                                    "infer 'static lifetime requirements",
                                    cfg_fn!(infer_static_outlives_requirements))),
 
-    // RFC 2070 (deprecated attribute name)
-    ("panic_implementation",
-     Normal,
-     Gated(Stability::Deprecated("https://github.com/rust-lang/rust/issues/44489\
-                                  #issuecomment-415140224",
-                                 Some("replace this attribute with `#[panic_handler]`")),
-           "panic_implementation",
-           "this attribute was renamed to `panic_handler`",
-           cfg_fn!(panic_implementation))),
-
     // RFC 2070
     ("panic_handler", Normal, Ungated),
 
@@ -1635,19 +1624,13 @@ fn visit_item(&mut self, i: &'a ast::Item) {
                             if name == "packed" {
                                 gate_feature_post!(&self, repr_packed, attr.span,
                                                    "the `#[repr(packed(n))]` attribute \
-                                                   is experimental");
+                                                    is experimental");
                             }
                         }
                     }
                 }
             }
 
-            ast::ItemKind::TraitAlias(..) => {
-                gate_feature_post!(&self, trait_alias,
-                                   i.span,
-                                   "trait aliases are not yet fully implemented");
-            }
-
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
                 if polarity == ast::ImplPolarity::Negative {
                     gate_feature_post!(&self, optin_builtin_traits,
@@ -1669,6 +1652,15 @@ fn visit_item(&mut self, i: &'a ast::Item) {
                                    "auto traits are experimental and possibly buggy");
             }
 
+            ast::ItemKind::TraitAlias(..) => {
+                gate_feature_post!(
+                    &self,
+                    trait_alias,
+                    i.span,
+                    "trait aliases are experimental"
+                );
+            }
+
             ast::ItemKind::MacroDef(ast::MacroDef { legacy: false, .. }) => {
                 let msg = "`macro` is experimental";
                 gate_feature_post!(&self, decl_macro, i.span, msg);
index 9077eca18215c0010d3e57a52570f97b3e44a1bd..e9a6535cba1d231460068d3bbc4c0a33ae5cdb0b 100644 (file)
@@ -70,6 +70,23 @@ macro_rules! panictry {
     })
 }
 
+// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
+macro_rules! panictry_buffer {
+    ($handler:expr, $e:expr) => ({
+        use std::result::Result::{Ok, Err};
+        use errors::{FatalError, DiagnosticBuilder};
+        match $e {
+            Ok(e) => e,
+            Err(errs) => {
+                for e in errs {
+                    DiagnosticBuilder::new_diagnostic($handler, e).emit();
+                }
+                FatalError.raise()
+            }
+        }
+    })
+}
+
 #[macro_export]
 macro_rules! unwrap_or {
     ($opt:expr, $default:expr) => {
index 465ce73e01de20d6dc0097eab8c6db41f8bfc342..590506566dd5f0d40baa1fbb83c95c955a6c6f0c 100644 (file)
@@ -11,7 +11,7 @@
 use ast::{self, Ident};
 use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
 use source_map::{SourceMap, FilePathMapping};
-use errors::{Applicability, FatalError, DiagnosticBuilder};
+use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
 use parse::{token, ParseSess};
 use str::char_at;
 use symbol::{Symbol, keywords};
@@ -175,6 +175,16 @@ pub fn emit_fatal_errors(&mut self) {
         self.fatal_errs.clear();
     }
 
+    pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
+        let mut buffer = Vec::new();
+
+        for err in self.fatal_errs.drain(..) {
+            err.buffer(&mut buffer);
+        }
+
+        buffer
+    }
+
     pub fn peek(&self) -> TokenAndSpan {
         // FIXME(pcwalton): Bad copy!
         TokenAndSpan {
@@ -251,6 +261,17 @@ pub fn new_without_err(sess: &'a ParseSess,
         Ok(sr)
     }
 
+    pub fn new_or_buffered_errs(sess: &'a ParseSess,
+                                source_file: Lrc<syntax_pos::SourceFile>,
+                                override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
+        let mut sr = StringReader::new_raw(sess, source_file, override_span);
+        if sr.advance_token().is_err() {
+            Err(sr.buffer_fatal_errors())
+        } else {
+            Ok(sr)
+        }
+    }
+
     pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
         let begin = sess.source_map().lookup_byte_offset(span.lo());
         let end = sess.source_map().lookup_byte_offset(span.hi());
index ce32520b8e74603e3f58a25cbe4fb34e4ba1b5f7..fd66bf55a74be8b8d53a0ec4df50260c7198ff3c 100644 (file)
@@ -15,7 +15,7 @@
 use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
 use source_map::{SourceMap, FilePathMapping};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use errors::{Handler, ColorConfig, DiagnosticBuilder};
+use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
 use feature_gate::UnstableFeatures;
 use parse::parser::Parser;
 use ptr::P;
@@ -174,12 +174,21 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
     source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
-// Create a new parser from a source string
+/// Create a new parser from a source string
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
                                       -> Parser {
-    let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source));
+    panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
+}
+
+/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
+/// token stream.
+pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
+    -> Result<Parser, Vec<Diagnostic>>
+{
+    let mut parser = maybe_source_file_to_parser(sess,
+                                                 sess.source_map().new_source_file(name, source))?;
     parser.recurse_into_file_modules = false;
-    parser
+    Ok(parser)
 }
 
 /// Create a new parser, handling errors as appropriate
@@ -204,14 +213,23 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a>
 
 /// Given a source_file and config, return a parser
 fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
+    panictry_buffer!(&sess.span_diagnostic,
+                     maybe_source_file_to_parser(sess, source_file))
+}
+
+/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
+/// initial token stream.
+fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
+    -> Result<Parser, Vec<Diagnostic>>
+{
     let end_pos = source_file.end_pos;
-    let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None));
+    let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
 
     if parser.token == token::Eof && parser.span.is_dummy() {
         parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
     }
 
-    parser
+    Ok(parser)
 }
 
 // must preserve old name for now, because quote! from the *existing*
@@ -243,9 +261,25 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 pub fn source_file_to_stream(sess: &ParseSess,
                              source_file: Lrc<SourceFile>,
                              override_span: Option<Span>) -> TokenStream {
-    let mut srdr = lexer::StringReader::new(sess, source_file, override_span);
+    panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
+}
+
+/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
+/// parsing the token tream.
+pub fn maybe_file_to_stream(sess: &ParseSess,
+                            source_file: Lrc<SourceFile>,
+                            override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
+    let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
     srdr.real_token();
-    panictry!(srdr.parse_all_token_trees())
+
+    match srdr.parse_all_token_trees() {
+        Ok(stream) => Ok(stream),
+        Err(err) => {
+            let mut buffer = Vec::with_capacity(1);
+            err.buffer(&mut buffer);
+            Err(buffer)
+        }
+    }
 }
 
 /// Given stream and the `ParseSess`, produce a parser
index be448f960df3fd105310cbe657195a0bb7b5fe70..7e29eaae4e856ea67671694658fad910063d5366 100644 (file)
@@ -611,6 +611,7 @@ fn token_descr(&self) -> Option<&'static str> {
             t if t.is_special_ident() => "reserved identifier",
             t if t.is_used_keyword() => "keyword",
             t if t.is_unused_keyword() => "reserved keyword",
+            token::DocComment(..) => "doc comment",
             _ => return None,
         })
     }
@@ -644,8 +645,8 @@ pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  ()> {
                 Ok(())
             } else {
                 let token_str = pprust::token_to_string(t);
-                let this_token_str = self.this_token_to_string();
-                let mut err = self.fatal(&format!("expected `{}`, found `{}`",
+                let this_token_str = self.this_token_descr();
+                let mut err = self.fatal(&format!("expected `{}`, found {}",
                                                   token_str,
                                                   this_token_str));
 
@@ -1295,7 +1296,7 @@ fn token_is_bare_fn_keyword(&mut self) -> bool {
             self.check_keyword(keywords::Extern) && self.is_extern_non_path()
     }
 
-    /// parse a TyKind::BareFn type:
+    /// parse a `TyKind::BareFn` type:
     fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
         /*
 
@@ -1444,8 +1445,8 @@ fn parse_trait_item_(&mut self,
                             Some(body)
                         }
                         _ => {
-                            let token_str = self.this_token_to_string();
-                            let mut err = self.fatal(&format!("expected `;` or `{{`, found `{}`",
+                            let token_str = self.this_token_descr();
+                            let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
                                                               token_str));
                             err.span_label(self.span, "expected `;` or `{`");
                             return Err(err);
@@ -1453,8 +1454,8 @@ fn parse_trait_item_(&mut self,
                     }
                 }
                 _ => {
-                    let token_str = self.this_token_to_string();
-                    let mut err = self.fatal(&format!("expected `;` or `{{`, found `{}`",
+                    let token_str = self.this_token_descr();
+                    let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
                                                       token_str));
                     err.span_label(self.span, "expected `;` or `{`");
                     return Err(err);
@@ -1532,7 +1533,7 @@ fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool)
                             if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
                         let path = match bounds[0] {
                             GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
-                            _ => self.bug("unexpected lifetime bound"),
+                            GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
                         };
                         self.parse_remaining_bounds(Vec::new(), path, lo, true)?
                     }
@@ -1789,6 +1790,35 @@ fn is_named_argument(&mut self) -> bool {
         self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
+    /// Skip unexpected attributes and doc comments in this position and emit an appropriate error.
+    fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
+        if let token::DocComment(_) = self.token {
+            let mut err = self.diagnostic().struct_span_err(
+                self.span,
+                &format!("documentation comments cannot be applied to {}", applied_to),
+            );
+            err.span_label(self.span, "doc comments are not allowed here");
+            err.emit();
+            self.bump();
+        } else if self.token == token::Pound && self.look_ahead(1, |t| {
+            *t == token::OpenDelim(token::Bracket)
+        }) {
+            let lo = self.span;
+            // Skip every token until next possible arg.
+            while self.token != token::CloseDelim(token::Bracket) {
+                self.bump();
+            }
+            let sp = lo.to(self.span);
+            self.bump();
+            let mut err = self.diagnostic().struct_span_err(
+                sp,
+                &format!("attributes cannot be applied to {}", applied_to),
+            );
+            err.span_label(sp, "attributes are not allowed here");
+            err.emit();
+        }
+    }
+
     /// This version of parse arg doesn't necessarily require
     /// identifier names.
     fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
@@ -1797,7 +1827,8 @@ fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
         let (pat, ty) = if require_name || self.is_named_argument() {
             debug!("parse_arg_general parse_pat (require_name:{})",
                    require_name);
-            let pat = self.parse_pat()?;
+            self.eat_incorrect_doc_comment("method arguments");
+            let pat = self.parse_pat(Some("argument name"))?;
 
             if let Err(mut err) = self.expect(&token::Colon) {
                 // If we find a pattern followed by an identifier, it could be an (incorrect)
@@ -1819,10 +1850,12 @@ fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
                 return Err(err);
             }
 
+            self.eat_incorrect_doc_comment("a method argument's type");
             (pat, self.parse_ty()?)
         } else {
             debug!("parse_arg_general ident_to_pat");
             let parser_snapshot_before_ty = self.clone();
+            self.eat_incorrect_doc_comment("a method argument's type");
             let mut ty = self.parse_ty();
             if ty.is_ok() && self.token == token::Colon {
                 // This wasn't actually a type, but a pattern looking like a type,
@@ -1844,7 +1877,7 @@ fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
                     // Recover from attempting to parse the argument as a type without pattern.
                     err.cancel();
                     mem::replace(self, parser_snapshot_before_ty);
-                    let pat = self.parse_pat()?;
+                    let pat = self.parse_pat(Some("argument name"))?;
                     self.expect(&token::Colon)?;
                     let ty = self.parse_ty()?;
 
@@ -1882,7 +1915,7 @@ fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> {
 
     /// Parse an argument in a lambda header e.g. |arg, arg|
     fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
-        let pat = self.parse_pat()?;
+        let pat = self.parse_pat(Some("argument name"))?;
         let t = if self.eat(&token::Colon) {
             self.parse_ty()?
         } else {
@@ -2439,7 +2472,11 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                     return Ok(self.mk_expr(lo.to(hi), ex, attrs));
                 }
                 if self.eat_keyword(keywords::Match) {
-                    return self.parse_match_expr(attrs);
+                    let match_sp = self.prev_span;
+                    return self.parse_match_expr(attrs).map_err(|mut err| {
+                        err.span_label(match_sp, "while parsing this match expression");
+                        err
+                    });
                 }
                 if self.eat_keyword(keywords::Unsafe) {
                     return self.parse_block_expr(
@@ -3745,7 +3782,7 @@ fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)>
                                   "`..` can only be used once per tuple or tuple struct pattern");
                 }
             } else if !self.check(&token::CloseDelim(token::Paren)) {
-                fields.push(self.parse_pat()?);
+                fields.push(self.parse_pat(None)?);
             } else {
                 break
             }
@@ -3801,7 +3838,7 @@ fn parse_pat_vec_elements(
                 }
             }
 
-            let subpat = self.parse_pat()?;
+            let subpat = self.parse_pat(None)?;
             if before_slice && self.eat(&token::DotDot) {
                 slice = Some(subpat);
                 before_slice = false;
@@ -3826,7 +3863,7 @@ fn parse_pat_field(
             // Parsing a pattern of the form "fieldname: pat"
             let fieldname = self.parse_field_name()?;
             self.bump();
-            let pat = self.parse_pat()?;
+            let pat = self.parse_pat(None)?;
             hi = pat.span;
             (pat, fieldname, false)
         } else {
@@ -3917,8 +3954,8 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::Fiel
                     etc_span = Some(etc_sp);
                     break;
                 }
-                let token_str = self.this_token_to_string();
-                let mut err = self.fatal(&format!("expected `}}`, found `{}`", token_str));
+                let token_str = self.this_token_descr();
+                let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
 
                 err.span_label(self.span, "expected `}`");
                 let mut comma_sp = None;
@@ -4028,7 +4065,7 @@ fn parse_as_ident(&mut self) -> bool {
     /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
     /// to subpatterns within such).
     fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
-        let pat = self.parse_pat()?;
+        let pat = self.parse_pat(None)?;
         if self.token == token::Comma {
             // An unexpected comma after a top-level pattern is a clue that the
             // user (perhaps more accustomed to some other language) forgot the
@@ -4060,13 +4097,17 @@ fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
     }
 
     /// Parse a pattern.
-    pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> {
-        self.parse_pat_with_range_pat(true)
+    pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
+        self.parse_pat_with_range_pat(true, expected)
     }
 
     /// Parse a pattern, with a setting whether modern range patterns e.g. `a..=b`, `a..b` are
     /// allowed.
-    fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<Pat>> {
+    fn parse_pat_with_range_pat(
+        &mut self,
+        allow_range_pat: bool,
+        expected: Option<&'static str>,
+    ) -> PResult<'a, P<Pat>> {
         maybe_whole!(self, NtPat, |x| x);
 
         let lo = self.span;
@@ -4082,7 +4123,7 @@ fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<P
                     err.span_label(self.span, "unexpected lifetime");
                     return Err(err);
                 }
-                let subpat = self.parse_pat_with_range_pat(false)?;
+                let subpat = self.parse_pat_with_range_pat(false, expected)?;
                 pat = PatKind::Ref(subpat, mutbl);
             }
             token::OpenDelim(token::Paren) => {
@@ -4128,7 +4169,7 @@ fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<P
                 pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
             } else if self.eat_keyword(keywords::Box) {
                 // Parse box pat
-                let subpat = self.parse_pat_with_range_pat(false)?;
+                let subpat = self.parse_pat_with_range_pat(false, None)?;
                 pat = PatKind::Box(subpat);
             } else if self.token.is_ident() && !self.token.is_reserved_ident() &&
                       self.parse_as_ident() {
@@ -4228,9 +4269,14 @@ fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<P
                     }
                     Err(mut err) => {
                         self.cancel(&mut err);
-                        let msg = format!("expected pattern, found {}", self.this_token_descr());
+                        let expected = expected.unwrap_or("pattern");
+                        let msg = format!(
+                            "expected {}, found {}",
+                            expected,
+                            self.this_token_descr(),
+                        );
                         let mut err = self.fatal(&msg);
-                        err.span_label(self.span, "expected pattern");
+                        err.span_label(self.span, format!("expected {}", expected));
                         return Err(err);
                     }
                 }
@@ -4274,7 +4320,7 @@ fn parse_pat_ident(&mut self,
                        -> PResult<'a, PatKind> {
         let ident = self.parse_ident()?;
         let sub = if self.eat(&token::At) {
-            Some(self.parse_pat()?)
+            Some(self.parse_pat(Some("binding pattern"))?)
         } else {
             None
         };
@@ -4680,8 +4726,8 @@ fn parse_stmt_without_recovery(&mut self,
                     } else {
                         ""
                     };
-                    let tok_str = self.this_token_to_string();
-                    let mut err = self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
+                    let tok_str = self.this_token_descr();
+                    let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
                                                       ident_str,
                                                       tok_str));
                     err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
@@ -4817,8 +4863,8 @@ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
 
         if !self.eat(&token::OpenDelim(token::Brace)) {
             let sp = self.span;
-            let tok = self.this_token_to_string();
-            let mut e = self.span_fatal(sp, &format!("expected `{{`, found `{}`", tok));
+            let tok = self.this_token_descr();
+            let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
             let do_not_suggest_help =
                 self.token.is_keyword(keywords::In) || self.token == token::Colon;
 
@@ -4880,6 +4926,7 @@ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
                 }
                 _ => ()
             }
+            e.span_label(sp, "expected `{`");
             return Err(e);
         }
 
@@ -4975,7 +5022,7 @@ fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Blo
 
     fn warn_missing_semicolon(&self) {
         self.diagnostic().struct_span_warn(self.span, {
-            &format!("expected `;`, found `{}`", self.this_token_to_string())
+            &format!("expected `;`, found {}", self.this_token_descr())
         }).note({
             "This was erroneously allowed and will become a hard error in a future release"
         }).emit();
@@ -5779,7 +5826,7 @@ fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
                              ast::ImplItemKind)> {
         // code copied from parse_macro_use_or_failure... abstraction!
         if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
-            // Method macro.
+            // method macro
             Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
                 ast::ImplItemKind::Macro(mac)))
         } else {
@@ -6014,9 +6061,9 @@ fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
             self.expect(&token::Semi)?;
             body
         } else {
-            let token_str = self.this_token_to_string();
+            let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
-                "expected `where`, `{{`, `(`, or `;` after struct name, found `{}`",
+                "expected `where`, `{{`, `(`, or `;` after struct name, found {}",
                 token_str
             ));
             err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
@@ -6038,9 +6085,9 @@ fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
         } else if self.token == token::OpenDelim(token::Brace) {
             VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID)
         } else {
-            let token_str = self.this_token_to_string();
+            let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
-                "expected `where` or `{{` after union name, found `{}`", token_str));
+                "expected `where` or `{{` after union name, found {}", token_str));
             err.span_label(self.span, "expected `where` or `{` after union name");
             return Err(err);
         };
@@ -6088,9 +6135,9 @@ fn parse_record_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
             }
             self.eat(&token::CloseDelim(token::Brace));
         } else {
-            let token_str = self.this_token_to_string();
+            let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
-                    "expected `where`, or `{{` after struct name, found `{}`", token_str));
+                    "expected `where`, or `{{` after struct name, found {}", token_str));
             err.span_label(self.span, "expected `where`, or `{` after struct name");
             return Err(err);
         }
@@ -6166,8 +6213,8 @@ fn parse_single_struct_field(&mut self,
             }
             _ => {
                 let sp = self.sess.source_map().next_point(self.prev_span);
-                let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found `{}`",
-                                                                self.this_token_to_string()));
+                let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
+                                                                self.this_token_descr()));
                 if self.token.is_ident() {
                     // This is likely another field; emit the diagnostic and keep going
                     err.span_suggestion_with_applicability(
@@ -6303,9 +6350,9 @@ fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a
         }
 
         if !self.eat(term) {
-            let token_str = self.this_token_to_string();
-            let mut err = self.fatal(&format!("expected item, found `{}`", token_str));
-            if token_str == ";" {
+            let token_str = self.this_token_descr();
+            let mut err = self.fatal(&format!("expected item, found {}", token_str));
+            if self.token == token::Semi {
                 let msg = "consider removing this semicolon";
                 err.span_suggestion_short_with_applicability(
                     self.span, msg, String::new(), Applicability::MachineApplicable
@@ -6792,11 +6839,11 @@ fn parse_item_foreign_mod(&mut self,
         Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
     }
 
-    /// Parse type Foo = Bar;
+    /// Parse `type Foo = Bar;`
     /// or
-    /// existential type Foo: Bar;
+    /// `existential type Foo: Bar;`
     /// or
-    /// return None without modifying the parser state
+    /// `return None` without modifying the parser state
     fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
         // This parses the grammar:
         //     Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
index 374154e63333d221ce0f381cc9dfa7414949ebbd..98e9272e6d8e50c06816aff3301c8d1fdb6ffc41 100644 (file)
@@ -68,7 +68,7 @@ pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
 pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
     let ps = ParseSess::new(FilePathMapping::empty());
     with_error_checking_parse(source_str, &ps, |p| {
-        p.parse_pat()
+        p.parse_pat(None)
     })
 }
 
index 002ecce58e6529207d0a7ca6c5b1eab8d42ff321..dd90ef06c39295da68bab608be45b3a0f5937b65 100644 (file)
@@ -1384,7 +1384,7 @@ fn build_enum_match_tuple<'b>(&self,
             // let __self2_vi = unsafe {
             //     std::intrinsics::discriminant_value(&arg2) } as i32;
             // ```
-            let mut index_let_stmts: Vec<ast::Stmt> = Vec::new();
+            let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(vi_idents.len() + 1);
 
             // We also build an expression which checks whether all discriminants are equal
             // discriminant_test = __self0_vi == __self1_vi && __self0_vi == __self2_vi && ...
index caddcd9b9dc9479a20908d93c3e47c49b021379e..7051ead40a5f825878b59bf08d4e768be9e99a4a 160000 (submodule)
--- a/src/llvm
+++ b/src/llvm
@@ -1 +1 @@
-Subproject commit caddcd9b9dc9479a20908d93c3e47c49b021379e
+Subproject commit 7051ead40a5f825878b59bf08d4e768be9e99a4a
index 9ccd37a6a45924fb1e4ecf88795d62e277311e44..ec822fddef3eba1b2638f82869d131a2b8ec9e46 100644 (file)
@@ -12,4 +12,4 @@ rustc_target = { path = "../librustc_target" }
 rustc_driver = { path = "../librustc_driver" }
 
 [features]
-jemalloc = ["rustc_target/jemalloc"]
+jemalloc = ['rustc_driver/jemalloc-sys']
index 49a4962858ca733d0a65ddd8ad376a25798e1512..390450a520526cfd25b2a76ea5739139f502bbf1 100644 (file)
@@ -145,7 +145,6 @@ extern "C" void LLVMRustArchiveIteratorFree(LLVMRustArchiveIteratorRef RAI) {
 
 extern "C" const char *
 LLVMRustArchiveChildName(LLVMRustArchiveChildConstRef Child, size_t *Size) {
-#if LLVM_VERSION_GE(4, 0)
   Expected<StringRef> NameOrErr = Child->getName();
   if (!NameOrErr) {
     // rustc_codegen_llvm currently doesn't use this error string, but it might be
@@ -154,11 +153,6 @@ LLVMRustArchiveChildName(LLVMRustArchiveChildConstRef Child, size_t *Size) {
     LLVMRustSetLastError(toString(NameOrErr.takeError()).c_str());
     return nullptr;
   }
-#else
-  ErrorOr<StringRef> NameOrErr = Child->getName();
-  if (NameOrErr.getError())
-    return nullptr;
-#endif
   StringRef Name = NameOrErr.get();
   *Size = Name.size();
   return Name.data();
@@ -167,19 +161,11 @@ LLVMRustArchiveChildName(LLVMRustArchiveChildConstRef Child, size_t *Size) {
 extern "C" const char *LLVMRustArchiveChildData(LLVMRustArchiveChildRef Child,
                                                 size_t *Size) {
   StringRef Buf;
-#if LLVM_VERSION_GE(4, 0)
   Expected<StringRef> BufOrErr = Child->getBuffer();
   if (!BufOrErr) {
     LLVMRustSetLastError(toString(BufOrErr.takeError()).c_str());
     return nullptr;
   }
-#else
-  ErrorOr<StringRef> BufOrErr = Child->getBuffer();
-  if (BufOrErr.getError()) {
-    LLVMRustSetLastError(BufOrErr.getError().message().c_str());
-    return nullptr;
-  }
-#endif
   Buf = BufOrErr.get();
   *Size = Buf.size();
   return Buf.data();
index 534e4b910902e89ce567eb364a3bf4b5e2b3600f..59f0b23361e540313f9ef491d79d72cd9e0222e7 100644 (file)
@@ -42,7 +42,6 @@ LLVMRustLinkerAdd(RustLinker *L, char *BC, size_t Len) {
   std::unique_ptr<MemoryBuffer> Buf =
       MemoryBuffer::getMemBufferCopy(StringRef(BC, Len));
 
-#if LLVM_VERSION_GE(4, 0)
   Expected<std::unique_ptr<Module>> SrcOrError =
       llvm::getLazyBitcodeModule(Buf->getMemBufferRef(), L->Ctx);
   if (!SrcOrError) {
@@ -51,20 +50,8 @@ LLVMRustLinkerAdd(RustLinker *L, char *BC, size_t Len) {
   }
 
   auto Src = std::move(*SrcOrError);
-#else
-  ErrorOr<std::unique_ptr<Module>> Src =
-      llvm::getLazyBitcodeModule(std::move(Buf), L->Ctx);
-  if (!Src) {
-    LLVMRustSetLastError(Src.getError().message().c_str());
-    return false;
-  }
-#endif
 
-#if LLVM_VERSION_GE(4, 0)
   if (L->L.linkInModule(std::move(Src))) {
-#else
-  if (L->L.linkInModule(std::move(Src.get()))) {
-#endif
     LLVMRustSetLastError("");
     return false;
   }
index 06f75d981e3d6c8405503942e9da1734636acfce..200175da1bf2a57d7387db7a79e0866a667eb5b2 100644 (file)
@@ -32,7 +32,6 @@
 #include "llvm/Target/TargetSubtargetInfo.h"
 #endif
 
-#if LLVM_VERSION_GE(4, 0)
 #include "llvm/Transforms/IPO/AlwaysInliner.h"
 #include "llvm/Transforms/IPO/FunctionImport.h"
 #include "llvm/Transforms/Utils/FunctionImportUtils.h"
 #if LLVM_VERSION_LE(4, 0)
 #include "llvm/Object/ModuleSummaryIndexObjectFile.h"
 #endif
-#endif
 
 #include "llvm-c/Transforms/PassManagerBuilder.h"
 
-#if LLVM_VERSION_GE(4, 0)
-#define PGO_AVAILABLE
-#endif
-
 using namespace llvm;
 using namespace llvm::legacy;
 
@@ -121,12 +115,8 @@ bool LLVMRustPassManagerBuilderPopulateThinLTOPassManager(
   LLVMPassManagerBuilderRef PMBR,
   LLVMPassManagerRef PMR
 ) {
-#if LLVM_VERSION_GE(4, 0)
   unwrap(PMBR)->populateThinLTOPassManager(*unwrap(PMR));
   return true;
-#else
-  return false;
-#endif
 }
 
 #ifdef LLVM_COMPONENT_X86
@@ -288,17 +278,12 @@ static Optional<Reloc::Model> fromRust(LLVMRustRelocMode RustReloc) {
     return Reloc::PIC_;
   case LLVMRustRelocMode::DynamicNoPic:
     return Reloc::DynamicNoPIC;
-#if LLVM_VERSION_GE(4, 0)
   case LLVMRustRelocMode::ROPI:
     return Reloc::ROPI;
   case LLVMRustRelocMode::RWPI:
     return Reloc::RWPI;
   case LLVMRustRelocMode::ROPIRWPI:
     return Reloc::ROPI_RWPI;
-#else
-  default:
-    break;
-#endif
   }
   report_fatal_error("Bad RelocModel.");
 }
@@ -450,11 +435,8 @@ extern "C" void LLVMRustConfigurePassManagerBuilder(
   unwrap(PMBR)->SLPVectorize = SLPVectorize;
   unwrap(PMBR)->OptLevel = fromRust(OptLevel);
   unwrap(PMBR)->LoopVectorize = LoopVectorize;
-#if LLVM_VERSION_GE(4, 0)
   unwrap(PMBR)->PrepareForThinLTO = PrepareForThinLTO;
-#endif
 
-#ifdef PGO_AVAILABLE
   if (PGOGenPath) {
     assert(!PGOUsePath);
     unwrap(PMBR)->EnablePGOInstrGen = true;
@@ -464,9 +446,6 @@ extern "C" void LLVMRustConfigurePassManagerBuilder(
     assert(!PGOGenPath);
     unwrap(PMBR)->PGOInstrUse = PGOUsePath;
   }
-#else
-  assert(!PGOGenPath && !PGOUsePath && "Should've caught earlier");
-#endif
 }
 
 // Unfortunately, the LLVM C API doesn't provide a way to set the `LibraryInfo`
@@ -716,7 +695,6 @@ extern "C" void LLVMRustPrintPasses() {
   LLVMInitializePasses();
   struct MyListener : PassRegistrationListener {
     void passEnumerate(const PassInfo *Info) {
-#if LLVM_VERSION_GE(4, 0)
       StringRef PassArg = Info->getPassArgument();
       StringRef PassName = Info->getPassName();
       if (!PassArg.empty()) {
@@ -726,11 +704,6 @@ extern "C" void LLVMRustPrintPasses() {
         printf("%15.*s - %.*s\n", (int)PassArg.size(), PassArg.data(),
                (int)PassName.size(), PassName.data());
       }
-#else
-      if (Info->getPassArgument() && *Info->getPassArgument()) {
-        printf("%15s - %s\n", Info->getPassArgument(), Info->getPassName());
-      }
-#endif
     }
   } Listener;
 
@@ -740,11 +713,7 @@ extern "C" void LLVMRustPrintPasses() {
 
 extern "C" void LLVMRustAddAlwaysInlinePass(LLVMPassManagerBuilderRef PMBR,
                                             bool AddLifetimes) {
-#if LLVM_VERSION_GE(4, 0)
   unwrap(PMBR)->Inliner = llvm::createAlwaysInlinerLegacyPass(AddLifetimes);
-#else
-  unwrap(PMBR)->Inliner = createAlwaysInlinerPass(AddLifetimes);
-#endif
 }
 
 extern "C" void LLVMRustRunRestrictionPass(LLVMModuleRef M, char **Symbols,
@@ -795,26 +764,6 @@ extern "C" void LLVMRustSetModulePIELevel(LLVMModuleRef M) {
   unwrap(M)->setPIELevel(PIELevel::Level::Large);
 }
 
-extern "C" bool
-LLVMRustThinLTOAvailable() {
-#if LLVM_VERSION_GE(4, 0)
-  return true;
-#else
-  return false;
-#endif
-}
-
-extern "C" bool
-LLVMRustPGOAvailable() {
-#ifdef PGO_AVAILABLE
-  return true;
-#else
-  return false;
-#endif
-}
-
-#if LLVM_VERSION_GE(4, 0)
-
 // Here you'll find an implementation of ThinLTO as used by the Rust compiler
 // right now. This ThinLTO support is only enabled on "recent ish" versions of
 // LLVM, and otherwise it's just blanket rejected from other compilers.
@@ -1276,94 +1225,3 @@ LLVMRustThinLTOPatchDICompileUnit(LLVMModuleRef Mod, DICompileUnit *Unit) {
   MD->clearOperands();
   MD->addOperand(Unit);
 }
-
-#else
-
-struct LLVMRustThinLTOData {
-};
-
-struct LLVMRustThinLTOModule {
-};
-
-extern "C" LLVMRustThinLTOData*
-LLVMRustCreateThinLTOData(LLVMRustThinLTOModule *modules,
-                          int num_modules,
-                          const char **preserved_symbols,
-                          int num_symbols) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" bool
-LLVMRustPrepareThinLTORename(const LLVMRustThinLTOData *Data, LLVMModuleRef M) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" bool
-LLVMRustPrepareThinLTOResolveWeak(const LLVMRustThinLTOData *Data, LLVMModuleRef M) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" bool
-LLVMRustPrepareThinLTOInternalize(const LLVMRustThinLTOData *Data, LLVMModuleRef M) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" bool
-LLVMRustPrepareThinLTOImport(const LLVMRustThinLTOData *Data, LLVMModuleRef M) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" LLVMRustThinLTOModuleImports
-LLVMRustGetLLVMRustThinLTOModuleImports(const LLVMRustThinLTOData *Data) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" void
-LLVMRustFreeThinLTOData(LLVMRustThinLTOData *Data) {
-  report_fatal_error("ThinLTO not available");
-}
-
-struct LLVMRustThinLTOBuffer {
-};
-
-extern "C" LLVMRustThinLTOBuffer*
-LLVMRustThinLTOBufferCreate(LLVMModuleRef M) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" void
-LLVMRustThinLTOBufferFree(LLVMRustThinLTOBuffer *Buffer) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" const void*
-LLVMRustThinLTOBufferPtr(const LLVMRustThinLTOBuffer *Buffer) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" size_t
-LLVMRustThinLTOBufferLen(const LLVMRustThinLTOBuffer *Buffer) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" LLVMModuleRef
-LLVMRustParseBitcodeForThinLTO(LLVMContextRef Context,
-                               const char *data,
-                               size_t len,
-                               const char *identifier) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" void
-LLVMRustThinLTOGetDICompileUnit(LLVMModuleRef Mod,
-                                DICompileUnit **A,
-                                DICompileUnit **B) {
-  report_fatal_error("ThinLTO not available");
-}
-
-extern "C" void
-LLVMRustThinLTOPatchDICompileUnit(LLVMModuleRef Mod) {
-  report_fatal_error("ThinLTO not available");
-}
-
-#endif // LLVM_VERSION_GE(4, 0)
index bf7afa1b6c0684d15f2e19300e114af89c8ce3a4..3dbde46f762411598632ce24b23b7df7c877d0dc 100644 (file)
@@ -713,6 +713,21 @@ extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateStructType(
       unwrapDI<DIType>(VTableHolder), UniqueId));
 }
 
+extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateVariantPart(
+    LLVMRustDIBuilderRef Builder, LLVMMetadataRef Scope, const char *Name,
+    LLVMMetadataRef File, unsigned LineNumber, uint64_t SizeInBits,
+    uint32_t AlignInBits, LLVMRustDIFlags Flags, LLVMMetadataRef Discriminator,
+    LLVMMetadataRef Elements, const char *UniqueId) {
+#if LLVM_VERSION_GE(7, 0)
+  return wrap(Builder->createVariantPart(
+      unwrapDI<DIDescriptor>(Scope), Name, unwrapDI<DIFile>(File), LineNumber,
+      SizeInBits, AlignInBits, fromRust(Flags), unwrapDI<DIDerivedType>(Discriminator),
+      DINodeArray(unwrapDI<MDTuple>(Elements)), UniqueId));
+#else
+  abort();
+#endif
+}
+
 extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateMemberType(
     LLVMRustDIBuilderRef Builder, LLVMMetadataRef Scope, const char *Name,
     LLVMMetadataRef File, unsigned LineNo, uint64_t SizeInBits,
@@ -724,6 +739,28 @@ extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateMemberType(
                                         fromRust(Flags), unwrapDI<DIType>(Ty)));
 }
 
+extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateVariantMemberType(
+    LLVMRustDIBuilderRef Builder, LLVMMetadataRef Scope,
+    const char *Name, LLVMMetadataRef File, unsigned LineNo, uint64_t SizeInBits,
+    uint32_t AlignInBits, uint64_t OffsetInBits, LLVMValueRef Discriminant,
+    LLVMRustDIFlags Flags, LLVMMetadataRef Ty) {
+#if LLVM_VERSION_GE(7, 0)
+  llvm::ConstantInt* D = nullptr;
+  if (Discriminant) {
+    D = unwrap<llvm::ConstantInt>(Discriminant);
+  }
+  return wrap(Builder->createVariantMemberType(unwrapDI<DIDescriptor>(Scope), Name,
+                                               unwrapDI<DIFile>(File), LineNo,
+                                               SizeInBits, AlignInBits, OffsetInBits, D,
+                                               fromRust(Flags), unwrapDI<DIType>(Ty)));
+#else
+  return wrap(Builder->createMemberType(unwrapDI<DIDescriptor>(Scope), Name,
+                                        unwrapDI<DIFile>(File), LineNo,
+                                        SizeInBits, AlignInBits, OffsetInBits,
+                                        fromRust(Flags), unwrapDI<DIType>(Ty)));
+#endif
+}
+
 extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateLexicalBlock(
     LLVMRustDIBuilderRef Builder, LLVMMetadataRef Scope,
     LLVMMetadataRef File, unsigned Line, unsigned Col) {
@@ -826,11 +863,19 @@ extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateEnumerationType(
     LLVMRustDIBuilderRef Builder, LLVMMetadataRef Scope, const char *Name,
     LLVMMetadataRef File, unsigned LineNumber, uint64_t SizeInBits,
     uint32_t AlignInBits, LLVMMetadataRef Elements,
-    LLVMMetadataRef ClassTy) {
+    LLVMMetadataRef ClassTy, bool IsFixed) {
+#if LLVM_VERSION_GE(7, 0)
+  return wrap(Builder->createEnumerationType(
+      unwrapDI<DIDescriptor>(Scope), Name, unwrapDI<DIFile>(File), LineNumber,
+      SizeInBits, AlignInBits, DINodeArray(unwrapDI<MDTuple>(Elements)),
+      unwrapDI<DIType>(ClassTy), "", IsFixed));
+#else
+  // Ignore IsFixed on older LLVM.
   return wrap(Builder->createEnumerationType(
       unwrapDI<DIDescriptor>(Scope), Name, unwrapDI<DIFile>(File), LineNumber,
       SizeInBits, AlignInBits, DINodeArray(unwrapDI<MDTuple>(Elements)),
-      unwrapDI<DIType>(ClassTy)));
+      unwrapDI<DIType>(ClassTy), ""));
+#endif
 }
 
 extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateUnionType(
@@ -862,10 +907,8 @@ LLVMRustDIBuilderCreateNameSpace(LLVMRustDIBuilderRef Builder,
       ,
       unwrapDI<DIFile>(File), LineNo
 #endif
-#if LLVM_VERSION_GE(4, 0)
       ,
       false // ExportSymbols (only relevant for C++ anonymous namespaces)
-#endif
       ));
 }
 
@@ -1502,14 +1545,6 @@ LLVMRustBuildVectorReduceFMax(LLVMBuilderRef, LLVMValueRef, bool) {
 }
 #endif
 
-#if LLVM_VERSION_LT(4, 0)
-extern "C" LLVMValueRef
-LLVMBuildExactUDiv(LLVMBuilderRef B, LLVMValueRef LHS,
-                   LLVMValueRef RHS, const char *Name) {
-  return wrap(unwrap(B)->CreateExactUDiv(unwrap(LHS), unwrap(RHS), Name));
-}
-#endif
-
 #if LLVM_VERSION_GE(6, 0)
 extern "C" LLVMValueRef
 LLVMRustBuildMinNum(LLVMBuilderRef B, LLVMValueRef LHS, LLVMValueRef RHS) {
index b6fa9a2fa950809b5b52dd42bea14f10e1e112fa..2fd72fa749a840390fd811503f20ed2cc221e9aa 100644 (file)
 
 #include "llvm/IR/LegacyPassManager.h"
 
-#if LLVM_VERSION_GE(4, 0)
 #include "llvm/Bitcode/BitcodeReader.h"
 #include "llvm/Bitcode/BitcodeWriter.h"
-#else
-#include "llvm/Bitcode/ReaderWriter.h"
-#endif
 
 #include "llvm/IR/DIBuilder.h"
 #include "llvm/IR/DebugInfo.h"
index 6e931a84bacb9f6f7d5175f6eab3f49043eb55c2..9326e22090c22300101815c99b5a49eadebc5f60 100644 (file)
@@ -12,7 +12,7 @@
 # source tarball for a stable release you'll likely see `1.x.0` for rustc and
 # `0.x.0` for Cargo where they were released on `date`.
 
-date: 2018-10-13
+date: 2018-10-30
 rustc: beta
 cargo: beta
 
index 431766a3fbcfb6dafb2d5a3866c1609bf44ee554..0309be1ade6bf61066f2c69f77ac3567b7dc31b5 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 431766a3fbcfb6dafb2d5a3866c1609bf44ee554
+Subproject commit 0309be1ade6bf61066f2c69f77ac3567b7dc31b5
diff --git a/src/test/codegen/enum-debug-clike.rs b/src/test/codegen/enum-debug-clike.rs
new file mode 100644 (file)
index 0000000..528e84b
--- /dev/null
@@ -0,0 +1,35 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test depends on a patch that was committed to upstream LLVM
+// before 7.0, then backported to the Rust LLVM fork.  It tests that
+// debug info for "c-like" enums is properly emitted.
+
+// ignore-tidy-linelength
+// ignore-windows
+// min-system-llvm-version 7.0
+
+// compile-flags: -g -C no-prepopulate-passes
+
+// CHECK-LABEL: @main
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_enumeration_type,{{.*}}name: "E",{{.*}}flags: DIFlagFixedEnum,{{.*}}
+// CHECK: {{.*}}DIEnumerator{{.*}}name: "A",{{.*}}value: {{[0-9].*}}
+// CHECK: {{.*}}DIEnumerator{{.*}}name: "B",{{.*}}value: {{[0-9].*}}
+// CHECK: {{.*}}DIEnumerator{{.*}}name: "C",{{.*}}value: {{[0-9].*}}
+
+#![allow(dead_code)]
+#![allow(unused_variables)]
+#![allow(unused_assignments)]
+
+enum E { A, B, C }
+
+pub fn main() {
+    let e = E::C;
+}
diff --git a/src/test/codegen/enum-debug-niche.rs b/src/test/codegen/enum-debug-niche.rs
new file mode 100644 (file)
index 0000000..6326ba9
--- /dev/null
@@ -0,0 +1,42 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test depends on a patch that was committed to upstream LLVM
+// before 7.0, then backported to the Rust LLVM fork.  It tests that
+// optimized enum debug info accurately reflects the enum layout.
+
+// ignore-tidy-linelength
+// ignore-windows
+// min-system-llvm-version 7.0
+
+// compile-flags: -g -C no-prepopulate-passes
+
+// CHECK-LABEL: @main
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_variant_part,{{.*}}discriminator:{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "A",{{.*}}extraData:{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "A",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "B",{{.*}}extraData:{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "B",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "C",{{.*}}extraData:{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "C",{{.*}}
+// CHECK-NOT: {{.*}}DIDerivedType{{.*}}name: "D",{{.*}}extraData:{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "D",{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "D",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}flags: DIFlagArtificial{{.*}}
+
+#![allow(dead_code)]
+#![allow(unused_variables)]
+#![allow(unused_assignments)]
+
+enum E { A, B, C, D(bool) }
+
+pub fn main() {
+    let e = E::D(true);
+}
diff --git a/src/test/codegen/enum-debug-tagged.rs b/src/test/codegen/enum-debug-tagged.rs
new file mode 100644 (file)
index 0000000..e862d29
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test depends on a patch that was committed to upstream LLVM
+// before 7.0, then backported to the Rust LLVM fork.  It tests that
+// debug info for tagged (ordinary) enums is properly emitted.
+
+// ignore-tidy-linelength
+// ignore-windows
+// min-system-llvm-version 7.0
+
+// compile-flags: -g -C no-prepopulate-passes
+
+// CHECK-LABEL: @main
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "E",{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_variant_part,{{.*}}discriminator:{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "A",{{.*}}extraData:{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "A",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "__0",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "B",{{.*}}extraData:{{.*}}
+// CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_structure_type,{{.*}}name: "B",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "__0",{{.*}}
+// CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}flags: DIFlagArtificial{{.*}}
+
+#![allow(dead_code)]
+#![allow(unused_variables)]
+#![allow(unused_assignments)]
+
+enum E { A(u32), B(u32) }
+
+pub fn main() {
+    let e = E::A(23);
+}
index 75737cd6f135e835cdc9337a8a77c1a2fd2eeaae..dcd4588af76c7710b063a0dd63d1da6ec12b5b9b 100644 (file)
 
 // min-lldb-version: 310
 
+// This fails on lldb 6.0.1 on x86-64 Fedora 28; so mark it macOS-only
+// for now.
+// only-macos
+
 // compile-flags:-g
 
 // === GDB TESTS ===================================================================================
diff --git a/src/test/debuginfo/borrowed-enum-legacy.rs b/src/test/debuginfo/borrowed-enum-legacy.rs
new file mode 100644 (file)
index 0000000..a04f7d6
--- /dev/null
@@ -0,0 +1,94 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-lldb-version: 310
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 7.11.90 - 7.12.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// === GDB TESTS ===================================================================================
+
+// gdb-command:run
+
+// gdb-command:print *the_a_ref
+// gdbg-check:$1 = {{RUST$ENUM$DISR = TheA, x = 0, y = 8970181431921507452}, {RUST$ENUM$DISR = TheA, [...]}}
+// gdbr-check:$1 = borrowed_enum_legacy::ABC::TheA{x: 0, y: 8970181431921507452}
+
+// gdb-command:print *the_b_ref
+// gdbg-check:$2 = {{RUST$ENUM$DISR = TheB, [...]}, {RUST$ENUM$DISR = TheB, __0 = 0, __1 = 286331153, __2 = 286331153}}
+// gdbr-check:$2 = borrowed_enum_legacy::ABC::TheB(0, 286331153, 286331153)
+
+// gdb-command:print *univariant_ref
+// gdbg-check:$3 = {{__0 = 4820353753753434}}
+// gdbr-check:$3 = borrowed_enum_legacy::Univariant::TheOnlyCase(4820353753753434)
+
+
+// === LLDB TESTS ==================================================================================
+
+// lldb-command:run
+
+// lldb-command:print *the_a_ref
+// lldbg-check:[...]$0 = TheA { x: 0, y: 8970181431921507452 }
+// lldbr-check:(borrowed_enum_legacy::ABC::TheA) *the_a_ref = TheA { borrowed_enum_legacy::ABC::TheA: 0, borrowed_enum_legacy::ABC::TheB: 8970181431921507452 }
+// lldb-command:print *the_b_ref
+// lldbg-check:[...]$1 = TheB(0, 286331153, 286331153)
+// lldbr-check:(borrowed_enum_legacy::ABC::TheB) *the_b_ref = { = 0 = 286331153 = 286331153 }
+// lldb-command:print *univariant_ref
+// lldbg-check:[...]$2 = TheOnlyCase(4820353753753434)
+// lldbr-check:(borrowed_enum_legacy::Univariant) *univariant_ref = { borrowed_enum_legacy::TheOnlyCase = { = 4820353753753434 } }
+
+#![allow(unused_variables)]
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+// The first element is to ensure proper alignment, irrespective of the machines word size. Since
+// the size of the discriminant value is machine dependent, this has be taken into account when
+// datatype layout should be predictable as in this case.
+enum ABC {
+    TheA { x: i64, y: i64 },
+    TheB (i64, i32, i32),
+}
+
+// This is a special case since it does not have the implicit discriminant field.
+enum Univariant {
+    TheOnlyCase(i64)
+}
+
+fn main() {
+
+    // 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
+    // 0b01111100011111000111110001111100 = 2088533116
+    // 0b0111110001111100 = 31868
+    // 0b01111100 = 124
+    let the_a = ABC::TheA { x: 0, y: 8970181431921507452 };
+    let the_a_ref: &ABC = &the_a;
+
+    // 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
+    // 0b00010001000100010001000100010001 = 286331153
+    // 0b0001000100010001 = 4369
+    // 0b00010001 = 17
+    let the_b = ABC::TheB (0, 286331153, 286331153);
+    let the_b_ref: &ABC = &the_b;
+
+    let univariant = Univariant::TheOnlyCase(4820353753753434);
+    let univariant_ref: &Univariant = &univariant;
+
+    zzz(); // #break
+}
+
+fn zzz() {()}
index 9143e83343fb89cddc0ee653302d92e8e4c585c1..8362934166cf56a7abeab87cc76e00929e2906fa 100644 (file)
@@ -9,8 +9,11 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// min-lldb-version: 310
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb or lldb that can read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
+// rust-lldb
 
 // compile-flags:-g
 
 // gdb-command:run
 
 // gdb-command:print *the_a_ref
-// gdbg-check:$1 = {{RUST$ENUM$DISR = TheA, x = 0, y = 8970181431921507452}, {RUST$ENUM$DISR = TheA, [...]}}
 // gdbr-check:$1 = borrowed_enum::ABC::TheA{x: 0, y: 8970181431921507452}
 
 // gdb-command:print *the_b_ref
-// gdbg-check:$2 = {{RUST$ENUM$DISR = TheB, [...]}, {RUST$ENUM$DISR = TheB, __0 = 0, __1 = 286331153, __2 = 286331153}}
 // gdbr-check:$2 = borrowed_enum::ABC::TheB(0, 286331153, 286331153)
 
 // gdb-command:print *univariant_ref
-// gdbg-check:$3 = {{__0 = 4820353753753434}}
 // gdbr-check:$3 = borrowed_enum::Univariant::TheOnlyCase(4820353753753434)
 
 
 // lldb-command:run
 
 // lldb-command:print *the_a_ref
-// lldbg-check:[...]$0 = TheA { x: 0, y: 8970181431921507452 }
-// lldbr-check:(borrowed_enum::ABC::TheA) *the_a_ref = TheA { borrowed_enum::ABC::TheA: 0, borrowed_enum::ABC::TheB: 8970181431921507452 }
+// lldbr-check:(borrowed_enum::ABC::TheA) *the_a_ref = TheA { TheA: 0, TheB: 8970181431921507452 }
 // lldb-command:print *the_b_ref
-// lldbg-check:[...]$1 = TheB(0, 286331153, 286331153)
 // lldbr-check:(borrowed_enum::ABC::TheB) *the_b_ref = { = 0 = 286331153 = 286331153 }
 // lldb-command:print *univariant_ref
-// lldbg-check:[...]$2 = TheOnlyCase(4820353753753434)
-// lldbr-check:(borrowed_enum::Univariant) *univariant_ref = { borrowed_enum::TheOnlyCase = { = 4820353753753434 } }
+// lldbr-check:(borrowed_enum::Univariant) *univariant_ref = { TheOnlyCase = { = 4820353753753434 } }
 
 #![allow(unused_variables)]
 #![feature(omit_gdb_pretty_printer_section)]
index 9002e19ce21baa77b38b903e732393794e475362..715024a2ef9130ba800a9d5a35a167bfe37ce8fd 100644 (file)
 
 // min-lldb-version: 310
 
+// This fails on lldb 6.0.1 on x86-64 Fedora 28; so mark it macOS-only
+// for now.
+// only-macos
+
 // aux-build:cross_crate_spans.rs
 extern crate cross_crate_spans;
 
index 48231a906c902a7032a7187f33dc6c2dac277621..77583ab10377919c75f57508134b781248a0d5fb 100644 (file)
 
 // min-lldb-version: 310
 
+// This fails on lldb 6.0.1 on x86-64 Fedora 28; so mark it macOS-only
+// for now.
+// only-macos
+
 // compile-flags:-g
 
 // === GDB TESTS ===================================================================================
diff --git a/src/test/debuginfo/generic-enum-with-different-disr-sizes-legacy.rs b/src/test/debuginfo/generic-enum-with-different-disr-sizes-legacy.rs
new file mode 100644 (file)
index 0000000..092b31b
--- /dev/null
@@ -0,0 +1,115 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// ignore-lldb: FIXME(#27089)
+// min-lldb-version: 310
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// === GDB TESTS ===================================================================================
+// gdb-command:run
+
+// gdb-command:print eight_bytes1
+// gdbg-check:$1 = {{RUST$ENUM$DISR = Variant1, __0 = 100}, {RUST$ENUM$DISR = Variant1, __0 = 100}}
+// gdbr-check:$1 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant1(100)
+
+// gdb-command:print four_bytes1
+// gdbg-check:$2 = {{RUST$ENUM$DISR = Variant1, __0 = 101}, {RUST$ENUM$DISR = Variant1, __0 = 101}}
+// gdbr-check:$2 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant1(101)
+
+// gdb-command:print two_bytes1
+// gdbg-check:$3 = {{RUST$ENUM$DISR = Variant1, __0 = 102}, {RUST$ENUM$DISR = Variant1, __0 = 102}}
+// gdbr-check:$3 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant1(102)
+
+// gdb-command:print one_byte1
+// gdbg-check:$4 = {{RUST$ENUM$DISR = Variant1, __0 = 65 'A'}, {RUST$ENUM$DISR = Variant1, __0 = 65 'A'}}
+// gdbr-check:$4 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant1(65)
+
+
+// gdb-command:print eight_bytes2
+// gdbg-check:$5 = {{RUST$ENUM$DISR = Variant2, __0 = 100}, {RUST$ENUM$DISR = Variant2, __0 = 100}}
+// gdbr-check:$5 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant2(100)
+
+// gdb-command:print four_bytes2
+// gdbg-check:$6 = {{RUST$ENUM$DISR = Variant2, __0 = 101}, {RUST$ENUM$DISR = Variant2, __0 = 101}}
+// gdbr-check:$6 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant2(101)
+
+// gdb-command:print two_bytes2
+// gdbg-check:$7 = {{RUST$ENUM$DISR = Variant2, __0 = 102}, {RUST$ENUM$DISR = Variant2, __0 = 102}}
+// gdbr-check:$7 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant2(102)
+
+// gdb-command:print one_byte2
+// gdbg-check:$8 = {{RUST$ENUM$DISR = Variant2, __0 = 65 'A'}, {RUST$ENUM$DISR = Variant2, __0 = 65 'A'}}
+// gdbr-check:$8 = generic_enum_with_different_disr_sizes_legacy::Enum::Variant2(65)
+
+// gdb-command:continue
+
+// === LLDB TESTS ==================================================================================
+// lldb-command:run
+
+// lldb-command:print eight_bytes1
+// lldb-check:[...]$0 = Variant1(100)
+// lldb-command:print four_bytes1
+// lldb-check:[...]$1 = Variant1(101)
+// lldb-command:print two_bytes1
+// lldb-check:[...]$2 = Variant1(102)
+// lldb-command:print one_byte1
+// lldb-check:[...]$3 = Variant1('A')
+
+// lldb-command:print eight_bytes2
+// lldb-check:[...]$4 = Variant2(100)
+// lldb-command:print four_bytes2
+// lldb-check:[...]$5 = Variant2(101)
+// lldb-command:print two_bytes2
+// lldb-check:[...]$6 = Variant2(102)
+// lldb-command:print one_byte2
+// lldb-check:[...]$7 = Variant2('A')
+
+// lldb-command:continue
+
+#![allow(unused_variables)]
+#![allow(dead_code)]
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+// This test case makes sure that we get correct type descriptions for the enum
+// discriminant of different instantiations of the same generic enum type where,
+// dependending on the generic type parameter(s), the discriminant has a
+// different size in memory.
+
+enum Enum<T> {
+    Variant1(T),
+    Variant2(T)
+}
+
+fn main() {
+    // These are ordered for descending size on purpose
+    let eight_bytes1 = Enum::Variant1(100.0f64);
+    let four_bytes1 = Enum::Variant1(101i32);
+    let two_bytes1 = Enum::Variant1(102i16);
+    let one_byte1 = Enum::Variant1(65u8);
+
+    let eight_bytes2 = Enum::Variant2(100.0f64);
+    let four_bytes2 = Enum::Variant2(101i32);
+    let two_bytes2 = Enum::Variant2(102i16);
+    let one_byte2 = Enum::Variant2(65u8);
+
+    zzz(); // #break
+}
+
+fn zzz() { () }
index 1fc05b3752f04126b23fef8707aa46317b546150..988ec4a65f1e2a8e041d82f3727bd92daafd1885 100644 (file)
 // ignore-lldb: FIXME(#27089)
 // min-lldb-version: 310
 
+// Require LLVM with DW_TAG_variant_part and a gdb that can read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
+
 // compile-flags:-g
 
 // === GDB TESTS ===================================================================================
 // gdb-command:run
 
 // gdb-command:print eight_bytes1
-// gdbg-check:$1 = {{RUST$ENUM$DISR = Variant1, __0 = 100}, {RUST$ENUM$DISR = Variant1, __0 = 100}}
-// gdbr-check:$1 = generic_enum_with_different_disr_sizes::Enum::Variant1(100)
+// gdbr-check:$1 = generic_enum_with_different_disr_sizes::Enum<f64>::Variant1(100)
 
 // gdb-command:print four_bytes1
-// gdbg-check:$2 = {{RUST$ENUM$DISR = Variant1, __0 = 101}, {RUST$ENUM$DISR = Variant1, __0 = 101}}
-// gdbr-check:$2 = generic_enum_with_different_disr_sizes::Enum::Variant1(101)
+// gdbr-check:$2 = generic_enum_with_different_disr_sizes::Enum<i32>::Variant1(101)
 
 // gdb-command:print two_bytes1
-// gdbg-check:$3 = {{RUST$ENUM$DISR = Variant1, __0 = 102}, {RUST$ENUM$DISR = Variant1, __0 = 102}}
-// gdbr-check:$3 = generic_enum_with_different_disr_sizes::Enum::Variant1(102)
+// gdbr-check:$3 = generic_enum_with_different_disr_sizes::Enum<i16>::Variant1(102)
 
 // gdb-command:print one_byte1
-// gdbg-check:$4 = {{RUST$ENUM$DISR = Variant1, __0 = 65 'A'}, {RUST$ENUM$DISR = Variant1, __0 = 65 'A'}}
-// gdbr-check:$4 = generic_enum_with_different_disr_sizes::Enum::Variant1(65)
+// gdbr-check:$4 = generic_enum_with_different_disr_sizes::Enum<u8>::Variant1(65)
 
 
 // gdb-command:print eight_bytes2
-// gdbg-check:$5 = {{RUST$ENUM$DISR = Variant2, __0 = 100}, {RUST$ENUM$DISR = Variant2, __0 = 100}}
-// gdbr-check:$5 = generic_enum_with_different_disr_sizes::Enum::Variant2(100)
+// gdbr-check:$5 = generic_enum_with_different_disr_sizes::Enum<f64>::Variant2(100)
 
 // gdb-command:print four_bytes2
-// gdbg-check:$6 = {{RUST$ENUM$DISR = Variant2, __0 = 101}, {RUST$ENUM$DISR = Variant2, __0 = 101}}
-// gdbr-check:$6 = generic_enum_with_different_disr_sizes::Enum::Variant2(101)
+// gdbr-check:$6 = generic_enum_with_different_disr_sizes::Enum<i32>::Variant2(101)
 
 // gdb-command:print two_bytes2
-// gdbg-check:$7 = {{RUST$ENUM$DISR = Variant2, __0 = 102}, {RUST$ENUM$DISR = Variant2, __0 = 102}}
-// gdbr-check:$7 = generic_enum_with_different_disr_sizes::Enum::Variant2(102)
+// gdbr-check:$7 = generic_enum_with_different_disr_sizes::Enum<i16>::Variant2(102)
 
 // gdb-command:print one_byte2
-// gdbg-check:$8 = {{RUST$ENUM$DISR = Variant2, __0 = 65 'A'}, {RUST$ENUM$DISR = Variant2, __0 = 65 'A'}}
-// gdbr-check:$8 = generic_enum_with_different_disr_sizes::Enum::Variant2(65)
+// gdbr-check:$8 = generic_enum_with_different_disr_sizes::Enum<u8>::Variant2(65)
 
 // gdb-command:continue
 
diff --git a/src/test/debuginfo/generic-struct-style-enum-legacy.rs b/src/test/debuginfo/generic-struct-style-enum-legacy.rs
new file mode 100644 (file)
index 0000000..47c4ea7
--- /dev/null
@@ -0,0 +1,96 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-lldb-version: 310
+// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// gdb-command:set print union on
+// gdb-command:run
+
+// gdb-command:print case1
+// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, a = 0, b = 31868, c = 31868, d = 31868, e = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
+// gdbr-check:$1 = generic_struct_style_enum_legacy::Regular::Case1{a: 0, b: 31868, c: 31868, d: 31868, e: 31868}
+
+// gdb-command:print case2
+// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, a = 0, b = 286331153, c = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
+// gdbr-check:$2 = generic_struct_style_enum_legacy::Regular::Case2{a: 0, b: 286331153, c: 286331153}
+
+// gdb-command:print case3
+// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, a = 0, b = 6438275382588823897}}
+// gdbr-check:$3 = generic_struct_style_enum_legacy::Regular::Case3{a: 0, b: 6438275382588823897}
+
+// gdb-command:print univariant
+// gdbg-check:$4 = {{a = -1}}
+// gdbr-check:$4 = generic_struct_style_enum_legacy::Univariant<i32>::TheOnlyCase{a: -1}
+
+
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+use self::Regular::{Case1, Case2, Case3};
+use self::Univariant::TheOnlyCase;
+
+// NOTE: This is a copy of the non-generic test case. The `Txx` type parameters have to be
+// substituted with something of size `xx` bits and the same alignment as an integer type of the
+// same size.
+
+// The first element is to ensure proper alignment, irrespective of the machines word size. Since
+// the size of the discriminant value is machine dependent, this has be taken into account when
+// datatype layout should be predictable as in this case.
+enum Regular<T16, T32, T64> {
+    Case1 { a: T64, b: T16, c: T16, d: T16, e: T16},
+    Case2 { a: T64, b: T32, c: T32},
+    Case3 { a: T64, b: T64 }
+}
+
+enum Univariant<T> {
+    TheOnlyCase { a: T }
+}
+
+fn main() {
+
+    // In order to avoid endianness trouble all of the following test values consist of a single
+    // repeated byte. This way each interpretation of the union should look the same, no matter if
+    // this is a big or little endian machine.
+
+    // 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
+    // 0b01111100011111000111110001111100 = 2088533116
+    // 0b0111110001111100 = 31868
+    // 0b01111100 = 124
+    let case1: Regular<u16, u32, i64> = Case1 { a: 0, b: 31868, c: 31868, d: 31868, e: 31868 };
+
+    // 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
+    // 0b00010001000100010001000100010001 = 286331153
+    // 0b0001000100010001 = 4369
+    // 0b00010001 = 17
+    let case2: Regular<i16, u32, i64>  = Case2 { a: 0, b: 286331153, c: 286331153 };
+
+    // 0b0101100101011001010110010101100101011001010110010101100101011001 = 6438275382588823897
+    // 0b01011001010110010101100101011001 = 1499027801
+    // 0b0101100101011001 = 22873
+    // 0b01011001 = 89
+    let case3: Regular<u16, i32, u64>  = Case3 { a: 0, b: 6438275382588823897 };
+
+    let univariant = TheOnlyCase { a: -1 };
+
+    zzz(); // #break
+}
+
+fn zzz() {()}
index 4a1d14ccf6118c862f540777a3b4e49186356226..e08cde03c477b92aa3a91e6dcfeef993d79fc0a5 100644 (file)
 
 // ignore-tidy-linelength
 // min-lldb-version: 310
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb that can read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
 
 // compile-flags:-g
 
 // gdb-command:run
 
 // gdb-command:print case1
-// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, a = 0, b = 31868, c = 31868, d = 31868, e = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
-// gdbr-check:$1 = generic_struct_style_enum::Regular::Case1{a: 0, b: 31868, c: 31868, d: 31868, e: 31868}
+// gdbr-check:$1 = generic_struct_style_enum::Regular<u16, u32, i64>::Case1{a: 0, b: 31868, c: 31868, d: 31868, e: 31868}
 
 // gdb-command:print case2
-// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, a = 0, b = 286331153, c = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
-// gdbr-check:$2 = generic_struct_style_enum::Regular::Case2{a: 0, b: 286331153, c: 286331153}
+// gdbr-check:$2 = generic_struct_style_enum::Regular<i16, u32, i64>::Case2{a: 0, b: 286331153, c: 286331153}
 
 // gdb-command:print case3
-// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, a = 0, b = 6438275382588823897}}
-// gdbr-check:$3 = generic_struct_style_enum::Regular::Case3{a: 0, b: 6438275382588823897}
+// gdbr-check:$3 = generic_struct_style_enum::Regular<u16, i32, u64>::Case3{a: 0, b: 6438275382588823897}
 
 // gdb-command:print univariant
-// gdbg-check:$4 = {{a = -1}}
 // gdbr-check:$4 = generic_struct_style_enum::Univariant<i32>::TheOnlyCase{a: -1}
 
 
diff --git a/src/test/debuginfo/generic-tuple-style-enum-legacy.rs b/src/test/debuginfo/generic-tuple-style-enum-legacy.rs
new file mode 100644 (file)
index 0000000..ee28968
--- /dev/null
@@ -0,0 +1,118 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-lldb-version: 310
+// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// === GDB TESTS ===================================================================================
+
+// gdb-command:set print union on
+// gdb-command:run
+
+// gdb-command:print case1
+// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, __0 = 0, __1 = 31868, __2 = 31868, __3 = 31868, __4 = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
+// gdbr-check:$1 = generic_tuple_style_enum_legacy::Regular::Case1(0, 31868, 31868, 31868, 31868)
+
+// gdb-command:print case2
+// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, __0 = 0, __1 = 286331153, __2 = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
+// gdbr-check:$2 = generic_tuple_style_enum_legacy::Regular::Case2(0, 286331153, 286331153)
+
+// gdb-command:print case3
+// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, __0 = 0, __1 = 6438275382588823897}}
+// gdbr-check:$3 = generic_tuple_style_enum_legacy::Regular::Case3(0, 6438275382588823897)
+
+// gdb-command:print univariant
+// gdbg-check:$4 = {{__0 = -1}}
+// gdbr-check:$4 = generic_tuple_style_enum_legacy::Univariant<i64>::TheOnlyCase(-1)
+
+
+// === LLDB TESTS ==================================================================================
+
+// lldb-command:run
+
+// lldb-command:print case1
+// lldbg-check:[...]$0 = Case1(0, 31868, 31868, 31868, 31868)
+// lldbr-check:(generic_tuple_style_enum_legacy::Regular<u16, u32, u64>::Case1) case1 = { = 0 = 31868 = 31868 = 31868 = 31868 }
+
+// lldb-command:print case2
+// lldbg-check:[...]$1 = Case2(0, 286331153, 286331153)
+// lldbr-check:(generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case2) case2 = Regular<i16, i32, i64>::Case2 { generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case1: 0, generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case2: 286331153, generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case3: 286331153 }
+
+// lldb-command:print case3
+// lldbg-check:[...]$2 = Case3(0, 6438275382588823897)
+// lldbr-check:(generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case3) case3 = Regular<i16, i32, i64>::Case3 { generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case1: 0, generic_tuple_style_enum_legacy::Regular<i16, i32, i64>::Case2: 6438275382588823897 }
+
+// lldb-command:print univariant
+// lldbg-check:[...]$3 = TheOnlyCase(-1)
+// lldbr-check:(generic_tuple_style_enum_legacy::Univariant<i64>) univariant = { generic_tuple_style_enum_legacy::TheOnlyCase = { = -1 } }
+
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+use self::Regular::{Case1, Case2, Case3};
+use self::Univariant::TheOnlyCase;
+
+// NOTE: This is a copy of the non-generic test case. The `Txx` type parameters have to be
+// substituted with something of size `xx` bits and the same alignment as an integer type of the
+// same size.
+
+// The first element is to ensure proper alignment, irrespective of the machines word size. Since
+// the size of the discriminant value is machine dependent, this has be taken into account when
+// datatype layout should be predictable as in this case.
+enum Regular<T16, T32, T64> {
+    Case1(T64, T16, T16, T16, T16),
+    Case2(T64, T32, T32),
+    Case3(T64, T64)
+}
+
+enum Univariant<T64> {
+    TheOnlyCase(T64)
+}
+
+fn main() {
+
+    // In order to avoid endianness trouble all of the following test values consist of a single
+    // repeated byte. This way each interpretation of the union should look the same, no matter if
+    // this is a big or little endian machine.
+
+    // 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
+    // 0b01111100011111000111110001111100 = 2088533116
+    // 0b0111110001111100 = 31868
+    // 0b01111100 = 124
+    let case1: Regular<u16, u32, u64> = Case1(0_u64, 31868_u16, 31868_u16, 31868_u16, 31868_u16);
+
+    // 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
+    // 0b00010001000100010001000100010001 = 286331153
+    // 0b0001000100010001 = 4369
+    // 0b00010001 = 17
+    let case2: Regular<i16, i32, i64> = Case2(0_i64, 286331153_i32, 286331153_i32);
+
+    // 0b0101100101011001010110010101100101011001010110010101100101011001 = 6438275382588823897
+    // 0b01011001010110010101100101011001 = 1499027801
+    // 0b0101100101011001 = 22873
+    // 0b01011001 = 89
+    let case3: Regular<i16, i32, i64> = Case3(0_i64, 6438275382588823897_i64);
+
+    let univariant = TheOnlyCase(-1_i64);
+
+    zzz(); // #break
+}
+
+fn zzz() { () }
index 62bec28a022af0ab397827956926f5a7edd28fb9..ebd43daf46479f7ecf363d3ef2a0bc245bd99b77 100644 (file)
@@ -9,8 +9,12 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// min-lldb-version: 310
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb and lldb that can
+// read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
+// rust-lldb
 
 // compile-flags:-g
 
 // gdb-command:run
 
 // gdb-command:print case1
-// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, __0 = 0, __1 = 31868, __2 = 31868, __3 = 31868, __4 = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
-// gdbr-check:$1 = generic_tuple_style_enum::Regular::Case1(0, 31868, 31868, 31868, 31868)
+// gdbr-check:$1 = generic_tuple_style_enum::Regular<u16, u32, u64>::Case1(0, 31868, 31868, 31868, 31868)
 
 // gdb-command:print case2
-// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, __0 = 0, __1 = 286331153, __2 = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
-// gdbr-check:$2 = generic_tuple_style_enum::Regular::Case2(0, 286331153, 286331153)
+// gdbr-check:$2 = generic_tuple_style_enum::Regular<i16, i32, i64>::Case2(0, 286331153, 286331153)
 
 // gdb-command:print case3
-// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, __0 = 0, __1 = 6438275382588823897}}
-// gdbr-check:$3 = generic_tuple_style_enum::Regular::Case3(0, 6438275382588823897)
+// gdbr-check:$3 = generic_tuple_style_enum::Regular<i16, i32, i64>::Case3(0, 6438275382588823897)
 
 // gdb-command:print univariant
-// gdbg-check:$4 = {{__0 = -1}}
 // gdbr-check:$4 = generic_tuple_style_enum::Univariant<i64>::TheOnlyCase(-1)
 
 
 // lldb-command:run
 
 // lldb-command:print case1
-// lldbg-check:[...]$0 = Case1(0, 31868, 31868, 31868, 31868)
 // lldbr-check:(generic_tuple_style_enum::Regular<u16, u32, u64>::Case1) case1 = { = 0 = 31868 = 31868 = 31868 = 31868 }
 
 // lldb-command:print case2
-// lldbg-check:[...]$1 = Case2(0, 286331153, 286331153)
-// lldbr-check:(generic_tuple_style_enum::Regular<i16, i32, i64>::Case2) case2 = Regular<i16, i32, i64>::Case2 { generic_tuple_style_enum::Regular<i16, i32, i64>::Case1: 0, generic_tuple_style_enum::Regular<i16, i32, i64>::Case2: 286331153, generic_tuple_style_enum::Regular<i16, i32, i64>::Case3: 286331153 }
+// lldbr-check:(generic_tuple_style_enum::Regular<i16, i32, i64>::Case2) case2 = Regular<i16, i32, i64>::Case2 { Case1: 0, Case2: 286331153, Case3: 286331153 }
 
 // lldb-command:print case3
-// lldbg-check:[...]$2 = Case3(0, 6438275382588823897)
-// lldbr-check:(generic_tuple_style_enum::Regular<i16, i32, i64>::Case3) case3 = Regular<i16, i32, i64>::Case3 { generic_tuple_style_enum::Regular<i16, i32, i64>::Case1: 0, generic_tuple_style_enum::Regular<i16, i32, i64>::Case2: 6438275382588823897 }
+// lldbr-check:(generic_tuple_style_enum::Regular<i16, i32, i64>::Case3) case3 = Regular<i16, i32, i64>::Case3 { Case1: 0, Case2: 6438275382588823897 }
 
 // lldb-command:print univariant
-// lldbg-check:[...]$3 = TheOnlyCase(-1)
-// lldbr-check:(generic_tuple_style_enum::Univariant<i64>) univariant = { generic_tuple_style_enum::TheOnlyCase = { = -1 } }
+// lldbr-check:(generic_tuple_style_enum::Univariant<i64>) univariant = { TheOnlyCase = { = -1 } }
 
 #![feature(omit_gdb_pretty_printer_section)]
 #![omit_gdb_pretty_printer_section]
index cef7a1cbf1b569c5baedf2c2acc3085f426adeaf..ffd402ec9361043f476b206a9e7208f883a1c4e0 100644 (file)
@@ -74,7 +74,7 @@
 // STACK BY REF
 // lldb-command:print *self
 // lldbg-check:[...]$0 = TupleStruct(100, -100.5)
-// lldbr-check:(method_on_tuple_struct::TupleStruct) *self = { = 100 = -100.5 }
+// lldbr-check:(method_on_tuple_struct::TupleStruct) *self = TupleStruct(100, -100.5)
 // lldb-command:print arg1
 // lldbg-check:[...]$1 = -1
 // lldbr-check:(isize) arg1 = -1
@@ -86,7 +86,7 @@
 // STACK BY VAL
 // lldb-command:print self
 // lldbg-check:[...]$3 = TupleStruct(100, -100.5)
-// lldbr-check:(method_on_tuple_struct::TupleStruct) self = { = 100 = -100.5 }
+// lldbr-check:(method_on_tuple_struct::TupleStruct) self = TupleStruct(100, -100.5)
 // lldb-command:print arg1
 // lldbg-check:[...]$4 = -3
 // lldbr-check:(isize) arg1 = -3
@@ -98,7 +98,7 @@
 // OWNED BY REF
 // lldb-command:print *self
 // lldbg-check:[...]$6 = TupleStruct(200, -200.5)
-// lldbr-check:(method_on_tuple_struct::TupleStruct) *self = { = 200 = -200.5 }
+// lldbr-check:(method_on_tuple_struct::TupleStruct) *self = TupleStruct(200, -200.5)
 // lldb-command:print arg1
 // lldbg-check:[...]$7 = -5
 // lldbr-check:(isize) arg1 = -5
 // OWNED BY VAL
 // lldb-command:print self
 // lldbg-check:[...]$9 = TupleStruct(200, -200.5)
-// lldbr-check:(method_on_tuple_struct::TupleStruct) self = { = 200 = -200.5 }
+// lldbr-check:(method_on_tuple_struct::TupleStruct) self = TupleStruct(200, -200.5)
 // lldb-command:print arg1
 // lldbg-check:[...]$10 = -7
 // lldbr-check:(isize) arg1 = -7
 // OWNED MOVED
 // lldb-command:print *self
 // lldbg-check:[...]$12 = TupleStruct(200, -200.5)
-// lldbr-check:(method_on_tuple_struct::TupleStruct) *self = { = 200 = -200.5 }
+// lldbr-check:(method_on_tuple_struct::TupleStruct) *self = TupleStruct(200, -200.5)
 // lldb-command:print arg1
 // lldbg-check:[...]$13 = -9
 // lldbr-check:(isize) arg1 = -9
index ab9c7e2dd2758a5d4dec1827be9c915f7fef0848..ab42b2eff99f858bffac19b006b2e23c015fa4ee 100644 (file)
 // ignore-lldb
 
 
+// Require LLVM with DW_TAG_variant_part and a gdb that can read it.
+// gdb 8.2.0 crashes on this test case, see
+// https://sourceware.org/bugzilla/show_bug.cgi?id=23626
+// This will be fixed in the next release, which will be >= 8.2.1.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2.1
+
 // compile-flags:-g
 // gdb-command:run
 
 // gdb-command:print first
-// gdbg-check:$1 = {<No data fields>}
-// gdbr-check:$1 = <error reading variable>
+// gdbr-check:$1 = nil_enum::ANilEnum {<No data fields>}
 
 // gdb-command:print second
-// gdbg-check:$2 = {<No data fields>}
-// gdbr-check:$2 = <error reading variable>
+// gdbr-check:$2 = nil_enum::AnotherNilEnum {<No data fields>}
 
 #![allow(unused_variables)]
 #![feature(omit_gdb_pretty_printer_section)]
diff --git a/src/test/debuginfo/recursive-struct-legacy.rs b/src/test/debuginfo/recursive-struct-legacy.rs
new file mode 100644 (file)
index 0000000..ac407ce
--- /dev/null
@@ -0,0 +1,245 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// ignore-lldb
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 7.11.90 - 7.12.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// gdb-command:run
+
+// gdb-command:print stack_unique.value
+// gdb-check:$1 = 0
+// gdbg-command:print stack_unique.next.RUST$ENCODED$ENUM$0$Empty.val->value
+// gdbr-command:print stack_unique.next.val.value
+// gdb-check:$2 = 1
+
+// gdbg-command:print unique_unique->value
+// gdbr-command:print unique_unique.value
+// gdb-check:$3 = 2
+// gdbg-command:print unique_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
+// gdbr-command:print unique_unique.next.val.value
+// gdb-check:$4 = 3
+
+// gdb-command:print vec_unique[0].value
+// gdb-check:$5 = 6.5
+// gdbg-command:print vec_unique[0].next.RUST$ENCODED$ENUM$0$Empty.val->value
+// gdbr-command:print vec_unique[0].next.val.value
+// gdb-check:$6 = 7.5
+
+// gdbg-command:print borrowed_unique->value
+// gdbr-command:print borrowed_unique.value
+// gdb-check:$7 = 8.5
+// gdbg-command:print borrowed_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
+// gdbr-command:print borrowed_unique.next.val.value
+// gdb-check:$8 = 9.5
+
+// LONG CYCLE
+// gdb-command:print long_cycle1.value
+// gdb-check:$9 = 20
+// gdbg-command:print long_cycle1.next->value
+// gdbr-command:print long_cycle1.next.value
+// gdb-check:$10 = 21
+// gdbg-command:print long_cycle1.next->next->value
+// gdbr-command:print long_cycle1.next.next.value
+// gdb-check:$11 = 22
+// gdbg-command:print long_cycle1.next->next->next->value
+// gdbr-command:print long_cycle1.next.next.next.value
+// gdb-check:$12 = 23
+
+// gdb-command:print long_cycle2.value
+// gdb-check:$13 = 24
+// gdbg-command:print long_cycle2.next->value
+// gdbr-command:print long_cycle2.next.value
+// gdb-check:$14 = 25
+// gdbg-command:print long_cycle2.next->next->value
+// gdbr-command:print long_cycle2.next.next.value
+// gdb-check:$15 = 26
+
+// gdb-command:print long_cycle3.value
+// gdb-check:$16 = 27
+// gdbg-command:print long_cycle3.next->value
+// gdbr-command:print long_cycle3.next.value
+// gdb-check:$17 = 28
+
+// gdb-command:print long_cycle4.value
+// gdb-check:$18 = 29.5
+
+// gdbg-command:print (*****long_cycle_w_anonymous_types).value
+// gdbr-command:print long_cycle_w_anonymous_types.value
+// gdb-check:$19 = 30
+
+// gdbg-command:print (*****((*****long_cycle_w_anonymous_types).next.RUST$ENCODED$ENUM$0$Empty.val)).value
+// gdbr-command:print long_cycle_w_anonymous_types.next.val.value
+// gdb-check:$20 = 31
+
+// gdb-command:continue
+
+#![allow(unused_variables)]
+#![feature(box_syntax)]
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+use self::Opt::{Empty, Val};
+
+enum Opt<T> {
+    Empty,
+    Val { val: T }
+}
+
+struct UniqueNode<T> {
+    next: Opt<Box<UniqueNode<T>>>,
+    value: T
+}
+
+struct LongCycle1<T> {
+    next: Box<LongCycle2<T>>,
+    value: T,
+}
+
+struct LongCycle2<T> {
+    next: Box<LongCycle3<T>>,
+    value: T,
+}
+
+struct LongCycle3<T> {
+    next: Box<LongCycle4<T>>,
+    value: T,
+}
+
+struct LongCycle4<T> {
+    next: Option<Box<LongCycle1<T>>>,
+    value: T,
+}
+
+struct LongCycleWithAnonymousTypes {
+    next: Opt<Box<Box<Box<Box<Box<LongCycleWithAnonymousTypes>>>>>>,
+    value: usize,
+}
+
+// This test case makes sure that recursive structs are properly described. The Node structs are
+// generic so that we can have a new type (that newly needs to be described) for the different
+// cases. The potential problem with recursive types is that the DI generation algorithm gets
+// trapped in an endless loop. To make sure, we actually test this in the different cases, we have
+// to operate on a new type each time, otherwise we would just hit the DI cache for all but the
+// first case.
+
+// The different cases below (stack_*, unique_*, box_*, etc) are set up so that the type description
+// algorithm will enter the type reference cycle that is created by a recursive definition from a
+// different context each time.
+
+// The "long cycle" cases are constructed to span a longer, indirect recursion cycle between types.
+// The different locals will cause the DI algorithm to enter the type reference cycle at different
+// points.
+
+fn main() {
+    let stack_unique: UniqueNode<u16> = UniqueNode {
+        next: Val {
+            val: box UniqueNode {
+                next: Empty,
+                value: 1,
+            }
+        },
+        value: 0,
+    };
+
+    let unique_unique: Box<UniqueNode<u32>> = box UniqueNode {
+        next: Val {
+            val: box UniqueNode {
+                next: Empty,
+                value: 3,
+            }
+        },
+        value: 2,
+    };
+
+    let vec_unique: [UniqueNode<f32>; 1] = [UniqueNode {
+        next: Val {
+            val: box UniqueNode {
+                next: Empty,
+                value: 7.5,
+            }
+        },
+        value: 6.5,
+    }];
+
+    let borrowed_unique: &UniqueNode<f64> = &UniqueNode {
+        next: Val {
+            val: box UniqueNode {
+                next: Empty,
+                value: 9.5,
+            }
+        },
+        value: 8.5,
+    };
+
+    // LONG CYCLE
+    let long_cycle1: LongCycle1<u16> = LongCycle1 {
+        next: box LongCycle2 {
+            next: box LongCycle3 {
+                next: box LongCycle4 {
+                    next: None,
+                    value: 23,
+                },
+                value: 22,
+            },
+            value: 21
+        },
+        value: 20
+    };
+
+    let long_cycle2: LongCycle2<u32> = LongCycle2 {
+        next: box LongCycle3 {
+            next: box LongCycle4 {
+                next: None,
+                value: 26,
+            },
+            value: 25,
+        },
+        value: 24
+    };
+
+    let long_cycle3: LongCycle3<u64> = LongCycle3 {
+        next: box LongCycle4 {
+            next: None,
+            value: 28,
+        },
+        value: 27,
+    };
+
+    let long_cycle4: LongCycle4<f32> = LongCycle4 {
+        next: None,
+        value: 29.5,
+    };
+
+    // It's important that LongCycleWithAnonymousTypes is encountered only at the end of the
+    // `box` chain.
+    let long_cycle_w_anonymous_types = box box box box box LongCycleWithAnonymousTypes {
+        next: Val {
+            val: box box box box box LongCycleWithAnonymousTypes {
+                next: Empty,
+                value: 31,
+            }
+        },
+        value: 30
+    };
+
+    zzz(); // #break
+}
+
+fn zzz() {()}
index 75c2feb480ede8f32d0aae9055d9a60a0890ac84..647f95197894e52546814dc5c961e383feccea70 100644 (file)
 
 // ignore-tidy-linelength
 // ignore-lldb
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb that can read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
 
 // compile-flags:-g
 
 
 // gdb-command:print stack_unique.value
 // gdb-check:$1 = 0
-// gdbg-command:print stack_unique.next.RUST$ENCODED$ENUM$0$Empty.val->value
 // gdbr-command:print stack_unique.next.val.value
 // gdb-check:$2 = 1
 
-// gdbg-command:print unique_unique->value
 // gdbr-command:print unique_unique.value
 // gdb-check:$3 = 2
-// gdbg-command:print unique_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
 // gdbr-command:print unique_unique.next.val.value
 // gdb-check:$4 = 3
 
 // gdb-command:print vec_unique[0].value
 // gdb-check:$5 = 6.5
-// gdbg-command:print vec_unique[0].next.RUST$ENCODED$ENUM$0$Empty.val->value
 // gdbr-command:print vec_unique[0].next.val.value
 // gdb-check:$6 = 7.5
 
-// gdbg-command:print borrowed_unique->value
 // gdbr-command:print borrowed_unique.value
 // gdb-check:$7 = 8.5
-// gdbg-command:print borrowed_unique->next.RUST$ENCODED$ENUM$0$Empty.val->value
 // gdbr-command:print borrowed_unique.next.val.value
 // gdb-check:$8 = 9.5
 
 // LONG CYCLE
 // gdb-command:print long_cycle1.value
 // gdb-check:$9 = 20
-// gdbg-command:print long_cycle1.next->value
 // gdbr-command:print long_cycle1.next.value
 // gdb-check:$10 = 21
-// gdbg-command:print long_cycle1.next->next->value
 // gdbr-command:print long_cycle1.next.next.value
 // gdb-check:$11 = 22
-// gdbg-command:print long_cycle1.next->next->next->value
 // gdbr-command:print long_cycle1.next.next.next.value
 // gdb-check:$12 = 23
 
 // gdb-command:print long_cycle2.value
 // gdb-check:$13 = 24
-// gdbg-command:print long_cycle2.next->value
 // gdbr-command:print long_cycle2.next.value
 // gdb-check:$14 = 25
-// gdbg-command:print long_cycle2.next->next->value
 // gdbr-command:print long_cycle2.next.next.value
 // gdb-check:$15 = 26
 
 // gdb-command:print long_cycle3.value
 // gdb-check:$16 = 27
-// gdbg-command:print long_cycle3.next->value
 // gdbr-command:print long_cycle3.next.value
 // gdb-check:$17 = 28
 
 // gdb-command:print long_cycle4.value
 // gdb-check:$18 = 29.5
 
-// gdbg-command:print (*****long_cycle_w_anonymous_types).value
 // gdbr-command:print long_cycle_w_anonymous_types.value
 // gdb-check:$19 = 30
 
-// gdbg-command:print (*****((*****long_cycle_w_anonymous_types).next.RUST$ENCODED$ENUM$0$Empty.val)).value
 // gdbr-command:print long_cycle_w_anonymous_types.next.val.value
 // gdb-check:$20 = 31
 
diff --git a/src/test/debuginfo/struct-style-enum-legacy.rs b/src/test/debuginfo/struct-style-enum-legacy.rs
new file mode 100644 (file)
index 0000000..fd2c6fa
--- /dev/null
@@ -0,0 +1,115 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-lldb-version: 310
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 7.11.90 - 7.12.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// === GDB TESTS ===================================================================================
+
+// gdb-command:set print union on
+// gdb-command:run
+
+// gdb-command:print case1
+// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, a = 0, b = 31868, c = 31868, d = 31868, e = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
+// gdbr-check:$1 = struct_style_enum_legacy::Regular::Case1{a: 0, b: 31868, c: 31868, d: 31868, e: 31868}
+
+// gdb-command:print case2
+// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, a = 0, b = 286331153, c = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
+// gdbr-check:$2 = struct_style_enum_legacy::Regular::Case2{a: 0, b: 286331153, c: 286331153}
+
+// gdb-command:print case3
+// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, a = 0, b = 6438275382588823897}}
+// gdbr-check:$3 = struct_style_enum_legacy::Regular::Case3{a: 0, b: 6438275382588823897}
+
+// gdb-command:print univariant
+// gdbg-check:$4 = {{a = -1}}
+// gdbr-check:$4 = struct_style_enum_legacy::Univariant::TheOnlyCase{a: -1}
+
+
+// === LLDB TESTS ==================================================================================
+
+// lldb-command:run
+
+// lldb-command:print case1
+// lldbg-check:[...]$0 = Case1 { a: 0, b: 31868, c: 31868, d: 31868, e: 31868 }
+// lldbr-check:(struct_style_enum_legacy::Regular::Case1) case1 = { a = 0 b = 31868 c = 31868 d = 31868 e = 31868 }
+
+// lldb-command:print case2
+// lldbg-check:[...]$1 = Case2 { a: 0, b: 286331153, c: 286331153 }
+// lldbr-check:(struct_style_enum_legacy::Regular::Case2) case2 = Case2 { struct_style_enum_legacy::Regular::Case1: 0, struct_style_enum_legacy::Regular::Case2: 286331153, struct_style_enum_legacy::Regular::Case3: 286331153 }
+
+// lldb-command:print case3
+// lldbg-check:[...]$2 = Case3 { a: 0, b: 6438275382588823897 }
+// lldbr-check:(struct_style_enum_legacy::Regular::Case3) case3 = Case3 { struct_style_enum_legacy::Regular::Case1: 0, struct_style_enum_legacy::Regular::Case2: 6438275382588823897 }
+
+// lldb-command:print univariant
+// lldbg-check:[...]$3 = TheOnlyCase { a: -1 }
+// lldbr-check:(struct_style_enum_legacy::Univariant) univariant = Univariant { struct_style_enum_legacy::TheOnlyCase: TheOnlyCase { a: -1 } }
+
+#![allow(unused_variables)]
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+use self::Regular::{Case1, Case2, Case3};
+use self::Univariant::TheOnlyCase;
+
+// The first element is to ensure proper alignment, irrespective of the machines word size. Since
+// the size of the discriminant value is machine dependent, this has be taken into account when
+// datatype layout should be predictable as in this case.
+enum Regular {
+    Case1 { a: u64, b: u16, c: u16, d: u16, e: u16},
+    Case2 { a: u64, b: u32, c: u32},
+    Case3 { a: u64, b: u64 }
+}
+
+enum Univariant {
+    TheOnlyCase { a: i64 }
+}
+
+fn main() {
+
+    // In order to avoid endianness trouble all of the following test values consist of a single
+    // repeated byte. This way each interpretation of the union should look the same, no matter if
+    // this is a big or little endian machine.
+
+    // 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
+    // 0b01111100011111000111110001111100 = 2088533116
+    // 0b0111110001111100 = 31868
+    // 0b01111100 = 124
+    let case1 = Case1 { a: 0, b: 31868, c: 31868, d: 31868, e: 31868 };
+
+    // 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
+    // 0b00010001000100010001000100010001 = 286331153
+    // 0b0001000100010001 = 4369
+    // 0b00010001 = 17
+    let case2 = Case2 { a: 0, b: 286331153, c: 286331153 };
+
+    // 0b0101100101011001010110010101100101011001010110010101100101011001 = 6438275382588823897
+    // 0b01011001010110010101100101011001 = 1499027801
+    // 0b0101100101011001 = 22873
+    // 0b01011001 = 89
+    let case3 = Case3 { a: 0, b: 6438275382588823897 };
+
+    let univariant = TheOnlyCase { a: -1 };
+
+    zzz(); // #break
+}
+
+fn zzz() {()}
index 36cd85fb4dc6503945c48f94bd5ad0026ad917fd..722ca00e0488908a64c70e6bdeb8c0f2b6d18f6f 100644 (file)
@@ -9,8 +9,12 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// min-lldb-version: 310
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb and lldb that can
+// read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
+// rust-lldb
 
 // compile-flags:-g
 
 // gdb-command:run
 
 // gdb-command:print case1
-// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, a = 0, b = 31868, c = 31868, d = 31868, e = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
 // gdbr-check:$1 = struct_style_enum::Regular::Case1{a: 0, b: 31868, c: 31868, d: 31868, e: 31868}
 
 // gdb-command:print case2
-// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, a = 0, b = 286331153, c = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
 // gdbr-check:$2 = struct_style_enum::Regular::Case2{a: 0, b: 286331153, c: 286331153}
 
 // gdb-command:print case3
-// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, a = 0, b = 6438275382588823897}}
 // gdbr-check:$3 = struct_style_enum::Regular::Case3{a: 0, b: 6438275382588823897}
 
 // gdb-command:print univariant
-// gdbg-check:$4 = {{a = -1}}
 // gdbr-check:$4 = struct_style_enum::Univariant::TheOnlyCase{a: -1}
 
 
 // lldb-command:run
 
 // lldb-command:print case1
-// lldbg-check:[...]$0 = Case1 { a: 0, b: 31868, c: 31868, d: 31868, e: 31868 }
 // lldbr-check:(struct_style_enum::Regular::Case1) case1 = { a = 0 b = 31868 c = 31868 d = 31868 e = 31868 }
 
 // lldb-command:print case2
-// lldbg-check:[...]$1 = Case2 { a: 0, b: 286331153, c: 286331153 }
-// lldbr-check:(struct_style_enum::Regular::Case2) case2 = Case2 { struct_style_enum::Regular::Case1: 0, struct_style_enum::Regular::Case2: 286331153, struct_style_enum::Regular::Case3: 286331153 }
+// lldbr-check:(struct_style_enum::Regular::Case2) case2 = Case2 { Case1: 0, Case2: 286331153, Case3: 286331153 }
 
 // lldb-command:print case3
-// lldbg-check:[...]$2 = Case3 { a: 0, b: 6438275382588823897 }
-// lldbr-check:(struct_style_enum::Regular::Case3) case3 = Case3 { struct_style_enum::Regular::Case1: 0, struct_style_enum::Regular::Case2: 6438275382588823897 }
+// lldbr-check:(struct_style_enum::Regular::Case3) case3 = Case3 { Case1: 0, Case2: 6438275382588823897 }
 
 // lldb-command:print univariant
-// lldbg-check:[...]$3 = TheOnlyCase { a: -1 }
-// lldbr-check:(struct_style_enum::Univariant) univariant = Univariant { struct_style_enum::TheOnlyCase: TheOnlyCase { a: -1 } }
+// lldbr-check:(struct_style_enum::Univariant) univariant = Univariant { TheOnlyCase: TheOnlyCase { a: -1 } }
 
 #![allow(unused_variables)]
 #![feature(omit_gdb_pretty_printer_section)]
diff --git a/src/test/debuginfo/tuple-style-enum-legacy.rs b/src/test/debuginfo/tuple-style-enum-legacy.rs
new file mode 100644 (file)
index 0000000..e33f6db
--- /dev/null
@@ -0,0 +1,115 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-lldb-version: 310
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 7.11.90 - 7.12.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// === GDB TESTS ===================================================================================
+
+// gdb-command:set print union on
+// gdb-command:run
+
+// gdb-command:print case1
+// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, __0 = 0, __1 = 31868, __2 = 31868, __3 = 31868, __4 = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
+// gdbr-check:$1 = tuple_style_enum_legacy::Regular::Case1(0, 31868, 31868, 31868, 31868)
+
+// gdb-command:print case2
+// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, __0 = 0, __1 = 286331153, __2 = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
+// gdbr-check:$2 = tuple_style_enum_legacy::Regular::Case2(0, 286331153, 286331153)
+
+// gdb-command:print case3
+// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, __0 = 0, __1 = 6438275382588823897}}
+// gdbr-check:$3 = tuple_style_enum_legacy::Regular::Case3(0, 6438275382588823897)
+
+// gdb-command:print univariant
+// gdbg-check:$4 = {{__0 = -1}}
+// gdbr-check:$4 = tuple_style_enum_legacy::Univariant::TheOnlyCase(-1)
+
+
+// === LLDB TESTS ==================================================================================
+
+// lldb-command:run
+
+// lldb-command:print case1
+// lldbg-check:[...]$0 = Case1(0, 31868, 31868, 31868, 31868)
+// lldbr-check:(tuple_style_enum_legacy::Regular::Case1) case1 = { = 0 = 31868 = 31868 = 31868 = 31868 }
+
+// lldb-command:print case2
+// lldbg-check:[...]$1 = Case2(0, 286331153, 286331153)
+// lldbr-check:(tuple_style_enum_legacy::Regular::Case2) case2 = Case2 { tuple_style_enum_legacy::Regular::Case1: 0, tuple_style_enum_legacy::Regular::Case2: 286331153, tuple_style_enum_legacy::Regular::Case3: 286331153 }
+
+// lldb-command:print case3
+// lldbg-check:[...]$2 = Case3(0, 6438275382588823897)
+// lldbr-check:(tuple_style_enum_legacy::Regular::Case3) case3 = Case3 { tuple_style_enum_legacy::Regular::Case1: 0, tuple_style_enum_legacy::Regular::Case2: 6438275382588823897 }
+
+// lldb-command:print univariant
+// lldbg-check:[...]$3 = TheOnlyCase(-1)
+// lldbr-check:(tuple_style_enum_legacy::Univariant) univariant = { tuple_style_enum_legacy::TheOnlyCase = { = -1 } }
+
+#![allow(unused_variables)]
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+use self::Regular::{Case1, Case2, Case3};
+use self::Univariant::TheOnlyCase;
+
+// The first element is to ensure proper alignment, irrespective of the machines word size. Since
+// the size of the discriminant value is machine dependent, this has be taken into account when
+// datatype layout should be predictable as in this case.
+enum Regular {
+    Case1(u64, u16, u16, u16, u16),
+    Case2(u64, u32, u32),
+    Case3(u64, u64)
+}
+
+enum Univariant {
+    TheOnlyCase(i64)
+}
+
+fn main() {
+
+    // In order to avoid endianness trouble all of the following test values consist of a single
+    // repeated byte. This way each interpretation of the union should look the same, no matter if
+    // this is a big or little endian machine.
+
+    // 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
+    // 0b01111100011111000111110001111100 = 2088533116
+    // 0b0111110001111100 = 31868
+    // 0b01111100 = 124
+    let case1 = Case1(0, 31868, 31868, 31868, 31868);
+
+    // 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
+    // 0b00010001000100010001000100010001 = 286331153
+    // 0b0001000100010001 = 4369
+    // 0b00010001 = 17
+    let case2 = Case2(0, 286331153, 286331153);
+
+    // 0b0101100101011001010110010101100101011001010110010101100101011001 = 6438275382588823897
+    // 0b01011001010110010101100101011001 = 1499027801
+    // 0b0101100101011001 = 22873
+    // 0b01011001 = 89
+    let case3 = Case3(0, 6438275382588823897);
+
+    let univariant = TheOnlyCase(-1);
+
+    zzz(); // #break
+}
+
+fn zzz() {()}
index 682e74601b0a11b7f15f2a3357ec22e175dd8c66..d976839f08c1135ae740d977c34f18a1787d5ea6 100644 (file)
@@ -9,8 +9,12 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// min-lldb-version: 310
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb and lldb that can
+// read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
+// rust-lldb
 
 // compile-flags:-g
 
 // gdb-command:run
 
 // gdb-command:print case1
-// gdbg-check:$1 = {{RUST$ENUM$DISR = Case1, __0 = 0, __1 = 31868, __2 = 31868, __3 = 31868, __4 = 31868}, {RUST$ENUM$DISR = Case1, [...]}, {RUST$ENUM$DISR = Case1, [...]}}
 // gdbr-check:$1 = tuple_style_enum::Regular::Case1(0, 31868, 31868, 31868, 31868)
 
 // gdb-command:print case2
-// gdbg-check:$2 = {{RUST$ENUM$DISR = Case2, [...]}, {RUST$ENUM$DISR = Case2, __0 = 0, __1 = 286331153, __2 = 286331153}, {RUST$ENUM$DISR = Case2, [...]}}
 // gdbr-check:$2 = tuple_style_enum::Regular::Case2(0, 286331153, 286331153)
 
 // gdb-command:print case3
-// gdbg-check:$3 = {{RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, [...]}, {RUST$ENUM$DISR = Case3, __0 = 0, __1 = 6438275382588823897}}
 // gdbr-check:$3 = tuple_style_enum::Regular::Case3(0, 6438275382588823897)
 
 // gdb-command:print univariant
-// gdbg-check:$4 = {{__0 = -1}}
 // gdbr-check:$4 = tuple_style_enum::Univariant::TheOnlyCase(-1)
 
 
 // lldb-command:run
 
 // lldb-command:print case1
-// lldbg-check:[...]$0 = Case1(0, 31868, 31868, 31868, 31868)
 // lldbr-check:(tuple_style_enum::Regular::Case1) case1 = { = 0 = 31868 = 31868 = 31868 = 31868 }
 
 // lldb-command:print case2
-// lldbg-check:[...]$1 = Case2(0, 286331153, 286331153)
-// lldbr-check:(tuple_style_enum::Regular::Case2) case2 = Case2 { tuple_style_enum::Regular::Case1: 0, tuple_style_enum::Regular::Case2: 286331153, tuple_style_enum::Regular::Case3: 286331153 }
+// lldbr-check:(tuple_style_enum::Regular::Case2) case2 = Case2 { Case1: 0, Case2: 286331153, Case3: 286331153 }
 
 // lldb-command:print case3
-// lldbg-check:[...]$2 = Case3(0, 6438275382588823897)
-// lldbr-check:(tuple_style_enum::Regular::Case3) case3 = Case3 { tuple_style_enum::Regular::Case1: 0, tuple_style_enum::Regular::Case2: 6438275382588823897 }
+// lldbr-check:(tuple_style_enum::Regular::Case3) case3 = Case3 { Case1: 0, Case2: 6438275382588823897 }
 
 // lldb-command:print univariant
-// lldbg-check:[...]$3 = TheOnlyCase(-1)
-// lldbr-check:(tuple_style_enum::Univariant) univariant = { tuple_style_enum::TheOnlyCase = { = -1 } }
+// lldbr-check:(tuple_style_enum::Univariant) univariant = { TheOnlyCase = { = -1 } }
 
 #![allow(unused_variables)]
 #![feature(omit_gdb_pretty_printer_section)]
diff --git a/src/test/debuginfo/unique-enum-legacy.rs b/src/test/debuginfo/unique-enum-legacy.rs
new file mode 100644 (file)
index 0000000..91fece3
--- /dev/null
@@ -0,0 +1,98 @@
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// min-lldb-version: 310
+
+// As long as LLVM 5 and LLVM 6 are supported, we want to test the
+// enum debuginfo fallback mode.  Once those are desupported, this
+// test can be removed, as there is another (non-"legacy") test that
+// tests the new mode.
+// ignore-llvm-version: 7.0 - 9.9.9
+// ignore-gdb-version: 7.11.90 - 7.12.9
+// ignore-gdb-version: 8.2 - 9.9
+
+// compile-flags:-g
+
+// === GDB TESTS ===================================================================================
+
+// gdb-command:run
+
+// gdb-command:print *the_a
+// gdbg-check:$1 = {{RUST$ENUM$DISR = TheA, x = 0, y = 8970181431921507452}, {RUST$ENUM$DISR = TheA, [...]}}
+// gdbr-check:$1 = unique_enum_legacy::ABC::TheA{x: 0, y: 8970181431921507452}
+
+// gdb-command:print *the_b
+// gdbg-check:$2 = {{RUST$ENUM$DISR = TheB, [...]}, {RUST$ENUM$DISR = TheB, __0 = 0, __1 = 286331153, __2 = 286331153}}
+// gdbr-check:$2 = unique_enum_legacy::ABC::TheB(0, 286331153, 286331153)
+
+// gdb-command:print *univariant
+// gdbg-check:$3 = {{__0 = 123234}}
+// gdbr-check:$3 = unique_enum_legacy::Univariant::TheOnlyCase(123234)
+
+
+// === LLDB TESTS ==================================================================================
+
+// lldb-command:run
+
+// lldb-command:print *the_a
+// lldbg-check:[...]$0 = TheA { x: 0, y: 8970181431921507452 }
+// lldbr-check:(unique_enum_legacy::ABC::TheA) *the_a = TheA { unique_enum_legacy::ABC::TheA: 0, unique_enum_legacy::ABC::TheB: 8970181431921507452 }
+
+// lldb-command:print *the_b
+// lldbg-check:[...]$1 = TheB(0, 286331153, 286331153)
+// lldbr-check:(unique_enum_legacy::ABC::TheB) *the_b = { = 0 = 286331153 = 286331153 }
+
+// lldb-command:print *univariant
+// lldbg-check:[...]$2 = TheOnlyCase(123234)
+// lldbr-check:(unique_enum_legacy::Univariant) *univariant = { unique_enum_legacy::TheOnlyCase = { = 123234 } }
+
+#![allow(unused_variables)]
+#![feature(box_syntax)]
+#![feature(omit_gdb_pretty_printer_section)]
+#![omit_gdb_pretty_printer_section]
+
+// The first element is to ensure proper alignment, irrespective of the machines word size. Since
+// the size of the discriminant value is machine dependent, this has be taken into account when
+// datatype layout should be predictable as in this case.
+enum ABC {
+    TheA { x: i64, y: i64 },
+    TheB (i64, i32, i32),
+}
+
+// This is a special case since it does not have the implicit discriminant field.
+enum Univariant {
+    TheOnlyCase(i64)
+}
+
+fn main() {
+
+    // In order to avoid endianness trouble all of the following test values consist of a single
+    // repeated byte. This way each interpretation of the union should look the same, no matter if
+    // this is a big or little endian machine.
+
+    // 0b0111110001111100011111000111110001111100011111000111110001111100 = 8970181431921507452
+    // 0b01111100011111000111110001111100 = 2088533116
+    // 0b0111110001111100 = 31868
+    // 0b01111100 = 124
+    let the_a: Box<_> = box ABC::TheA { x: 0, y: 8970181431921507452 };
+
+    // 0b0001000100010001000100010001000100010001000100010001000100010001 = 1229782938247303441
+    // 0b00010001000100010001000100010001 = 286331153
+    // 0b0001000100010001 = 4369
+    // 0b00010001 = 17
+    let the_b: Box<_> = box ABC::TheB (0, 286331153, 286331153);
+
+    let univariant: Box<_> = box Univariant::TheOnlyCase(123234);
+
+    zzz(); // #break
+}
+
+fn zzz() {()}
index 6b62c304513944ea0ea0f7abd220493c0258d084..aab8edc55f74c108c2c07db5f4745165e5df27d2 100644 (file)
@@ -9,8 +9,12 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// min-lldb-version: 310
-// ignore-gdb-version: 7.11.90 - 7.12.9
+
+// Require LLVM with DW_TAG_variant_part and a gdb and lldb that can
+// read it.
+// min-system-llvm-version: 7.0
+// min-gdb-version: 8.2
+// rust-lldb
 
 // compile-flags:-g
 
 // gdb-command:run
 
 // gdb-command:print *the_a
-// gdbg-check:$1 = {{RUST$ENUM$DISR = TheA, x = 0, y = 8970181431921507452}, {RUST$ENUM$DISR = TheA, [...]}}
 // gdbr-check:$1 = unique_enum::ABC::TheA{x: 0, y: 8970181431921507452}
 
 // gdb-command:print *the_b
-// gdbg-check:$2 = {{RUST$ENUM$DISR = TheB, [...]}, {RUST$ENUM$DISR = TheB, __0 = 0, __1 = 286331153, __2 = 286331153}}
 // gdbr-check:$2 = unique_enum::ABC::TheB(0, 286331153, 286331153)
 
 // gdb-command:print *univariant
-// gdbg-check:$3 = {{__0 = 123234}}
 // gdbr-check:$3 = unique_enum::Univariant::TheOnlyCase(123234)
 
 
 // lldb-command:run
 
 // lldb-command:print *the_a
-// lldbg-check:[...]$0 = TheA { x: 0, y: 8970181431921507452 }
-// lldbr-check:(unique_enum::ABC::TheA) *the_a = TheA { unique_enum::ABC::TheA: 0, unique_enum::ABC::TheB: 8970181431921507452 }
+// lldbr-check:(unique_enum::ABC::TheA) *the_a = TheA { TheA: 0, TheB: 8970181431921507452 }
 
 // lldb-command:print *the_b
-// lldbg-check:[...]$1 = TheB(0, 286331153, 286331153)
 // lldbr-check:(unique_enum::ABC::TheB) *the_b = { = 0 = 286331153 = 286331153 }
 
 // lldb-command:print *univariant
-// lldbg-check:[...]$2 = TheOnlyCase(123234)
-// lldbr-check:(unique_enum::Univariant) *univariant = { unique_enum::TheOnlyCase = { = 123234 } }
+// lldbr-check:(unique_enum::Univariant) *univariant = { TheOnlyCase = { = 123234 } }
 
 #![allow(unused_variables)]
 #![feature(box_syntax)]
index 39bf0c175ebcb7ea41eb7401ee965b3d42888f5a..39267edaac04566b535657a5ec1920928b7a881f 100644 (file)
@@ -99,7 +99,7 @@
 
 // lldb-command:print padded_tuple
 // lldbg-check:[...]$4 = &[(6, 7), (8, 9)]
-// lldbr-check:(&[(i32, i16)]) padded_tuple = { data_ptr = *0x555555554ff0 length = 2 }
+// lldbr-check:(&[(i32, i16)]) padded_tuple = { data_ptr = *0x555555555030 length = 2 }
 
 // lldb-command:print padded_struct
 // lldbg-check:[...]$5 = &[AStruct { x: 10, y: 11, z: 12 }, AStruct { x: 13, y: 14, z: 15 }]
index 7ca1d01f20b7b2787505df39c8a6dc6c330e11c3..75f19d133e0e92cde1234a13865389f6c7da5132 100644 (file)
@@ -56,7 +56,7 @@ fn main() {
 //        StorageLive(_4);
 //        _4 = std::option::Option<std::boxed::Box<u32>>::None;
 //        FakeRead(ForLet, _4);
-//        AscribeUserType(_4, o, UserTypeProjection { base: Ty(Canonical { variables: [], value: std::option::Option<std::boxed::Box<u32>> }), projs: [] });
+//        AscribeUserType(_4, o, UserTypeProjection { base: Ty(Canonical { max_universe: U0, variables: [], value: std::option::Option<std::boxed::Box<u32>> }), projs: [] });
 //        StorageLive(_5);
 //        StorageLive(_6);
 //        _6 = move _4;
diff --git a/src/test/mir-opt/inline-retag.rs b/src/test/mir-opt/inline-retag.rs
new file mode 100644 (file)
index 0000000..4b3280e
--- /dev/null
@@ -0,0 +1,45 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z span_free_formats -Z mir-emit-retag
+
+// Tests that MIR inliner fixes up `Retag`'s `fn_entry` flag
+
+fn main() {
+    println!("{}", bar());
+}
+
+#[inline(always)]
+fn foo(x: &i32, y: &i32) -> bool {
+    *x == *y
+}
+
+fn bar() -> bool {
+    let f = foo;
+    f(&1, &-1)
+}
+
+// END RUST SOURCE
+// START rustc.bar.Inline.after.mir
+// ...
+//     bb0: {
+//         ...
+//         Retag(_3);
+//         Retag(_6);
+//         StorageLive(_9);
+//         _9 = (*_3);
+//         StorageLive(_10);
+//         _10 = (*_6);
+//         _0 = Eq(move _9, move _10);
+//         ...
+//         return;
+//     }
+// ...
+// END rustc.bar.Inline.after.mir
diff --git a/src/test/mir-opt/retag.rs b/src/test/mir-opt/retag.rs
new file mode 100644 (file)
index 0000000..9c01300
--- /dev/null
@@ -0,0 +1,106 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// compile-flags: -Z mir-emit-retag -Z mir-opt-level=0 -Z span_free_formats
+
+#![allow(unused)]
+
+struct Test(i32);
+
+impl Test {
+    // Make sure we run the pass on a method, not just on bare functions.
+    fn foo<'x>(&self, x: &'x mut i32) -> &'x mut i32 { x }
+    fn foo_shr<'x>(&self, x: &'x i32) -> &'x i32 { x }
+}
+
+fn main() {
+    let mut x = 0;
+    {
+        let v = Test(0).foo(&mut x); // just making sure we do not panic when there is a tuple struct ctor
+        let w = { v }; // assignment
+        let _w = w; // reborrow
+    }
+
+    // Also test closures
+    let c: fn(&i32) -> &i32 = |x: &i32| -> &i32 { let _y = x; x };
+    let _w = c(&x);
+
+    // need to call `foo_shr` or it doesn't even get generated
+    Test(0).foo_shr(&0);
+}
+
+// END RUST SOURCE
+// START rustc.{{impl}}-foo.EraseRegions.after.mir
+//     bb0: {
+//         Retag([fn entry] _1);
+//         Retag([fn entry] _2);
+//         ...
+//         _0 = &mut (*_3);
+//         ...
+//         return;
+//     }
+// END rustc.{{impl}}-foo.EraseRegions.after.mir
+// START rustc.{{impl}}-foo_shr.EraseRegions.after.mir
+//     bb0: {
+//         Retag([fn entry] _1);
+//         Retag([fn entry] _2);
+//         ...
+//         _0 = _2;
+//         Retag(_0);
+//         ...
+//         return;
+//     }
+// END rustc.{{impl}}-foo_shr.EraseRegions.after.mir
+// START rustc.main.EraseRegions.after.mir
+// fn main() -> () {
+//     ...
+//     bb0: {
+//         ...
+//         _3 = const Test::foo(move _4, move _6) -> bb1;
+//     }
+//
+//     bb1: {
+//         Retag(_3);
+//         ...
+//         _9 = move _3;
+//         Retag(_9);
+//         _8 = &mut (*_9);
+//         StorageDead(_9);
+//         StorageLive(_10);
+//         _10 = move _8;
+//         Retag(_10);
+//         ...
+//         _13 = move _14(move _15) -> bb2;
+//     }
+//
+//     bb2: {
+//         Retag(_13);
+//         ...
+//     }
+//     ...
+// }
+// END rustc.main.EraseRegions.after.mir
+// START rustc.main-{{closure}}.EraseRegions.after.mir
+// fn main::{{closure}}(_1: &[closure@NodeId(117)], _2: &i32) -> &i32 {
+//     ...
+//     bb0: {
+//         Retag([fn entry] _1);
+//         Retag([fn entry] _2);
+//         StorageLive(_3);
+//         _3 = _2;
+//         Retag(_3);
+//         _0 = _2;
+//         Retag(_0);
+//         StorageDead(_3);
+//         return;
+//     }
+// }
+// END rustc.main-{{closure}}.EraseRegions.after.mir
diff --git a/src/test/mir-opt/validate_1.rs b/src/test/mir-opt/validate_1.rs
deleted file mode 100644 (file)
index f154496..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-tidy-linelength
-// compile-flags: -Z verbose -Z mir-emit-validate=1 -Z span_free_formats
-
-struct Test(i32);
-
-impl Test {
-    // Make sure we run the pass on a method, not just on bare functions.
-    fn foo(&self, _x: &mut i32) {}
-}
-
-fn main() {
-    let mut x = 0;
-    Test(0).foo(&mut x); // just making sure we do not panic when there is a tuple struct ctor
-
-    // Also test closures
-    let c = |x: &mut i32| { let y = &*x; *y };
-    c(&mut x);
-}
-
-// END RUST SOURCE
-// START rustc.{{impl}}-foo.EraseRegions.after.mir
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/0:5 ~ validate_1[317d]::{{impl}}[0]::foo[0]), BrAnon(0)) Test, _2: &ReFree(DefId(0/0:5 ~ validate_1[317d]::{{impl}}[0]::foo[0]), BrAnon(1)) mut i32]);
-//         ...
-//         return;
-//     }
-// END rustc.{{impl}}-foo.EraseRegions.after.mir
-// START rustc.main.EraseRegions.after.mir
-// fn main() -> () {
-//     ...
-//     bb0: {
-//         ...
-//         Validate(Suspend(ReScope(Node(ItemLocalId(13)))), [_1: i32]);
-//         _6 = &ReErased mut _1;
-//         Validate(Acquire, [(*_6): i32/ReScope(Node(ItemLocalId(13)))]);
-//         Validate(Suspend(ReScope(Node(ItemLocalId(13)))), [(*_6): i32/ReScope(Node(ItemLocalId(13)))]);
-//         _5 = &ReErased mut (*_6);
-//         Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(13)))]);
-//         Validate(Release, [_2: (), _3: &ReScope(Node(ItemLocalId(13))) Test, _5: &ReScope(Node(ItemLocalId(13))) mut i32]);
-//         _2 = const Test::foo(move _3, move _5) -> bb1;
-//     }
-//
-//     bb1: {
-//         Validate(Acquire, [_2: ()]);
-//         EndRegion(ReScope(Node(ItemLocalId(13))));
-//         ...
-//         return;
-//     }
-// }
-// END rustc.main.EraseRegions.after.mir
-// START rustc.main-{{closure}}.EraseRegions.after.mir
-// fn main::{{closure}}(_1: &ReErased [closure@NodeId(65)], _2: &ReErased mut i32) -> i32 {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/1:11 ~ validate_1[317d]::main[0]::{{closure}}[0]), BrEnv) [closure@NodeId(65)], _2: &ReFree(DefId(0/1:11 ~ validate_1[317d]::main[0]::{{closure}}[0]), BrAnon(0)) mut i32]);
-//         StorageLive(_3);
-//         Validate(Suspend(ReScope(Remainder { block: ItemLocalId(31), first_statement_index: 0 })), [(*_2): i32]);
-//         _3 = &ReErased (*_2);
-//         Validate(Acquire, [(*_3): i32/ReScope(Remainder { block: ItemLocalId(31), first_statement_index: 0 }) (imm)]);
-//         _0 = (*_3);
-//         EndRegion(ReScope(Remainder { block: ItemLocalId(31), first_statement_index: 0 }));
-//         StorageDead(_3);
-//         return;
-//     }
-// }
-// END rustc.main-{{closure}}.EraseRegions.after.mir
diff --git a/src/test/mir-opt/validate_2.rs b/src/test/mir-opt/validate_2.rs
deleted file mode 100644 (file)
index 3776a11..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-tidy-linelength
-// ignore-wasm32-bare unwinding being disabled causes differences in output
-// ignore-wasm64-bare unwinding being disabled causes differences in output
-// compile-flags: -Z verbose -Z mir-emit-validate=1
-
-fn main() {
-    let _x : Box<[i32]> = Box::new([1, 2, 3]);
-}
-
-// END RUST SOURCE
-// START rustc.main.EraseRegions.after.mir
-// fn main() -> () {
-//     ...
-//     bb1: {
-//         Validate(Acquire, [_2: std::boxed::Box<[i32; 3]>]);
-//         Validate(Release, [_2: std::boxed::Box<[i32; 3]>]);
-//         _1 = move _2 as std::boxed::Box<[i32]> (Unsize);
-//         Validate(Acquire, [_1: std::boxed::Box<[i32]>]);
-//         StorageDead(_2);
-//         StorageDead(_3);
-//         _0 = ();
-//         Validate(Release, [_1: std::boxed::Box<[i32]>]);
-//         drop(_1) -> [return: bb2, unwind: bb3];
-//     }
-//     ...
-// }
-// END rustc.main.EraseRegions.after.mir
diff --git a/src/test/mir-opt/validate_3.rs b/src/test/mir-opt/validate_3.rs
deleted file mode 100644 (file)
index ce84039..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-tidy-linelength
-// compile-flags: -Z verbose -Z mir-emit-validate=1
-
-struct Test {
-    x: i32
-}
-
-fn foo(_x: &i32) {}
-
-fn main() {
-    // These internal unsafe functions should have no effect on the code generation.
-    unsafe fn _unused1() {}
-    fn _unused2(x: *const i32) -> i32 { unsafe { *x }}
-
-    let t = Test { x: 0 };
-    let t = &t;
-    foo(&t.x);
-}
-
-// END RUST SOURCE
-// START rustc.main.EraseRegions.after.mir
-// fn main() -> (){
-//     let mut _0: ();
-//     scope 1 {
-//         scope 3 {
-//         }
-//         scope 4 {
-//             let _2: &ReErased Test;
-//         }
-//     }
-//     scope 2 {
-//         let _1: Test;
-//     }
-//     let mut _3: ();
-//     let mut _4: &ReErased i32;
-//     let mut _5: &ReErased i32;
-//     bb0: {
-//         StorageLive(_1);
-//         _1 = Test { x: const 0i32 };
-//         StorageLive(_2);
-//         Validate(Suspend(ReScope(Remainder { block: ItemLocalId(24), first_statement_index: 3 })), [_1: Test]);
-//         _2 = &ReErased _1;
-//         Validate(Acquire, [(*_2): Test/ReScope(Remainder { block: ItemLocalId(24), first_statement_index: 3 }) (imm)]);
-//         StorageLive(_4);
-//         StorageLive(_5);
-//         Validate(Suspend(ReScope(Node(ItemLocalId(22)))), [((*_2).0: i32): i32/ReScope(Remainder { block: ItemLocalId(24), first_statement_index: 3 }) (imm)]);
-//         _5 = &ReErased ((*_2).0: i32);
-//         Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(22))) (imm)]);
-//         Validate(Suspend(ReScope(Node(ItemLocalId(22)))), [(*_5): i32/ReScope(Node(ItemLocalId(22))) (imm)]);
-//         _4 = &ReErased (*_5);
-//         Validate(Acquire, [(*_4): i32/ReScope(Node(ItemLocalId(22))) (imm)]);
-//         Validate(Release, [_3: (), _4: &ReScope(Node(ItemLocalId(22))) i32]);
-//         _3 = const foo(move _4) -> bb1;
-//     }
-//     bb1: {
-//         Validate(Acquire, [_3: ()]);
-//         EndRegion(ReScope(Node(ItemLocalId(22))));
-//         StorageDead(_4);
-//         StorageDead(_5);
-//         _0 = ();
-//         EndRegion(ReScope(Remainder { block: ItemLocalId(24), first_statement_index: 3 }));
-//         StorageDead(_2);
-//         StorageDead(_1);
-//         return;
-//     }
-// }
-// END rustc.main.EraseRegions.after.mir
diff --git a/src/test/mir-opt/validate_4.rs b/src/test/mir-opt/validate_4.rs
deleted file mode 100644 (file)
index 542ac8a..0000000
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-tidy-linelength
-// compile-flags: -Z verbose -Z mir-emit-validate=1 -Z span_free_formats
-
-// Make sure unsafe fns and fns with an unsafe block only get restricted validation.
-
-unsafe fn write_42(x: *mut i32) -> bool {
-    let test_closure = |x: *mut i32| *x = 23;
-    test_closure(x);
-    *x = 42;
-    true
-}
-
-fn test(x: &mut i32) {
-    unsafe { write_42(x) };
-}
-
-fn main() {
-    test(&mut 0);
-
-    let test_closure = unsafe { |x: &mut i32| write_42(x) };
-    test_closure(&mut 0);
-}
-
-// FIXME: Also test code generated inside the closure, make sure it only does restricted validation
-// because it is entirely inside an unsafe block.  Unfortunately, the interesting lines of code also
-// contain name of the source file, so we cannot test for it.
-
-// END RUST SOURCE
-// START rustc.write_42.EraseRegions.after.mir
-// fn write_42(_1: *mut i32) -> bool {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: *mut i32]);
-//         Validate(Release, [_1: *mut i32]);
-//         ...
-//         return;
-//     }
-// }
-// END rustc.write_42.EraseRegions.after.mir
-// START rustc.write_42-{{closure}}.EraseRegions.after.mir
-// fn write_42::{{closure}}(_1: &ReErased [closure@NodeId(32)], _2: *mut i32) -> () {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/1:9 ~ validate_4[317d]::write_42[0]::{{closure}}[0]), BrEnv) [closure@NodeId(32)], _2: *mut i32]);
-//         Validate(Release, [_1: &ReFree(DefId(0/1:9 ~ validate_4[317d]::write_42[0]::{{closure}}[0]), BrEnv) [closure@NodeId(32)], _2: *mut i32]);
-//         (*_2) = const 23i32;
-//         _0 = ();
-//         return;
-//     }
-// }
-// END rustc.write_42-{{closure}}.EraseRegions.after.mir
-// START rustc.test.EraseRegions.after.mir
-// fn test(_1: &ReErased mut i32) -> () {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/0:4 ~ validate_4[317d]::test[0]), BrAnon(0)) mut i32]);
-//         Validate(Release, [_1: &ReFree(DefId(0/0:4 ~ validate_4[317d]::test[0]), BrAnon(0)) mut i32]);
-//         ...
-//         _2 = const write_42(move _3) -> bb1;
-//     }
-//     bb1: {
-//         Validate(Acquire, [_2: bool]);
-//         Validate(Release, [_2: bool]);
-//         ...
-//     }
-// }
-// END rustc.test.EraseRegions.after.mir
-// START rustc.main-{{closure}}.EraseRegions.after.mir
-// fn main::{{closure}}(_1: &ReErased [closure@NodeId(80)], _2: &ReErased mut i32) -> bool {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/1:10 ~ validate_4[317d]::main[0]::{{closure}}[0]), BrEnv) [closure@NodeId(80)], _2: &ReFree(DefId(0/1:10 ~ validate_4[317d]::main[0]::{{closure}}[0]), BrAnon(0)) mut i32]);
-//         Validate(Release, [_1: &ReFree(DefId(0/1:10 ~ validate_4[317d]::main[0]::{{closure}}[0]), BrEnv) [closure@NodeId(80)], _2: &ReFree(DefId(0/1:10 ~ validate_4[317d]::main[0]::{{closure}}[0]), BrAnon(0)) mut i32]);
-//         StorageLive(_3);
-//         ...
-//         _0 = const write_42(move _3) -> bb1;
-//     }
-//     ...
-// }
-// END rustc.main-{{closure}}.EraseRegions.after.mir
diff --git a/src/test/mir-opt/validate_5.rs b/src/test/mir-opt/validate_5.rs
deleted file mode 100644 (file)
index 955de0c..0000000
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-tidy-linelength
-// compile-flags: -Z verbose -Z mir-emit-validate=2 -Z span_free_formats
-
-// Make sure unsafe fns and fns with an unsafe block still get full validation.
-
-unsafe fn write_42(x: *mut i32) -> bool {
-    *x = 42;
-    true
-}
-
-fn test(x: &mut i32) {
-    unsafe { write_42(x) };
-}
-
-fn main() {
-    test(&mut 0);
-
-    let test_closure = unsafe { |x: &mut i32| write_42(x) };
-    // Note that validation will fail if this is executed: The closure keeps the lock on
-    // x, so the write in write_42 fails.  This test just checks code generation,
-    // so the UB doesn't matter.
-    test_closure(&mut 0);
-}
-
-// END RUST SOURCE
-// START rustc.test.EraseRegions.after.mir
-// fn test(_1: &ReErased mut i32) -> () {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/0:4 ~ validate_5[317d]::test[0]), BrAnon(0)) mut i32]);
-//         ...
-//         Validate(Release, [_2: bool, _3: *mut i32]);
-//         _2 = const write_42(move _3) -> bb1;
-//     }
-//     ...
-// }
-// END rustc.test.EraseRegions.after.mir
-// START rustc.main-{{closure}}.EraseRegions.after.mir
-// fn main::{{closure}}(_1: &ReErased [closure@NodeId(62)], _2: &ReErased mut i32) -> bool {
-//     ...
-//     bb0: {
-//         Validate(Acquire, [_1: &ReFree(DefId(0/1:9 ~ validate_5[317d]::main[0]::{{closure}}[0]), BrEnv) [closure@NodeId(62)], _2: &ReFree(DefId(0/1:9 ~ validate_5[317d]::main[0]::{{closure}}[0]), BrAnon(0)) mut i32]);
-//         StorageLive(_3);
-//         StorageLive(_4);
-//         StorageLive(_5);
-//         Validate(Suspend(ReScope(Node(ItemLocalId(16)))), [(*_2): i32]);
-//         _5 = &ReErased mut (*_2);
-//         Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(16)))]);
-//         _4 = move _5 as *mut i32 (Misc);
-//         _3 = move _4;
-//         EndRegion(ReScope(Node(ItemLocalId(16))));
-//         StorageDead(_4);
-//         StorageDead(_5);
-//         Validate(Release, [_0: bool, _3: *mut i32]);
-//         _0 = const write_42(move _3) -> bb1;
-//     }
-//     ...
-// }
-// END rustc.main-{{closure}}.EraseRegions.after.mir
index 54f888b3796a17b1763fa2b1ac9134b499a293b5..2c8128b1907b6a8f945da16069321effbbe041a3 100644 (file)
@@ -58,6 +58,14 @@ pub unsafe fn atomic_u64(x: *mut u64) {
 pub unsafe fn atomic_i64(x: *mut i64) {
     atomic_xadd(x, 1);
 }
+#[cfg(target_has_atomic = "128")]
+pub unsafe fn atomic_u128(x: *mut u128) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "128")]
+pub unsafe fn atomic_i128(x: *mut i128) {
+    atomic_xadd(x, 1);
+}
 #[cfg(target_has_atomic = "ptr")]
 pub unsafe fn atomic_usize(x: *mut usize) {
     atomic_xadd(x, 1);
diff --git a/src/test/run-make/thumb-none-qemu/Makefile b/src/test/run-make/thumb-none-qemu/Makefile
new file mode 100644 (file)
index 0000000..ffd1772
--- /dev/null
@@ -0,0 +1,30 @@
+-include ../../run-make-fulldeps/tools.mk
+
+# How to run this
+# $ ./x.py clean
+# $ ./x.py test --target thumbv7m-none-eabi src/test/run-make
+
+ifneq (,$(filter $(TARGET),thumbv6m-none-eabi thumbv7m-none-eabi))
+
+# For cargo setting
+export RUSTC := $(RUSTC_ORIGINAL)
+export LD_LIBRARY_PATH := $(HOST_RPATH_DIR)
+# We need to be outside of 'src' dir in order to run cargo
+export WORK_DIR := $(TMPDIR)
+export HERE := $(shell pwd)
+
+## clean up unused env variables which might cause harm.
+unexport RUSTC_LINKER
+unexport RUSTC_BOOTSTRAP
+unexport RUST_BUILD_STAGE
+unexport RUST_TEST_THREADS
+unexport RUST_TEST_TMPDIR
+unexport AR
+unexport CC
+unexport CXX
+
+all:
+       bash script.sh
+else
+all:
+endif
diff --git a/src/test/run-make/thumb-none-qemu/example/.cargo/config b/src/test/run-make/thumb-none-qemu/example/.cargo/config
new file mode 100644 (file)
index 0000000..0d6b19c
--- /dev/null
@@ -0,0 +1,31 @@
+[target.thumbv7m-none-eabi]
+# uncomment this to make `cargo run` execute programs on QEMU
+runner = "qemu-system-arm -cpu cortex-m3 -machine lm3s6965evb -nographic -semihosting-config enable=on,target=native -kernel"
+
+[target.thumbv6m-none-eabi]
+# uncomment this to make `cargo run` execute programs on QEMU
+# For now, we use cortex-m3 instead of cortex-m0 which are not supported by QEMU
+runner = "qemu-system-arm -cpu cortex-m3 -machine lm3s6965evb -nographic -semihosting-config enable=on,target=native -kernel"
+
+[target.'cfg(all(target_arch = "arm", target_os = "none"))']
+# uncomment ONE of these three option to make `cargo run` start a GDB session
+# which option to pick depends on your system
+# runner = "arm-none-eabi-gdb -q -x openocd.gdb"
+# runner = "gdb-multiarch -q -x openocd.gdb"
+# runner = "gdb -q -x openocd.gdb"
+
+rustflags = [
+  # LLD (shipped with the Rust toolchain) is used as the default linker
+  "-C", "link-arg=-Tlink.x",
+
+  # if you run into problems with LLD switch to the GNU linker by commenting out
+  # this line
+  # "-C", "linker=arm-none-eabi-ld",
+
+  # if you need to link to pre-compiled C libraries provided by a C toolchain
+  # use GCC as the linker by commenting out both lines above and then
+  # uncommenting the three lines below
+  # "-C", "linker=arm-none-eabi-gcc",
+  # "-C", "link-arg=-Wl,-Tlink.x",
+  # "-C", "link-arg=-nostartfiles",
+]
\ No newline at end of file
diff --git a/src/test/run-make/thumb-none-qemu/example/Cargo.toml b/src/test/run-make/thumb-none-qemu/example/Cargo.toml
new file mode 100644 (file)
index 0000000..4995533
--- /dev/null
@@ -0,0 +1,11 @@
+[package]
+name = "example"
+version = "0.1.0"
+authors = ["Hideki Sekine <sekineh@me.com>"]
+# edition = "2018"
+
+[dependencies]
+cortex-m = "0.5.4"
+cortex-m-rt = "=0.5.4"
+panic-halt = "0.2.0"
+cortex-m-semihosting = "0.3.1"
diff --git a/src/test/run-make/thumb-none-qemu/example/memory.x b/src/test/run-make/thumb-none-qemu/example/memory.x
new file mode 100644 (file)
index 0000000..dc7ad96
--- /dev/null
@@ -0,0 +1,23 @@
+/* Device specific memory layout */
+
+/* This file is used to build the cortex-m-rt examples,
+   but not other applications using cortex-m-rt. */
+
+MEMORY
+{
+  /* FLASH and RAM are mandatory memory regions */
+  /* Update examples/data_overflow.rs if you change these sizes. */
+  FLASH : ORIGIN = 0x00000000, LENGTH = 256K
+  RAM : ORIGIN = 0x20000000, LENGTH = 64K
+
+  /* More memory regions can declared: for example this is a second RAM region */
+  /* CCRAM : ORIGIN = 0x10000000, LENGTH = 8K */
+}
+
+/* The location of the stack can be overridden using the `_stack_start` symbol.
+   By default it will be placed at the end of the RAM region */
+/* _stack_start = ORIGIN(CCRAM) + LENGTH(CCRAM); */
+
+/* The location of the .text section can be overridden using the `_stext` symbol.
+   By default it will place after .vector_table */
+/* _stext = ORIGIN(FLASH) + 0x40c; */
\ No newline at end of file
diff --git a/src/test/run-make/thumb-none-qemu/example/src/main.rs b/src/test/run-make/thumb-none-qemu/example/src/main.rs
new file mode 100644 (file)
index 0000000..d88a327
--- /dev/null
@@ -0,0 +1,30 @@
+// #![feature(stdsimd)]
+#![no_main]
+#![no_std]
+
+extern crate cortex_m;
+
+extern crate cortex_m_rt as rt;
+extern crate cortex_m_semihosting as semihosting;
+extern crate panic_halt;
+
+use core::fmt::Write;
+use cortex_m::asm;
+use rt::entry;
+
+entry!(main);
+
+fn main() -> ! {
+    let x = 42;
+
+    loop {
+        asm::nop();
+
+        // write something through semihosting interface
+        let mut hstdout = semihosting::hio::hstdout().unwrap();
+        write!(hstdout, "x = {}\n", x);
+
+        // exit from qemu
+        semihosting::debug::exit(semihosting::debug::EXIT_SUCCESS);
+    }
+}
diff --git a/src/test/run-make/thumb-none-qemu/script.sh b/src/test/run-make/thumb-none-qemu/script.sh
new file mode 100644 (file)
index 0000000..0f1c49f
--- /dev/null
@@ -0,0 +1,16 @@
+set -exuo pipefail
+
+CRATE=example
+
+env | sort
+mkdir -p $WORK_DIR
+pushd $WORK_DIR
+    rm -rf $CRATE || echo OK
+    cp -a $HERE/example .
+    pushd $CRATE
+        env RUSTFLAGS="-C linker=arm-none-eabi-ld -C link-arg=-Tlink.x" \
+            $CARGO run --target $TARGET           | grep "x = 42"
+        env RUSTFLAGS="-C linker=arm-none-eabi-ld -C link-arg=-Tlink.x" \
+            $CARGO run --target $TARGET --release | grep "x = 42"
+    popd
+popd
index 330ae300c445ee4334e67ed60e88ad53fe383243..48b530c9957254528422e007a40075a1954d4f1d 100644 (file)
@@ -11,6 +11,9 @@ all:
        $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg c
        wc -c < $(TMPDIR)/foo.wasm
        [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
+       $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg d
+       wc -c < $(TMPDIR)/foo.wasm
+       [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
 else
 all:
 endif
index 1ea724ca94d477951407704d3b4ae957d43d0429..441e92976a39671283c142f531172d8cf49504f1 100644 (file)
@@ -27,3 +27,11 @@ pub fn foo() {
 pub fn foo() {
     panic!("{}", "a");
 }
+
+#[no_mangle]
+#[cfg(d)]
+pub fn foo() -> usize {
+    use std::cell::Cell;
+    thread_local!(static A: Cell<Vec<u32>> = Cell::new(Vec::new()));
+    A.try_with(|x| x.replace(Vec::new()).len()).unwrap_or(0)
+}
index 156db486a47676f398148bf607f2637ffdb5ddc0..dcc2f4f5223007cd09abe8325f1b37f4510ebec6 100644 (file)
@@ -9,8 +9,6 @@
 // except according to those terms.
 
 #![crate_type = "cdylib"]
-
-#![feature(panic_implementation)]
 #![no_std]
 
 use core::panic::PanicInfo;
@@ -20,7 +18,7 @@
     panic!()
 }
 
-#[panic_implementation]
+#[panic_handler]
 fn panic(_info: &PanicInfo) -> ! {
     loop {}
 }
diff --git a/src/test/run-pass/arbitrary_self_types_pointers_and_wrappers.rs b/src/test/run-pass/arbitrary_self_types_pointers_and_wrappers.rs
new file mode 100644 (file)
index 0000000..e166356
--- /dev/null
@@ -0,0 +1,76 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+#![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)]
+#![feature(rustc_attrs)]
+
+use std::{
+    ops::{Deref, CoerceUnsized, DispatchFromDyn},
+    marker::Unsize,
+};
+
+struct Ptr<T: ?Sized>(Box<T>);
+
+impl<T: ?Sized> Deref for Ptr<T> {
+    type Target = T;
+
+    fn deref(&self) -> &T {
+        &*self.0
+    }
+}
+
+impl<T: Unsize<U> + ?Sized, U: ?Sized> CoerceUnsized<Ptr<U>> for Ptr<T> {}
+impl<T: Unsize<U> + ?Sized, U: ?Sized> DispatchFromDyn<Ptr<U>> for Ptr<T> {}
+
+struct Wrapper<T: ?Sized>(T);
+
+impl<T: ?Sized> Deref for Wrapper<T> {
+    type Target = T;
+
+    fn deref(&self) -> &T {
+        &self.0
+    }
+}
+
+impl<T: CoerceUnsized<U>, U> CoerceUnsized<Wrapper<U>> for Wrapper<T> {}
+impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T> {}
+
+
+trait Trait {
+    // This method isn't object-safe yet. Unsized by-value `self` is object-safe (but not callable
+    // without unsized_locals), but wrappers arond `Self` currently are not.
+    // FIXME (mikeyhew) uncomment this when unsized rvalues object-safety is implemented
+    // fn wrapper(self: Wrapper<Self>) -> i32;
+    fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32;
+    fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32;
+    fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32;
+}
+
+impl Trait for i32 {
+    fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32 {
+        **self
+    }
+    fn wrapper_ptr(self: Wrapper<Ptr<Self>>) -> i32 {
+        **self
+    }
+    fn wrapper_ptr_wrapper(self: Wrapper<Ptr<Wrapper<Self>>>) -> i32 {
+        ***self
+    }
+}
+
+fn main() {
+    let pw = Ptr(Box::new(Wrapper(5))) as Ptr<Wrapper<dyn Trait>>;
+    assert_eq!(pw.ptr_wrapper(), 5);
+
+    let wp = Wrapper(Ptr(Box::new(6))) as Wrapper<Ptr<dyn Trait>>;
+    assert_eq!(wp.wrapper_ptr(), 6);
+
+    let wpw = Wrapper(Ptr(Box::new(Wrapper(7)))) as Wrapper<Ptr<Wrapper<dyn Trait>>>;
+    assert_eq!(wpw.wrapper_ptr_wrapper(), 7);
+}
diff --git a/src/test/run-pass/arbitrary_self_types_stdlib_pointers.rs b/src/test/run-pass/arbitrary_self_types_stdlib_pointers.rs
new file mode 100644 (file)
index 0000000..80a7ce9
--- /dev/null
@@ -0,0 +1,56 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(arbitrary_self_types)]
+#![feature(pin)]
+#![feature(rustc_attrs)]
+
+use std::{
+    rc::Rc,
+    sync::Arc,
+    pin::Pin,
+};
+
+trait Trait {
+    fn by_rc(self: Rc<Self>) -> i64;
+    fn by_arc(self: Arc<Self>) -> i64;
+    fn by_pin_mut(self: Pin<&mut Self>) -> i64;
+    fn by_pin_box(self: Pin<Box<Self>>) -> i64;
+}
+
+impl Trait for i64 {
+    fn by_rc(self: Rc<Self>) -> i64 {
+        *self
+    }
+    fn by_arc(self: Arc<Self>) -> i64 {
+        *self
+    }
+    fn by_pin_mut(self: Pin<&mut Self>) -> i64 {
+        *self
+    }
+    fn by_pin_box(self: Pin<Box<Self>>) -> i64 {
+        *self
+    }
+}
+
+fn main() {
+    let rc = Rc::new(1i64) as Rc<dyn Trait>;
+    assert_eq!(1, rc.by_rc());
+
+    let arc = Arc::new(2i64) as Arc<dyn Trait>;
+    assert_eq!(2, arc.by_arc());
+
+    let mut value = 3i64;
+    let pin_mut = Pin::new(&mut value) as Pin<&mut dyn Trait>;
+    assert_eq!(3, pin_mut.by_pin_mut());
+
+    let pin_box = Into::<Pin<Box<i64>>>::into(Box::new(4i64)) as Pin<Box<dyn Trait>>;
+    assert_eq!(4, pin_box.by_pin_box());
+}
diff --git a/src/test/run-pass/atomic-alignment.rs b/src/test/run-pass/atomic-alignment.rs
new file mode 100644 (file)
index 0000000..8771765
--- /dev/null
@@ -0,0 +1,46 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(cfg_target_has_atomic)]
+#![feature(integer_atomics)]
+
+use std::mem::{align_of, size_of};
+use std::sync::atomic::*;
+
+fn main() {
+    #[cfg(target_has_atomic = "8")]
+    assert_eq!(align_of::<AtomicBool>(), size_of::<AtomicBool>());
+    #[cfg(target_has_atomic = "ptr")]
+    assert_eq!(align_of::<AtomicPtr<u8>>(), size_of::<AtomicPtr<u8>>());
+    #[cfg(target_has_atomic = "8")]
+    assert_eq!(align_of::<AtomicU8>(), size_of::<AtomicU8>());
+    #[cfg(target_has_atomic = "8")]
+    assert_eq!(align_of::<AtomicI8>(), size_of::<AtomicI8>());
+    #[cfg(target_has_atomic = "16")]
+    assert_eq!(align_of::<AtomicU16>(), size_of::<AtomicU16>());
+    #[cfg(target_has_atomic = "16")]
+    assert_eq!(align_of::<AtomicI16>(), size_of::<AtomicI16>());
+    #[cfg(target_has_atomic = "32")]
+    assert_eq!(align_of::<AtomicU32>(), size_of::<AtomicU32>());
+    #[cfg(target_has_atomic = "32")]
+    assert_eq!(align_of::<AtomicI32>(), size_of::<AtomicI32>());
+    #[cfg(target_has_atomic = "64")]
+    assert_eq!(align_of::<AtomicU64>(), size_of::<AtomicU64>());
+    #[cfg(target_has_atomic = "64")]
+    assert_eq!(align_of::<AtomicI64>(), size_of::<AtomicI64>());
+    #[cfg(target_has_atomic = "128")]
+    assert_eq!(align_of::<AtomicU128>(), size_of::<AtomicU128>());
+    #[cfg(target_has_atomic = "128")]
+    assert_eq!(align_of::<AtomicI128>(), size_of::<AtomicI128>());
+    #[cfg(target_has_atomic = "ptr")]
+    assert_eq!(align_of::<AtomicUsize>(), size_of::<AtomicUsize>());
+    #[cfg(target_has_atomic = "ptr")]
+    assert_eq!(align_of::<AtomicIsize>(), size_of::<AtomicIsize>());
+}
index 46b409fb13a84bc94763e528af8a0bc5dc43e6cb..96f9da67790fc7814339eaca87641640e6062481 100644 (file)
@@ -48,6 +48,13 @@ fn main() {
                 println!("passed");
             }
 
+            "exec-test5" => {
+                env::set_var("VARIABLE", "ABC");
+                Command::new("definitely-not-a-real-binary").env("VARIABLE", "XYZ").exec();
+                assert_eq!(env::var("VARIABLE").unwrap(), "ABC");
+                println!("passed");
+            }
+
             _ => panic!("unknown argument: {}", arg),
         }
         return
@@ -72,4 +79,9 @@ fn main() {
     assert!(output.status.success());
     assert!(output.stderr.is_empty());
     assert_eq!(output.stdout, b"passed\n");
+
+    let output = Command::new(&me).arg("exec-test5").output().unwrap();
+    assert!(output.status.success());
+    assert!(output.stderr.is_empty());
+    assert_eq!(output.stdout, b"passed\n");
 }
index 6cf06d15621040df66db4c8ebaf1703bf6c2591a..e6dd421f48f51dce9e782843955475e66fc6b734 100644 (file)
@@ -310,10 +310,10 @@ fn join(mut self, sep: &str) -> String
     where Self::Item: std::fmt::Display {
         let mut s = String::new();
         if let Some(e) = self.next() {
-            write!(s, "{}", e);
+            write!(s, "{}", e).unwrap();
             for e in self {
                 s.push_str(sep);
-                write!(s, "{}", e);
+                write!(s, "{}", e).unwrap();
             }
         }
         s
@@ -537,7 +537,7 @@ fn format_weeks(it: impl Iterator<Item = impl DateIterator>) -> impl Iterator<It
                 first = false;
             }
 
-            write!(buf, " {:>2}", d.day());
+            write!(buf, " {:>2}", d.day()).unwrap();
         }
 
         // Insert more filler at the end to fill up the remainder of the week,
index ed8c4992417afee758adbb250518fd7d3653eb7e..0f354e1aad6c97c2bd7dda89f53bdc122e3169b0 100644 (file)
@@ -39,6 +39,7 @@ fn check_status(status: std::process::ExitStatus)
     use std::os::unix::process::ExitStatusExt;
 
     assert!(status.signal() == Some(libc::SIGILL)
+            || status.signal() == Some(libc::SIGTRAP)
             || status.signal() == Some(libc::SIGABRT));
 }
 
diff --git a/src/test/run-pass/issues/issue-54477-reduced-2.rs b/src/test/run-pass/issues/issue-54477-reduced-2.rs
new file mode 100644 (file)
index 0000000..28731d4
--- /dev/null
@@ -0,0 +1,25 @@
+// rust-lang/rust#54477: runtime bug in the VecDeque library that was
+// exposed by this test case, derived from test suite of crates.io
+// `collection` crate.
+
+use std::collections::VecDeque;
+
+fn main() {
+    let mut vecdeque_13 = VecDeque::from(vec![ ]);
+    let mut vecdeque_29 = VecDeque::from(vec![ 0 ]);
+    vecdeque_29.insert(0,  30 );
+    vecdeque_29.insert(1,  31 );
+    vecdeque_29.insert(2,  32 );
+    vecdeque_29.insert(3,  33 );
+    vecdeque_29.insert(4,  34 );
+    vecdeque_29.insert(5,  35 );
+    // println!("vecdeque_13: {:?}", vecdeque_13);
+    // println!("vecdeque_29: {:?}", vecdeque_29);
+
+    // println!("Invoking: `vecdeque_13.append(&mut vecdeque_29)`");
+    vecdeque_13.append(&mut vecdeque_29);
+
+    // println!("vecdeque_13: {:?}", vecdeque_13);
+
+    assert_eq!(vecdeque_13, VecDeque::from(vec![30, 31, 32, 33, 34, 35, 0]));
+}
index 7c557eb2bd07973dcd3df8031af7b62ec747c526..ee597f11c6a9de1f355f95326af84b1e3f3a9d1e 100644 (file)
@@ -27,18 +27,18 @@ fn write_str(&mut self, _: &str) -> fmt::Result {
 }
 
 fn borrowing_writer_from_struct_and_formatting_struct_field(foo: Foo) {
-    write!(foo.writer, "{}", foo.other);
+    write!(foo.writer, "{}", foo.other).unwrap();
 }
 
 fn main() {
     let mut w = Vec::new();
-    write!(&mut w as &mut Write, "");
-    write!(&mut w, ""); // should coerce
+    write!(&mut w as &mut Write, "").unwrap();
+    write!(&mut w, "").unwrap(); // should coerce
     println!("ok");
 
     let mut s = Bar;
     {
         use std::fmt::Write;
-        write!(&mut s, "test");
+        write!(&mut s, "test").unwrap();
     }
 }
index 3832c040108f8f0b0ad35306357929331a479255..a12dad4489df92384837f0709350b449c40fc73e 100644 (file)
@@ -12,7 +12,8 @@
 
 use std::mem::size_of;
 
-// compile-flags: -Z fuel=foo=0
+// (#55495: The --error-format is to sidestep an issue in our test harness)
+// compile-flags: --error-format human -Z fuel=foo=0
 
 struct S1(u8, u16, u8);
 struct S2(u8, u16, u8);
diff --git a/src/test/run-pass/optimization-fuel-0.stderr b/src/test/run-pass/optimization-fuel-0.stderr
new file mode 100644 (file)
index 0000000..3ad405b
--- /dev/null
@@ -0,0 +1 @@
+optimization-fuel-exhausted: Reorder fields of "S1"
diff --git a/src/test/run-pass/optimization-fuel-0.stdout b/src/test/run-pass/optimization-fuel-0.stdout
deleted file mode 100644 (file)
index 3ad405b..0000000
+++ /dev/null
@@ -1 +0,0 @@
-optimization-fuel-exhausted: Reorder fields of "S1"
index e3529ebfb0d811fa629df89c0a896aea5730495e..1e76aaa48b761065cfb20c24e0ef6a5bfb7ffeb2 100644 (file)
@@ -12,7 +12,8 @@
 
 use std::mem::size_of;
 
-// compile-flags: -Z fuel=foo=1
+// (#55495: The --error-format is to sidestep an issue in our test harness)
+// compile-flags: --error-format human -Z fuel=foo=1
 
 struct S1(u8, u16, u8);
 struct S2(u8, u16, u8);
diff --git a/src/test/run-pass/optimization-fuel-1.stderr b/src/test/run-pass/optimization-fuel-1.stderr
new file mode 100644 (file)
index 0000000..197e452
--- /dev/null
@@ -0,0 +1 @@
+optimization-fuel-exhausted: Reorder fields of "S2"
diff --git a/src/test/run-pass/optimization-fuel-1.stdout b/src/test/run-pass/optimization-fuel-1.stdout
deleted file mode 100644 (file)
index 197e452..0000000
+++ /dev/null
@@ -1 +0,0 @@
-optimization-fuel-exhausted: Reorder fields of "S2"
diff --git a/src/test/run-pass/thread-local-not-in-prelude.rs b/src/test/run-pass/thread-local-not-in-prelude.rs
new file mode 100644 (file)
index 0000000..0c36559
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![no_std]
+
+extern crate std;
+
+std::thread_local!(static A: usize = 30);
+
+fn main() {
+}
diff --git a/src/test/run-pass/traits/trait-alias-bounds.rs b/src/test/run-pass/traits/trait-alias-bounds.rs
new file mode 100644 (file)
index 0000000..d8ac1a8
--- /dev/null
@@ -0,0 +1,65 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(trait_alias)]
+
+use std::marker::PhantomData;
+
+trait Empty {}
+trait EmptyAlias = Empty;
+trait CloneDefault = Clone + Default;
+trait SendSyncAlias = Send + Sync;
+trait WhereSendAlias = where Self: Send;
+trait SendEqAlias<T> = Send where T: PartialEq<Self>;
+trait I32Iterator = Iterator<Item = i32>;
+
+#[allow(dead_code)]
+struct Foo<T: SendSyncAlias>(PhantomData<T>);
+#[allow(dead_code)]
+struct Bar<T>(PhantomData<T>) where T: SendSyncAlias;
+
+impl EmptyAlias {}
+
+impl<T: SendSyncAlias> Empty for T {}
+
+fn a<T: CloneDefault>() -> (T, T) {
+    let one = T::default();
+    let two = one.clone();
+    (one, two)
+}
+
+fn b(x: &impl SendEqAlias<i32>) -> bool {
+    22_i32 == *x
+}
+
+fn c<T: I32Iterator>(x: &mut T) -> Option<i32> {
+    x.next()
+}
+
+fn d<T: SendSyncAlias>() {
+    is_send_and_sync::<T>();
+}
+
+fn is_send_and_sync<T: Send + Sync>() {}
+
+fn main() {
+    let both = a::<i32>();
+    assert_eq!(both.0, 0);
+    assert_eq!(both.1, 0);
+    let both: (i32, i32) = a();
+    assert_eq!(both.0, 0);
+    assert_eq!(both.1, 0);
+
+    assert!(b(&22));
+
+    assert_eq!(c(&mut vec![22].into_iter()), Some(22));
+
+    d::<i32>();
+}
diff --git a/src/test/run-pass/traits/trait-alias-object-type.rs b/src/test/run-pass/traits/trait-alias-object-type.rs
new file mode 100644 (file)
index 0000000..17e3092
--- /dev/null
@@ -0,0 +1,27 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(trait_alias)]
+
+trait Foo = PartialEq<i32> + Send;
+trait Bar = Foo + Sync;
+
+trait I32Iterator = Iterator<Item = i32>;
+
+pub fn main() {
+    let a: &dyn Bar = &123;
+    assert!(*a == 123);
+    let b = Box::new(456) as Box<dyn Foo>;
+    assert!(*b == 456);
+
+    // FIXME(alexreg): associated type should be gotten from trait alias definition
+    // let c: &dyn I32Iterator = &vec![123].into_iter();
+    // assert_eq!(c.next(), Some(123));
+}
diff --git a/src/test/run-pass/traits/trait-alias-syntax.rs b/src/test/run-pass/traits/trait-alias-syntax.rs
new file mode 100644 (file)
index 0000000..a9b7afb
--- /dev/null
@@ -0,0 +1,32 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(trait_alias)]
+
+trait SimpleAlias = Default;
+trait GenericAlias<T> = Iterator<Item = T>;
+trait Partial<T> = IntoIterator<Item = T>;
+trait SpecificAlias = GenericAlias<i32>;
+trait PartialEqRef<'a, T: 'a> = PartialEq<&'a T>;
+trait StaticAlias = 'static;
+
+trait Things<T> {}
+trait Romeo {}
+#[allow(dead_code)]
+struct The<T>(T);
+#[allow(dead_code)]
+struct Fore<T>(T);
+impl<T, U> Things<T> for The<U> {}
+impl<T> Romeo for Fore<T> {}
+
+trait WithWhere<Art, Thou> = Romeo + Romeo where Fore<(Art, Thou)>: Romeo;
+trait BareWhere<Wild, Are> = where The<Wild>: Things<Are>;
+
+fn main() {}
index 876f6c0a80b1da7485360a9f62dbb118f3759c08..527f1355a9ef716970daf46650e748d219a4a8ba 100644 (file)
@@ -12,7 +12,7 @@ error[E0425]: cannot find value `no` in this scope
 3 | no
   | ^^ not found in this scope
 
-thread '$DIR/failed-doctest-output.rs - OtherStruct (line 27)' panicked at 'couldn't compile the test', librustdoc/test.rs:332:13
+thread '$DIR/failed-doctest-output.rs - OtherStruct (line 27)' panicked at 'couldn't compile the test', librustdoc/test.rs:323:13
 note: Run with `RUST_BACKTRACE=1` for a backtrace.
 
 ---- $DIR/failed-doctest-output.rs - SomeStruct (line 21) stdout ----
@@ -21,7 +21,7 @@ thread '$DIR/failed-doctest-output.rs - SomeStruct (line 21)' panicked at 'test
 thread 'main' panicked at 'oh no', $DIR/failed-doctest-output.rs:3:1
 note: Run with `RUST_BACKTRACE=1` for a backtrace.
 
-', librustdoc/test.rs:367:17
+', librustdoc/test.rs:358:17
 
 
 failures:
index cbb2a00214a5a30458589108ebba6ad49bc05323..9ace8714918248c619b2c7c8a46be0e4f01540dc 100644 (file)
@@ -75,8 +75,8 @@ pub trait Qux {
     /// Docs for QUX1 in trait.
     const QUX1: i8;
     // @has - '//*[@id="associatedconstant.QUX_DEFAULT0"]' 'const QUX_DEFAULT0: u16'
-    // @has - '//*[@class="docblock"]' "Docs for QUX_DEFAULT0 in trait."
-    /// Docs for QUX_DEFAULT0 in trait.
+    // @has - '//*[@class="docblock"]' "Docs for QUX_DEFAULT12 in trait."
+    /// Docs for QUX_DEFAULT12 in trait.
     const QUX_DEFAULT0: u16 = 1;
     // @has - '//*[@id="associatedconstant.QUX_DEFAULT1"]' 'const QUX_DEFAULT1: i16'
     // @has - '//*[@class="docblock"]' "Docs for QUX_DEFAULT1 in trait."
@@ -99,7 +99,7 @@ impl Qux for Bar {
     /// Docs for QUX1 in impl.
     const QUX1: i8 = 5;
     // @has - '//*[@id="associatedconstant.QUX_DEFAULT0"]' 'const QUX_DEFAULT0: u16'
-    // @has - '//*[@class="docblock"]' "Docs for QUX_DEFAULT0 in trait."
+    // @has - '//*[@class="docblock hidden"]' "Docs for QUX_DEFAULT12 in trait."
     const QUX_DEFAULT0: u16 = 6;
     // @has - '//*[@id="associatedconstant.QUX_DEFAULT1"]' 'const QUX_DEFAULT1: i16'
     // @has - '//*[@class="docblock"]' "Docs for QUX_DEFAULT1 in impl."
diff --git a/src/test/rustdoc/auxiliary/enum_primitive.rs b/src/test/rustdoc/auxiliary/enum_primitive.rs
new file mode 100644 (file)
index 0000000..c265ae4
--- /dev/null
@@ -0,0 +1,210 @@
+// Copyright (c) 2015 Anders Kaseorg <andersk@mit.edu>
+
+// Permission is hereby granted, free of charge, to any person obtaining
+// a copy of this software and associated documentation files (the
+// “Software”), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to
+// permit persons to whom the Software is furnished to do so, subject to
+// the following conditions:
+
+// The above copyright notice and this permission notice shall be
+// included in all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+//! This crate exports a macro `enum_from_primitive!` that wraps an
+//! `enum` declaration and automatically adds an implementation of
+//! `num::FromPrimitive` (reexported here), to allow conversion from
+//! primitive integers to the enum.  It therefore provides an
+//! alternative to the built-in `#[derive(FromPrimitive)]`, which
+//! requires the unstable `std::num::FromPrimitive` and is disabled in
+//! Rust 1.0.
+//!
+//! # Example
+//!
+//! ```
+//! #[macro_use] extern crate enum_primitive;
+//! extern crate num_traits;
+//! use num_traits::FromPrimitive;
+//!
+//! enum_from_primitive! {
+//! #[derive(Debug, PartialEq)]
+//! enum FooBar {
+//!     Foo = 17,
+//!     Bar = 42,
+//!     Baz,
+//! }
+//! }
+//!
+//! fn main() {
+//!     assert_eq!(FooBar::from_i32(17), Some(FooBar::Foo));
+//!     assert_eq!(FooBar::from_i32(42), Some(FooBar::Bar));
+//!     assert_eq!(FooBar::from_i32(43), Some(FooBar::Baz));
+//!     assert_eq!(FooBar::from_i32(91), None);
+//! }
+//! ```
+
+
+pub mod num_traits {
+    pub trait FromPrimitive: Sized {
+        fn from_i64(n: i64) -> Option<Self>;
+        fn from_u64(n: u64) -> Option<Self>;
+    }
+}
+
+pub use std::option::Option;
+pub use num_traits::FromPrimitive;
+
+/// Helper macro for internal use by `enum_from_primitive!`.
+#[macro_export]
+macro_rules! enum_from_primitive_impl_ty {
+    ($meth:ident, $ty:ty, $name:ident, $( $variant:ident )*) => {
+        #[allow(non_upper_case_globals, unused)]
+        fn $meth(n: $ty) -> $crate::Option<Self> {
+            $( if n == $name::$variant as $ty {
+                $crate::Option::Some($name::$variant)
+            } else )* {
+                $crate::Option::None
+            }
+        }
+    };
+}
+
+/// Helper macro for internal use by `enum_from_primitive!`.
+#[macro_export]
+#[macro_use(enum_from_primitive_impl_ty)]
+macro_rules! enum_from_primitive_impl {
+    ($name:ident, $( $variant:ident )*) => {
+        impl $crate::FromPrimitive for $name {
+            enum_from_primitive_impl_ty! { from_i64, i64, $name, $( $variant )* }
+            enum_from_primitive_impl_ty! { from_u64, u64, $name, $( $variant )* }
+        }
+    };
+}
+
+/// Wrap this macro around an `enum` declaration to get an
+/// automatically generated implementation of `num::FromPrimitive`.
+#[macro_export]
+#[macro_use(enum_from_primitive_impl)]
+macro_rules! enum_from_primitive {
+    (
+        $( #[$enum_attr:meta] )*
+        enum $name:ident {
+            $( $( #[$variant_attr:meta] )* $variant:ident ),+
+            $( = $discriminator:expr, $( $( #[$variant_two_attr:meta] )* $variant_two:ident ),+ )*
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        enum $name {
+            $( $( #[$variant_attr] )* $variant ),+
+            $( = $discriminator, $( $( #[$variant_two_attr] )* $variant_two ),+ )*
+        }
+        enum_from_primitive_impl! { $name, $( $variant )+ $( $( $variant_two )+ )* }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        enum $name:ident {
+            $( $( $( #[$variant_attr:meta] )* $variant:ident ),+ = $discriminator:expr ),*
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        enum $name {
+            $( $( $( #[$variant_attr] )* $variant ),+ = $discriminator ),*
+        }
+        enum_from_primitive_impl! { $name, $( $( $variant )+ )* }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        enum $name:ident {
+            $( $( #[$variant_attr:meta] )* $variant:ident ),+
+            $( = $discriminator:expr, $( $( #[$variant_two_attr:meta] )* $variant_two:ident ),+ )*,
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        enum $name {
+            $( $( #[$variant_attr] )* $variant ),+
+            $( = $discriminator, $( $( #[$variant_two_attr] )* $variant_two ),+ )*,
+        }
+        enum_from_primitive_impl! { $name, $( $variant )+ $( $( $variant_two )+ )* }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        enum $name:ident {
+            $( $( $( #[$variant_attr:meta] )* $variant:ident ),+ = $discriminator:expr ),+,
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        enum $name {
+            $( $( $( #[$variant_attr] )* $variant ),+ = $discriminator ),+,
+        }
+        enum_from_primitive_impl! { $name, $( $( $variant )+ )+ }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        pub enum $name:ident {
+            $( $( #[$variant_attr:meta] )* $variant:ident ),+
+            $( = $discriminator:expr, $( $( #[$variant_two_attr:meta] )* $variant_two:ident ),+ )*
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        pub enum $name {
+            $( $( #[$variant_attr] )* $variant ),+
+            $( = $discriminator, $( $( #[$variant_two_attr] )* $variant_two ),+ )*
+        }
+        enum_from_primitive_impl! { $name, $( $variant )+ $( $( $variant_two )+ )* }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        pub enum $name:ident {
+            $( $( $( #[$variant_attr:meta] )* $variant:ident ),+ = $discriminator:expr ),*
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        pub enum $name {
+            $( $( $( #[$variant_attr] )* $variant ),+ = $discriminator ),*
+        }
+        enum_from_primitive_impl! { $name, $( $( $variant )+ )* }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        pub enum $name:ident {
+            $( $( #[$variant_attr:meta] )* $variant:ident ),+
+            $( = $discriminator:expr, $( $( #[$variant_two_attr:meta] )* $variant_two:ident ),+ )*,
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        pub enum $name {
+            $( $( #[$variant_attr] )* $variant ),+
+            $( = $discriminator, $( $( #[$variant_two_attr] )* $variant_two ),+ )*,
+        }
+        enum_from_primitive_impl! { $name, $( $variant )+ $( $( $variant_two )+ )* }
+    };
+
+    (
+        $( #[$enum_attr:meta] )*
+        pub enum $name:ident {
+            $( $( $( #[$variant_attr:meta] )* $variant:ident ),+ = $discriminator:expr ),+,
+        }
+    ) => {
+        $( #[$enum_attr] )*
+        pub enum $name {
+            $( $( $( #[$variant_attr] )* $variant ),+ = $discriminator ),+,
+        }
+        enum_from_primitive_impl! { $name, $( $( $variant )+ )+ }
+    };
+}
+
diff --git a/src/test/rustdoc/index-page.rs b/src/test/rustdoc/index-page.rs
new file mode 100644 (file)
index 0000000..9d35f8a
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z unstable-options --enable-index-page
+
+#![crate_name = "foo"]
+
+// @has foo/../index.html
+// @has - '//span[@class="in-band"]' 'List of all crates'
+// @has - '//ul[@class="mod"]//a[@href="foo/index.html"]' 'foo'
+pub struct Foo;
diff --git a/src/test/rustdoc/issue-54705.rs b/src/test/rustdoc/issue-54705.rs
new file mode 100644 (file)
index 0000000..ccc9396
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+pub trait ScopeHandle<'scope> {}
+
+
+
+// @has issue_54705/struct.ScopeFutureContents.html
+// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]//*/code' "impl<'scope, S> \
+// Send for ScopeFutureContents<'scope, S> where S: Sync"
+//
+// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]//*/code' "impl<'scope, S> \
+// Sync for ScopeFutureContents<'scope, S> where S: Sync"
+pub struct ScopeFutureContents<'scope, S>
+    where S: ScopeHandle<'scope>,
+{
+    dummy: &'scope S,
+    this: Box<ScopeFuture<'scope, S>>,
+}
+
+struct ScopeFuture<'scope, S>
+    where S: ScopeHandle<'scope>,
+{
+    contents: ScopeFutureContents<'scope, S>,
+}
+
+unsafe impl<'scope, S> Send for ScopeFuture<'scope, S>
+    where S: ScopeHandle<'scope>,
+{}
+unsafe impl<'scope, S> Sync for ScopeFuture<'scope, S>
+    where S: ScopeHandle<'scope>,
+{}
index db48a6525230cb8e9cf9a0ff3f4df63801274bee..949ef1182889730f580ce65f67a7e77f6ab818e9 100644 (file)
@@ -73,7 +73,7 @@ fn c_method(&self) -> usize {
 // @has manual_impl/struct.S3.html '//*[@class="trait"]' 'T'
 // @has  - '//*[@class="docblock"]' 'Docs associated with the S3 trait implementation.'
 // @has  - '//*[@class="docblock"]' 'Docs associated with the S3 trait b_method implementation.'
-// @has - '//*[@class="docblock"]' 'Docs associated with the trait a_method definition.'
+// @has - '//*[@class="docblock hidden"]' 'Docs associated with the trait a_method definition.'
 pub struct S3(usize);
 
 /// Docs associated with the S3 trait implementation.
diff --git a/src/test/rustdoc/no-stack-overflow-25295.rs b/src/test/rustdoc/no-stack-overflow-25295.rs
new file mode 100644 (file)
index 0000000..37b0aca
--- /dev/null
@@ -0,0 +1,46 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ensure this code doesn't stack overflow
+// aux-build:enum_primitive.rs
+
+#[macro_use] extern crate enum_primitive;
+
+enum_from_primitive! {
+    pub enum Test {
+        A1,A2,A3,A4,A5,A6,
+        B1,B2,B3,B4,B5,B6,
+        C1,C2,C3,C4,C5,C6,
+        D1,D2,D3,D4,D5,D6,
+        E1,E2,E3,E4,E5,E6,
+        F1,F2,F3,F4,F5,F6,
+        G1,G2,G3,G4,G5,G6,
+        H1,H2,H3,H4,H5,H6,
+        I1,I2,I3,I4,I5,I6,
+        J1,J2,J3,J4,J5,J6,
+        K1,K2,K3,K4,K5,K6,
+        L1,L2,L3,L4,L5,L6,
+        M1,M2,M3,M4,M5,M6,
+        N1,N2,N3,N4,N5,N6,
+        O1,O2,O3,O4,O5,O6,
+        P1,P2,P3,P4,P5,P6,
+        Q1,Q2,Q3,Q4,Q5,Q6,
+        R1,R2,R3,R4,R5,R6,
+        S1,S2,S3,S4,S5,S6,
+        T1,T2,T3,T4,T5,T6,
+        U1,U2,U3,U4,U5,U6,
+        V1,V2,V3,V4,V5,V6,
+        W1,W2,W3,W4,W5,W6,
+        X1,X2,X3,X4,X5,X6,
+        Y1,Y2,Y3,Y4,Y5,Y6,
+        Z1,Z2,Z3,Z4,Z5,Z6,
+    }
+}
+
index 48918b996ef5989aa9e9b67d10a805295fea5d26..4dc481174a45d4ac94a5690a256b5df4bd3f1435 100644 (file)
 use std::rc::Rc;
 
 trait Foo {
-    fn foo(self: Rc<Self>) -> usize;
+    fn foo(self: &Rc<Self>) -> usize;
 }
 
 trait Bar {
-    fn foo(self: Rc<Self>) -> usize where Self: Sized;
-    fn bar(self: Box<Self>) -> usize;
+    fn foo(self: &Rc<Self>) -> usize where Self: Sized;
+    fn bar(self: Rc<Self>) -> usize;
 }
 
 impl Foo for usize {
-    fn foo(self: Rc<Self>) -> usize {
-        *self
+    fn foo(self: &Rc<Self>) -> usize {
+        **self
     }
 }
 
 impl Bar for usize {
-    fn foo(self: Rc<Self>) -> usize {
-        *self
+    fn foo(self: &Rc<Self>) -> usize {
+        **self
     }
 
-    fn bar(self: Box<Self>) -> usize {
+    fn bar(self: Rc<Self>) -> usize {
         *self
     }
 }
 
 fn make_foo() {
-    let x = Box::new(5usize) as Box<Foo>;
+    let x = Rc::new(5usize) as Rc<Foo>;
     //~^ ERROR E0038
     //~| ERROR E0038
 }
 
 fn make_bar() {
-    let x = Box::new(5usize) as Box<Bar>;
+    let x = Rc::new(5usize) as Rc<Bar>;
     x.bar();
 }
 
index ec9e65fc4c62d3fb8717c1886e4a5d13bfdf4a76..77ca118471db5d6d75c506c69902cbce7f35e8f0 100644 (file)
@@ -1,19 +1,19 @@
 error[E0038]: the trait `Foo` cannot be made into an object
-  --> $DIR/arbitrary-self-types-not-object-safe.rs:40:33
+  --> $DIR/arbitrary-self-types-not-object-safe.rs:40:32
    |
-LL |     let x = Box::new(5usize) as Box<Foo>;
-   |                                 ^^^^^^^^ the trait `Foo` cannot be made into an object
+LL |     let x = Rc::new(5usize) as Rc<Foo>;
+   |                                ^^^^^^^ the trait `Foo` cannot be made into an object
    |
-   = note: method `foo` has a non-standard `self` type
+   = note: method `foo`'s receiver cannot be dispatched on
 
 error[E0038]: the trait `Foo` cannot be made into an object
   --> $DIR/arbitrary-self-types-not-object-safe.rs:40:13
    |
-LL |     let x = Box::new(5usize) as Box<Foo>;
-   |             ^^^^^^^^^^^^^^^^ the trait `Foo` cannot be made into an object
+LL |     let x = Rc::new(5usize) as Rc<Foo>;
+   |             ^^^^^^^^^^^^^^^ the trait `Foo` cannot be made into an object
    |
-   = note: method `foo` has a non-standard `self` type
-   = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<std::boxed::Box<dyn Foo>>` for `std::boxed::Box<usize>`
+   = note: method `foo`'s receiver cannot be dispatched on
+   = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<std::rc::Rc<dyn Foo>>` for `std::rc::Rc<usize>`
 
 error: aborting due to 2 previous errors
 
index 545a60f6770d9471d24ffd0230fc4c8c0de8c17a..612375f904783995bc27aec4d96f1e1466402055 100644 (file)
@@ -20,6 +20,29 @@ LL |     x.clone();  //~ ERROR: use of moved value
    |
    = note: move occurs because `x` has type `T`, which does not implement the `Copy` trait
 
+error[E0505]: cannot move out of `x` because it is borrowed
+  --> $DIR/binop-move-semantics.rs:31:5
+   |
+LL |     let m = &x;
+   |             -- borrow of `x` occurs here
+...
+LL |     x  //~ ERROR: cannot move out of `x` because it is borrowed
+   |     ^ move out of `x` occurs here
+...
+LL |     use_mut(n); use_imm(m);
+   |                         - borrow later used here
+
+error[E0505]: cannot move out of `y` because it is borrowed
+  --> $DIR/binop-move-semantics.rs:33:5
+   |
+LL |     let n = &mut y;
+   |             ------ borrow of `y` occurs here
+...
+LL |     y;  //~ ERROR: cannot move out of `y` because it is borrowed
+   |     ^ move out of `y` occurs here
+LL |     use_mut(n); use_imm(m);
+   |             - borrow later used here
+
 error[E0507]: cannot move out of borrowed content
   --> $DIR/binop-move-semantics.rs:40:5
    |
@@ -62,7 +85,7 @@ LL | |     &mut f;  //~ ERROR: cannot borrow `f` as mutable because it is also b
    |       |    immutable borrow later used here
    |       mutable borrow occurs here
 
-error: aborting due to 6 previous errors
+error: aborting due to 8 previous errors
 
-Some errors occurred: E0382, E0502, E0507.
+Some errors occurred: E0382, E0502, E0505, E0507.
 For more information about an error, try `rustc --explain E0382`.
index cff0064497aff34ac814b8be55d4a1b9c4e64b9d..f6fad8b46dd9d4f68ff983ca8959f45924652404 100644 (file)
@@ -31,8 +31,8 @@ fn move_borrowed<T: Add<Output=()>>(x: T, mut y: T) {
     x  //~ ERROR: cannot move out of `x` because it is borrowed
     +
     y;  //~ ERROR: cannot move out of `y` because it is borrowed
+    use_mut(n); use_imm(m);
 }
-
 fn illegal_dereference<T: Add<Output=()>>(mut x: T, y: T) {
     let m = &mut x;
     let n = &y;
@@ -40,8 +40,8 @@ fn illegal_dereference<T: Add<Output=()>>(mut x: T, y: T) {
     *m  //~ ERROR: cannot move out of borrowed content
     +
     *n;  //~ ERROR: cannot move out of borrowed content
+    use_imm(n); use_mut(m);
 }
-
 struct Foo;
 
 impl<'a, 'b> Add<&'b Foo> for &'a mut Foo {
@@ -73,3 +73,6 @@ fn immut_plus_mut() {
 }
 
 fn main() {}
+
+fn use_mut<T>(_: &mut T) { }
+fn use_imm<T>(_: &T) { }
index e8fae63a5d617d1c2ec63cd36e9fea79d9cf6ed2..160a84c480cd31a6f930ed18b7a6bac3a9c52c1b 100644 (file)
@@ -1,15 +1,32 @@
 error[E0596]: cannot borrow `*x` as mutable, as it is behind a `&` reference
-  --> $DIR/borrowck-closures-mut-of-imm.rs:23:21
+  --> $DIR/borrowck-closures-mut-of-imm.rs:23:25
    |
-LL |     let c1 = || set(&mut *x);
-   |                     ^^^^^^^ cannot borrow as mutable
+LL |     let mut c1 = || set(&mut *x);
+   |                         ^^^^^^^ cannot borrow as mutable
 
 error[E0596]: cannot borrow `*x` as mutable, as it is behind a `&` reference
-  --> $DIR/borrowck-closures-mut-of-imm.rs:25:21
+  --> $DIR/borrowck-closures-mut-of-imm.rs:25:25
    |
-LL |     let c2 = || set(&mut *x);
-   |                     ^^^^^^^ cannot borrow as mutable
+LL |     let mut c2 = || set(&mut *x);
+   |                         ^^^^^^^ cannot borrow as mutable
 
-error: aborting due to 2 previous errors
+error[E0524]: two closures require unique access to `x` at the same time
+  --> $DIR/borrowck-closures-mut-of-imm.rs:25:18
+   |
+LL |     let mut c1 = || set(&mut *x);
+   |                  --           - first borrow occurs due to use of `x` in closure
+   |                  |
+   |                  first closure is constructed here
+LL |     //~^ ERROR cannot borrow
+LL |     let mut c2 = || set(&mut *x);
+   |                  ^^           - second borrow occurs due to use of `x` in closure
+   |                  |
+   |                  second closure is constructed here
+...
+LL |     c2(); c1();
+   |           -- first borrow later used here
+
+error: aborting due to 3 previous errors
 
-For more information about this error, try `rustc --explain E0596`.
+Some errors occurred: E0524, E0596.
+For more information about an error, try `rustc --explain E0524`.
index dc2f0e8395f08154172853662d3d83f97d12ac09..3bf4f17fde1a808a997df2ab9575d0d9822ecc7d 100644 (file)
@@ -20,11 +20,12 @@ fn set(x: &mut isize) {
 }
 
 fn a(x: &isize) {
-    let c1 = || set(&mut *x);
+    let mut c1 = || set(&mut *x);
     //~^ ERROR cannot borrow
-    let c2 = || set(&mut *x);
+    let mut c2 = || set(&mut *x);
     //~^ ERROR cannot borrow
     //~| ERROR two closures require unique access to `x` at the same time
+    c2(); c1();
 }
 
 fn main() {
index 87eb52b6aa6052ab13421e9a621bc7bd90aab2d6..c248595d57119c1929283086d1e52ea3c5cbb451 100644 (file)
@@ -1,30 +1,30 @@
 error[E0524]: two closures require unique access to `x` at the same time
-  --> $DIR/borrowck-closures-mut-of-imm.rs:25:14
+  --> $DIR/borrowck-closures-mut-of-imm.rs:25:18
    |
-LL |     let c1 = || set(&mut *x);
-   |              --           - previous borrow occurs due to use of `x` in closure
-   |              |
-   |              first closure is constructed here
+LL |     let mut c1 = || set(&mut *x);
+   |                  --           - previous borrow occurs due to use of `x` in closure
+   |                  |
+   |                  first closure is constructed here
 LL |     //~^ ERROR cannot borrow
-LL |     let c2 = || set(&mut *x);
-   |              ^^           - borrow occurs due to use of `x` in closure
-   |              |
-   |              second closure is constructed here
+LL |     let mut c2 = || set(&mut *x);
+   |                  ^^           - borrow occurs due to use of `x` in closure
+   |                  |
+   |                  second closure is constructed here
 ...
 LL | }
    | - borrow from first closure ends here
 
 error[E0596]: cannot borrow immutable borrowed content `***x` as mutable
-  --> $DIR/borrowck-closures-mut-of-imm.rs:23:26
+  --> $DIR/borrowck-closures-mut-of-imm.rs:23:30
    |
-LL |     let c1 = || set(&mut *x);
-   |                          ^^ cannot borrow as mutable
+LL |     let mut c1 = || set(&mut *x);
+   |                              ^^ cannot borrow as mutable
 
 error[E0596]: cannot borrow immutable borrowed content `***x` as mutable
-  --> $DIR/borrowck-closures-mut-of-imm.rs:25:26
+  --> $DIR/borrowck-closures-mut-of-imm.rs:25:30
    |
-LL |     let c2 = || set(&mut *x);
-   |                          ^^ cannot borrow as mutable
+LL |     let mut c2 = || set(&mut *x);
+   |                              ^^ cannot borrow as mutable
 
 error: aborting due to 3 previous errors
 
diff --git a/src/test/ui/borrowck/borrowck-closures-mut-of-mut.nll.stderr b/src/test/ui/borrowck/borrowck-closures-mut-of-mut.nll.stderr
new file mode 100644 (file)
index 0000000..18f95f2
--- /dev/null
@@ -0,0 +1,18 @@
+error[E0524]: two closures require unique access to `x` at the same time
+  --> $DIR/borrowck-closures-mut-of-mut.rs:14:18
+   |
+LL |     let mut c1 = || set(&mut *x);
+   |                  --           - first borrow occurs due to use of `x` in closure
+   |                  |
+   |                  first closure is constructed here
+LL |     let mut c2 = || set(&mut *x);
+   |                  ^^           - second borrow occurs due to use of `x` in closure
+   |                  |
+   |                  second closure is constructed here
+LL |     //~^ ERROR two closures require unique access to `x` at the same time
+LL |     c2(); c1();
+   |           -- first borrow later used here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0524`.
diff --git a/src/test/ui/borrowck/borrowck-closures-mut-of-mut.rs b/src/test/ui/borrowck/borrowck-closures-mut-of-mut.rs
new file mode 100644 (file)
index 0000000..50c6f2c
--- /dev/null
@@ -0,0 +1,20 @@
+// Tests that two closures cannot simultaneously both have mutable
+// access to the variable. Related to issue #6801.
+
+fn get(x: &isize) -> isize {
+    *x
+}
+
+fn set(x: &mut isize) {
+    *x = 4;
+}
+
+fn a(x: &mut isize) {
+    let mut c1 = || set(&mut *x);
+    let mut c2 = || set(&mut *x);
+    //~^ ERROR two closures require unique access to `x` at the same time
+    c2(); c1();
+}
+
+fn main() {
+}
diff --git a/src/test/ui/borrowck/borrowck-closures-mut-of-mut.stderr b/src/test/ui/borrowck/borrowck-closures-mut-of-mut.stderr
new file mode 100644 (file)
index 0000000..2c55877
--- /dev/null
@@ -0,0 +1,18 @@
+error[E0524]: two closures require unique access to `x` at the same time
+  --> $DIR/borrowck-closures-mut-of-mut.rs:14:18
+   |
+LL |     let mut c1 = || set(&mut *x);
+   |                  --           - previous borrow occurs due to use of `x` in closure
+   |                  |
+   |                  first closure is constructed here
+LL |     let mut c2 = || set(&mut *x);
+   |                  ^^           - borrow occurs due to use of `x` in closure
+   |                  |
+   |                  second closure is constructed here
+...
+LL | }
+   | - borrow from first closure ends here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0524`.
diff --git a/src/test/ui/borrowck/borrowck-lend-flow-loop.ast.stderr b/src/test/ui/borrowck/borrowck-lend-flow-loop.ast.stderr
new file mode 100644 (file)
index 0000000..1844d82
--- /dev/null
@@ -0,0 +1,93 @@
+error[E0502]: cannot borrow `*v` as immutable because `v` is also borrowed as mutable
+  --> $DIR/borrowck-lend-flow-loop.rs:35:17
+   |
+LL |     let mut x = &mut v;
+   |                      - mutable borrow occurs here
+...
+LL |         borrow(&*v); //[ast]~ ERROR cannot borrow
+   |                 ^^ immutable borrow occurs here
+LL |     }
+LL | }
+   | - mutable borrow ends here
+
+error[E0502]: cannot borrow `*v` as immutable because `v` is also borrowed as mutable
+  --> $DIR/borrowck-lend-flow-loop.rs:45:17
+   |
+LL |     let mut x = &mut v;
+   |                      - mutable borrow occurs here
+LL |     for _ in 0..3 {
+LL |         borrow(&*v); //[ast]~ ERROR cannot borrow
+   |                 ^^ immutable borrow occurs here
+...
+LL | }
+   | - mutable borrow ends here
+
+error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
+  --> $DIR/borrowck-lend-flow-loop.rs:57:25
+   |
+LL |         borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
+   |                         ^^ mutable borrow occurs here
+LL |         _x = &v;
+   |               - immutable borrow occurs here
+LL |     }
+LL | }
+   | - immutable borrow ends here
+
+error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
+  --> $DIR/borrowck-lend-flow-loop.rs:69:25
+   |
+LL |         borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
+   |                         ^^ mutable borrow occurs here
+LL |         _x = &v;
+   |               - immutable borrow occurs here
+LL |     }
+LL | }
+   | - immutable borrow ends here
+
+error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
+  --> $DIR/borrowck-lend-flow-loop.rs:86:21
+   |
+LL |         _x = &v;
+   |               - immutable borrow occurs here
+...
+LL |     borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
+   |                     ^^ mutable borrow occurs here
+LL | }
+   | - immutable borrow ends here
+
+error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
+  --> $DIR/borrowck-lend-flow-loop.rs:100:21
+   |
+LL |         _x = &v;
+   |               - immutable borrow occurs here
+...
+LL |     borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
+   |                     ^^ mutable borrow occurs here
+LL | }
+   | - immutable borrow ends here
+
+error[E0502]: cannot borrow `*v` as immutable because `v` is also borrowed as mutable
+  --> $DIR/borrowck-lend-flow-loop.rs:109:17
+   |
+LL |         borrow(&*v); //[ast]~ ERROR cannot borrow
+   |                 ^^ immutable borrow occurs here
+...
+LL |             x = &mut v; //[ast]~ ERROR cannot borrow
+   |                      - mutable borrow occurs here
+...
+LL | }
+   | - mutable borrow ends here
+
+error[E0499]: cannot borrow `v` as mutable more than once at a time
+  --> $DIR/borrowck-lend-flow-loop.rs:112:22
+   |
+LL |             x = &mut v; //[ast]~ ERROR cannot borrow
+   |                      ^ mutable borrow starts here in previous iteration of loop
+...
+LL | }
+   | - mutable borrow ends here
+
+error: aborting due to 8 previous errors
+
+Some errors occurred: E0499, E0502.
+For more information about an error, try `rustc --explain E0499`.
index 388fc9c5fa8acd976911a2ebb7180eb76cd7dd23..19de3582c881964372e0b23851fca87a1bd352c0 100644 (file)
@@ -4,9 +4,9 @@ error[E0502]: cannot borrow `*v` as immutable because it is also borrowed as mut
 LL |     let mut x = &mut v;
    |                 ------ mutable borrow occurs here
 LL |     for _ in 0..3 {
-LL |         borrow(&*v); //~ ERROR cannot borrow
+LL |         borrow(&*v); //[ast]~ ERROR cannot borrow
    |                ^^^ immutable borrow occurs here
-LL |     }
+...
 LL |     *x = box 5;
    |     -- mutable borrow used here, in later iteration of loop
 
@@ -15,10 +15,10 @@ error[E0502]: cannot borrow `*v` as immutable because it is also borrowed as mut
    |
 LL |         **x += 1;
    |         -------- mutable borrow used here, in later iteration of loop
-LL |         borrow(&*v); //~ ERROR cannot borrow
+LL |         borrow(&*v); //[ast]~ ERROR cannot borrow
    |                ^^^ immutable borrow occurs here
-LL |         if cond2 {
-LL |             x = &mut v; //~ ERROR cannot borrow
+...
+LL |             x = &mut v; //[ast]~ ERROR cannot borrow
    |                 ------ mutable borrow occurs here
 
 error: aborting due to 2 previous errors
index f09e7ffd7e4b791c4b51472ca9db8013b79e0659..7008e5cef4b7587380ebd06ca5caf538beab8df6 100644 (file)
@@ -1,18 +1,18 @@
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Note: the borrowck analysis is currently flow-insensitive.
-// Therefore, some of these errors are marked as spurious and could be
-// corrected by a simple change to the analysis.  The others are
-// either genuine or would require more advanced changes.  The latter
-// cases are noted.
+// revisions: ast nll
+
+// Since we are testing nll migration explicitly as a separate
+// revision, don't worry about the --compare-mode=nll on this test.
+
+// ignore-compare-mode-nll
+
+//[ast]compile-flags: -Z borrowck=ast
+//[nll]compile-flags: -Z borrowck=migrate -Z two-phase-borrows
+
+// Note: the borrowck analysis was originally a flow-insensitive pass
+// over the AST. Therefore, some of these (AST) errors are marked as
+// spurious and are corrected by the flow-sensitive (NLL) analysis.
+// The others are either genuine or would require more advanced
+// changes. The latter cases are noted.
 
 #![feature(box_syntax)]
 
@@ -32,7 +32,7 @@ fn loop_overarching_alias_mut() {
     let mut x = &mut v;
     **x += 1;
     loop {
-        borrow(&*v); //~ ERROR cannot borrow
+        borrow(&*v); //[ast]~ ERROR cannot borrow
     }
 }
 
@@ -42,11 +42,11 @@ fn block_overarching_alias_mut() {
     let mut v: Box<_> = box 3;
     let mut x = &mut v;
     for _ in 0..3 {
-        borrow(&*v); //~ ERROR cannot borrow
+        borrow(&*v); //[ast]~ ERROR cannot borrow
+        //[nll]~^ ERROR cannot borrow
     }
     *x = box 5;
 }
-
 fn loop_aliased_mut() {
     // In this instance, the borrow is carried through the loop.
 
@@ -54,7 +54,7 @@ fn loop_aliased_mut() {
     let mut w: Box<_> = box 4;
     let mut _x = &w;
     loop {
-        borrow_mut(&mut *v); //~ ERROR cannot borrow
+        borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
         _x = &v;
     }
 }
@@ -66,7 +66,7 @@ fn while_aliased_mut() {
     let mut w: Box<_> = box 4;
     let mut _x = &w;
     while cond() {
-        borrow_mut(&mut *v); //~ ERROR cannot borrow
+        borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
         _x = &v;
     }
 }
@@ -83,7 +83,7 @@ fn loop_aliased_mut_break() {
         _x = &v;
         break;
     }
-    borrow_mut(&mut *v); //~ ERROR cannot borrow
+    borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
 }
 
 fn while_aliased_mut_break() {
@@ -97,7 +97,7 @@ fn while_aliased_mut_break() {
         _x = &v;
         break;
     }
-    borrow_mut(&mut *v); //~ ERROR cannot borrow
+    borrow_mut(&mut *v); //[ast]~ ERROR cannot borrow
 }
 
 fn while_aliased_mut_cond(cond: bool, cond2: bool) {
@@ -106,13 +106,13 @@ fn while_aliased_mut_cond(cond: bool, cond2: bool) {
     let mut x = &mut w;
     while cond {
         **x += 1;
-        borrow(&*v); //~ ERROR cannot borrow
+        borrow(&*v); //[ast]~ ERROR cannot borrow
+        //[nll]~^ ERROR cannot borrow
         if cond2 {
-            x = &mut v; //~ ERROR cannot borrow
+            x = &mut v; //[ast]~ ERROR cannot borrow
         }
     }
 }
-
 fn loop_break_pops_scopes<'r, F>(_v: &'r mut [usize], mut f: F) where
     F: FnMut(&'r mut usize) -> bool,
 {
diff --git a/src/test/ui/borrowck/borrowck-lend-flow-loop.stderr b/src/test/ui/borrowck/borrowck-lend-flow-loop.stderr
deleted file mode 100644 (file)
index 534e30b..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
-error[E0502]: cannot borrow `*v` as immutable because `v` is also borrowed as mutable
-  --> $DIR/borrowck-lend-flow-loop.rs:35:17
-   |
-LL |     let mut x = &mut v;
-   |                      - mutable borrow occurs here
-...
-LL |         borrow(&*v); //~ ERROR cannot borrow
-   |                 ^^ immutable borrow occurs here
-LL |     }
-LL | }
-   | - mutable borrow ends here
-
-error[E0502]: cannot borrow `*v` as immutable because `v` is also borrowed as mutable
-  --> $DIR/borrowck-lend-flow-loop.rs:45:17
-   |
-LL |     let mut x = &mut v;
-   |                      - mutable borrow occurs here
-LL |     for _ in 0..3 {
-LL |         borrow(&*v); //~ ERROR cannot borrow
-   |                 ^^ immutable borrow occurs here
-...
-LL | }
-   | - mutable borrow ends here
-
-error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
-  --> $DIR/borrowck-lend-flow-loop.rs:57:25
-   |
-LL |         borrow_mut(&mut *v); //~ ERROR cannot borrow
-   |                         ^^ mutable borrow occurs here
-LL |         _x = &v;
-   |               - immutable borrow occurs here
-LL |     }
-LL | }
-   | - immutable borrow ends here
-
-error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
-  --> $DIR/borrowck-lend-flow-loop.rs:69:25
-   |
-LL |         borrow_mut(&mut *v); //~ ERROR cannot borrow
-   |                         ^^ mutable borrow occurs here
-LL |         _x = &v;
-   |               - immutable borrow occurs here
-LL |     }
-LL | }
-   | - immutable borrow ends here
-
-error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
-  --> $DIR/borrowck-lend-flow-loop.rs:86:21
-   |
-LL |         _x = &v;
-   |               - immutable borrow occurs here
-...
-LL |     borrow_mut(&mut *v); //~ ERROR cannot borrow
-   |                     ^^ mutable borrow occurs here
-LL | }
-   | - immutable borrow ends here
-
-error[E0502]: cannot borrow `*v` as mutable because `v` is also borrowed as immutable
-  --> $DIR/borrowck-lend-flow-loop.rs:100:21
-   |
-LL |         _x = &v;
-   |               - immutable borrow occurs here
-...
-LL |     borrow_mut(&mut *v); //~ ERROR cannot borrow
-   |                     ^^ mutable borrow occurs here
-LL | }
-   | - immutable borrow ends here
-
-error[E0502]: cannot borrow `*v` as immutable because `v` is also borrowed as mutable
-  --> $DIR/borrowck-lend-flow-loop.rs:109:17
-   |
-LL |         borrow(&*v); //~ ERROR cannot borrow
-   |                 ^^ immutable borrow occurs here
-LL |         if cond2 {
-LL |             x = &mut v; //~ ERROR cannot borrow
-   |                      - mutable borrow occurs here
-...
-LL | }
-   | - mutable borrow ends here
-
-error[E0499]: cannot borrow `v` as mutable more than once at a time
-  --> $DIR/borrowck-lend-flow-loop.rs:111:22
-   |
-LL |             x = &mut v; //~ ERROR cannot borrow
-   |                      ^ mutable borrow starts here in previous iteration of loop
-...
-LL | }
-   | - mutable borrow ends here
-
-error: aborting due to 8 previous errors
-
-Some errors occurred: E0499, E0502.
-For more information about an error, try `rustc --explain E0499`.
index dc8d731dede74236aa65119e9528b939d291f697..0c4f2fa9d718bbca248899b6aada15b77244cf82 100644 (file)
@@ -1,3 +1,13 @@
+error[E0502]: cannot borrow `s` as immutable because it is also borrowed as mutable
+  --> $DIR/borrowck-overloaded-call.rs:69:5
+   |
+LL |     let sp = &mut s;
+   |              ------ mutable borrow occurs here
+LL |     s(3);   //~ ERROR cannot borrow `s` as immutable because it is also borrowed as mutable
+   |     ^ immutable borrow occurs here
+LL |     use_mut(sp);
+   |             -- mutable borrow later used here
+
 error[E0596]: cannot borrow `s` as mutable, as it is not declared as mutable
   --> $DIR/borrowck-overloaded-call.rs:77:5
    |
@@ -17,7 +27,7 @@ LL |     s(" world".to_string());    //~ ERROR use of moved value: `s`
    |
    = note: move occurs because `s` has type `SFnOnce`, which does not implement the `Copy` trait
 
-error: aborting due to 2 previous errors
+error: aborting due to 3 previous errors
 
-Some errors occurred: E0382, E0596.
+Some errors occurred: E0382, E0502, E0596.
 For more information about an error, try `rustc --explain E0382`.
index 41f3e472cd125596078db87236915af96f336bef..b2401fbbc042c559739427564959380a316df284 100644 (file)
@@ -67,8 +67,8 @@ fn f() {
     };
     let sp = &mut s;
     s(3);   //~ ERROR cannot borrow `s` as immutable because it is also borrowed as mutable
+    use_mut(sp);
 }
-
 fn g() {
     let s = SFnMut {
         x: 1,
@@ -86,3 +86,5 @@ fn h() {
 }
 
 fn main() {}
+
+fn use_mut<T>(_: &mut T) { }
index fa2473adc2ffd14a5a5617b9b8772953bbae8140..bb5bafbbc7b85a0dbb314d7e0868831829e4a5e7 100644 (file)
@@ -5,6 +5,7 @@ LL |     let sp = &mut s;
    |                   - mutable borrow occurs here
 LL |     s(3);   //~ ERROR cannot borrow `s` as immutable because it is also borrowed as mutable
    |     ^ immutable borrow occurs here
+LL |     use_mut(sp);
 LL | }
    | - mutable borrow ends here
 
index 824d8298ecbc671e28457576fc7591c60d0d4e99..198d086aa3be6d9926fef9eab1fa8d8087f3d4a4 100644 (file)
@@ -1,3 +1,27 @@
+error[E0505]: cannot move out of `s` because it is borrowed
+  --> $DIR/borrowck-overloaded-index-move-index.rs:60:22
+   |
+LL |     let rs = &mut s;
+   |              ------ borrow of `s` occurs here
+LL | 
+LL |     println!("{}", f[s]);
+   |                      ^ move out of `s` occurs here
+...
+LL |     use_mut(rs);
+   |             -- borrow later used here
+
+error[E0505]: cannot move out of `s` because it is borrowed
+  --> $DIR/borrowck-overloaded-index-move-index.rs:63:7
+   |
+LL |     let rs = &mut s;
+   |              ------ borrow of `s` occurs here
+...
+LL |     f[s] = 10;
+   |       ^ move out of `s` occurs here
+...
+LL |     use_mut(rs);
+   |             -- borrow later used here
+
 error[E0382]: use of moved value: `s`
   --> $DIR/borrowck-overloaded-index-move-index.rs:63:7
    |
@@ -9,6 +33,7 @@ LL |     f[s] = 10;
    |
    = note: move occurs because `s` has type `std::string::String`, which does not implement the `Copy` trait
 
-error: aborting due to previous error
+error: aborting due to 3 previous errors
 
-For more information about this error, try `rustc --explain E0382`.
+Some errors occurred: E0382, E0505.
+For more information about an error, try `rustc --explain E0382`.
index d8615d1905338444f1ff3ab171f15b19b18c3ebe..e95423a8e834d78e224b0c0568c95a2805900f01 100644 (file)
@@ -71,4 +71,8 @@ fn main() {
     let _j = &i;
     println!("{}", s[i]); // no error, i is copy
     println!("{}", s[i]);
+
+    use_mut(rs);
 }
+
+fn use_mut<T>(_: &mut T) { }
index 4c81bb8eb3086401951f30a7391fd4e2324c684e..1b4f9e77da80b77d17041e91862687bb23b3d34a 100644 (file)
@@ -1,3 +1,107 @@
+error[E0499]: cannot borrow `foo.bar1` as mutable more than once at a time
+  --> $DIR/borrowck-reborrow-from-mut.rs:23:17
+   |
+LL |     let _bar1 = &mut foo.bar1;
+   |                 ------------- first mutable borrow occurs here
+LL |     let _bar2 = &mut foo.bar1;  //~ ERROR cannot borrow
+   |                 ^^^^^^^^^^^^^ second mutable borrow occurs here
+LL |     use_mut(_bar1);
+   |             ----- first borrow later used here
+
+error[E0502]: cannot borrow `foo.bar1` as immutable because it is also borrowed as mutable
+  --> $DIR/borrowck-reborrow-from-mut.rs:28:17
+   |
+LL |     let _bar1 = &mut foo.bar1;
+   |                 ------------- mutable borrow occurs here
+LL |     let _bar2 = &foo.bar1;  //~ ERROR cannot borrow
+   |                 ^^^^^^^^^ immutable borrow occurs here
+LL |     use_mut(_bar1);
+   |             ----- mutable borrow later used here
+
+error[E0502]: cannot borrow `foo.bar1` as mutable because it is also borrowed as immutable
+  --> $DIR/borrowck-reborrow-from-mut.rs:33:17
+   |
+LL |     let _bar1 = &foo.bar1;
+   |                 --------- immutable borrow occurs here
+LL |     let _bar2 = &mut foo.bar1;  //~ ERROR cannot borrow
+   |                 ^^^^^^^^^^^^^ mutable borrow occurs here
+LL |     use_imm(_bar1);
+   |             ----- immutable borrow later used here
+
+error[E0499]: cannot borrow `foo.bar1` as mutable more than once at a time
+  --> $DIR/borrowck-reborrow-from-mut.rs:55:21
+   |
+LL |     let _bar1 = &mut foo.bar1;
+   |                 ------------- first mutable borrow occurs here
+LL |     match *foo {
+LL |         Foo { bar1: ref mut _bar1, bar2: _ } => {}
+   |                     ^^^^^^^^^^^^^ second mutable borrow occurs here
+...
+LL |     use_mut(_bar1);
+   |             ----- first borrow later used here
+
+error[E0502]: cannot borrow `foo.bar1` as immutable because it is also borrowed as mutable
+  --> $DIR/borrowck-reborrow-from-mut.rs:62:17
+   |
+LL |     let _bar1 = &mut foo.bar1.int1;
+   |                 ------------------ mutable borrow occurs here
+LL |     let _foo1 = &foo.bar1; //~ ERROR cannot borrow
+   |                 ^^^^^^^^^ immutable borrow occurs here
+LL |     let _foo2 = &*foo; //~ ERROR cannot borrow
+LL |     use_mut(_bar1);
+   |             ----- mutable borrow later used here
+
+error[E0502]: cannot borrow `*foo` as immutable because it is also borrowed as mutable
+  --> $DIR/borrowck-reborrow-from-mut.rs:63:17
+   |
+LL |     let _bar1 = &mut foo.bar1.int1;
+   |                 ------------------ mutable borrow occurs here
+LL |     let _foo1 = &foo.bar1; //~ ERROR cannot borrow
+LL |     let _foo2 = &*foo; //~ ERROR cannot borrow
+   |                 ^^^^^ immutable borrow occurs here
+LL |     use_mut(_bar1);
+   |             ----- mutable borrow later used here
+
+error[E0499]: cannot borrow `foo.bar1` as mutable more than once at a time
+  --> $DIR/borrowck-reborrow-from-mut.rs:68:17
+   |
+LL |     let _bar1 = &mut foo.bar1.int1;
+   |                 ------------------ first mutable borrow occurs here
+LL |     let _foo1 = &mut foo.bar1; //~ ERROR cannot borrow
+   |                 ^^^^^^^^^^^^^ second mutable borrow occurs here
+LL |     use_mut(_bar1);
+   |             ----- first borrow later used here
+
+error[E0499]: cannot borrow `*foo` as mutable more than once at a time
+  --> $DIR/borrowck-reborrow-from-mut.rs:73:17
+   |
+LL |     let _bar1 = &mut foo.bar1.int1;
+   |                 ------------------ first mutable borrow occurs here
+LL |     let _foo2 = &mut *foo; //~ ERROR cannot borrow
+   |                 ^^^^^^^^^ second mutable borrow occurs here
+LL |     use_mut(_bar1);
+   |             ----- first borrow later used here
+
+error[E0502]: cannot borrow `foo.bar1` as mutable because it is also borrowed as immutable
+  --> $DIR/borrowck-reborrow-from-mut.rs:78:17
+   |
+LL |     let _bar1 = &foo.bar1.int1;
+   |                 -------------- immutable borrow occurs here
+LL |     let _foo1 = &mut foo.bar1; //~ ERROR cannot borrow
+   |                 ^^^^^^^^^^^^^ mutable borrow occurs here
+LL |     use_imm(_bar1);
+   |             ----- immutable borrow later used here
+
+error[E0502]: cannot borrow `*foo` as mutable because it is also borrowed as immutable
+  --> $DIR/borrowck-reborrow-from-mut.rs:83:17
+   |
+LL |     let _bar1 = &foo.bar1.int1;
+   |                 -------------- immutable borrow occurs here
+LL |     let _foo2 = &mut *foo; //~ ERROR cannot borrow
+   |                 ^^^^^^^^^ mutable borrow occurs here
+LL |     use_imm(_bar1);
+   |             ----- immutable borrow later used here
+
 error[E0596]: cannot borrow `foo.bar1` as mutable, as it is behind a `&` reference
   --> $DIR/borrowck-reborrow-from-mut.rs:98:17
    |
@@ -6,6 +110,7 @@ LL | fn borrow_mut_from_imm(foo: &Foo) {
 LL |     let _bar1 = &mut foo.bar1; //~ ERROR cannot borrow
    |                 ^^^^^^^^^^^^^ `foo` is a `&` reference, so the data it refers to cannot be borrowed as mutable
 
-error: aborting due to previous error
+error: aborting due to 11 previous errors
 
-For more information about this error, try `rustc --explain E0596`.
+Some errors occurred: E0499, E0502, E0596.
+For more information about an error, try `rustc --explain E0499`.
index 6f5dfa67be50debc5837b965d18eff2e5281c4bb..9235d900a7e792db5a978ebcc8550601269eb84e 100644 (file)
@@ -21,79 +21,79 @@ struct Bar {
 fn borrow_same_field_twice_mut_mut(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1;
     let _bar2 = &mut foo.bar1;  //~ ERROR cannot borrow
+    use_mut(_bar1);
 }
-
 fn borrow_same_field_twice_mut_imm(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1;
     let _bar2 = &foo.bar1;  //~ ERROR cannot borrow
+    use_mut(_bar1);
 }
-
 fn borrow_same_field_twice_imm_mut(foo: &mut Foo) {
     let _bar1 = &foo.bar1;
     let _bar2 = &mut foo.bar1;  //~ ERROR cannot borrow
+    use_imm(_bar1);
 }
-
 fn borrow_same_field_twice_imm_imm(foo: &mut Foo) {
     let _bar1 = &foo.bar1;
     let _bar2 = &foo.bar1;
+    use_imm(_bar1);
 }
-
 fn borrow_both_mut(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1;
     let _bar2 = &mut foo.bar2;
+    use_mut(_bar1);
 }
-
 fn borrow_both_mut_pattern(foo: &mut Foo) {
     match *foo {
-        Foo { bar1: ref mut _bar1, bar2: ref mut _bar2 } => {}
+        Foo { bar1: ref mut _bar1, bar2: ref mut _bar2 } =>
+        { use_mut(_bar1); use_mut(_bar2); }
     }
 }
-
 fn borrow_var_and_pattern(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1;
     match *foo {
         Foo { bar1: ref mut _bar1, bar2: _ } => {}
         //~^ ERROR cannot borrow
     }
+    use_mut(_bar1);
 }
-
 fn borrow_mut_and_base_imm(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1.int1;
     let _foo1 = &foo.bar1; //~ ERROR cannot borrow
     let _foo2 = &*foo; //~ ERROR cannot borrow
+    use_mut(_bar1);
 }
-
 fn borrow_mut_and_base_mut(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1.int1;
     let _foo1 = &mut foo.bar1; //~ ERROR cannot borrow
+    use_mut(_bar1);
 }
-
 fn borrow_mut_and_base_mut2(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1.int1;
     let _foo2 = &mut *foo; //~ ERROR cannot borrow
+    use_mut(_bar1);
 }
-
 fn borrow_imm_and_base_mut(foo: &mut Foo) {
     let _bar1 = &foo.bar1.int1;
     let _foo1 = &mut foo.bar1; //~ ERROR cannot borrow
+    use_imm(_bar1);
 }
-
 fn borrow_imm_and_base_mut2(foo: &mut Foo) {
     let _bar1 = &foo.bar1.int1;
     let _foo2 = &mut *foo; //~ ERROR cannot borrow
+    use_imm(_bar1);
 }
-
 fn borrow_imm_and_base_imm(foo: &mut Foo) {
     let _bar1 = &foo.bar1.int1;
     let _foo1 = &foo.bar1;
     let _foo2 = &*foo;
+    use_imm(_bar1);
 }
-
 fn borrow_mut_and_imm(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1;
     let _foo1 = &foo.bar2;
+    use_mut(_bar1);
 }
-
 fn borrow_mut_from_imm(foo: &Foo) {
     let _bar1 = &mut foo.bar1; //~ ERROR cannot borrow
 }
@@ -101,6 +101,9 @@ fn borrow_mut_from_imm(foo: &Foo) {
 fn borrow_long_path_both_mut(foo: &mut Foo) {
     let _bar1 = &mut foo.bar1.int1;
     let _foo1 = &mut foo.bar2.int2;
+    use_mut(_bar1);
 }
-
 fn main() {}
+
+fn use_mut<T>(_: &mut T) { }
+fn use_imm<T>(_: &T) { }
index 00660ff84841b002184430a75d46ff25b9b94596..1310e38cb3ee6f7567b57d9f51b80f9889374bad 100644 (file)
@@ -5,6 +5,7 @@ LL |     let _bar1 = &mut foo.bar1;
    |                      -------- first mutable borrow occurs here
 LL |     let _bar2 = &mut foo.bar1;  //~ ERROR cannot borrow
    |                      ^^^^^^^^ second mutable borrow occurs here
+LL |     use_mut(_bar1);
 LL | }
    | - first borrow ends here
 
@@ -15,6 +16,7 @@ LL |     let _bar1 = &mut foo.bar1;
    |                      -------- mutable borrow occurs here
 LL |     let _bar2 = &foo.bar1;  //~ ERROR cannot borrow
    |                  ^^^^^^^^ immutable borrow occurs here
+LL |     use_mut(_bar1);
 LL | }
    | - mutable borrow ends here
 
@@ -25,6 +27,7 @@ LL |     let _bar1 = &foo.bar1;
    |                  -------- immutable borrow occurs here
 LL |     let _bar2 = &mut foo.bar1;  //~ ERROR cannot borrow
    |                      ^^^^^^^^ mutable borrow occurs here
+LL |     use_imm(_bar1);
 LL | }
    | - immutable borrow ends here
 
@@ -47,7 +50,7 @@ LL |     let _bar1 = &mut foo.bar1.int1;
    |                      ------------- mutable borrow occurs here
 LL |     let _foo1 = &foo.bar1; //~ ERROR cannot borrow
    |                  ^^^^^^^^ immutable borrow occurs here
-LL |     let _foo2 = &*foo; //~ ERROR cannot borrow
+...
 LL | }
    | - mutable borrow ends here
 
@@ -59,6 +62,7 @@ LL |     let _bar1 = &mut foo.bar1.int1;
 LL |     let _foo1 = &foo.bar1; //~ ERROR cannot borrow
 LL |     let _foo2 = &*foo; //~ ERROR cannot borrow
    |                  ^^^^ immutable borrow occurs here
+LL |     use_mut(_bar1);
 LL | }
    | - mutable borrow ends here
 
@@ -69,6 +73,7 @@ LL |     let _bar1 = &mut foo.bar1.int1;
    |                      ------------- first mutable borrow occurs here
 LL |     let _foo1 = &mut foo.bar1; //~ ERROR cannot borrow
    |                      ^^^^^^^^ second mutable borrow occurs here
+LL |     use_mut(_bar1);
 LL | }
    | - first borrow ends here
 
@@ -79,6 +84,7 @@ LL |     let _bar1 = &mut foo.bar1.int1;
    |                      ------------- first mutable borrow occurs here
 LL |     let _foo2 = &mut *foo; //~ ERROR cannot borrow
    |                      ^^^^ second mutable borrow occurs here
+LL |     use_mut(_bar1);
 LL | }
    | - first borrow ends here
 
@@ -89,6 +95,7 @@ LL |     let _bar1 = &foo.bar1.int1;
    |                  ------------- immutable borrow occurs here
 LL |     let _foo1 = &mut foo.bar1; //~ ERROR cannot borrow
    |                      ^^^^^^^^ mutable borrow occurs here
+LL |     use_imm(_bar1);
 LL | }
    | - immutable borrow ends here
 
@@ -99,6 +106,7 @@ LL |     let _bar1 = &foo.bar1.int1;
    |                  ------------- immutable borrow occurs here
 LL |     let _foo2 = &mut *foo; //~ ERROR cannot borrow
    |                      ^^^^ mutable borrow occurs here
+LL |     use_imm(_bar1);
 LL | }
    | - immutable borrow ends here
 
index 3fbb747db24f14053f5cb7d4d40b0e1cb4fe63c9..ee5ad58290e9e11a6eb7238ca3840106c071b096 100644 (file)
@@ -1,3 +1,13 @@
+error[E0502]: cannot borrow `f` as immutable because it is also borrowed as mutable
+  --> $DIR/borrowck-unboxed-closures.rs:13:5
+   |
+LL |     let g = &mut f;
+   |             ------ mutable borrow occurs here
+LL |     f(1, 2);    //~ ERROR cannot borrow `f` as immutable
+   |     ^ immutable borrow occurs here
+LL |     use_mut(g);
+   |             - mutable borrow later used here
+
 error[E0596]: cannot borrow `f` as mutable, as it is not declared as mutable
   --> $DIR/borrowck-unboxed-closures.rs:17:5
    |
@@ -16,7 +26,7 @@ LL |     f(1, 2);    //~ ERROR use of moved value
    |
    = note: move occurs because `f` has type `F`, which does not implement the `Copy` trait
 
-error: aborting due to 2 previous errors
+error: aborting due to 3 previous errors
 
-Some errors occurred: E0382, E0596.
+Some errors occurred: E0382, E0502, E0596.
 For more information about an error, try `rustc --explain E0382`.
index 4813b4b6a72cd127539c5a83499786437360c209..43f143a492fd63b76108c44ec14a3ce0a73ca300 100644 (file)
@@ -11,8 +11,8 @@
 fn a<F:Fn(isize, isize) -> isize>(mut f: F) {
     let g = &mut f;
     f(1, 2);    //~ ERROR cannot borrow `f` as immutable
+    use_mut(g);
 }
-
 fn b<F:FnMut(isize, isize) -> isize>(f: F) {
     f(1, 2);    //~ ERROR cannot borrow immutable argument
 }
@@ -23,3 +23,5 @@ fn c<F:FnOnce(isize, isize) -> isize>(f: F) {
 }
 
 fn main() {}
+
+fn use_mut<T>(_: &mut T) { }
index 0c067c47004cfbe03b19236b070605bf20b7a75e..6ee1a6245a556b51774baf80effe93b1f923b68c 100644 (file)
@@ -5,6 +5,7 @@ LL |     let g = &mut f;
    |                  - mutable borrow occurs here
 LL |     f(1, 2);    //~ ERROR cannot borrow `f` as immutable
    |     ^ immutable borrow occurs here
+LL |     use_mut(g);
 LL | }
    | - mutable borrow ends here
 
diff --git a/src/test/ui/codemap_tests/overlapping_spans.nll.stderr b/src/test/ui/codemap_tests/overlapping_spans.nll.stderr
deleted file mode 100644 (file)
index e334472..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-error[E0509]: cannot move out of type `S`, which implements the `Drop` trait
-  --> $DIR/overlapping_spans.rs:20:11
-   |
-LL |     match (S {f:"foo".to_string()}) {
-   |           ^^^^^^^^^^^^^^^^^^^^^^^^^ cannot move out of here
-LL |         S {f:_s} => {} //~ ERROR cannot move out
-   |              -- data moved here
-   |
-note: move occurs because `_s` has type `std::string::String`, which does not implement the `Copy` trait
-  --> $DIR/overlapping_spans.rs:21:14
-   |
-LL |         S {f:_s} => {} //~ ERROR cannot move out
-   |              ^^
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0509`.
diff --git a/src/test/ui/codemap_tests/overlapping_spans.rs b/src/test/ui/codemap_tests/overlapping_spans.rs
deleted file mode 100644 (file)
index 467e90b..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[derive(Debug)]
-struct Foo { }
-
-struct S {f:String}
-impl Drop for S {
-    fn drop(&mut self) { println!("{}", self.f); }
-}
-
-fn main() {
-    match (S {f:"foo".to_string()}) {
-        S {f:_s} => {} //~ ERROR cannot move out
-    }
-}
diff --git a/src/test/ui/codemap_tests/overlapping_spans.stderr b/src/test/ui/codemap_tests/overlapping_spans.stderr
deleted file mode 100644 (file)
index 62a4f08..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0509]: cannot move out of type `S`, which implements the `Drop` trait
-  --> $DIR/overlapping_spans.rs:21:9
-   |
-LL |         S {f:_s} => {} //~ ERROR cannot move out
-   |         ^^^^^--^
-   |         |    |
-   |         |    hint: to prevent move, use `ref _s` or `ref mut _s`
-   |         cannot move out of here
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0509`.
index e247e86fbcb2281ecf67e9aa5bad6b73617a321e..f60f1dfcf3772e0979c224d3a9e5e40fc2416472 100644 (file)
@@ -2,7 +2,9 @@ error[E0404]: expected trait, found type alias `Bar`
   --> $DIR/two_files.rs:15:6
    |
 LL | impl Bar for Baz { } //~ ERROR expected trait, found type alias
-   |      ^^^ type aliases cannot be used for traits
+   |      ^^^ type aliases cannot be used as traits
+   |
+   = note: did you mean to use a trait alias?
 
 error: aborting due to previous error
 
index 81f6e7ddd2de233bf5a861cc111c441444a7ca22..76f9276c05807cf2a0131dcfe84501916fb61430 100644 (file)
@@ -21,12 +21,12 @@ enum Bar {
 union Union {
     foo: &'static Foo,
     bar: &'static Bar,
-    usize: &'static usize,
+    u8: &'static u8,
 }
-static BAR: usize = 42;
+static BAR: u8 = 42;
 static FOO: (&Foo, &Bar) = unsafe {(
-    Union { usize: &BAR }.foo,
-    Union { usize: &BAR }.bar,
+    Union { u8: &BAR }.foo,
+    Union { u8: &BAR }.bar,
 )};
 
 fn main() {}
index b661ee92475e68e0561145f8f87b21a0b449eb4d..701632362ce7afa4b9ba5fc8e6a64be4d0fe6dfd 100644 (file)
@@ -19,12 +19,12 @@ enum Bar {
 union Union {
     foo: &'static Foo,
     bar: &'static Bar,
-    usize: &'static usize,
+    u8: &'static u8,
 }
-static BAR: usize = 5;
+static BAR: u8 = 5;
 static FOO: (&Foo, &Bar) = unsafe {( //~ undefined behavior
-    Union { usize: &BAR }.foo,
-    Union { usize: &BAR }.bar,
+    Union { u8: &BAR }.foo,
+    Union { u8: &BAR }.bar,
 )};
 
 fn main() {}
index 9dd7570232d37f13102901c953bebd71fcd0bed7..28825477c810276f25c5b30876d49a5aab3eb839 100644 (file)
@@ -2,8 +2,8 @@ error[E0080]: it is undefined behavior to use this value
   --> $DIR/double_check2.rs:25:1
    |
 LL | / static FOO: (&Foo, &Bar) = unsafe {( //~ undefined behavior
-LL | |     Union { usize: &BAR }.foo,
-LL | |     Union { usize: &BAR }.bar,
+LL | |     Union { u8: &BAR }.foo,
+LL | |     Union { u8: &BAR }.bar,
 LL | | )};
    | |___^ type validation failed: encountered invalid enum discriminant 5 at .1.<deref>
    |
diff --git a/src/test/ui/consts/dangling-alloc-id-ice.rs b/src/test/ui/consts/dangling-alloc-id-ice.rs
new file mode 100644 (file)
index 0000000..695d33b
--- /dev/null
@@ -0,0 +1,15 @@
+// https://github.com/rust-lang/rust/issues/55223
+
+#![feature(const_let)]
+
+union Foo<'a> {
+    y: &'a (),
+    long_live_the_unit: &'static (),
+}
+
+const FOO: &() = { //~ ERROR any use of this value will cause an error
+    let y = ();
+    unsafe { Foo { y: &y }.long_live_the_unit }
+};
+
+fn main() {}
diff --git a/src/test/ui/consts/dangling-alloc-id-ice.stderr b/src/test/ui/consts/dangling-alloc-id-ice.stderr
new file mode 100644 (file)
index 0000000..a5fa88e
--- /dev/null
@@ -0,0 +1,13 @@
+error: any use of this value will cause an error
+  --> $DIR/dangling-alloc-id-ice.rs:10:1
+   |
+LL | / const FOO: &() = { //~ ERROR any use of this value will cause an error
+LL | |     let y = ();
+LL | |     unsafe { Foo { y: &y }.long_live_the_unit }
+LL | | };
+   | |__^ type validation failed: encountered dangling pointer in final constant
+   |
+   = note: #[deny(const_err)] on by default
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/consts/dangling_raw_ptr.rs b/src/test/ui/consts/dangling_raw_ptr.rs
new file mode 100644 (file)
index 0000000..7fc7734
--- /dev/null
@@ -0,0 +1,10 @@
+#![feature(const_let)]
+
+const FOO: *const u32 = { //~ ERROR any use of this value will cause an error
+    let x = 42;
+    &x
+};
+
+fn main() {
+    let x = FOO;
+}
diff --git a/src/test/ui/consts/dangling_raw_ptr.stderr b/src/test/ui/consts/dangling_raw_ptr.stderr
new file mode 100644 (file)
index 0000000..3b20936
--- /dev/null
@@ -0,0 +1,13 @@
+error: any use of this value will cause an error
+  --> $DIR/dangling_raw_ptr.rs:3:1
+   |
+LL | / const FOO: *const u32 = { //~ ERROR any use of this value will cause an error
+LL | |     let x = 42;
+LL | |     &x
+LL | | };
+   | |__^ type validation failed: encountered dangling pointer in final constant
+   |
+   = note: #[deny(const_err)] on by default
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/derive-uninhabited-enum-38885.rs b/src/test/ui/derive-uninhabited-enum-38885.rs
new file mode 100644 (file)
index 0000000..dc7f5d6
--- /dev/null
@@ -0,0 +1,27 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// compile-flags: -Wunused
+
+// ensure there are no special warnings about uninhabited types
+// when deriving Debug on an empty enum
+
+#[derive(Debug)]
+enum Void {} //~ WARN never used
+
+#[derive(Debug)]
+enum Foo { //~ WARN never used
+    Bar(u8),
+    Void(Void),
+}
+
+fn main() {}
+
diff --git a/src/test/ui/derive-uninhabited-enum-38885.stderr b/src/test/ui/derive-uninhabited-enum-38885.stderr
new file mode 100644 (file)
index 0000000..11032ab
--- /dev/null
@@ -0,0 +1,14 @@
+warning: enum is never used: `Void`
+  --> $DIR/derive-uninhabited-enum-38885.rs:18:1
+   |
+LL | enum Void {} //~ WARN never used
+   | ^^^^^^^^^
+   |
+   = note: `-W dead-code` implied by `-W unused`
+
+warning: enum is never used: `Foo`
+  --> $DIR/derive-uninhabited-enum-38885.rs:21:1
+   |
+LL | enum Foo { //~ WARN never used
+   | ^^^^^^^^
+
index db3478116cbfdd37270b312a5d01cba019cf4b5f..8b6e34c585f9d8daefffbcb265b206fae8b7a3fc 100644 (file)
@@ -28,7 +28,7 @@ error: expected `{`, found `;`
 LL |     if not  // lack of braces is [sic]
    |     -- this `if` statement has a condition, but no block
 LL |         println!("Then when?");
-   |                               ^
+   |                               ^ expected `{`
 
 error: unexpected `2` after identifier
   --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:36:24
diff --git a/src/test/ui/dropck/dropck-eyepatch-extern-crate.ast.stderr b/src/test/ui/dropck/dropck-eyepatch-extern-crate.ast.stderr
new file mode 100644 (file)
index 0000000..31adb2f
--- /dev/null
@@ -0,0 +1,69 @@
+error[E0597]: `c` does not live long enough
+  --> $DIR/dropck-eyepatch-extern-crate.rs:41:20
+   |
+LL |     dt = Dt("dt", &c);
+   |                    ^ borrowed value does not live long enough
+...
+LL | }
+   | - `c` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c` does not live long enough
+  --> $DIR/dropck-eyepatch-extern-crate.rs:43:20
+   |
+LL |     dr = Dr("dr", &c);
+   |                    ^ borrowed value does not live long enough
+...
+LL | }
+   | - `c` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-extern-crate.rs:47:20
+   |
+LL |     dt = Dt("dt", &c_shortest);
+   |                    ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-extern-crate.rs:50:20
+   |
+LL |     dr = Dr("dr", &c_shortest);
+   |                    ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-extern-crate.rs:57:29
+   |
+LL |     pt = Pt("pt", &c_long, &c_shortest);
+   |                             ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-extern-crate.rs:59:29
+   |
+LL |     pr = Pr("pr", &c_long, &c_shortest);
+   |                             ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error: aborting due to 6 previous errors
+
+For more information about this error, try `rustc --explain E0597`.
index 3e531d9fd6011e8fee34972998a929cb9de8c4ed..68065639398a591409ab042d6251431d54d335a8 100644 (file)
@@ -1,12 +1,12 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
+// The behavior of AST-borrowck and NLL explcitly differ here due to
+// NLL's increased precision; so we use revisions and do not worry
+// about the --compare-mode=nll on this test.
+
+// revisions: ast nll
+//[ast]compile-flags: -Z borrowck=ast
+//[nll]compile-flags: -Z borrowck=migrate -Z two-phase-borrows
+
+// ignore-compare-mode-nll
 
 // aux-build:dropck_eyepatch_extern_crate.rs
 
@@ -39,29 +39,32 @@ fn main() { #![rustc_error] // rust-lang/rust#49855
 
     // Error: destructor order imprecisely modelled
     dt = Dt("dt", &c);
-    //~^ ERROR `c` does not live long enough
+    //[ast]~^ ERROR `c` does not live long enough
     dr = Dr("dr", &c);
-    //~^ ERROR `c` does not live long enough
+    //[ast]~^ ERROR `c` does not live long enough
 
     // Error: `c_shortest` dies too soon for the references in dtors to be valid.
     dt = Dt("dt", &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
+    //[nll]~^^ ERROR `c_shortest` does not live long enough
     dr = Dr("dr", &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
-
+    //[ast]~^ ERROR `c_shortest` does not live long enough
     // No error: Drop impl asserts .1 (A and &'a _) are not accessed
     pt = Pt("pt", &c_shortest, &c_long);
     pr = Pr("pr", &c_shortest, &c_long);
 
     // Error: Drop impl's assertion does not apply to `B` nor `&'b _`
     pt = Pt("pt", &c_long, &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
     pr = Pr("pr", &c_long, &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
 
     // No error: St and Sr have no destructor.
     st = St("st", &c_shortest);
     sr = Sr("sr", &c_shortest);
 
     println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
+    use_imm(sr.1); use_imm(st.1); use_imm(pr.1); use_imm(pt.1); use_imm(dr.1); use_imm(dt.1);
 }
+
+fn use_imm<T>(_: &T) { }
diff --git a/src/test/ui/dropck/dropck-eyepatch-extern-crate.stderr b/src/test/ui/dropck/dropck-eyepatch-extern-crate.stderr
deleted file mode 100644 (file)
index 35db46f..0000000
+++ /dev/null
@@ -1,69 +0,0 @@
-error[E0597]: `c` does not live long enough
-  --> $DIR/dropck-eyepatch-extern-crate.rs:41:20
-   |
-LL |     dt = Dt("dt", &c);
-   |                    ^ borrowed value does not live long enough
-...
-LL | }
-   | - `c` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c` does not live long enough
-  --> $DIR/dropck-eyepatch-extern-crate.rs:43:20
-   |
-LL |     dr = Dr("dr", &c);
-   |                    ^ borrowed value does not live long enough
-...
-LL | }
-   | - `c` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-extern-crate.rs:47:20
-   |
-LL |     dt = Dt("dt", &c_shortest);
-   |                    ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-extern-crate.rs:49:20
-   |
-LL |     dr = Dr("dr", &c_shortest);
-   |                    ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-extern-crate.rs:57:29
-   |
-LL |     pt = Pt("pt", &c_long, &c_shortest);
-   |                             ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-extern-crate.rs:59:29
-   |
-LL |     pr = Pr("pr", &c_long, &c_shortest);
-   |                             ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error: aborting due to 6 previous errors
-
-For more information about this error, try `rustc --explain E0597`.
diff --git a/src/test/ui/dropck/dropck-eyepatch-reorder.ast.stderr b/src/test/ui/dropck/dropck-eyepatch-reorder.ast.stderr
new file mode 100644 (file)
index 0000000..ddd47e9
--- /dev/null
@@ -0,0 +1,69 @@
+error[E0597]: `c` does not live long enough
+  --> $DIR/dropck-eyepatch-reorder.rs:58:20
+   |
+LL |     dt = Dt("dt", &c);
+   |                    ^ borrowed value does not live long enough
+...
+LL | }
+   | - `c` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c` does not live long enough
+  --> $DIR/dropck-eyepatch-reorder.rs:60:20
+   |
+LL |     dr = Dr("dr", &c);
+   |                    ^ borrowed value does not live long enough
+...
+LL | }
+   | - `c` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-reorder.rs:64:20
+   |
+LL |     dt = Dt("dt", &c_shortest);
+   |                    ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-reorder.rs:67:20
+   |
+LL |     dr = Dr("dr", &c_shortest);
+   |                    ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-reorder.rs:74:29
+   |
+LL |     pt = Pt("pt", &c_long, &c_shortest);
+   |                             ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch-reorder.rs:76:29
+   |
+LL |     pr = Pr("pr", &c_long, &c_shortest);
+   |                             ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error: aborting due to 6 previous errors
+
+For more information about this error, try `rustc --explain E0597`.
index 1806dc71424527947eb8f3ffb9c3073913e68ae9..16aaa2612576821c954b62ca84e1247ac71ee034 100644 (file)
@@ -1,12 +1,12 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
+// The behavior of AST-borrowck and NLL explcitly differ here due to
+// NLL's increased precision; so we use revisions and do not worry
+// about the --compare-mode=nll on this test.
+
+// revisions: ast nll
+//[ast]compile-flags: -Z borrowck=ast
+//[nll]compile-flags: -Z borrowck=migrate -Z two-phase-borrows
+
+// ignore-compare-mode-nll
 
 #![feature(dropck_eyepatch, rustc_attrs)]
 
@@ -56,29 +56,32 @@ fn main() { #![rustc_error] // rust-lang/rust#49855
 
     // Error: destructor order imprecisely modelled
     dt = Dt("dt", &c);
-    //~^ ERROR `c` does not live long enough
+    //[ast]~^ ERROR `c` does not live long enough
     dr = Dr("dr", &c);
-    //~^ ERROR `c` does not live long enough
+    //[ast]~^ ERROR `c` does not live long enough
 
     // Error: `c_shortest` dies too soon for the references in dtors to be valid.
     dt = Dt("dt", &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
+    //[nll]~^^ ERROR `c_shortest` does not live long enough
     dr = Dr("dr", &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
-
+    //[ast]~^ ERROR `c_shortest` does not live long enough
     // No error: Drop impl asserts .1 (A and &'a _) are not accessed
     pt = Pt("pt", &c_shortest, &c_long);
     pr = Pr("pr", &c_shortest, &c_long);
 
     // Error: Drop impl's assertion does not apply to `B` nor `&'b _`
     pt = Pt("pt", &c_long, &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
     pr = Pr("pr", &c_long, &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
 
     // No error: St and Sr have no destructor.
     st = St("st", &c_shortest);
     sr = Sr("sr", &c_shortest);
 
     println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
+    use_imm(sr.1); use_imm(st.1); use_imm(pr.1); use_imm(pt.1); use_imm(dr.1); use_imm(dt.1);
 }
+
+fn use_imm<T>(_: &T) { }
diff --git a/src/test/ui/dropck/dropck-eyepatch-reorder.stderr b/src/test/ui/dropck/dropck-eyepatch-reorder.stderr
deleted file mode 100644 (file)
index 9984a7b..0000000
+++ /dev/null
@@ -1,69 +0,0 @@
-error[E0597]: `c` does not live long enough
-  --> $DIR/dropck-eyepatch-reorder.rs:58:20
-   |
-LL |     dt = Dt("dt", &c);
-   |                    ^ borrowed value does not live long enough
-...
-LL | }
-   | - `c` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c` does not live long enough
-  --> $DIR/dropck-eyepatch-reorder.rs:60:20
-   |
-LL |     dr = Dr("dr", &c);
-   |                    ^ borrowed value does not live long enough
-...
-LL | }
-   | - `c` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-reorder.rs:64:20
-   |
-LL |     dt = Dt("dt", &c_shortest);
-   |                    ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-reorder.rs:66:20
-   |
-LL |     dr = Dr("dr", &c_shortest);
-   |                    ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-reorder.rs:74:29
-   |
-LL |     pt = Pt("pt", &c_long, &c_shortest);
-   |                             ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch-reorder.rs:76:29
-   |
-LL |     pr = Pr("pr", &c_long, &c_shortest);
-   |                             ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error: aborting due to 6 previous errors
-
-For more information about this error, try `rustc --explain E0597`.
diff --git a/src/test/ui/dropck/dropck-eyepatch.ast.stderr b/src/test/ui/dropck/dropck-eyepatch.ast.stderr
new file mode 100644 (file)
index 0000000..0952ed0
--- /dev/null
@@ -0,0 +1,69 @@
+error[E0597]: `c` does not live long enough
+  --> $DIR/dropck-eyepatch.rs:81:20
+   |
+LL |     dt = Dt("dt", &c);
+   |                    ^ borrowed value does not live long enough
+...
+LL | }
+   | - `c` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c` does not live long enough
+  --> $DIR/dropck-eyepatch.rs:83:20
+   |
+LL |     dr = Dr("dr", &c);
+   |                    ^ borrowed value does not live long enough
+...
+LL | }
+   | - `c` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch.rs:87:20
+   |
+LL |     dt = Dt("dt", &c_shortest);
+   |                    ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch.rs:90:20
+   |
+LL |     dr = Dr("dr", &c_shortest);
+   |                    ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch.rs:98:29
+   |
+LL |     pt = Pt("pt", &c_long, &c_shortest);
+   |                             ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error[E0597]: `c_shortest` does not live long enough
+  --> $DIR/dropck-eyepatch.rs:100:29
+   |
+LL |     pr = Pr("pr", &c_long, &c_shortest);
+   |                             ^^^^^^^^^^ borrowed value does not live long enough
+...
+LL | }
+   | - `c_shortest` dropped here while still borrowed
+   |
+   = note: values in a scope are dropped in the opposite order they are created
+
+error: aborting due to 6 previous errors
+
+For more information about this error, try `rustc --explain E0597`.
index 40d3ff050e2aa3db6fa71367fbd80780a97831bc..d7a671fd33c2c62713325081c71edbe326b16556 100644 (file)
@@ -1,12 +1,12 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
+// The behavior of AST-borrowck and NLL explcitly differ here due to
+// NLL's increased precision; so we use revisions and do not worry
+// about the --compare-mode=nll on this test.
+
+// revisions: ast nll
+//[ast]compile-flags: -Z borrowck=ast
+//[nll]compile-flags: -Z borrowck=migrate -Z two-phase-borrows
+
+// ignore-compare-mode-nll
 
 #![feature(dropck_eyepatch, rustc_attrs)]
 
@@ -79,16 +79,16 @@ fn main() { #![rustc_error] // rust-lang/rust#49855
 
     // Error: destructor order imprecisely modelled
     dt = Dt("dt", &c);
-    //~^ ERROR `c` does not live long enough
+    //[ast]~^ ERROR `c` does not live long enough
     dr = Dr("dr", &c);
-    //~^ ERROR `c` does not live long enough
+    //[ast]~^ ERROR `c` does not live long enough
 
     // Error: `c_shortest` dies too soon for the references in dtors to be valid.
     dt = Dt("dt", &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
+    //[nll]~^^ ERROR `c_shortest` does not live long enough
     dr = Dr("dr", &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
-
+    //[ast]~^ ERROR `c_shortest` does not live long enough
 
     // No error: Drop impl asserts .1 (A and &'a _) are not accessed
     pt = Pt("pt", &c_shortest, &c_long);
@@ -96,13 +96,16 @@ fn main() { #![rustc_error] // rust-lang/rust#49855
 
     // Error: Drop impl's assertion does not apply to `B` nor `&'b _`
     pt = Pt("pt", &c_long, &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
     pr = Pr("pr", &c_long, &c_shortest);
-    //~^ ERROR `c_shortest` does not live long enough
+    //[ast]~^ ERROR `c_shortest` does not live long enough
 
     // No error: St and Sr have no destructor.
     st = St("st", &c_shortest);
     sr = Sr("sr", &c_shortest);
 
     println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0));
+    use_imm(sr.1); use_imm(st.1); use_imm(pr.1); use_imm(pt.1); use_imm(dr.1); use_imm(dt.1);
 }
+
+fn use_imm<T>(_: &T) { }
diff --git a/src/test/ui/dropck/dropck-eyepatch.stderr b/src/test/ui/dropck/dropck-eyepatch.stderr
deleted file mode 100644 (file)
index 7cdf645..0000000
+++ /dev/null
@@ -1,69 +0,0 @@
-error[E0597]: `c` does not live long enough
-  --> $DIR/dropck-eyepatch.rs:81:20
-   |
-LL |     dt = Dt("dt", &c);
-   |                    ^ borrowed value does not live long enough
-...
-LL | }
-   | - `c` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c` does not live long enough
-  --> $DIR/dropck-eyepatch.rs:83:20
-   |
-LL |     dr = Dr("dr", &c);
-   |                    ^ borrowed value does not live long enough
-...
-LL | }
-   | - `c` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch.rs:87:20
-   |
-LL |     dt = Dt("dt", &c_shortest);
-   |                    ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch.rs:89:20
-   |
-LL |     dr = Dr("dr", &c_shortest);
-   |                    ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch.rs:98:29
-   |
-LL |     pt = Pt("pt", &c_long, &c_shortest);
-   |                             ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error[E0597]: `c_shortest` does not live long enough
-  --> $DIR/dropck-eyepatch.rs:100:29
-   |
-LL |     pr = Pr("pr", &c_long, &c_shortest);
-   |                             ^^^^^^^^^^ borrowed value does not live long enough
-...
-LL | }
-   | - `c_shortest` dropped here while still borrowed
-   |
-   = note: values in a scope are dropped in the opposite order they are created
-
-error: aborting due to 6 previous errors
-
-For more information about this error, try `rustc --explain E0597`.
index 6a4392df35d8b31646a25266f8df11e380667e05..900812787bcf7debe389d41ab08db9cb9f33d8d0 100644 (file)
@@ -4,7 +4,7 @@ error[E0004]: non-exhaustive patterns: type std::option::Option<i32> is non-empt
 LL |     match x { } //~ ERROR E0004
    |           ^
    |
-help: Please ensure that all possible cases are being handled; possibly adding wildcards or more match arms.
+help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
   --> $DIR/E0004-2.rs:14:11
    |
 LL |     match x { } //~ ERROR E0004
index 3ffd25084b8296dc9ae53541bff20ed81330d5af..f3db697790c5e26e246ea59b4be0bdb3fa26414d 100644 (file)
@@ -5,7 +5,7 @@ LL | impl<T, U> CoerceUnsized<Foo<U, T>> for Foo<T, U> {}
    |            ^^^^^^^^^^^^^^^^^^^^^^^^ requires multiple coercions
    |
    = note: `CoerceUnsized` may only be implemented for a coercion between structures with one field being coerced
-   = note: currently, 2 fields need coercions: b (T to U), c (U to T)
+   = note: currently, 2 fields need coercions: `b` (`T` to `U`), `c` (`U` to `T`)
 
 error: aborting due to previous error
 
index 3f8ad32437df99b18cf621f3f1d76629235c6dd3..736fad44b8b5840b4259c4ddeafe8eb1d185e4d2 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: allow_fail attribute is currently unstable (see issue #42219)
+error[E0658]: allow_fail attribute is currently unstable (see issue #46488)
   --> $DIR/feature-gate-allow_fail.rs:13:1
    |
 LL | #[allow_fail] //~ ERROR allow_fail attribute is currently unstable
index aa27f8922c005d7a77af19294583b7c2e77e42ba..6b70c1ea294c55a0977ef32dd3157cf5b49cae73 100644 (file)
@@ -61,6 +61,16 @@ pub unsafe fn atomic_u64(x: *mut u64) {
 pub unsafe fn atomic_i64(x: *mut i64) {
     atomic_xadd(x, 1);
 }
+#[cfg(target_has_atomic = "128")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_u128(x: *mut u128) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "128")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_i128(x: *mut i128) {
+    atomic_xadd(x, 1);
+}
 #[cfg(target_has_atomic = "ptr")]
 //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
 pub unsafe fn atomic_usize(x: *mut usize) {
@@ -81,6 +91,8 @@ fn main() {
     //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
     cfg!(target_has_atomic = "64");
     //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+    cfg!(target_has_atomic = "128");
+    //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
     cfg!(target_has_atomic = "ptr");
     //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
 }
index f3975b7ce8b26fdbebe36f2a73080b385c267f83..81f20112a12ffc96356ccb0842144be86c4b3588 100644 (file)
@@ -65,7 +65,7 @@ LL | #[cfg(target_has_atomic = "64")]
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
   --> $DIR/feature-gate-cfg-target-has-atomic.rs:64:7
    |
-LL | #[cfg(target_has_atomic = "ptr")]
+LL | #[cfg(target_has_atomic = "128")]
    |       ^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
@@ -73,13 +73,29 @@ LL | #[cfg(target_has_atomic = "ptr")]
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
   --> $DIR/feature-gate-cfg-target-has-atomic.rs:69:7
    |
+LL | #[cfg(target_has_atomic = "128")]
+   |       ^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
+
+error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:74:7
+   |
+LL | #[cfg(target_has_atomic = "ptr")]
+   |       ^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
+
+error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:79:7
+   |
 LL | #[cfg(target_has_atomic = "ptr")]
    |       ^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
 
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
-  --> $DIR/feature-gate-cfg-target-has-atomic.rs:76:10
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:86:10
    |
 LL |     cfg!(target_has_atomic = "8");
    |          ^^^^^^^^^^^^^^^^^^^^^^^
@@ -87,7 +103,7 @@ LL |     cfg!(target_has_atomic = "8");
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
 
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
-  --> $DIR/feature-gate-cfg-target-has-atomic.rs:78:10
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:88:10
    |
 LL |     cfg!(target_has_atomic = "16");
    |          ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -95,7 +111,7 @@ LL |     cfg!(target_has_atomic = "16");
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
 
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
-  --> $DIR/feature-gate-cfg-target-has-atomic.rs:80:10
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:90:10
    |
 LL |     cfg!(target_has_atomic = "32");
    |          ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -103,7 +119,7 @@ LL |     cfg!(target_has_atomic = "32");
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
 
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
-  --> $DIR/feature-gate-cfg-target-has-atomic.rs:82:10
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:92:10
    |
 LL |     cfg!(target_has_atomic = "64");
    |          ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -111,13 +127,21 @@ LL |     cfg!(target_has_atomic = "64");
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
 
 error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
-  --> $DIR/feature-gate-cfg-target-has-atomic.rs:84:10
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:94:10
+   |
+LL |     cfg!(target_has_atomic = "128");
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
+
+error[E0658]: `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+  --> $DIR/feature-gate-cfg-target-has-atomic.rs:96:10
    |
 LL |     cfg!(target_has_atomic = "ptr");
    |          ^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = help: add #![feature(cfg_target_has_atomic)] to the crate attributes to enable
 
-error: aborting due to 15 previous errors
+error: aborting due to 18 previous errors
 
 For more information about this error, try `rustc --explain E0658`.
index d0ee40504fb21b9497e6802ef916dbd68895115a..e1c1dcbcd790ce271faa113fd3c83542b6e53674 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: `crate` visibility modifier is experimental (see issue #45388)
+error[E0658]: `crate` visibility modifier is experimental (see issue #53120)
   --> $DIR/feature-gate-crate_visibility_modifier.rs:11:1
    |
 LL | crate struct Bender { //~ ERROR `crate` visibility modifier is experimental
index cabfb56d7a840b5913110774390b241fceae17d0..bbd4b630263a44706e21fb21377a314db30c7319 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:26:9
    |
 LL |     use alloc;
@@ -6,7 +6,7 @@ LL |     use alloc;
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:28:9
    |
 LL |     use alloc::boxed;
@@ -14,7 +14,7 @@ LL |     use alloc::boxed;
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:33:11
    |
 LL |     use ::alloc;
@@ -22,7 +22,7 @@ LL |     use ::alloc;
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:35:11
    |
 LL |     use ::alloc::boxed;
@@ -30,7 +30,7 @@ LL |     use ::alloc::boxed;
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:9:17
    |
 LL |         let v = alloc::vec![0];
@@ -38,7 +38,7 @@ LL |         let v = alloc::vec![0];
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:11:18
    |
 LL |         type A = alloc::boxed::Box<u8>;
@@ -46,7 +46,7 @@ LL |         type A = alloc::boxed::Box<u8>;
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:18:19
    |
 LL |         let v = ::alloc::vec![0];
@@ -54,7 +54,7 @@ LL |         let v = ::alloc::vec![0];
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:20:20
    |
 LL |         type A = ::alloc::boxed::Box<u8>;
@@ -62,7 +62,7 @@ LL |         type A = ::alloc::boxed::Box<u8>;
    |
    = help: add #![feature(extern_crate_item_prelude)] to the crate attributes to enable
 
-error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #54658)
+error[E0658]: use of extern prelude names introduced with `extern crate` items is unstable (see issue #55599)
   --> $DIR/feature-gate-extern_crate_item_prelude.rs:42:14
    |
 LL |     type A = core::boxed::Box<u8>;
index 535ed94565c6db61472144dfae144f6802746a0e..a73533b617891c121ce4029a46d0d8cb414729bd 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: `extern` in paths is experimental (see issue #44660)
+error[E0658]: `extern` in paths is experimental (see issue #55600)
   --> $DIR/feature-gate-extern_in_paths.rs:14:13
    |
 LL |     let _ = extern::std::vec::Vec::new(); //~ ERROR `extern` in paths is experimental
diff --git a/src/test/ui/feature-gates/feature-gate-nll.nll.stderr b/src/test/ui/feature-gates/feature-gate-nll.nll.stderr
deleted file mode 100644 (file)
index 81de0d1..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-error: compilation successful
-  --> $DIR/feature-gate-nll.rs:13:1
-   |
-LL | / fn main() { #![rustc_error] // rust-lang/rust#49855
-LL | |     let mut x = 33;
-LL | |
-LL | |     let p = &x;
-LL | |     x = 22; //~ ERROR cannot assign to `x` because it is borrowed [E0506]
-LL | | }
-   | |_^
-
-error: aborting due to previous error
-
index 752b1fa821f7f9cf59fcaa1610c3ef3d833028de..14c48fb48a09b6a83d85ca616cc5a0af6d42505b 100644 (file)
@@ -1,16 +1,16 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-#![feature(rustc_attrs)]
+// This is a test checking that if you do not opt into NLL then you
+// should not get the effects of NLL applied to the test.
+
+// Don't use 2018 edition, since that turns on NLL (migration mode).
+// edition:2015
+
+// Don't use compare-mode=nll, since that turns on NLL.
+// ignore-compare-mode-nll
+
+
 #![allow(dead_code)]
 
-fn main() { #![rustc_error] // rust-lang/rust#49855
+fn main() {
     let mut x = 33;
 
     let p = &x;
diff --git a/src/test/ui/feature-gates/feature-gate-panic-implementation.rs b/src/test/ui/feature-gates/feature-gate-panic-implementation.rs
deleted file mode 100644 (file)
index ca51154..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-C panic=abort
-
-#![no_std]
-#![no_main]
-
-use core::panic::PanicInfo;
-
-#[panic_implementation] //~ ERROR this attribute was renamed to `panic_handler` (see issue #44489)
-fn panic(info: &PanicInfo) -> ! {
-    loop {}
-}
diff --git a/src/test/ui/feature-gates/feature-gate-panic-implementation.stderr b/src/test/ui/feature-gates/feature-gate-panic-implementation.stderr
deleted file mode 100644 (file)
index a547804..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-error[E0658]: this attribute was renamed to `panic_handler` (see issue #44489)
-  --> $DIR/feature-gate-panic-implementation.rs:18:1
-   |
-LL | #[panic_implementation] //~ ERROR this attribute was renamed to `panic_handler` (see issue #44489)
-   | ^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(panic_implementation)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/feature-gates/feature-gate-trait-alias.rs b/src/test/ui/feature-gates/feature-gate-trait-alias.rs
new file mode 100644 (file)
index 0000000..a2a183f
--- /dev/null
@@ -0,0 +1,13 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo = Default;
+
+fn main() {}
diff --git a/src/test/ui/feature-gates/feature-gate-trait-alias.stderr b/src/test/ui/feature-gates/feature-gate-trait-alias.stderr
new file mode 100644 (file)
index 0000000..e02dfe2
--- /dev/null
@@ -0,0 +1,11 @@
+error[E0658]: trait aliases are experimental (see issue #41517)
+  --> $DIR/feature-gate-trait-alias.rs:11:1
+   |
+LL | trait Foo = Default;
+   | ^^^^^^^^^^^^^^^^^^^^
+   |
+   = help: add #![feature(trait_alias)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
index ce3de3b302dd3678e52d41dec61fddeb660cb66c..db81f00a9e364ab5ed11458dc10e5cefb3ff353f 100644 (file)
@@ -16,3 +16,4 @@ fn main() {
     }
 }
 //~^ ERROR expected `{`, found `}`
+//~| NOTE expected `{`
index bc8e7310ce37198eed4f852a03ef3ede5d01c3dc..c94dac8871dabbfb3d8a0eee37bd1e94270ae687 100644 (file)
@@ -5,7 +5,7 @@ LL |     if 5 == {
    |     -- this `if` statement has a condition, but no block
 ...
 LL | }
-   | ^
+   | ^ expected `{`
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/imports/issue-55457.rs b/src/test/ui/imports/issue-55457.rs
new file mode 100644 (file)
index 0000000..9c6750f
--- /dev/null
@@ -0,0 +1,8 @@
+use NonExistent; //~ ERROR unresolved import `NonExistent`
+use non_existent::non_existent; //~ ERROR unresolved import `non_existent`
+
+#[non_existent] //~ ERROR cannot determine resolution for the attribute macro `non_existent`
+#[derive(NonExistent)] //~ ERROR cannot determine resolution for the derive macro `NonExistent`
+struct S;
+
+fn main() {}
diff --git a/src/test/ui/imports/issue-55457.stderr b/src/test/ui/imports/issue-55457.stderr
new file mode 100644 (file)
index 0000000..363dec0
--- /dev/null
@@ -0,0 +1,31 @@
+error[E0432]: unresolved import `NonExistent`
+  --> $DIR/issue-55457.rs:1:5
+   |
+LL | use NonExistent; //~ ERROR unresolved import `NonExistent`
+   |     ^^^^^^^^^^^ no `NonExistent` in the root. Did you mean to use `non_existent`?
+
+error[E0432]: unresolved import `non_existent`
+  --> $DIR/issue-55457.rs:2:5
+   |
+LL | use non_existent::non_existent; //~ ERROR unresolved import `non_existent`
+   |     ^^^^^^^^^^^^ Maybe a missing `extern crate non_existent;`?
+
+error: cannot determine resolution for the derive macro `NonExistent`
+  --> $DIR/issue-55457.rs:5:10
+   |
+LL | #[derive(NonExistent)] //~ ERROR cannot determine resolution for the derive macro `NonExistent`
+   |          ^^^^^^^^^^^
+   |
+   = note: import resolution is stuck, try simplifying macro imports
+
+error: cannot determine resolution for the attribute macro `non_existent`
+  --> $DIR/issue-55457.rs:4:3
+   |
+LL | #[non_existent] //~ ERROR cannot determine resolution for the attribute macro `non_existent`
+   |   ^^^^^^^^^^^^
+   |
+   = note: import resolution is stuck, try simplifying macro imports
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0432`.
index 37021f38cacdf826f97234a4f81801c6365d2b50..bbfb14b8d9d22d5e4ed00523bf0351610f394cba 100644 (file)
@@ -21,6 +21,7 @@ fn main() {
     issue_37437();
     issue_40187();
     issue_54067();
+    multiple_errors();
 }
 
 fn issue_37433() {
@@ -55,3 +56,11 @@ fn issue_54067() {
         asm!("mov sp, $0"::"r"(addr)); //~ ERROR E0669
     }
 }
+
+fn multiple_errors() {
+    let addr: (u32, u32) = (1, 2);
+    unsafe {
+        asm!("mov sp, $0"::"r"(addr), //~ ERROR E0669
+                           "r"("hello e0669")); //~ ERROR E0669
+    }
+}
index 6971215a95f9ede5be47dc10f0a63b49ea8b7f15..2f650bfcab7b52ac1ef27f4f8fe196da1909a2ca 100644 (file)
@@ -1,33 +1,45 @@
 error[E0669]: invalid value for constraint in inline assembly
-  --> $DIR/inline-asm-bad-operand.rs:28:9
+  --> $DIR/inline-asm-bad-operand.rs:29:24
    |
 LL |         asm!("" :: "r"("")); //~ ERROR E0669
-   |         ^^^^^^^^^^^^^^^^^^^^
+   |                        ^^
 
 error[E0669]: invalid value for constraint in inline assembly
-  --> $DIR/inline-asm-bad-operand.rs:33:9
+  --> $DIR/inline-asm-bad-operand.rs:34:32
    |
 LL |         asm!("ret" : : "{rdi}"(target)); //~ ERROR E0669
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                                ^^^^^^
 
 error[E0669]: invalid value for constraint in inline assembly
-  --> $DIR/inline-asm-bad-operand.rs:40:14
+  --> $DIR/inline-asm-bad-operand.rs:41:29
    |
 LL |     unsafe { asm!("" :: "i"(hello)) }; //~ ERROR E0669
-   |              ^^^^^^^^^^^^^^^^^^^^^^
+   |                             ^^^^^
 
 error[E0669]: invalid value for constraint in inline assembly
-  --> $DIR/inline-asm-bad-operand.rs:48:9
+  --> $DIR/inline-asm-bad-operand.rs:49:38
    |
 LL |         asm!("movups $1, %xmm0"::"m"(arr)); //~ ERROR E0669
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                                      ^^^
 
 error[E0669]: invalid value for constraint in inline assembly
-  --> $DIR/inline-asm-bad-operand.rs:55:9
+  --> $DIR/inline-asm-bad-operand.rs:56:32
    |
 LL |         asm!("mov sp, $0"::"r"(addr)); //~ ERROR E0669
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                                ^^^^
 
-error: aborting due to 5 previous errors
+error[E0669]: invalid value for constraint in inline assembly
+  --> $DIR/inline-asm-bad-operand.rs:63:32
+   |
+LL |         asm!("mov sp, $0"::"r"(addr), //~ ERROR E0669
+   |                                ^^^^
+
+error[E0669]: invalid value for constraint in inline assembly
+  --> $DIR/inline-asm-bad-operand.rs:64:32
+   |
+LL |                            "r"("hello e0669")); //~ ERROR E0669
+   |                                ^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
 
 For more information about this error, try `rustc --explain E0669`.
diff --git a/src/test/ui/invalid_dispatch_from_dyn_impls.rs b/src/test/ui/invalid_dispatch_from_dyn_impls.rs
new file mode 100644 (file)
index 0000000..1cf5c73
--- /dev/null
@@ -0,0 +1,52 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(unsize, dispatch_from_dyn)]
+
+use std::{
+    ops::DispatchFromDyn,
+    marker::{Unsize, PhantomData},
+};
+
+struct WrapperWithExtraField<T>(T, i32);
+
+impl<T, U> DispatchFromDyn<WrapperWithExtraField<U>> for WrapperWithExtraField<T>
+where
+    T: DispatchFromDyn<U>,
+{} //~^^^ ERROR [E0378]
+
+
+struct MultiplePointers<T: ?Sized>{
+    ptr1: *const T,
+    ptr2: *const T,
+}
+
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<MultiplePointers<U>> for MultiplePointers<T>
+where
+    T: Unsize<U>,
+{} //~^^^ ERROR [E0378]
+
+
+struct NothingToCoerce<T: ?Sized> {
+    data: PhantomData<T>,
+}
+
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NothingToCoerce<T>> for NothingToCoerce<U> {}
+//~^ ERROR [E0378]
+
+#[repr(C)]
+struct HasReprC<T: ?Sized>(Box<T>);
+
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<HasReprC<U>> for HasReprC<T>
+where
+    T: Unsize<U>,
+{} //~^^^ ERROR [E0378]
+
+fn main() {}
diff --git a/src/test/ui/invalid_dispatch_from_dyn_impls.stderr b/src/test/ui/invalid_dispatch_from_dyn_impls.stderr
new file mode 100644 (file)
index 0000000..82186b6
--- /dev/null
@@ -0,0 +1,41 @@
+error[E0378]: the trait `DispatchFromDyn` may only be implemented for structs containing the field being coerced, `PhantomData` fields, and nothing else
+  --> $DIR/invalid_dispatch_from_dyn_impls.rs:20:1
+   |
+LL | / impl<T, U> DispatchFromDyn<WrapperWithExtraField<U>> for WrapperWithExtraField<T>
+LL | | where
+LL | |     T: DispatchFromDyn<U>,
+LL | | {} //~^^^ ERROR [E0378]
+   | |__^
+   |
+   = note: extra field `1` of type `i32` is not allowed
+
+error[E0378]: implementing the `DispatchFromDyn` trait requires multiple coercions
+  --> $DIR/invalid_dispatch_from_dyn_impls.rs:31:1
+   |
+LL | / impl<T: ?Sized, U: ?Sized> DispatchFromDyn<MultiplePointers<U>> for MultiplePointers<T>
+LL | | where
+LL | |     T: Unsize<U>,
+LL | | {} //~^^^ ERROR [E0378]
+   | |__^
+   |
+   = note: the trait `DispatchFromDyn` may only be implemented for a coercion between structures with a single field being coerced
+   = note: currently, 2 fields need coercions: `ptr1` (`*const T` to `*const U`), `ptr2` (`*const T` to `*const U`)
+
+error[E0378]: the trait `DispatchFromDyn` may only be implemented for a coercion between structures with a single field being coerced, none found
+  --> $DIR/invalid_dispatch_from_dyn_impls.rs:41:1
+   |
+LL | impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NothingToCoerce<T>> for NothingToCoerce<U> {}
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error[E0378]: structs implementing `DispatchFromDyn` may not have `#[repr(packed)]` or `#[repr(C)]`
+  --> $DIR/invalid_dispatch_from_dyn_impls.rs:47:1
+   |
+LL | / impl<T: ?Sized, U: ?Sized> DispatchFromDyn<HasReprC<U>> for HasReprC<T>
+LL | | where
+LL | |     T: Unsize<U>,
+LL | | {} //~^^^ ERROR [E0378]
+   | |__^
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0378`.
index ac079b452c5eb4647b8be348617a7653dcf57e2a..d50ee64cf52904b77522e592021e59d73e758044 100644 (file)
@@ -1,4 +1,4 @@
-error: expected `{`, found `in`
+error: expected `{`, found keyword `in`
   --> $DIR/issue-51602.rs:12:10
    |
 LL |     if i in 1..10 {
diff --git a/src/test/ui/issues/issue-17263.ast.stderr b/src/test/ui/issues/issue-17263.ast.stderr
new file mode 100644 (file)
index 0000000..3d42dcb
--- /dev/null
@@ -0,0 +1,26 @@
+error[E0499]: cannot borrow `x` (via `x.b`) as mutable more than once at a time
+  --> $DIR/issue-17263.rs:17:34
+   |
+LL |     let (a, b) = (&mut x.a, &mut x.b);
+   |                        ---       ^^^ second mutable borrow occurs here (via `x.b`)
+   |                        |
+   |                        first mutable borrow occurs here (via `x.a`)
+...
+LL | }
+   | - first borrow ends here
+
+error[E0502]: cannot borrow `foo` (via `foo.b`) as immutable because `foo` is also borrowed as mutable (via `foo.a`)
+  --> $DIR/issue-17263.rs:21:32
+   |
+LL |     let (c, d) = (&mut foo.a, &foo.b);
+   |                        -----   ^^^^^ immutable borrow occurs here (via `foo.b`)
+   |                        |
+   |                        mutable borrow occurs here (via `foo.a`)
+...
+LL | }
+   | - mutable borrow ends here
+
+error: aborting due to 2 previous errors
+
+Some errors occurred: E0499, E0502.
+For more information about an error, try `rustc --explain E0499`.
index d6009e8078dceb204102b6f977f68de9cbb4fd21..cdb574b8b9f942f8e3d069b5a3051660d89b25af 100644 (file)
@@ -1,12 +1,12 @@
 error: compilation successful
   --> $DIR/issue-17263.rs:15:1
    |
-LL | / fn main() { #![rustc_error] // rust-lang/rust#49855
+LL | / fn main() { //[nll]~ ERROR compilation successful
 LL | |     let mut x: Box<_> = box Foo { a: 1, b: 2 };
 LL | |     let (a, b) = (&mut x.a, &mut x.b);
-LL | |     //~^ ERROR cannot borrow `x` (via `x.b`) as mutable more than once at a time
+LL | |     //[ast]~^ ERROR cannot borrow `x` (via `x.b`) as mutable more than once at a time
 ...  |
-LL | |     //~^ ERROR cannot borrow `foo` (via `foo.b`) as immutable
+LL | |     use_mut(a);
 LL | | }
    | |_^
 
index b251f9a415253eab1a1f71908a854bb4dc83e122..754f3b90aacf1f47de9ce15674126dc94370edf6 100644 (file)
@@ -1,23 +1,35 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
+// This checks diagnostic quality for cases where AST-borrowck treated
+// `Box<T>` as other types (see rust-lang/rfcs#130). NLL again treats
+// `Box<T>` specially. We capture the differences via revisions.
 
+// revisions: ast nll
+//[ast]compile-flags: -Z borrowck=ast
+//[nll]compile-flags: -Z borrowck=migrate -Z two-phase-borrows
+
+// don't worry about the --compare-mode=nll on this test.
+// ignore-compare-mode-nll
 #![feature(box_syntax, rustc_attrs)]
 
 struct Foo { a: isize, b: isize }
-
-fn main() { #![rustc_error] // rust-lang/rust#49855
+#[rustc_error] // rust-lang/rust#49855
+fn main() { //[nll]~ ERROR compilation successful
     let mut x: Box<_> = box Foo { a: 1, b: 2 };
     let (a, b) = (&mut x.a, &mut x.b);
-    //~^ ERROR cannot borrow `x` (via `x.b`) as mutable more than once at a time
+    //[ast]~^ ERROR cannot borrow `x` (via `x.b`) as mutable more than once at a time
 
     let mut foo: Box<_> = box Foo { a: 1, b: 2 };
     let (c, d) = (&mut foo.a, &foo.b);
-    //~^ ERROR cannot borrow `foo` (via `foo.b`) as immutable
+    //[ast]~^ ERROR cannot borrow `foo` (via `foo.b`) as immutable
+
+    // We explicitly use the references created above to illustrate
+    // that NLL is accepting this code *not* because of artificially
+    // short lifetimes, but rather because it understands that all the
+    // references are of disjoint parts of memory.
+    use_imm(d);
+    use_mut(c);
+    use_mut(b);
+    use_mut(a);
 }
+
+fn use_mut<T>(_: &mut T) { }
+fn use_imm<T>(_: &T) { }
diff --git a/src/test/ui/issues/issue-17263.stderr b/src/test/ui/issues/issue-17263.stderr
deleted file mode 100644 (file)
index 4767fbb..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-error[E0499]: cannot borrow `x` (via `x.b`) as mutable more than once at a time
-  --> $DIR/issue-17263.rs:17:34
-   |
-LL |     let (a, b) = (&mut x.a, &mut x.b);
-   |                        ---       ^^^ second mutable borrow occurs here (via `x.b`)
-   |                        |
-   |                        first mutable borrow occurs here (via `x.a`)
-...
-LL | }
-   | - first borrow ends here
-
-error[E0502]: cannot borrow `foo` (via `foo.b`) as immutable because `foo` is also borrowed as mutable (via `foo.a`)
-  --> $DIR/issue-17263.rs:21:32
-   |
-LL |     let (c, d) = (&mut foo.a, &foo.b);
-   |                        -----   ^^^^^ immutable borrow occurs here (via `foo.b`)
-   |                        |
-   |                        mutable borrow occurs here (via `foo.a`)
-LL |     //~^ ERROR cannot borrow `foo` (via `foo.b`) as immutable
-LL | }
-   | - mutable borrow ends here
-
-error: aborting due to 2 previous errors
-
-Some errors occurred: E0499, E0502.
-For more information about an error, try `rustc --explain E0499`.
index f18b58a8330e032e8a2db500a82314f61ae14d33..7feabef56609a054c9e3334fe9072d450cc8ec2a 100644 (file)
@@ -5,7 +5,7 @@ LL | impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<MyRc<U>> for MyRc<T>{
    |                                        ^^^^^^^^^^^^^^^^^^^^^^ requires multiple coercions
    |
    = note: `CoerceUnsized` may only be implemented for a coercion between structures with one field being coerced
-   = note: currently, 2 fields need coercions: _ptr (*const T to *const U), _boo (NotPhantomData<T> to NotPhantomData<U>)
+   = note: currently, 2 fields need coercions: `_ptr` (`*const T` to `*const U`), `_boo` (`NotPhantomData<T>` to `NotPhantomData<U>`)
 
 error: aborting due to previous error
 
index 783e831a2a523cb51b5b6121702177f6de2afcef..b2bfe6b5e8c0778b854feb39d74e5b40cdd0b190 100644 (file)
@@ -4,7 +4,7 @@ error[E0004]: non-exhaustive patterns: type () is non-empty
 LL |     match () { } //~ ERROR non-exhaustive
    |           ^^
    |
-help: Please ensure that all possible cases are being handled; possibly adding wildcards or more match arms.
+help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
   --> $DIR/issue-3096-1.rs:12:11
    |
 LL |     match () { } //~ ERROR non-exhaustive
index 6031f25c03dff6bcf10abedc6ea8ce4fcdbd3504..bb9dfabe7be0338189591ce1461364200135c1c2 100644 (file)
@@ -4,7 +4,7 @@ error[E0004]: non-exhaustive patterns: type *const bottom is non-empty
 LL |     match x { } //~ ERROR non-exhaustive patterns
    |           ^
    |
-help: Please ensure that all possible cases are being handled; possibly adding wildcards or more match arms.
+help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
   --> $DIR/issue-3096-2.rs:15:11
    |
 LL |     match x { } //~ ERROR non-exhaustive patterns
diff --git a/src/test/ui/issues/issue-31076.rs b/src/test/ui/issues/issue-31076.rs
new file mode 100644 (file)
index 0000000..e453107
--- /dev/null
@@ -0,0 +1,17 @@
+#![feature(no_core, lang_items)]
+#![no_core]
+
+#[lang="sized"]
+trait Sized {}
+
+#[lang="add"]
+trait Add<T> {}
+
+impl Add<i32> for i32 {}
+
+fn main() {
+    let x = 5 + 6;
+    //~^ ERROR binary operation `+` cannot be applied to type `{integer}`
+    let y = 5i32 + 6i32;
+    //~^ ERROR binary operation `+` cannot be applied to type `i32`
+}
diff --git a/src/test/ui/issues/issue-31076.stderr b/src/test/ui/issues/issue-31076.stderr
new file mode 100644 (file)
index 0000000..3a13f02
--- /dev/null
@@ -0,0 +1,19 @@
+error[E0369]: binary operation `+` cannot be applied to type `{integer}`
+  --> $DIR/issue-31076.rs:13:13
+   |
+LL |     let x = 5 + 6;
+   |             ^^^^^
+   |
+   = note: an implementation of `std::ops::Add` might be missing for `{integer}`
+
+error[E0369]: binary operation `+` cannot be applied to type `i32`
+  --> $DIR/issue-31076.rs:15:13
+   |
+LL |     let y = 5i32 + 6i32;
+   |             ^^^^^^^^^^^
+   |
+   = note: an implementation of `std::ops::Add` might be missing for `i32`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0369`.
index 9ca39dbaa3436729ca55d64ae0721e1f4d016b28..a5d30b8561ab15daecbd48ad804d85f577aeaa54 100644 (file)
@@ -7,7 +7,7 @@ LL |         if $tgt.has_$field() {}
    |         this `if` statement has a condition, but no block
 ...
 LL |     get_opt!(bar, foo);
-   |                   ^^^
+   |                   ^^^ expected `{`
 
 error: aborting due to previous error
 
index b25b063f3b6fbbf2379676795aa42a94f61416d0..2165d951102c2d565f16a9842380c9c126e6c374 100644 (file)
@@ -3,9 +3,9 @@ error[E0597]: `line` does not live long enough
    |
 LL |         let v: Vec<&str> = line.split_whitespace().collect();
    |                            ^^^^ borrowed value does not live long enough
-LL |         //~^ ERROR `line` does not live long enough
-LL |         println!("accumulator before add_assign {:?}", acc.map);
-   |                                                        ------- borrow used here, in later iteration of loop
+...
+LL |         acc += cnt2;
+   |         --- borrow used here, in later iteration of loop
 ...
 LL |     }
    |     - `line` dropped here while still borrowed
index b26ad9bc37dd15dea784869e38dd967175c24259..1a353f9ea7cd37f56f558122116009f3818fe7e4 100644 (file)
@@ -43,7 +43,7 @@ pub fn panics() {
     for line in vec!["123456789".to_string(), "12345678".to_string()] {
         let v: Vec<&str> = line.split_whitespace().collect();
         //~^ ERROR `line` does not live long enough
-        println!("accumulator before add_assign {:?}", acc.map);
+        // println!("accumulator before add_assign {:?}", acc.map);
         let mut map = HashMap::new();
         for str_ref in v {
             let e = map.entry(str_ref);
@@ -53,7 +53,7 @@ pub fn panics() {
         }
         let cnt2 = Counter{map};
         acc += cnt2;
-        println!("accumulator after add_assign {:?}", acc.map);
+        // println!("accumulator after add_assign {:?}", acc.map);
         // line gets dropped here but references are kept in acc.map
     }
 }
index 0ab1ad2c24200360595147b43f83da6a71ffeb79..02fa541d6a4d6ffb487778be59e4cac63357b0b5 100644 (file)
@@ -10,6 +10,7 @@ error: expected `{`, found `'b`
 LL |     if true 'b: {} //~ ERROR expected `{`, found `'b`
    |     --      ^^----
    |     |       |
+   |     |       expected `{`
    |     |       help: try placing this code inside a block: `{ 'b: { } }`
    |     this `if` statement has a condition, but no block
 
@@ -19,13 +20,16 @@ error: expected `{`, found `'b`
 LL |     if true {} else 'b: {} //~ ERROR expected `{`, found `'b`
    |                     ^^----
    |                     |
+   |                     expected `{`
    |                     help: try placing this code inside a block: `{ 'b: { } }`
 
 error: expected one of `.`, `?`, `{`, or an operator, found `'b`
   --> $DIR/label_break_value_illegal_uses.rs:28:17
    |
 LL |     match false 'b: {} //~ ERROR expected one of `.`, `?`, `{`, or an operator
-   |                 ^^ expected one of `.`, `?`, `{`, or an operator here
+   |     -----       ^^ expected one of `.`, `?`, `{`, or an operator here
+   |     |
+   |     while parsing this match expression
 
 error: aborting due to 4 previous errors
 
index 9b40062bd57b912d1860b5c7b5d0155b3cd32660..4182b5258884f55b8ef205d0640cd4d88f007a32 100644 (file)
@@ -11,8 +11,6 @@ LL | |     };
    |
    = note: expected type `for<'r, 's> fn(&'r u8, &'s u8)`
               found type `for<'a> fn(&'a u8, &'a u8)`
-   = note: this was previously accepted by the compiler but has been phased out
-   = note: for more information, see https://github.com/rust-lang/rust/issues/45852
 
 error: aborting due to previous error
 
index 79442bd108a6759fe1ef27761d6e5309935bbf50..bb8cb45eec39711c88b4e2ba14365c712a537248 100644 (file)
@@ -11,8 +11,6 @@ LL | |     };
    |
    = note: expected type `&dyn for<'a, 'b> Foo<&'a u8, &'b u8>`
               found type `&dyn for<'a> Foo<&'a u8, &'a u8>`
-   = note: this was previously accepted by the compiler but has been phased out
-   = note: for more information, see https://github.com/rust-lang/rust/issues/45852
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/macros/must-use-in-macro-55516.rs b/src/test/ui/macros/must-use-in-macro-55516.rs
new file mode 100644 (file)
index 0000000..ad7cc37
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// compile-flags: -Wunused
+
+// make sure write!() can't hide its unused Result
+
+fn main() {
+    use std::fmt::Write;
+    let mut example = String::new();
+    write!(&mut example, "{}", 42); //~WARN must be used
+}
+
diff --git a/src/test/ui/macros/must-use-in-macro-55516.stderr b/src/test/ui/macros/must-use-in-macro-55516.stderr
new file mode 100644 (file)
index 0000000..b03a580
--- /dev/null
@@ -0,0 +1,10 @@
+warning: unused `std::result::Result` that must be used
+  --> $DIR/must-use-in-macro-55516.rs:19:5
+   |
+LL |     write!(&mut example, "{}", 42); //~WARN must be used
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: `-W unused-must-use` implied by `-W unused`
+   = note: this `Result` may be an `Err` variant, which should be handled
+   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
index a48eff890b331448750f525310b98f1341d2e44e..dbbfd10b79526994ce21b78d25948a33a324040d 100644 (file)
@@ -2,7 +2,7 @@ error: expected `{`, found `=>`
   --> $DIR/missing-block-hint.rs:13:18
    |
 LL |         if (foo) => {} //~ ERROR expected `{`, found `=>`
-   |         --       ^^
+   |         --       ^^ expected `{`
    |         |
    |         this `if` statement has a condition, but no block
 
@@ -14,6 +14,7 @@ LL |         if (foo)
 LL |             bar; //~ ERROR expected `{`, found `bar`
    |             ^^^-
    |             |
+   |             expected `{`
    |             help: try placing this code inside a block: `{ bar; }`
 
 error: aborting due to 2 previous errors
index 72319a257d85d9501d2a2eb1e300cc9fed4f116f..b4427565a8649162336462c800f6f2994bfff2bb 100644 (file)
@@ -1,4 +1,4 @@
-warning: expected `;`, found `let`
+warning: expected `;`, found keyword `let`
   --> $DIR/missing-semicolon-warning.rs:16:12
    |
 LL |         $( let x = $e1 )*; //~ WARN expected `;`
index 901ace59d33a04c75320af5150fe6a485e31f67c..88383190cbce6604e2d296397ce8748075f3ae1d 100644 (file)
@@ -1,4 +1,4 @@
-error: user substs: Canonical { variables: [], value: UserSubsts { substs: [u32], user_self_ty: None } }
+error: user substs: Canonical { max_universe: U0, variables: [], value: UserSubsts { substs: [u32], user_self_ty: None } }
   --> $DIR/dump-adt-brace-struct.rs:28:5
    |
 LL |     SomeStruct::<u32> { t: 22 }; //~ ERROR [u32]
index a26be359fc4aa4c28ed9b608c31d6604a727483d..3beb994a4e8a5731636422713a23c31d5959541a 100644 (file)
@@ -1,22 +1,22 @@
-error: user substs: Canonical { variables: [], value: UserSubsts { substs: [u32], user_self_ty: None } }
+error: user substs: Canonical { max_universe: U0, variables: [], value: UserSubsts { substs: [u32], user_self_ty: None } }
   --> $DIR/dump-fn-method.rs:36:13
    |
 LL |     let x = foo::<u32>; //~ ERROR [u32]
    |             ^^^^^^^^^^
 
-error: user substs: Canonical { variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: UserSubsts { substs: [?0, u32, ?1], user_self_ty: None } }
+error: user substs: Canonical { max_universe: U0, variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: UserSubsts { substs: [?0, u32, ?1], user_self_ty: None } }
   --> $DIR/dump-fn-method.rs:42:13
    |
 LL |     let x = <_ as Bazoom<u32>>::method::<_>; //~ ERROR [?0, u32, ?1]
    |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error: user substs: Canonical { variables: [], value: UserSubsts { substs: [u8, u16, u32], user_self_ty: None } }
+error: user substs: Canonical { max_universe: U0, variables: [], value: UserSubsts { substs: [u8, u16, u32], user_self_ty: None } }
   --> $DIR/dump-fn-method.rs:46:13
    |
 LL |     let x = <u8 as Bazoom<u16>>::method::<u32>; //~ ERROR [u8, u16, u32]
    |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error: user substs: Canonical { variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: UserSubsts { substs: [?0, ?1, u32], user_self_ty: None } }
+error: user substs: Canonical { max_universe: U0, variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: UserSubsts { substs: [?0, ?1, u32], user_self_ty: None } }
   --> $DIR/dump-fn-method.rs:54:5
    |
 LL |     y.method::<u32>(44, 66); //~ ERROR [?0, ?1, u32]
diff --git a/src/test/ui/panic-implementation/panic-implementation-deprecated.rs b/src/test/ui/panic-implementation/panic-implementation-deprecated.rs
deleted file mode 100644 (file)
index c4bec01..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-C panic=abort
-
-#![deny(deprecated)]
-#![feature(panic_implementation)]
-#![no_std]
-
-use core::panic::PanicInfo;
-
-#[panic_implementation]
-fn panic(info: &PanicInfo) -> ! {
-    loop {}
-}
-
-fn main() {}
diff --git a/src/test/ui/panic-implementation/panic-implementation-deprecated.stderr b/src/test/ui/panic-implementation/panic-implementation-deprecated.stderr
deleted file mode 100644 (file)
index fabfba9..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-error: use of deprecated attribute `panic_implementation`: this attribute was renamed to `panic_handler`. See https://github.com/rust-lang/rust/issues/44489#issuecomment-415140224
-  --> $DIR/panic-implementation-deprecated.rs:19:1
-   |
-LL | #[panic_implementation]
-   | ^^^^^^^^^^^^^^^^^^^^^^^ help: replace this attribute with `#[panic_handler]`
-   |
-note: lint level defined here
-  --> $DIR/panic-implementation-deprecated.rs:13:9
-   |
-LL | #![deny(deprecated)]
-   |         ^^^^^^^^^^
-
-error: aborting due to previous error
-
index 2882364081a1301cb4c1edfd0fc93cbeee325aa2..a321d0e29b276431771a36f138456828045cf3c3 100644 (file)
@@ -12,7 +12,7 @@
 
 fn /// document
 foo() {}
-//~^^ ERROR expected identifier, found `/// document`
+//~^^ ERROR expected identifier, found doc comment `/// document`
 
 fn main() {
     foo();
index f93ab634c28c91cb6aa26c109fab76e76b15349a..73b169ce8e58518f960a3189f266ed4d3af7999b 100644 (file)
@@ -1,8 +1,8 @@
-error: expected identifier, found `/// document`
+error: expected identifier, found doc comment `/// document`
   --> $DIR/doc-before-identifier.rs:13:4
    |
 LL | fn /// document
-   |    ^^^^^^^^^^^^ expected identifier
+   |    ^^^^^^^^^^^^ expected identifier, found doc comment
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/parser/doc-comment-in-if-statement.rs b/src/test/ui/parser/doc-comment-in-if-statement.rs
new file mode 100644 (file)
index 0000000..c85fe25
--- /dev/null
@@ -0,0 +1,4 @@
+fn main() {
+    if true /*!*/ {}
+    //~^ ERROR expected `{`, found doc comment `/*!*/`
+}
diff --git a/src/test/ui/parser/doc-comment-in-if-statement.stderr b/src/test/ui/parser/doc-comment-in-if-statement.stderr
new file mode 100644 (file)
index 0000000..6bcb773
--- /dev/null
@@ -0,0 +1,10 @@
+error: expected `{`, found doc comment `/*!*/`
+  --> $DIR/doc-comment-in-if-statement.rs:2:13
+   |
+LL |     if true /*!*/ {}
+   |     --      ^^^^^ expected `{`
+   |     |
+   |     this `if` statement has a condition, but no block
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/parser/fn-arg-doc-comment.rs b/src/test/ui/parser/fn-arg-doc-comment.rs
new file mode 100644 (file)
index 0000000..22af94b
--- /dev/null
@@ -0,0 +1,37 @@
+pub fn f(
+    /// Comment
+    //~^ ERROR documentation comments cannot be applied to method arguments
+    //~| NOTE doc comments are not allowed here
+    id: u8,
+    /// Other
+    //~^ ERROR documentation comments cannot be applied to method arguments
+    //~| NOTE doc comments are not allowed here
+    a: u8,
+) {}
+
+fn foo(#[allow(dead_code)] id: i32) {}
+//~^ ERROR attributes cannot be applied to method arguments
+//~| NOTE attributes are not allowed here
+
+fn bar(id: #[allow(dead_code)] i32) {}
+//~^ ERROR attributes cannot be applied to a method argument's type
+//~| NOTE attributes are not allowed here
+
+fn main() {
+    // verify that the parser recovered and properly typechecked the args
+    f("", "");
+    //~^ ERROR mismatched types
+    //~| NOTE expected u8, found reference
+    //~| NOTE expected
+    //~| ERROR mismatched types
+    //~| NOTE expected u8, found reference
+    //~| NOTE expected
+    foo("");
+    //~^ ERROR mismatched types
+    //~| NOTE expected i32, found reference
+    //~| NOTE expected
+    bar("");
+    //~^ ERROR mismatched types
+    //~| NOTE expected i32, found reference
+    //~| NOTE expected
+}
diff --git a/src/test/ui/parser/fn-arg-doc-comment.stderr b/src/test/ui/parser/fn-arg-doc-comment.stderr
new file mode 100644 (file)
index 0000000..73a24ee
--- /dev/null
@@ -0,0 +1,63 @@
+error: documentation comments cannot be applied to method arguments
+  --> $DIR/fn-arg-doc-comment.rs:2:5
+   |
+LL |     /// Comment
+   |     ^^^^^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to method arguments
+  --> $DIR/fn-arg-doc-comment.rs:6:5
+   |
+LL |     /// Other
+   |     ^^^^^^^^^ doc comments are not allowed here
+
+error: attributes cannot be applied to method arguments
+  --> $DIR/fn-arg-doc-comment.rs:12:8
+   |
+LL | fn foo(#[allow(dead_code)] id: i32) {}
+   |        ^^^^^^^^^^^^^^^^^^^ attributes are not allowed here
+
+error: attributes cannot be applied to a method argument's type
+  --> $DIR/fn-arg-doc-comment.rs:16:12
+   |
+LL | fn bar(id: #[allow(dead_code)] i32) {}
+   |            ^^^^^^^^^^^^^^^^^^^ attributes are not allowed here
+
+error[E0308]: mismatched types
+  --> $DIR/fn-arg-doc-comment.rs:22:7
+   |
+LL |     f("", "");
+   |       ^^ expected u8, found reference
+   |
+   = note: expected type `u8`
+              found type `&'static str`
+
+error[E0308]: mismatched types
+  --> $DIR/fn-arg-doc-comment.rs:22:11
+   |
+LL |     f("", "");
+   |           ^^ expected u8, found reference
+   |
+   = note: expected type `u8`
+              found type `&'static str`
+
+error[E0308]: mismatched types
+  --> $DIR/fn-arg-doc-comment.rs:29:9
+   |
+LL |     foo("");
+   |         ^^ expected i32, found reference
+   |
+   = note: expected type `i32`
+              found type `&'static str`
+
+error[E0308]: mismatched types
+  --> $DIR/fn-arg-doc-comment.rs:33:9
+   |
+LL |     bar("");
+   |         ^^ expected i32, found reference
+   |
+   = note: expected type `i32`
+              found type `&'static str`
+
+error: aborting due to 8 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
index 6fdc7972a8856647bad1c7c2baeeb0ec8bd01252..f00bfcb45b17f39b7aa87a8adcb64c0724c143b6 100644 (file)
@@ -1,4 +1,4 @@
-error: expected `;`, found `as`
+error: expected `;`, found keyword `as`
   --> $DIR/import-from-rename.rs:15:16
    |
 LL | use foo::{bar} as baz;
index c36a946fd63067a98b5978b99694c6683c628f50..0b124a32df32c1116f8060809ef41d1d0196d3bd 100644 (file)
@@ -1,4 +1,4 @@
-error: expected `;`, found `as`
+error: expected `;`, found keyword `as`
   --> $DIR/import-glob-rename.rs:15:12
    |
 LL | use foo::* as baz;
index 3f41c0edd2e6feed384e4d6ad8bec116f7c628d9..749cb7fcc0b1970d7ea25be23b74b9cb57eeaca2 100644 (file)
@@ -10,6 +10,6 @@
 
 // compile-flags: -Z parse-only -Z continue-parse-after-error
 
-struct Bar<T> { x: T } where T: Copy //~ ERROR expected item, found `where`
+struct Bar<T> { x: T } where T: Copy //~ ERROR expected item, found keyword `where`
 
 fn main() {}
index 65ddadb011d0f0c769276b546d3a9124ab455115..ff9e1215f103ed55ec7ea12dc55313d4b01f400e 100644 (file)
@@ -1,7 +1,7 @@
-error: expected item, found `where`
+error: expected item, found keyword `where`
   --> $DIR/issue-17904-2.rs:13:24
    |
-LL | struct Bar<T> { x: T } where T: Copy //~ ERROR expected item, found `where`
+LL | struct Bar<T> { x: T } where T: Copy //~ ERROR expected item, found keyword `where`
    |                        ^^^^^ expected item
 
 error: aborting due to previous error
index 25ae7b4c55a2b306e055810179f80643967eeeb2..7c3b84a5185d3323912cf1a7073196eaef079e2b 100644 (file)
@@ -11,5 +11,6 @@
 // compile-flags: -Z parse-only
 
 impl S {
-    fn f(*, a: u8) -> u8 {} //~ ERROR expected pattern, found `*`
+    fn f(*, a: u8) -> u8 {}
+    //~^ ERROR expected argument name, found `*`
 }
index 189ace74b9c6aca1979e4fc39f892ce4bfe998ce..e0d69e596f24e62b3bb0d7774673ca0f81974874 100644 (file)
@@ -1,8 +1,8 @@
-error: expected pattern, found `*`
+error: expected argument name, found `*`
   --> $DIR/issue-33413.rs:14:10
    |
-LL |     fn f(*, a: u8) -> u8 {} //~ ERROR expected pattern, found `*`
-   |          ^ expected pattern
+LL |     fn f(*, a: u8) -> u8 {}
+   |          ^ expected argument name
 
 error: aborting due to previous error
 
index 3c88608697aadb7b1302ba7f10ef6a73154f63e6..014dba3d4d0e743cc73385ddd5972fc035af863d 100644 (file)
@@ -12,7 +12,7 @@
 
 fn main() {
     let foo =
-        match
+        match //~ NOTE while parsing this match expression
         Some(4).unwrap_or_else(5)
         //~^ NOTE expected one of `.`, `?`, `{`, or an operator here
         ; //~ NOTE unexpected token
index ecca781684cec16190056242678e66a03f3d9a97..2ffbddd570ff6e95b9752d319a066393968f319b 100644 (file)
@@ -1,8 +1,11 @@
 error: expected one of `.`, `?`, `{`, or an operator, found `;`
   --> $DIR/match-refactor-to-expr.rs:18:9
    |
-LL |         match
-   |         ----- help: try removing this `match`
+LL |         match //~ NOTE while parsing this match expression
+   |         -----
+   |         |
+   |         while parsing this match expression
+   |         help: try removing this `match`
 LL |         Some(4).unwrap_or_else(5)
    |                                  - expected one of `.`, `?`, `{`, or an operator here
 LL |         //~^ NOTE expected one of `.`, `?`, `{`, or an operator here
index 6e99f8b3eeadc74f693c108b270c61fa06fdb36c..c2f87d8afce0e948a15e6e37bf0f24e34c0c1a85 100644 (file)
@@ -10,4 +10,5 @@
 
 // compile-flags: -Z parse-only
 
-fn f(+x: isize) {} //~ ERROR expected pattern, found `+`
+fn f(+x: isize) {}
+//~^ ERROR expected argument name, found `+`
index 7a274553d57660fbcbff9207146fddc81b4fcf3e..7ad88471d5a9a13b8ad167302fb899135601a38e 100644 (file)
@@ -1,8 +1,8 @@
-error: expected pattern, found `+`
+error: expected argument name, found `+`
   --> $DIR/removed-syntax-mode.rs:13:6
    |
-LL | fn f(+x: isize) {} //~ ERROR expected pattern, found `+`
-   |      ^ expected pattern
+LL | fn f(+x: isize) {}
+   |      ^ expected argument name
 
 error: aborting due to previous error
 
index 1bcb5208d7610f5dbe0af7d665e8494809fa3b34..533b947b79ae9914d2b1d2e334eb97ff57b6668d 100644 (file)
@@ -12,7 +12,8 @@
 
 // Test syntax checks for `type` keyword.
 
-struct S1 for type; //~ ERROR expected `where`, `{`, `(`, or `;` after struct name, found `for`
+struct S1 for type;
+//~^ ERROR expected `where`, `{`, `(`, or `;` after struct name, found keyword `for`
 
 pub fn main() {
 }
index 89de7ee875856dbf1cde19d567565649f8eb3e5a..cc35c2035eae88aa8a700baf24b6cad928b2e9c7 100644 (file)
@@ -1,7 +1,7 @@
-error: expected `where`, `{`, `(`, or `;` after struct name, found `for`
+error: expected `where`, `{`, `(`, or `;` after struct name, found keyword `for`
   --> $DIR/unsized.rs:15:11
    |
-LL | struct S1 for type; //~ ERROR expected `where`, `{`, `(`, or `;` after struct name, found `for`
+LL | struct S1 for type;
    |           ^^^ expected `where`, `{`, `(`, or `;` after struct name
 
 error: aborting due to previous error
index f34eddc93c570595d5bf188de6586b59655bf89e..b7426140d8ac5c142fdc99b4b0b5255029891a57 100644 (file)
@@ -12,7 +12,8 @@
 
 // Test diagnostics for the removed struct inheritance feature.
 
-virtual struct SuperStruct { //~ ERROR expected item, found `virtual`
+virtual struct SuperStruct {
+//~^ ERROR expected item, found reserved keyword `virtual`
     f1: isize,
 }
 
index 6af9922a698c433bffb4144ae6fc95ba84ce7821..659c7701c00d688e218b711244168613642cec60 100644 (file)
@@ -1,7 +1,7 @@
-error: expected item, found `virtual`
+error: expected item, found reserved keyword `virtual`
   --> $DIR/virtual-structs.rs:15:1
    |
-LL | virtual struct SuperStruct { //~ ERROR expected item, found `virtual`
+LL | virtual struct SuperStruct {
    | ^^^^^^^ expected item
 
 error: aborting due to previous error
index 96b025128ee9ac1be8086c3a50f1245a1881a6ab..31123410481c33d2f65809c92b57dc42c146dca2 100644 (file)
@@ -11,7 +11,8 @@
 #![crate_name="foo"]
 #![allow(dead_code)]
 
-// compile-flags: -Z print-fuel=foo
+// (#55495: The --error-format is to sidestep an issue in our test harness)
+// compile-flags: --error-format human -Z print-fuel=foo
 // compile-pass
 
 struct S1(u8, u16, u8);
diff --git a/src/test/ui/print-fuel/print-fuel.stderr b/src/test/ui/print-fuel/print-fuel.stderr
new file mode 100644 (file)
index 0000000..cc88cc0
--- /dev/null
@@ -0,0 +1 @@
+Fuel used by foo: 3
diff --git a/src/test/ui/print-fuel/print-fuel.stdout b/src/test/ui/print-fuel/print-fuel.stdout
deleted file mode 100644 (file)
index cc88cc0..0000000
+++ /dev/null
@@ -1 +0,0 @@
-Fuel used by foo: 3
index 3627c09b28fd9dac873e23b298065ae20d3cc91c..3e330f9de6547a668b193379b029b9e8b0c47f4e 100644 (file)
@@ -2,7 +2,9 @@ error[E0404]: expected trait, found type alias `Foo`
   --> $DIR/issue-3907.rs:20:6
    |
 LL | impl Foo for S { //~ ERROR expected trait, found type alias `Foo`
-   |      ^^^ type aliases cannot be used for traits
+   |      ^^^ type aliases cannot be used as traits
+   |
+   = note: did you mean to use a trait alias?
 help: possible better candidate is found in another module, you can import it into scope
    |
 LL | use issue_3907::Foo;
index 353a0b1c3d9d09f322af8900053f4d643b2f95be..0acc5c8a93ec689c1e2fc5ef56d9d06207ddb825 100644 (file)
@@ -11,7 +11,9 @@ LL | impl K for isize {} //~ ERROR expected trait, found type alias `K`
    |      ^
    |      |
    |      did you mean `I`?
-   |      type aliases cannot be used for traits
+   |      type aliases cannot be used as traits
+   |
+   = note: did you mean to use a trait alias?
 
 error: aborting due to 2 previous errors
 
index f32c5e9b2c6bd2e2f5214315d6dd3271340de899..d293a77392e8997b80458653ce86b26259d5511f 100644 (file)
@@ -8,7 +8,9 @@ error[E0404]: expected trait, found type alias `Typedef`
   --> $DIR/unboxed-closure-sugar-nonexistent-trait.rs:16:8
    |
 LL | fn g<F:Typedef(isize) -> isize>(x: F) {}
-   |        ^^^^^^^^^^^^^^^^^^^^^^^ type aliases cannot be used for traits
+   |        ^^^^^^^^^^^^^^^^^^^^^^^ type aliases cannot be used as traits
+   |
+   = note: did you mean to use a trait alias?
 
 error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/single-use-lifetime/one-use-in-fn-argument-in-band.fixed b/src/test/ui/single-use-lifetime/one-use-in-fn-argument-in-band.fixed
new file mode 100644 (file)
index 0000000..fd9b10e
--- /dev/null
@@ -0,0 +1,28 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-rustfix
+
+#![feature(in_band_lifetimes)]
+#![deny(single_use_lifetimes)]
+#![allow(dead_code)]
+#![allow(unused_variables)]
+
+// Test that we DO warn when lifetime name is used only
+// once in a fn argument, even with in band lifetimes.
+
+fn a(x: &u32, y: &u32) {
+    //~^ ERROR `'a` only used once
+    //~| ERROR `'b` only used once
+    //~| HELP elide the single-use lifetime
+    //~| HELP elide the single-use lifetime
+}
+
+fn main() { }
index 1aad3265cbe0af54f541d7d5f7a5607b199ef258..0d5f65e6d1b53e8bacef13e9f8907f9b67ea0a44 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// run-rustfix
+
 #![feature(in_band_lifetimes)]
 #![deny(single_use_lifetimes)]
 #![allow(dead_code)]
@@ -19,6 +21,8 @@
 fn a(x: &'a u32, y: &'b u32) {
     //~^ ERROR `'a` only used once
     //~| ERROR `'b` only used once
+    //~| HELP elide the single-use lifetime
+    //~| HELP elide the single-use lifetime
 }
 
 fn main() { }
index 6d18075ba37cf1174e56e34afe268672d0a38d7d..a9f8411d4110315ccde004cd23af9d45664ac5b4 100644 (file)
@@ -1,26 +1,26 @@
 error: lifetime parameter `'a` only used once
-  --> $DIR/one-use-in-fn-argument-in-band.rs:19:10
+  --> $DIR/one-use-in-fn-argument-in-band.rs:21:10
    |
 LL | fn a(x: &'a u32, y: &'b u32) {
-   |          ^^
+   |          ^^-
    |          |
-   |          this lifetime...
-   |          ...is used only here
+   |          this lifetime is only used here
+   |          help: elide the single-use lifetime
    |
 note: lint level defined here
-  --> $DIR/one-use-in-fn-argument-in-band.rs:12:9
+  --> $DIR/one-use-in-fn-argument-in-band.rs:14:9
    |
 LL | #![deny(single_use_lifetimes)]
    |         ^^^^^^^^^^^^^^^^^^^^
 
 error: lifetime parameter `'b` only used once
-  --> $DIR/one-use-in-fn-argument-in-band.rs:19:22
+  --> $DIR/one-use-in-fn-argument-in-band.rs:21:22
    |
 LL | fn a(x: &'a u32, y: &'b u32) {
-   |                      ^^
+   |                      ^^-
    |                      |
-   |                      this lifetime...
-   |                      ...is used only here
+   |                      this lifetime is only used here
+   |                      help: elide the single-use lifetime
 
 error: aborting due to 2 previous errors
 
index 9ac813a24ded3cb07597e11a388ee41a071eabab..a1b8866c8c455741c2eae27d5914f7d9ee9e62df 100644 (file)
@@ -16,6 +16,7 @@
 // once in a fn argument.
 
 fn a<'a>(x: &'a u32) { //~ ERROR `'a` only used once
+    //~^ HELP elide the single-use lifetime
 }
 
 fn main() { }
index 4c13133581bb4d156433031f8ce36404b257a6d1..0a23ecaf458ad965db0107da1db87769275eea25 100644 (file)
@@ -11,6 +11,10 @@ note: lint level defined here
    |
 LL | #![deny(single_use_lifetimes)]
    |         ^^^^^^^^^^^^^^^^^^^^
+help: elide the single-use lifetime
+   |
+LL | fn a(x: &u32) { //~ ERROR `'a` only used once
+   |    --   --
 
 error: aborting due to previous error
 
index a862bbbe30c09e4ebb32e45e27fd6c830e4ab0ed..b392f7a51746f07457964bd2a75bf3ea1b787449 100644 (file)
 #![allow(dead_code)]
 #![allow(unused_variables)]
 
-// Test that we DO warn for a lifetime used only once in an impl.
-//
-// (Actually, until #15872 is fixed, you can't use `'_` here, but
-// hopefully that will come soon.)
+// Test that we DO warn for a lifetime used only once in an impl, and that we
+// don't warn for the anonymous lifetime.
 
 struct Foo<'f> {
     data: &'f u32
@@ -26,4 +24,9 @@ fn inherent_a(&self) {
     }
 }
 
+impl Foo<'_> {
+    fn inherent_b(&self) {}
+}
+
+
 fn main() { }
index 2509366f9696fb080dbe1d2b21b5740c89d51e9b..40dfa677d0ab2cc27c1372bb41aef44d4721ea4d 100644 (file)
@@ -1,5 +1,5 @@
 error: lifetime parameter `'f` only used once
-  --> $DIR/one-use-in-inherent-impl-header.rs:24:6
+  --> $DIR/one-use-in-inherent-impl-header.rs:22:6
    |
 LL | impl<'f> Foo<'f> { //~ ERROR `'f` only used once
    |      ^^      -- ...is used only here
index 863d8faef6ea17ea964a091408dd4014840a4519..63c3bbdfe4a113e9cb497db18d55d03796140f04 100644 (file)
@@ -20,6 +20,7 @@ struct Foo<'f> {
 
 impl<'f> Foo<'f> { //~ ERROR `'f` only used once
     fn inherent_a<'a>(&self, data: &'a u32) { //~ ERROR `'a` only used once
+        //~^ HELP elide the single-use lifetime
     }
 }
 
index cfc8dbf18dc01d02c07045acf6eeacad050a748e..d0651a437b0919836f1b4d5547a0b3596329e241 100644 (file)
@@ -11,6 +11,10 @@ note: lint level defined here
    |
 LL | #![deny(single_use_lifetimes)]
    |         ^^^^^^^^^^^^^^^^^^^^
+help: elide the single-use lifetime
+   |
+LL |     fn inherent_a(&self, data: &u32) { //~ ERROR `'a` only used once
+   |                 --             --
 
 error: lifetime parameter `'f` only used once
   --> $DIR/one-use-in-inherent-method-argument.rs:21:6
index 389ba5c873ad8e3681a794f5c8d92dd2392de90d..c36e9ef2b223740d3a42255d3cec8cbd42ca980c 100644 (file)
@@ -23,6 +23,7 @@ impl<'f> Iterator for Foo<'f> {
     type Item = &'f u32;
 
     fn next<'g>(&'g mut self) -> Option<Self::Item> { //~ ERROR `'g` only used once
+        //~^ HELP elide the single-use lifetime
         None
     }
 }
index 4a796d83242fd91570718d2a72c8e77b8ecf5bae..e054d0ad9349bc148ef4aaec561ad00ddaae01d7 100644 (file)
@@ -11,6 +11,10 @@ note: lint level defined here
    |
 LL | #![deny(single_use_lifetimes)]
    |         ^^^^^^^^^^^^^^^^^^^^
+help: elide the single-use lifetime
+   |
+LL |     fn next(&mut self) -> Option<Self::Item> { //~ ERROR `'g` only used once
+   |           ----
 
 error: aborting due to previous error
 
index 5ba7df8a1e61fa3430cacf317da8e04165826ff0..89607af260a5258e795576acac33b085c34204fd 100644 (file)
@@ -7,17 +7,17 @@
 
 fn september() {}
 //~^ ERROR lifetime parameter `'a` never used
-//~| HELP remove it
+//~| HELP elide the unused lifetime
 
 fn october<'b, T>(s: &'b T) -> &'b T {
     //~^ ERROR lifetime parameter `'a` never used
-    //~| HELP remove it
+    //~| HELP elide the unused lifetime
     s
 }
 
 fn november<'a>(s: &'a str) -> (&'a str) {
     //~^ ERROR lifetime parameter `'b` never used
-    //~| HELP remove it
+    //~| HELP elide the unused lifetime
     s
 }
 
index a56d7fa8abc0275f40b323c8afc47bf2c948f6ae..be0bdb9b6285102ea40b2d906e6eec041595ef95 100644 (file)
@@ -7,17 +7,17 @@
 
 fn september<'a>() {}
 //~^ ERROR lifetime parameter `'a` never used
-//~| HELP remove it
+//~| HELP elide the unused lifetime
 
 fn october<'a, 'b, T>(s: &'b T) -> &'b T {
     //~^ ERROR lifetime parameter `'a` never used
-    //~| HELP remove it
+    //~| HELP elide the unused lifetime
     s
 }
 
 fn november<'a, 'b>(s: &'a str) -> (&'a str) {
     //~^ ERROR lifetime parameter `'b` never used
-    //~| HELP remove it
+    //~| HELP elide the unused lifetime
     s
 }
 
index 566c841cfa96984e853341f0fcc7c866b10e34d5..2ccba796d4229ebe319526d3280ee26057a72bea 100644 (file)
@@ -2,7 +2,7 @@ error: lifetime parameter `'a` never used
   --> $DIR/zero-uses-in-fn.rs:8:14
    |
 LL | fn september<'a>() {}
-   |             -^^- help: remove it
+   |             -^^- help: elide the unused lifetime
    |
 note: lint level defined here
   --> $DIR/zero-uses-in-fn.rs:5:9
@@ -16,7 +16,7 @@ error: lifetime parameter `'a` never used
 LL | fn october<'a, 'b, T>(s: &'b T) -> &'b T {
    |            ^^--
    |            |
-   |            help: remove it
+   |            help: elide the unused lifetime
 
 error: lifetime parameter `'b` never used
   --> $DIR/zero-uses-in-fn.rs:18:17
@@ -24,7 +24,7 @@ error: lifetime parameter `'b` never used
 LL | fn november<'a, 'b>(s: &'a str) -> (&'a str) {
    |               --^^
    |               |
-   |               help: remove it
+   |               help: elide the unused lifetime
 
 error: aborting due to 3 previous errors
 
index 1b77ebdec99c6af0986e62cfea945893261dc773..3f9d907ade6799dc324e1fd433a4395ef27b34ea 100644 (file)
@@ -2,7 +2,7 @@ error: lifetime parameter `'a` never used
   --> $DIR/zero-uses-in-impl.rs:8:6
    |
 LL | impl<'a> Foo {} //~ ERROR `'a` never used
-   |     -^^- help: remove it
+   |     -^^- help: elide the unused lifetime
    |
 note: lint level defined here
   --> $DIR/zero-uses-in-impl.rs:3:9
index 4a693a3b05d4ea1f38770055e1a9857d8b71b25f..389a1116c163ace63616dedc196c9467c8334fd9 100644 (file)
@@ -14,6 +14,16 @@ LL | fn deref_extend_mut_field1(x: &Own<Point>) -> &mut isize {
 LL |     &mut x.y //~ ERROR cannot borrow
    |          ^ `x` is a `&` reference, so the data it refers to cannot be borrowed as mutable
 
+error[E0499]: cannot borrow `*x` as mutable more than once at a time
+  --> $DIR/borrowck-borrow-overloaded-auto-deref-mut.rs:88:19
+   |
+LL |     let _x = &mut x.x;
+   |                   - first mutable borrow occurs here
+LL |     let _y = &mut x.y; //~ ERROR cannot borrow
+   |                   ^ second mutable borrow occurs here
+LL |     use_mut(_x);
+   |             -- first borrow later used here
+
 error[E0596]: cannot borrow `x` as mutable, as it is not declared as mutable
   --> $DIR/borrowck-borrow-overloaded-auto-deref-mut.rs:98:5
    |
@@ -30,6 +40,16 @@ LL | fn assign_field2<'a>(x: &'a Own<Point>) {
 LL |     x.y = 3; //~ ERROR cannot borrow
    |     ^ `x` is a `&` reference, so the data it refers to cannot be borrowed as mutable
 
+error[E0499]: cannot borrow `*x` as mutable more than once at a time
+  --> $DIR/borrowck-borrow-overloaded-auto-deref-mut.rs:111:5
+   |
+LL |     let _p: &mut Point = &mut **x;
+   |                                -- first mutable borrow occurs here
+LL |     x.y = 3; //~ ERROR cannot borrow
+   |     ^ second mutable borrow occurs here
+LL |     use_mut(_p);
+   |             -- first borrow later used here
+
 error[E0596]: cannot borrow `x` as mutable, as it is not declared as mutable
   --> $DIR/borrowck-borrow-overloaded-auto-deref-mut.rs:119:5
    |
@@ -62,6 +82,7 @@ LL | fn assign_method2<'a>(x: &'a Own<Point>) {
 LL |     *x.y_mut() = 3; //~ ERROR cannot borrow
    |      ^ `x` is a `&` reference, so the data it refers to cannot be borrowed as mutable
 
-error: aborting due to 8 previous errors
+error: aborting due to 10 previous errors
 
-For more information about this error, try `rustc --explain E0596`.
+Some errors occurred: E0499, E0596.
+For more information about an error, try `rustc --explain E0499`.
index 764d05be879b8be674f3e9c3adba83e924c051a8..48eb2e239f7cdc6a46ff85170949b339c8ed6128 100644 (file)
@@ -86,8 +86,8 @@ fn deref_extend_mut_field3(x: &mut Own<Point>) {
 
     let _x = &mut x.x;
     let _y = &mut x.y; //~ ERROR cannot borrow
+    use_mut(_x);
 }
-
 fn deref_extend_mut_field4<'a>(x: &'a mut Own<Point>) {
     let p = &mut **x;
     let _x = &mut p.x;
@@ -109,8 +109,8 @@ fn assign_field3<'a>(x: &'a mut Own<Point>) {
 fn assign_field4<'a>(x: &'a mut Own<Point>) {
     let _p: &mut Point = &mut **x;
     x.y = 3; //~ ERROR cannot borrow
+    use_mut(_p);
 }
-
 fn deref_imm_method(x: Own<Point>) {
     let __isize = x.get();
 }
@@ -148,3 +148,5 @@ fn assign_method3<'a>(x: &'a mut Own<Point>) {
 }
 
 pub fn main() {}
+
+fn use_mut<T>(_: &mut T) {}
index d91ff6964237b2c65e5c116ec9b02b5463707b12..864357fee9f0cbbb59ac9bb328e3ee10c920dda4 100644 (file)
@@ -21,6 +21,7 @@ LL |     let _x = &mut x.x;
    |                   - first mutable borrow occurs here
 LL |     let _y = &mut x.y; //~ ERROR cannot borrow
    |                   ^ second mutable borrow occurs here
+LL |     use_mut(_x);
 LL | }
    | - first borrow ends here
 
@@ -47,6 +48,7 @@ LL |     let _p: &mut Point = &mut **x;
    |                                -- first mutable borrow occurs here
 LL |     x.y = 3; //~ ERROR cannot borrow
    |     ^ second mutable borrow occurs here
+LL |     use_mut(_p);
 LL | }
    | - first borrow ends here
 
diff --git a/src/test/ui/trait-alias-fail.rs b/src/test/ui/trait-alias-fail.rs
deleted file mode 100644 (file)
index 7aca227..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// gate-test-trait_alias
-
-trait Alias1<T> = Default where T: Clone; // ok
-    //~^ERROR trait aliases are not yet fully implemented
-trait Alias2<T: Clone = ()> = Default;
-    //~^ERROR type parameters on the left side of a trait alias cannot be bounded
-    //~^^ERROR type parameters on the left side of a trait alias cannot have defaults
-    //~^^^ERROR trait aliases are not yet fully implemented
-
-impl Alias1 { //~ERROR expected type, found trait alias
-}
-
-impl Alias1 for () { //~ERROR expected trait, found trait alias
-}
-
-fn main() {}
-
diff --git a/src/test/ui/trait-alias-fail.stderr b/src/test/ui/trait-alias-fail.stderr
deleted file mode 100644 (file)
index f7b144c..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-error: type parameters on the left side of a trait alias cannot be bounded
-  --> $DIR/trait-alias-fail.rs:15:14
-   |
-LL | trait Alias2<T: Clone = ()> = Default;
-   |              ^
-
-error: type parameters on the left side of a trait alias cannot have defaults
-  --> $DIR/trait-alias-fail.rs:15:14
-   |
-LL | trait Alias2<T: Clone = ()> = Default;
-   |              ^
-
-error[E0573]: expected type, found trait alias `Alias1`
-  --> $DIR/trait-alias-fail.rs:20:6
-   |
-LL | impl Alias1 { //~ERROR expected type, found trait alias
-   |      ^^^^^^ not a type
-
-error[E0404]: expected trait, found trait alias `Alias1`
-  --> $DIR/trait-alias-fail.rs:23:6
-   |
-LL | impl Alias1 for () { //~ERROR expected trait, found trait alias
-   |      ^^^^^^ not a trait
-
-error[E0658]: trait aliases are not yet fully implemented (see issue #41517)
-  --> $DIR/trait-alias-fail.rs:13:1
-   |
-LL | trait Alias1<T> = Default where T: Clone; // ok
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(trait_alias)] to the crate attributes to enable
-
-error[E0658]: trait aliases are not yet fully implemented (see issue #41517)
-  --> $DIR/trait-alias-fail.rs:15:1
-   |
-LL | trait Alias2<T: Clone = ()> = Default;
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = help: add #![feature(trait_alias)] to the crate attributes to enable
-
-error: aborting due to 6 previous errors
-
-Some errors occurred: E0404, E0573, E0658.
-For more information about an error, try `rustc --explain E0404`.
diff --git a/src/test/ui/traits/trait-alias-impl.rs b/src/test/ui/traits/trait-alias-impl.rs
new file mode 100644 (file)
index 0000000..bf34830
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(trait_alias)]
+
+trait DefaultAlias = Default;
+
+impl DefaultAlias for () {}
+
+fn main() {}
diff --git a/src/test/ui/traits/trait-alias-impl.stderr b/src/test/ui/traits/trait-alias-impl.stderr
new file mode 100644 (file)
index 0000000..9ad6251
--- /dev/null
@@ -0,0 +1,9 @@
+error[E0404]: expected trait, found trait alias `DefaultAlias`
+  --> $DIR/trait-alias-impl.rs:15:6
+   |
+LL | impl DefaultAlias for () {}
+   |      ^^^^^^^^^^^^ not a trait
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0404`.
diff --git a/src/test/ui/traits/trait-alias-objects.rs b/src/test/ui/traits/trait-alias-objects.rs
new file mode 100644 (file)
index 0000000..3adcd84
--- /dev/null
@@ -0,0 +1,19 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(trait_alias)]
+
+trait EqAlias = Eq;
+trait IteratorAlias = Iterator;
+
+fn main() {
+    let _: &dyn EqAlias = &123;
+    let _: &dyn IteratorAlias = &vec![123].into_iter();
+}
diff --git a/src/test/ui/traits/trait-alias-objects.stderr b/src/test/ui/traits/trait-alias-objects.stderr
new file mode 100644 (file)
index 0000000..8f9681e
--- /dev/null
@@ -0,0 +1,18 @@
+error[E0038]: the trait `EqAlias` cannot be made into an object
+  --> $DIR/trait-alias-objects.rs:17:13
+   |
+LL |     let _: &dyn EqAlias = &123;
+   |             ^^^^^^^^^^^ the trait `EqAlias` cannot be made into an object
+   |
+   = note: the trait cannot use `Self` as a type parameter in the supertraits or where-clauses
+
+error[E0191]: the value of the associated type `Item` (from the trait `std::iter::Iterator`) must be specified
+  --> $DIR/trait-alias-objects.rs:18:13
+   |
+LL |     let _: &dyn IteratorAlias = &vec![123].into_iter();
+   |             ^^^^^^^^^^^^^^^^^ missing associated type `Item` value
+
+error: aborting due to 2 previous errors
+
+Some errors occurred: E0038, E0191.
+For more information about an error, try `rustc --explain E0038`.
diff --git a/src/test/ui/traits/trait-alias-wf.rs b/src/test/ui/traits/trait-alias-wf.rs
new file mode 100644 (file)
index 0000000..8c8ce12
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(trait_alias)]
+
+trait Foo {}
+trait A<T: Foo> {}
+trait B<T> = A<T>; // T cannot be unbounded
+
+fn main() {}
diff --git a/src/test/ui/traits/trait-alias-wf.stderr b/src/test/ui/traits/trait-alias-wf.stderr
new file mode 100644 (file)
index 0000000..e8c81c8
--- /dev/null
@@ -0,0 +1,16 @@
+error[E0277]: the trait bound `T: Foo` is not satisfied
+  --> $DIR/trait-alias-wf.rs:15:1
+   |
+LL | trait B<T> = A<T>; // T cannot be unbounded
+   | ^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `T`
+   |
+   = help: consider adding a `where T: Foo` bound
+note: required by `A`
+  --> $DIR/trait-alias-wf.rs:14:1
+   |
+LL | trait A<T: Foo> {}
+   | ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/traits/trait-alias.rs b/src/test/ui/traits/trait-alias.rs
deleted file mode 100644 (file)
index 9ea211b..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(trait_alias)]
-
-trait SimpleAlias = Default; //~ERROR E0645
-trait GenericAlias<T> = Iterator<Item=T>; //~ERROR E0645
-trait Partial<T> = IntoIterator<Item=T>; //~ERROR E0645
-
-trait Things<T> {}
-trait Romeo {}
-struct The<T>(T);
-struct Fore<T>(T);
-impl<T, U> Things<T> for The<U> {}
-impl<T> Romeo for Fore<T> {}
-
-trait WithWhere<Art, Thou> = Romeo + Romeo where Fore<(Art, Thou)>: Romeo; //~ERROR E0645
-trait BareWhere<Wild, Are> = where The<Wild>: Things<Are>; //~ERROR E0645
-
-trait CD = Clone + Default; //~ERROR E0645
-
-fn foo<T: CD>() -> (T, T) {
-    let one = T::default();
-    let two = one.clone();
-    (one, two)
-}
-
-fn main() {
-    let both = foo();
-    assert_eq!(both.0, 0);
-    assert_eq!(both.1, 0);
-    let both: (i32, i32) = foo();
-    assert_eq!(both.0, 0);
-    assert_eq!(both.1, 0);
-}
-
diff --git a/src/test/ui/traits/trait-alias.stderr b/src/test/ui/traits/trait-alias.stderr
deleted file mode 100644 (file)
index 5d290e5..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-error[E0645]: trait aliases are not yet implemented (see issue #41517)
-  --> $DIR/trait-alias.rs:13:1
-   |
-LL | trait SimpleAlias = Default; //~ERROR E0645
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error[E0645]: trait aliases are not yet implemented (see issue #41517)
-  --> $DIR/trait-alias.rs:14:1
-   |
-LL | trait GenericAlias<T> = Iterator<Item=T>; //~ERROR E0645
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error[E0645]: trait aliases are not yet implemented (see issue #41517)
-  --> $DIR/trait-alias.rs:15:1
-   |
-LL | trait Partial<T> = IntoIterator<Item=T>; //~ERROR E0645
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error[E0645]: trait aliases are not yet implemented (see issue #41517)
-  --> $DIR/trait-alias.rs:24:1
-   |
-LL | trait WithWhere<Art, Thou> = Romeo + Romeo where Fore<(Art, Thou)>: Romeo; //~ERROR E0645
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error[E0645]: trait aliases are not yet implemented (see issue #41517)
-  --> $DIR/trait-alias.rs:25:1
-   |
-LL | trait BareWhere<Wild, Are> = where The<Wild>: Things<Are>; //~ERROR E0645
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error[E0645]: trait aliases are not yet implemented (see issue #41517)
-  --> $DIR/trait-alias.rs:27:1
-   |
-LL | trait CD = Clone + Default; //~ERROR E0645
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to 6 previous errors
-
-For more information about this error, try `rustc --explain E0645`.
index f8e4f0d596e20d3a005514552b5ea0eecaa42e64..1db5ec097376cd95f6b1ebb138b97009e26601ea 100644 (file)
@@ -110,9 +110,7 @@ fn check_assoc_const() {
     // A, B, C are resolved as inherent items, their traits don't need to be in scope
     C::A; //~ ERROR associated constant `A` is private
           //~^ ERROR the trait `assoc_const::C` cannot be made into an object
-          //~| ERROR the trait bound `dyn assoc_const::C: assoc_const::A` is not satisfied
     C::B; // ERROR the trait `assoc_const::C` cannot be made into an object
-          //~^ ERROR the trait bound `dyn assoc_const::C: assoc_const::B` is not satisfied
     C::C; // OK
 }
 
index fc14ae91d7b36c3464d4ed1cbb86256c16a54487..4ede83d5d73627e3a68e20777e9ce401abc7c153 100644 (file)
@@ -100,30 +100,6 @@ error[E0624]: associated constant `A` is private
 LL |     C::A; //~ ERROR associated constant `A` is private
    |     ^^^^
 
-error[E0277]: the trait bound `dyn assoc_const::C: assoc_const::A` is not satisfied
-  --> $DIR/trait-item-privacy.rs:111:5
-   |
-LL |     C::A; //~ ERROR associated constant `A` is private
-   |     ^^^^ the trait `assoc_const::A` is not implemented for `dyn assoc_const::C`
-   |
-note: required by `assoc_const::A::A`
-  --> $DIR/trait-item-privacy.rs:35:9
-   |
-LL |         const A: u8 = 0;
-   |         ^^^^^^^^^^^^^^^^
-
-error[E0277]: the trait bound `dyn assoc_const::C: assoc_const::B` is not satisfied
-  --> $DIR/trait-item-privacy.rs:114:5
-   |
-LL |     C::B; // ERROR the trait `assoc_const::C` cannot be made into an object
-   |     ^^^^ the trait `assoc_const::B` is not implemented for `dyn assoc_const::C`
-   |
-note: required by `assoc_const::B::B`
-  --> $DIR/trait-item-privacy.rs:39:9
-   |
-LL |         const B: u8 = 0;
-   |         ^^^^^^^^^^^^^^^^
-
 error[E0038]: the trait `assoc_const::C` cannot be made into an object
   --> $DIR/trait-item-privacy.rs:111:5
    |
@@ -135,36 +111,36 @@ LL |     C::A; //~ ERROR associated constant `A` is private
    = note: the trait cannot contain associated consts like `A`
 
 error[E0223]: ambiguous associated type
-  --> $DIR/trait-item-privacy.rs:127:12
+  --> $DIR/trait-item-privacy.rs:125:12
    |
 LL |     let _: S::A; //~ ERROR ambiguous associated type
    |            ^^^^ help: use fully-qualified syntax: `<S as Trait>::A`
 
 error[E0223]: ambiguous associated type
-  --> $DIR/trait-item-privacy.rs:128:12
+  --> $DIR/trait-item-privacy.rs:126:12
    |
 LL |     let _: S::B; //~ ERROR ambiguous associated type
    |            ^^^^ help: use fully-qualified syntax: `<S as Trait>::B`
 
 error[E0223]: ambiguous associated type
-  --> $DIR/trait-item-privacy.rs:129:12
+  --> $DIR/trait-item-privacy.rs:127:12
    |
 LL |     let _: S::C; //~ ERROR ambiguous associated type
    |            ^^^^ help: use fully-qualified syntax: `<S as Trait>::C`
 
 error: associated type `A` is private
-  --> $DIR/trait-item-privacy.rs:131:12
+  --> $DIR/trait-item-privacy.rs:129:12
    |
 LL |     let _: T::A; //~ ERROR associated type `A` is private
    |            ^^^^
 
 error: associated type `A` is private
-  --> $DIR/trait-item-privacy.rs:140:9
+  --> $DIR/trait-item-privacy.rs:138:9
    |
 LL |         A = u8, //~ ERROR associated type `A` is private
    |         ^^^^^^
 
-error: aborting due to 17 previous errors
+error: aborting due to 15 previous errors
 
-Some errors occurred: E0038, E0223, E0277, E0599, E0624.
+Some errors occurred: E0038, E0223, E0599, E0624.
 For more information about an error, try `rustc --explain E0038`.
index dac76fb57fd7c3a3b4592b596222fd5009d261d9..01d7e89847a89fa454cd5d4bfbfc07174086e2c4 100644 (file)
@@ -20,5 +20,4 @@ fn main() {
     (box 10 as Box<bar>).dup();
     //~^ ERROR E0038
     //~| ERROR E0038
-    //~| ERROR E0277
 }
index 1e1fcbe340e58259430f1901be149585f8f2a507..db0cd38cb6a682fe256b5370c0fc64900d70badc 100644 (file)
@@ -10,12 +10,6 @@ error[E0107]: wrong number of type arguments: expected 1, found 2
 LL |     10.blah::<i32, i32>(); //~ ERROR wrong number of type arguments: expected 1, found 2
    |                    ^^^ unexpected type argument
 
-error[E0277]: the trait bound `dyn bar: bar` is not satisfied
-  --> $DIR/trait-test-2.rs:20:26
-   |
-LL |     (box 10 as Box<bar>).dup();
-   |                          ^^^ the trait `bar` is not implemented for `dyn bar`
-
 error[E0038]: the trait `bar` cannot be made into an object
   --> $DIR/trait-test-2.rs:20:16
    |
@@ -35,7 +29,7 @@ LL |     (box 10 as Box<bar>).dup();
    = note: method `blah` has generic type parameters
    = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<std::boxed::Box<dyn bar>>` for `std::boxed::Box<{integer}>`
 
-error: aborting due to 5 previous errors
+error: aborting due to 4 previous errors
 
-Some errors occurred: E0038, E0107, E0277.
+Some errors occurred: E0038, E0107.
 For more information about an error, try `rustc --explain E0038`.
index f07d23885aca19cef708a47bba7b2015ddf3cd60..c9b43000877cfbd63580b23bafc5d654fe69922f 100644 (file)
@@ -2,7 +2,9 @@ error: expected expression, found reserved keyword `try`
   --> $DIR/try-block-in-match.rs:16:11
    |
 LL |     match try { false } { _ => {} } //~ ERROR expected expression, found reserved keyword `try`
-   |           ^^^ expected expression
+   |     ----- ^^^ expected expression
+   |     |
+   |     while parsing this match expression
 
 error: aborting due to previous error
 
index d86ebda027efb21271895eda30b99f3ee485940b..83fd736a997a98ff686b277e885c5afd6f7b9974 100644 (file)
@@ -10,7 +10,7 @@ error[E0004]: non-exhaustive patterns: type &Void is non-empty
 LL |     let _ = match x {}; //~ ERROR non-exhaustive
    |                   ^
    |
-help: Please ensure that all possible cases are being handled; possibly adding wildcards or more match arms.
+help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
   --> $DIR/uninhabited-matches-feature-gated.rs:20:19
    |
 LL |     let _ = match x {}; //~ ERROR non-exhaustive
@@ -22,7 +22,7 @@ error[E0004]: non-exhaustive patterns: type (Void,) is non-empty
 LL |     let _ = match x {}; //~ ERROR non-exhaustive
    |                   ^
    |
-help: Please ensure that all possible cases are being handled; possibly adding wildcards or more match arms.
+help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
   --> $DIR/uninhabited-matches-feature-gated.rs:23:19
    |
 LL |     let _ = match x {}; //~ ERROR non-exhaustive
@@ -34,7 +34,7 @@ error[E0004]: non-exhaustive patterns: type [Void; 1] is non-empty
 LL |     let _ = match x {}; //~ ERROR non-exhaustive
    |                   ^
    |
-help: Please ensure that all possible cases are being handled; possibly adding wildcards or more match arms.
+help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
   --> $DIR/uninhabited-matches-feature-gated.rs:26:19
    |
 LL |     let _ = match x {}; //~ ERROR non-exhaustive
index a4f5e41b5291c7e57db7eae26469bdf0712eddab..6b18aff9f6b830caa18e310f006a2a0832d78c69 100644 (file)
@@ -1,33 +1,64 @@
+error[E0502]: cannot borrow `u.y` as immutable because it is also borrowed as mutable
+  --> $DIR/union-borrow-move-parent-sibling.rs:25:13
+   |
+LL |     let a = &mut u.x.0;
+   |             ---------- mutable borrow occurs here
+LL |     let b = &u.y; //~ ERROR cannot borrow `u.y`
+   |             ^^^^ immutable borrow occurs here
+LL |     use_borrow(a);
+   |                - mutable borrow later used here
+
 error[E0382]: use of moved value: `u`
-  --> $DIR/union-borrow-move-parent-sibling.rs:29:13
+  --> $DIR/union-borrow-move-parent-sibling.rs:32:13
    |
 LL |     let a = u.x.0;
    |             ----- value moved here
-LL |     let a = u.y; //~ ERROR use of moved value: `u.y`
+LL |     let b = u.y; //~ ERROR use of moved value: `u.y`
    |             ^^^ value used here after move
    |
    = note: move occurs because `u` has type `U`, which does not implement the `Copy` trait
 
+error[E0502]: cannot borrow `u.y` as immutable because it is also borrowed as mutable
+  --> $DIR/union-borrow-move-parent-sibling.rs:38:13
+   |
+LL |     let a = &mut (u.x.0).0;
+   |             -------------- mutable borrow occurs here
+LL |     let b = &u.y; //~ ERROR cannot borrow `u.y`
+   |             ^^^^ immutable borrow occurs here
+LL |     use_borrow(a);
+   |                - mutable borrow later used here
+
 error[E0382]: use of moved value: `u`
-  --> $DIR/union-borrow-move-parent-sibling.rs:41:13
+  --> $DIR/union-borrow-move-parent-sibling.rs:45:13
    |
 LL |     let a = (u.x.0).0;
    |             --------- value moved here
-LL |     let a = u.y; //~ ERROR use of moved value: `u.y`
+LL |     let b = u.y; //~ ERROR use of moved value: `u.y`
    |             ^^^ value used here after move
    |
    = note: move occurs because `u` has type `U`, which does not implement the `Copy` trait
 
+error[E0502]: cannot borrow `u.x` as immutable because it is also borrowed as mutable
+  --> $DIR/union-borrow-move-parent-sibling.rs:51:13
+   |
+LL |     let a = &mut *u.y;
+   |             --------- mutable borrow occurs here
+LL |     let b = &u.x; //~ ERROR cannot borrow `u` (via `u.x`)
+   |             ^^^^ immutable borrow occurs here
+LL |     use_borrow(a);
+   |                - mutable borrow later used here
+
 error[E0382]: use of moved value: `u`
-  --> $DIR/union-borrow-move-parent-sibling.rs:53:13
+  --> $DIR/union-borrow-move-parent-sibling.rs:58:13
    |
 LL |     let a = *u.y;
    |             ---- value moved here
-LL |     let a = u.x; //~ ERROR use of moved value: `u.x`
+LL |     let b = u.x; //~ ERROR use of moved value: `u.x`
    |             ^^^ value used here after move
    |
    = note: move occurs because `u` has type `U`, which does not implement the `Copy` trait
 
-error: aborting due to 3 previous errors
+error: aborting due to 6 previous errors
 
-For more information about this error, try `rustc --explain E0382`.
+Some errors occurred: E0382, E0502.
+For more information about an error, try `rustc --explain E0382`.
index 5f504feabb266bab2a7eadf0cfb3746dc9d06d78..99a073b838ca91482e91eff5f9ad4174e8acd929 100644 (file)
@@ -17,40 +17,45 @@ union U {
     y: Box<Vec<u8>>,
 }
 
+fn use_borrow<T>(_: &T) {}
+
 unsafe fn parent_sibling_borrow() {
     let mut u = U { x: ((Vec::new(), Vec::new()), Vec::new()) };
     let a = &mut u.x.0;
-    let a = &u.y; //~ ERROR cannot borrow `u.y`
+    let b = &u.y; //~ ERROR cannot borrow `u.y`
+    use_borrow(a);
 }
 
 unsafe fn parent_sibling_move() {
     let u = U { x: ((Vec::new(), Vec::new()), Vec::new()) };
     let a = u.x.0;
-    let a = u.y; //~ ERROR use of moved value: `u.y`
+    let b = u.y; //~ ERROR use of moved value: `u.y`
 }
 
 unsafe fn grandparent_sibling_borrow() {
     let mut u = U { x: ((Vec::new(), Vec::new()), Vec::new()) };
     let a = &mut (u.x.0).0;
-    let a = &u.y; //~ ERROR cannot borrow `u.y`
+    let b = &u.y; //~ ERROR cannot borrow `u.y`
+    use_borrow(a);
 }
 
 unsafe fn grandparent_sibling_move() {
     let u = U { x: ((Vec::new(), Vec::new()), Vec::new()) };
     let a = (u.x.0).0;
-    let a = u.y; //~ ERROR use of moved value: `u.y`
+    let b = u.y; //~ ERROR use of moved value: `u.y`
 }
 
 unsafe fn deref_sibling_borrow() {
     let mut u = U { y: Box::default() };
     let a = &mut *u.y;
-    let a = &u.x; //~ ERROR cannot borrow `u` (via `u.x`)
+    let b = &u.x; //~ ERROR cannot borrow `u` (via `u.x`)
+    use_borrow(a);
 }
 
 unsafe fn deref_sibling_move() {
     let u = U { x: ((Vec::new(), Vec::new()), Vec::new()) };
     let a = *u.y;
-    let a = u.x; //~ ERROR use of moved value: `u.x`
+    let b = u.x; //~ ERROR use of moved value: `u.x`
 }
 
 
index d855435416e5dc1ae6b36b196df4217b98d78a01..daf5a4f4fccaa4262b9dcf91fb22352e6118cf4b 100644 (file)
@@ -1,59 +1,62 @@
 error[E0502]: cannot borrow `u.y` as immutable because `u.x.0` is also borrowed as mutable
-  --> $DIR/union-borrow-move-parent-sibling.rs:23:14
+  --> $DIR/union-borrow-move-parent-sibling.rs:25:14
    |
 LL |     let a = &mut u.x.0;
    |                  ----- mutable borrow occurs here
-LL |     let a = &u.y; //~ ERROR cannot borrow `u.y`
+LL |     let b = &u.y; //~ ERROR cannot borrow `u.y`
    |              ^^^ immutable borrow occurs here
+LL |     use_borrow(a);
 LL | }
    | - mutable borrow ends here
 
 error[E0382]: use of moved value: `u.y`
-  --> $DIR/union-borrow-move-parent-sibling.rs:29:9
+  --> $DIR/union-borrow-move-parent-sibling.rs:32:9
    |
 LL |     let a = u.x.0;
    |         - value moved here
-LL |     let a = u.y; //~ ERROR use of moved value: `u.y`
+LL |     let b = u.y; //~ ERROR use of moved value: `u.y`
    |         ^ value used here after move
    |
    = note: move occurs because `u.y` has type `[type error]`, which does not implement the `Copy` trait
 
 error[E0502]: cannot borrow `u.y` as immutable because `u.x.0.0` is also borrowed as mutable
-  --> $DIR/union-borrow-move-parent-sibling.rs:35:14
+  --> $DIR/union-borrow-move-parent-sibling.rs:38:14
    |
 LL |     let a = &mut (u.x.0).0;
    |                  --------- mutable borrow occurs here
-LL |     let a = &u.y; //~ ERROR cannot borrow `u.y`
+LL |     let b = &u.y; //~ ERROR cannot borrow `u.y`
    |              ^^^ immutable borrow occurs here
+LL |     use_borrow(a);
 LL | }
    | - mutable borrow ends here
 
 error[E0382]: use of moved value: `u.y`
-  --> $DIR/union-borrow-move-parent-sibling.rs:41:9
+  --> $DIR/union-borrow-move-parent-sibling.rs:45:9
    |
 LL |     let a = (u.x.0).0;
    |         - value moved here
-LL |     let a = u.y; //~ ERROR use of moved value: `u.y`
+LL |     let b = u.y; //~ ERROR use of moved value: `u.y`
    |         ^ value used here after move
    |
    = note: move occurs because `u.y` has type `[type error]`, which does not implement the `Copy` trait
 
 error[E0502]: cannot borrow `u` (via `u.x`) as immutable because `u` is also borrowed as mutable (via `*u.y`)
-  --> $DIR/union-borrow-move-parent-sibling.rs:47:14
+  --> $DIR/union-borrow-move-parent-sibling.rs:51:14
    |
 LL |     let a = &mut *u.y;
    |                  ---- mutable borrow occurs here (via `*u.y`)
-LL |     let a = &u.x; //~ ERROR cannot borrow `u` (via `u.x`)
+LL |     let b = &u.x; //~ ERROR cannot borrow `u` (via `u.x`)
    |              ^^^ immutable borrow occurs here (via `u.x`)
+LL |     use_borrow(a);
 LL | }
    | - mutable borrow ends here
 
 error[E0382]: use of moved value: `u.x`
-  --> $DIR/union-borrow-move-parent-sibling.rs:53:9
+  --> $DIR/union-borrow-move-parent-sibling.rs:58:9
    |
 LL |     let a = *u.y;
    |         - value moved here
-LL |     let a = u.x; //~ ERROR use of moved value: `u.x`
+LL |     let b = u.x; //~ ERROR use of moved value: `u.x`
    |         ^ value used here after move
    |
    = note: move occurs because `u.x` has type `[type error]`, which does not implement the `Copy` trait
index 111940aab2c322764150b4a5be8539b966a6a6ad..bfc7736b2f32cd7f66b59646931677c257b303ac 100644 (file)
@@ -9,6 +9,29 @@ LL |     x.clone();  //~ ERROR: use of moved value
    |
    = note: move occurs because `x` has type `T`, which does not implement the `Copy` trait
 
+error[E0505]: cannot move out of `x` because it is borrowed
+  --> $DIR/unop-move-semantics.rs:25:6
+   |
+LL |     let m = &x;
+   |             -- borrow of `x` occurs here
+...
+LL |     !x;  //~ ERROR: cannot move out of `x` because it is borrowed
+   |      ^ move out of `x` occurs here
+...
+LL |     use_mut(n); use_imm(m);
+   |                         - borrow later used here
+
+error[E0505]: cannot move out of `y` because it is borrowed
+  --> $DIR/unop-move-semantics.rs:27:6
+   |
+LL |     let n = &mut y;
+   |             ------ borrow of `y` occurs here
+...
+LL |     !y;  //~ ERROR: cannot move out of `y` because it is borrowed
+   |      ^ move out of `y` occurs here
+LL |     use_mut(n); use_imm(m);
+   |             - borrow later used here
+
 error[E0507]: cannot move out of borrowed content
   --> $DIR/unop-move-semantics.rs:34:6
    |
@@ -21,7 +44,7 @@ error[E0507]: cannot move out of borrowed content
 LL |     !*n;  //~ ERROR: cannot move out of borrowed content
    |      ^^ cannot move out of borrowed content
 
-error: aborting due to 3 previous errors
+error: aborting due to 5 previous errors
 
-Some errors occurred: E0382, E0507.
+Some errors occurred: E0382, E0505, E0507.
 For more information about an error, try `rustc --explain E0382`.
index 946566675981e7020327aa8aca735f9e14490e75..fcbbe546a316446deedae6db6d929515cbcab52b 100644 (file)
@@ -25,8 +25,8 @@ fn move_borrowed<T: Not<Output=T>>(x: T, mut y: T) {
     !x;  //~ ERROR: cannot move out of `x` because it is borrowed
 
     !y;  //~ ERROR: cannot move out of `y` because it is borrowed
+    use_mut(n); use_imm(m);
 }
-
 fn illegal_dereference<T: Not<Output=T>>(mut x: T, y: T) {
     let m = &mut x;
     let n = &y;
@@ -34,6 +34,9 @@ fn illegal_dereference<T: Not<Output=T>>(mut x: T, y: T) {
     !*m;  //~ ERROR: cannot move out of borrowed content
 
     !*n;  //~ ERROR: cannot move out of borrowed content
+    use_imm(n); use_mut(m);
 }
-
 fn main() {}
+
+fn use_mut<T>(_: &mut T) { }
+fn use_imm<T>(_: &T) { }
index 2d0863f657e6f45159fc7412267eee3e659185e5..1fa30882067703202d13ad0bd53d630bc2c1de66 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 2d0863f657e6f45159fc7412267eee3e659185e5
+Subproject commit 1fa30882067703202d13ad0bd53d630bc2c1de66
index 481f7880df428020480eb343c209072c6db10503..d8b426901a75b1eb975f52b4537f2736f2b94436 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 481f7880df428020480eb343c209072c6db10503
+Subproject commit d8b426901a75b1eb975f52b4537f2736f2b94436
index 06eeef61a194ddf3e535e77c979df2add9bce39c..f12dd31c402d9e85d11b1cc542ac477136290b5d 100644 (file)
@@ -243,6 +243,29 @@ fn ignore_llvm(config: &Config, line: &str) -> bool {
                     // Ignore if using system LLVM and actual version
                     // is smaller the minimum required version
                     config.system_llvm && &actual_version[..] < min_version
+                } else if line.starts_with("ignore-llvm-version") {
+                    // Syntax is: "ignore-llvm-version <version1> [- <version2>]"
+                    let range_components = line.split(' ')
+                        .skip(1) // Skip the directive.
+                        .map(|s| s.trim())
+                        .filter(|word| !word.is_empty() && word != &"-")
+                        .take(3) // 3 or more = invalid, so take at most 3.
+                        .collect::<Vec<&str>>();
+                    match range_components.len() {
+                        1 => {
+                            &actual_version[..] == range_components[0]
+                        }
+                        2 => {
+                            let v_min = range_components[0];
+                            let v_max = range_components[1];
+                            if v_max < v_min {
+                                panic!("Malformed LLVM version range: max < min")
+                            }
+                            // Ignore if version lies inside of range.
+                            &actual_version[..] >= v_min && &actual_version[..] <= v_max
+                        }
+                        _ => panic!("Malformed LLVM version directive"),
+                    }
                 } else {
                     false
                 }
index ca30a4dd95d0e1137b88dd7cdbdb9cd9824c446b..f63950b448a143565fc33a3c366da5dd6e4edfc7 100644 (file)
@@ -766,7 +766,7 @@ fn make_test_name(
         .join(&testpaths.file.file_name().unwrap());
     let mode_suffix = match config.compare_mode {
         Some(ref mode) => format!(" ({})", mode.to_str()),
-        None => format!(""),
+        None => String::new(),
     };
     test::DynTestName(format!(
         "[{}{}] {}{}",
index 40917cc5db0d5f6eba8086615706c1774d4d9793..a5556e1d570eb89e6f04ffb258bd8f1f7ff9bfc8 100644 (file)
@@ -10,6 +10,7 @@
 
 #![feature(rustc_private)]
 
+extern crate env_logger;
 extern crate syntax;
 extern crate rustdoc;
 extern crate serialize as rustc_serialize;
@@ -264,6 +265,7 @@ fn parse_args() -> (OutputFormat, PathBuf) {
 }
 
 fn main() {
+    env_logger::init();
     PLAYGROUND.with(|slot| {
         *slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/")));
     });
index 41ad4e7180e60607325e8defde310eeb08b00300..ca7e0224dc328bcbfd12e78843968fc42e214ce2 100644 (file)
@@ -133,7 +133,7 @@ fn check(cache: &mut Cache,
     // whitelists to get this past `make check` today.
     // FIXME(#32129)
     if file.ends_with("std/string/struct.String.html") ||
-       file.ends_with("interpret/struct.ValTy.html") ||
+       file.ends_with("interpret/struct.ImmTy.html") ||
        file.ends_with("symbol/struct.InternedString.html") ||
        file.ends_with("ast/struct.ThinVec.html") ||
        file.ends_with("util/struct.ThinVec.html") ||
index 7728fa22bebea288abfea3b70cf795c60b93df3a..29bf48582812212450f4caf7da1af3f18c52bfef 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 7728fa22bebea288abfea3b70cf795c60b93df3a
+Subproject commit 29bf48582812212450f4caf7da1af3f18c52bfef
index 1539c2e25781b09d2e5ddabcbe19e63be1a2087a..4ade87f5d65bd2c167175bcf27ca178a28df1b50 100755 (executable)
@@ -14,7 +14,6 @@
 import sys
 import re
 import json
-import copy
 import datetime
 import collections
 import textwrap
@@ -27,7 +26,7 @@ except ImportError:
 MAINTAINERS = {
     'miri': '@oli-obk @RalfJung @eddyb',
     'clippy-driver': '@Manishearth @llogiq @mcarton @oli-obk',
-    'rls': '@nrc',
+    'rls': '@nrc @Xanewok',
     'rustfmt': '@nrc',
     'book': '@carols10cents @steveklabnik',
     'nomicon': '@frewsxcv @Gankro',
@@ -82,8 +81,8 @@ def update_latest(
                 status[os] = new
                 if new > old:
                     changed = True
-                    message += '🎉 {} on {}: {} → {}.\n' \
-                        .format(tool, os, old, new)
+                    message += '🎉 {} on {}: {} → {} (cc {}, @rust-lang/infra).\n' \
+                        .format(tool, os, old, new, MAINTAINERS.get(tool))
                 elif new < old:
                     changed = True
                     message += '💔 {} on {}: {} → {} (cc {}, @rust-lang/infra).\n' \
index 69f61bc248dbb5e6a0633799e5c5e8de4b309d41..466d2fa0d2bd627b4461c52158dc7e8cdc30a8ed 100644 (file)
@@ -84,8 +84,7 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
 
         // This is intentional, this dependency just makes the crate available
         // for others later on. Cover cases
-        let whitelisted = krate == "alloc_jemalloc";
-        let whitelisted = whitelisted || krate.starts_with("panic");
+        let whitelisted = krate.starts_with("panic");
         if toml.contains("name = \"std\"") && whitelisted {
             continue
         }
index c5f5896d286c32480689b704ec9e27274b1121e5..e235de9c5e138d619b2ef38f0915025086d1fd31 100644 (file)
@@ -57,7 +57,6 @@ macro_rules! tidy_error {
 fn filter_dirs(path: &Path) -> bool {
     let skip = [
         "src/dlmalloc",
-        "src/jemalloc",
         "src/llvm",
         "src/llvm-emscripten",
         "src/libbacktrace",
index 3d5e18e37b070c1aeaa93a693639bbf4bce68cf9..e8f197ba78afe1f83d4f7a158d1443ed2efa0a6c 100644 (file)
@@ -28,7 +28,6 @@
 //! - core may not have platform-specific code
 //! - libcompiler_builtins may have platform-specific code
 //! - liballoc_system may have platform-specific code
-//! - liballoc_jemalloc may have platform-specific code
 //! - libpanic_abort may have platform-specific code
 //! - libpanic_unwind may have platform-specific code
 //! - libunwind may have platform-specific code
@@ -52,7 +51,6 @@
 // Paths that may contain platform-specific code
 const EXCEPTION_PATHS: &[&str] = &[
     // std crates
-    "src/liballoc_jemalloc",
     "src/liballoc_system",
     "src/libcompiler_builtins",
     "src/liblibc",