]> git.lizzy.rs Git - rust.git/commitdiff
Merge branch 'master' into rusty-hermit
authorStefan Lankes <stlankes@users.noreply.github.com>
Sun, 20 Oct 2019 08:48:58 +0000 (10:48 +0200)
committerGitHub <noreply@github.com>
Sun, 20 Oct 2019 08:48:58 +0000 (10:48 +0200)
1163 files changed:
CODE_OF_CONDUCT.md
Cargo.lock
src/bootstrap/builder.rs
src/bootstrap/lib.rs
src/bootstrap/native.rs
src/build_helper/lib.rs
src/ci/azure-pipelines/auto.yml
src/ci/docker/README.md
src/ci/docker/asmjs/Dockerfile [deleted file]
src/ci/docker/disabled/asmjs/Dockerfile [new file with mode: 0644]
src/ci/docker/disabled/wasm32/Dockerfile [deleted file]
src/ci/docker/dist-armv7-linux/Dockerfile
src/ci/docker/dist-armv7-linux/armv7-linux-gnueabihf.config
src/ci/docker/dist-armv7-linux/crosstool-ng.sh [new file with mode: 0644]
src/ci/docker/dist-armv7-linux/patches/glibc/ports-2.16.0/001-arm-libgcc_s_resume-used.patch [deleted file]
src/ci/docker/scripts/cross-apt-packages.sh
src/ci/docker/scripts/emscripten.sh
src/ci/docker/wasm32/Dockerfile [new file with mode: 0644]
src/doc/book
src/doc/nomicon
src/doc/reference
src/doc/rust-by-example
src/doc/rustc/src/SUMMARY.md
src/doc/rustc/src/command-line-arguments.md
src/doc/rustc/src/json.md [new file with mode: 0644]
src/doc/rustc/src/lints/listing/deny-by-default.md
src/doc/rustc/src/profile-guided-optimization.md
src/doc/unstable-book/src/compiler-flags/report-time.md [new file with mode: 0644]
src/doc/unstable-book/src/language-features/track-caller.md [new file with mode: 0644]
src/liballoc/boxed.rs
src/liballoc/collections/btree/set.rs
src/liballoc/collections/vec_deque.rs
src/liballoc/collections/vec_deque/tests.rs
src/liballoc/fmt.rs
src/liballoc/lib.rs
src/liballoc/rc.rs
src/liballoc/slice.rs
src/liballoc/sync.rs
src/liballoc/tests/boxed.rs [new file with mode: 0644]
src/liballoc/tests/btree/set.rs
src/liballoc/tests/lib.rs
src/liballoc/tests/str.rs.rej [deleted file]
src/libcore/any.rs
src/libcore/cell.rs
src/libcore/char/methods.rs
src/libcore/fmt/builders.rs
src/libcore/fmt/mod.rs
src/libcore/lib.rs
src/libcore/macros.rs
src/libcore/mem/mod.rs
src/libcore/num/mod.rs
src/libcore/num/wrapping.rs
src/libcore/option.rs
src/libcore/slice/mod.rs
src/libcore/str/mod.rs
src/libcore/sync/atomic.rs
src/libcore/task/wake.rs
src/libcore/tests/fmt/builders.rs
src/libproc_macro/lib.rs
src/librustc/Cargo.toml
src/librustc/arena.rs
src/librustc/dep_graph/graph.rs
src/librustc/error_codes.rs
src/librustc/hir/check_attr.rs
src/librustc/hir/def.rs
src/librustc/hir/lowering.rs
src/librustc/hir/lowering/item.rs
src/librustc/hir/map/def_collector.rs
src/librustc/hir/map/definitions.rs
src/librustc/hir/map/mod.rs
src/librustc/hir/mod.rs
src/librustc/hir/print.rs
src/librustc/ich/hcx.rs
src/librustc/ich/impls_syntax.rs
src/librustc/infer/error_reporting/mod.rs
src/librustc/infer/lexical_region_resolve/mod.rs
src/librustc/infer/mod.rs
src/librustc/infer/opaque_types/mod.rs
src/librustc/infer/region_constraints/leak_check.rs
src/librustc/lib.rs
src/librustc/lint/builtin.rs
src/librustc/lint/levels.rs
src/librustc/lint/mod.rs
src/librustc/middle/cstore.rs
src/librustc/middle/dead.rs [deleted file]
src/librustc/middle/entry.rs [deleted file]
src/librustc/middle/intrinsicck.rs [deleted file]
src/librustc/middle/liveness.rs [deleted file]
src/librustc/middle/resolve_lifetime.rs
src/librustc/middle/stability.rs
src/librustc/mir/interpret/error.rs
src/librustc/mir/interpret/mod.rs
src/librustc/mir/interpret/value.rs
src/librustc/mir/mod.rs
src/librustc/mir/mono.rs
src/librustc/mir/tcx.rs
src/librustc/mir/visit.rs
src/librustc/query/mod.rs
src/librustc/session/config.rs
src/librustc/session/config/tests.rs
src/librustc/session/mod.rs
src/librustc/traits/error_reporting.rs
src/librustc/traits/mod.rs
src/librustc/traits/project.rs
src/librustc/traits/query/dropck_outlives.rs
src/librustc/traits/query/evaluate_obligation.rs
src/librustc/traits/query/type_op/outlives.rs
src/librustc/traits/select.rs
src/librustc/traits/specialize/mod.rs
src/librustc/traits/specialize/specialization_graph.rs
src/librustc/traits/util.rs
src/librustc/ty/codec.rs
src/librustc/ty/context.rs
src/librustc/ty/flags.rs
src/librustc/ty/fold.rs
src/librustc/ty/instance.rs
src/librustc/ty/layout.rs
src/librustc/ty/mod.rs
src/librustc/ty/outlives.rs
src/librustc/ty/print/obsolete.rs
src/librustc/ty/print/pretty.rs
src/librustc/ty/query/config.rs
src/librustc/ty/query/on_disk_cache.rs
src/librustc/ty/query/plumbing.rs
src/librustc/ty/relate.rs
src/librustc/ty/structural_impls.rs
src/librustc/ty/sty.rs
src/librustc/ty/subst.rs
src/librustc/ty/util.rs
src/librustc/ty/walk.rs
src/librustc/util/common.rs
src/librustc_codegen_llvm/allocator.rs
src/librustc_codegen_llvm/attributes.rs
src/librustc_codegen_llvm/back/lto.rs
src/librustc_codegen_llvm/back/write.rs
src/librustc_codegen_llvm/builder.rs
src/librustc_codegen_llvm/callee.rs
src/librustc_codegen_llvm/common.rs
src/librustc_codegen_llvm/consts.rs
src/librustc_codegen_llvm/context.rs
src/librustc_codegen_llvm/debuginfo/gdb.rs
src/librustc_codegen_llvm/debuginfo/metadata.rs
src/librustc_codegen_llvm/debuginfo/mod.rs
src/librustc_codegen_llvm/error_codes.rs [deleted file]
src/librustc_codegen_llvm/intrinsic.rs
src/librustc_codegen_llvm/lib.rs
src/librustc_codegen_llvm/llvm/ffi.rs
src/librustc_codegen_llvm/type_of.rs
src/librustc_codegen_ssa/Cargo.toml
src/librustc_codegen_ssa/README.md
src/librustc_codegen_ssa/back/link.rs
src/librustc_codegen_ssa/back/symbol_export.rs
src/librustc_codegen_ssa/back/write.rs
src/librustc_codegen_ssa/base.rs
src/librustc_codegen_ssa/callee.rs [deleted file]
src/librustc_codegen_ssa/common.rs
src/librustc_codegen_ssa/error_codes.rs
src/librustc_codegen_ssa/lib.rs
src/librustc_codegen_ssa/meth.rs
src/librustc_codegen_ssa/mir/block.rs
src/librustc_codegen_ssa/mir/mod.rs
src/librustc_codegen_ssa/mir/place.rs
src/librustc_codegen_ssa/mir/rvalue.rs
src/librustc_codegen_ssa/traits/backend.rs
src/librustc_codegen_ssa/traits/builder.rs
src/librustc_codegen_ssa/traits/consts.rs
src/librustc_codegen_ssa/traits/debuginfo.rs
src/librustc_codegen_ssa/traits/declare.rs
src/librustc_codegen_ssa/traits/intrinsic.rs
src/librustc_codegen_ssa/traits/misc.rs
src/librustc_codegen_ssa/traits/mod.rs
src/librustc_codegen_utils/codegen_backend.rs
src/librustc_codegen_utils/lib.rs
src/librustc_codegen_utils/symbol_names/legacy.rs
src/librustc_codegen_utils/symbol_names/v0.rs
src/librustc_data_structures/Cargo.toml
src/librustc_data_structures/sharded.rs
src/librustc_data_structures/sync.rs
src/librustc_driver/lib.rs
src/librustc_errors/annotate_snippet_emitter_writer.rs
src/librustc_errors/diagnostic.rs
src/librustc_errors/emitter.rs
src/librustc_errors/lib.rs
src/librustc_errors/styled_buffer.rs
src/librustc_incremental/persist/load.rs
src/librustc_incremental/persist/save.rs
src/librustc_index/bit_set.rs
src/librustc_interface/Cargo.toml
src/librustc_interface/interface.rs
src/librustc_interface/lib.rs
src/librustc_interface/passes.rs
src/librustc_interface/profile/mod.rs [deleted file]
src/librustc_interface/profile/trace.rs [deleted file]
src/librustc_interface/util.rs
src/librustc_lint/builtin.rs
src/librustc_lint/error_codes.rs
src/librustc_lint/lib.rs
src/librustc_lint/types.rs
src/librustc_lint/unused.rs
src/librustc_macros/Cargo.toml
src/librustc_macros/src/hash_stable.rs
src/librustc_macros/src/query.rs
src/librustc_metadata/Cargo.toml
src/librustc_metadata/creader.rs
src/librustc_metadata/cstore.rs
src/librustc_metadata/cstore_impl.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/dependency_format.rs
src/librustc_metadata/dynamic_lib.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/foreign_modules.rs
src/librustc_metadata/index.rs [deleted file]
src/librustc_metadata/lib.rs
src/librustc_metadata/link_args.rs
src/librustc_metadata/locator.rs
src/librustc_metadata/native_libs.rs
src/librustc_metadata/schema.rs
src/librustc_metadata/table.rs [new file with mode: 0644]
src/librustc_mir/borrow_check/conflict_errors.rs
src/librustc_mir/borrow_check/error_reporting.rs
src/librustc_mir/borrow_check/mod.rs
src/librustc_mir/borrow_check/nll/constraint_generation.rs
src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs
src/librustc_mir/borrow_check/nll/renumber.rs
src/librustc_mir/borrow_check/nll/type_check/mod.rs
src/librustc_mir/borrow_check/nll/universal_regions.rs
src/librustc_mir/build/expr/as_rvalue.rs
src/librustc_mir/build/expr/as_temp.rs
src/librustc_mir/build/matches/mod.rs
src/librustc_mir/build/mod.rs
src/librustc_mir/build/scope.rs
src/librustc_mir/dataflow/impls/indirect_mutation.rs
src/librustc_mir/dataflow/impls/storage_liveness.rs
src/librustc_mir/dataflow/mod.rs
src/librustc_mir/error_codes.rs
src/librustc_mir/hair/pattern/_match.rs
src/librustc_mir/hair/pattern/check_match.rs
src/librustc_mir/hair/pattern/mod.rs
src/librustc_mir/interpret/cast.rs
src/librustc_mir/interpret/eval_context.rs
src/librustc_mir/interpret/intern.rs
src/librustc_mir/interpret/intrinsics/type_name.rs
src/librustc_mir/interpret/operand.rs
src/librustc_mir/interpret/place.rs
src/librustc_mir/interpret/terminator.rs
src/librustc_mir/interpret/validity.rs
src/librustc_mir/lib.rs
src/librustc_mir/monomorphize/collector.rs
src/librustc_mir/monomorphize/item.rs [deleted file]
src/librustc_mir/monomorphize/partitioning.rs
src/librustc_mir/shim.rs
src/librustc_mir/transform/const_prop.rs
src/librustc_mir/transform/erase_regions.rs
src/librustc_mir/transform/generator.rs
src/librustc_mir/transform/inline.rs
src/librustc_mir/transform/promote_consts.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_mir/transform/qualify_min_const_fn.rs
src/librustc_mir/transform/simplify.rs
src/librustc_mir/util/def_use.rs
src/librustc_mir/util/elaborate_drops.rs
src/librustc_passes/Cargo.toml
src/librustc_passes/ast_validation.rs
src/librustc_passes/dead.rs [new file with mode: 0644]
src/librustc_passes/entry.rs [new file with mode: 0644]
src/librustc_passes/error_codes.rs
src/librustc_passes/intrinsicck.rs [new file with mode: 0644]
src/librustc_passes/lib.rs
src/librustc_passes/liveness.rs [new file with mode: 0644]
src/librustc_plugin/Cargo.toml
src/librustc_plugin/lib.rs
src/librustc_plugin/registry.rs
src/librustc_privacy/error_codes.rs
src/librustc_privacy/lib.rs
src/librustc_resolve/Cargo.toml
src/librustc_resolve/build_reduced_graph.rs
src/librustc_resolve/diagnostics.rs
src/librustc_resolve/error_codes.rs
src/librustc_resolve/late.rs
src/librustc_resolve/late/diagnostics.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_resolve/resolve_imports.rs
src/librustc_save_analysis/dump_visitor.rs
src/librustc_save_analysis/lib.rs
src/librustc_target/abi/mod.rs
src/librustc_target/spec/mod.rs
src/librustc_target/spec/wasm32_wasi.rs
src/librustc_traits/dropck_outlives.rs
src/librustc_traits/evaluate_obligation.rs
src/librustc_traits/generic_types.rs
src/librustc_traits/lowering/mod.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/closure.rs
src/librustc_typeck/check/demand.rs
src/librustc_typeck/check/dropck.rs
src/librustc_typeck/check/expr.rs
src/librustc_typeck/check/method/mod.rs
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/check/wfcheck.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/constrained_generic_params.rs
src/librustc_typeck/error_codes.rs
src/librustc_typeck/impl_wf_check.rs
src/librustc_typeck/lib.rs
src/librustc_typeck/outlives/explicit.rs
src/librustdoc/Cargo.toml
src/librustdoc/clean/auto_trait.rs
src/librustdoc/clean/blanket_impl.rs
src/librustdoc/clean/cfg.rs
src/librustdoc/clean/inline.rs
src/librustdoc/clean/mod.rs
src/librustdoc/config.rs
src/librustdoc/doctree.rs
src/librustdoc/html/highlight.rs
src/librustdoc/html/item_type.rs
src/librustdoc/html/render.rs
src/librustdoc/lib.rs
src/librustdoc/passes/check_code_block_syntax.rs
src/librustdoc/passes/collect_intra_doc_links.rs
src/librustdoc/test.rs
src/librustdoc/visit_ast.rs
src/libstd/Cargo.toml
src/libstd/collections/hash/map.rs
src/libstd/error.rs
src/libstd/ffi/c_str.rs
src/libstd/fs.rs
src/libstd/lib.rs
src/libstd/net/udp.rs
src/libstd/panic.rs
src/libstd/panicking.rs
src/libstd/path.rs
src/libstd/rt.rs
src/libstd/sync/mpsc/mod.rs
src/libstd/sync/once.rs
src/libstd/sys/unix/fs.rs
src/libstd/sys/vxworks/fs.rs
src/libstd/sys/vxworks/rwlock.rs
src/libstd/sys/windows/fs.rs
src/libstd/sys_common/backtrace.rs
src/libstd/sys_common/os_str_bytes.rs
src/libstd/thread/mod.rs
src/libstd/time.rs
src/libsyntax/ast.rs
src/libsyntax/attr/builtin.rs
src/libsyntax/attr/mod.rs
src/libsyntax/config.rs
src/libsyntax/early_buffered_lints.rs
src/libsyntax/error_codes.rs
src/libsyntax/ext/allocator.rs [deleted file]
src/libsyntax/ext/base.rs [deleted file]
src/libsyntax/ext/build.rs [deleted file]
src/libsyntax/ext/expand.rs [deleted file]
src/libsyntax/ext/mbe.rs [deleted file]
src/libsyntax/ext/mbe/macro_check.rs [deleted file]
src/libsyntax/ext/mbe/macro_parser.rs [deleted file]
src/libsyntax/ext/mbe/macro_rules.rs [deleted file]
src/libsyntax/ext/mbe/quoted.rs [deleted file]
src/libsyntax/ext/mbe/transcribe.rs [deleted file]
src/libsyntax/ext/placeholders.rs [deleted file]
src/libsyntax/ext/proc_macro.rs [deleted file]
src/libsyntax/ext/proc_macro_server.rs [deleted file]
src/libsyntax/feature_gate/active.rs
src/libsyntax/feature_gate/builtin_attrs.rs
src/libsyntax/feature_gate/check.rs
src/libsyntax/feature_gate/mod.rs
src/libsyntax/json.rs
src/libsyntax/lib.rs
src/libsyntax/mut_visit.rs
src/libsyntax/parse/attr.rs [deleted file]
src/libsyntax/parse/diagnostics.rs [deleted file]
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/literal.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/parser/attr.rs [new file with mode: 0644]
src/libsyntax/parse/parser/diagnostics.rs [new file with mode: 0644]
src/libsyntax/parse/parser/expr.rs
src/libsyntax/parse/parser/generics.rs
src/libsyntax/parse/parser/item.rs
src/libsyntax/parse/parser/module.rs
src/libsyntax/parse/parser/pat.rs
src/libsyntax/parse/parser/path.rs
src/libsyntax/parse/parser/stmt.rs
src/libsyntax/parse/parser/ty.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pprust.rs
src/libsyntax/sess.rs [new file with mode: 0644]
src/libsyntax/source_map.rs
src/libsyntax/tests.rs
src/libsyntax/tokenstream.rs
src/libsyntax_expand/Cargo.toml [new file with mode: 0644]
src/libsyntax_expand/allocator.rs [new file with mode: 0644]
src/libsyntax_expand/base.rs [new file with mode: 0644]
src/libsyntax_expand/build.rs [new file with mode: 0644]
src/libsyntax_expand/expand.rs [new file with mode: 0644]
src/libsyntax_expand/lib.rs [new file with mode: 0644]
src/libsyntax_expand/mbe.rs [new file with mode: 0644]
src/libsyntax_expand/mbe/macro_check.rs [new file with mode: 0644]
src/libsyntax_expand/mbe/macro_parser.rs [new file with mode: 0644]
src/libsyntax_expand/mbe/macro_rules.rs [new file with mode: 0644]
src/libsyntax_expand/mbe/quoted.rs [new file with mode: 0644]
src/libsyntax_expand/mbe/transcribe.rs [new file with mode: 0644]
src/libsyntax_expand/placeholders.rs [new file with mode: 0644]
src/libsyntax_expand/proc_macro.rs [new file with mode: 0644]
src/libsyntax_expand/proc_macro_server.rs [new file with mode: 0644]
src/libsyntax_ext/Cargo.toml
src/libsyntax_ext/asm.rs
src/libsyntax_ext/assert.rs
src/libsyntax_ext/cfg.rs
src/libsyntax_ext/cmdline_attrs.rs
src/libsyntax_ext/compile_error.rs
src/libsyntax_ext/concat.rs
src/libsyntax_ext/concat_idents.rs
src/libsyntax_ext/deriving/bounds.rs
src/libsyntax_ext/deriving/clone.rs
src/libsyntax_ext/deriving/cmp/eq.rs
src/libsyntax_ext/deriving/cmp/ord.rs
src/libsyntax_ext/deriving/cmp/partial_eq.rs
src/libsyntax_ext/deriving/cmp/partial_ord.rs
src/libsyntax_ext/deriving/debug.rs
src/libsyntax_ext/deriving/decodable.rs
src/libsyntax_ext/deriving/default.rs
src/libsyntax_ext/deriving/encodable.rs
src/libsyntax_ext/deriving/generic/mod.rs
src/libsyntax_ext/deriving/generic/ty.rs
src/libsyntax_ext/deriving/hash.rs
src/libsyntax_ext/deriving/mod.rs
src/libsyntax_ext/env.rs
src/libsyntax_ext/format.rs
src/libsyntax_ext/global_allocator.rs
src/libsyntax_ext/global_asm.rs
src/libsyntax_ext/lib.rs
src/libsyntax_ext/log_syntax.rs
src/libsyntax_ext/plugin_macro_defs.rs
src/libsyntax_ext/proc_macro_harness.rs
src/libsyntax_ext/source_util.rs
src/libsyntax_ext/standard_library_imports.rs
src/libsyntax_ext/test.rs
src/libsyntax_ext/test_harness.rs
src/libsyntax_ext/trace_macros.rs
src/libsyntax_ext/util.rs [new file with mode: 0644]
src/libsyntax_pos/lib.rs
src/libsyntax_pos/symbol.rs
src/libsyntax_pos/symbol/tests.rs
src/libtest/formatters/json.rs
src/libtest/formatters/pretty.rs
src/libtest/formatters/terse.rs
src/libtest/lib.rs
src/libtest/stats.rs
src/libtest/tests.rs
src/llvm-project
src/test/codegen/extern-functions.rs [deleted file]
src/test/codegen/non-terminate/infinite-loop-1.rs [new file with mode: 0644]
src/test/codegen/non-terminate/infinite-loop-2.rs [new file with mode: 0644]
src/test/codegen/non-terminate/infinite-recursion.rs [new file with mode: 0644]
src/test/codegen/nounwind-extern.rs [deleted file]
src/test/codegen/unwind-extern-exports.rs [new file with mode: 0644]
src/test/codegen/unwind-extern-imports.rs [new file with mode: 0644]
src/test/compile-fail/consts/const-err3.rs
src/test/mir-opt/const_prop/aggregate.rs [new file with mode: 0644]
src/test/mir-opt/const_prop/boxes.rs [new file with mode: 0644]
src/test/mir-opt/const_prop/discriminant.rs [new file with mode: 0644]
src/test/mir-opt/const_prop/reify_fn_ptr.rs
src/test/mir-opt/const_prop/repeat.rs [new file with mode: 0644]
src/test/run-fail/adjust_never.rs [deleted file]
src/test/run-fail/call-fn-never-arg.rs [deleted file]
src/test/run-fail/cast-never.rs [deleted file]
src/test/run-fail/never-associated-type.rs [deleted file]
src/test/run-fail/never-type-arg.rs [deleted file]
src/test/run-fail/overflowing-rsh-5.rs
src/test/run-fail/overflowing-rsh-6.rs
src/test/run-make-fulldeps/linker-output-non-utf8/Makefile [deleted file]
src/test/run-make-fulldeps/linker-output-non-utf8/exec.rs [deleted file]
src/test/run-make-fulldeps/linker-output-non-utf8/library.rs [deleted file]
src/test/run-make-fulldeps/sanitizer-address/Makefile
src/test/run-make-fulldeps/target-without-atomic-cas/Makefile
src/test/rustdoc-ui/doc-test-doctest-feature.rs [new file with mode: 0644]
src/test/rustdoc-ui/doc-test-doctest-feature.stdout [new file with mode: 0644]
src/test/rustdoc/macro-in-closure.rs [new file with mode: 0644]
src/test/ui-fulldeps/ast_stmt_expr_attr.rs
src/test/ui-fulldeps/auxiliary/attr-plugin-test.rs
src/test/ui-fulldeps/auxiliary/issue-40001-plugin.rs
src/test/ui-fulldeps/auxiliary/plugin-args.rs
src/test/ui-fulldeps/auxiliary/roman-numerals.rs
src/test/ui-fulldeps/gated-plugin.stderr
src/test/ui-fulldeps/issue-15778-fail.stderr
src/test/ui-fulldeps/issue-15778-pass.stderr
src/test/ui-fulldeps/issue-40001.stderr
src/test/ui-fulldeps/lint-group-plugin-deny-cmdline.stderr
src/test/ui-fulldeps/lint-group-plugin.stderr
src/test/ui-fulldeps/lint-plugin-cmdline-allow.stderr
src/test/ui-fulldeps/lint-plugin-deny-attr.stderr
src/test/ui-fulldeps/lint-plugin-deny-cmdline.stderr
src/test/ui-fulldeps/lint-plugin-forbid-attrs.stderr
src/test/ui-fulldeps/lint-plugin-forbid-cmdline.stderr
src/test/ui-fulldeps/lint-plugin.stderr
src/test/ui-fulldeps/lint-tool-cmdline-allow.stderr
src/test/ui-fulldeps/lint-tool-test.stderr
src/test/ui-fulldeps/llvm-pass-plugin.stderr
src/test/ui-fulldeps/lto-syntax-extension.stderr
src/test/ui-fulldeps/macro-crate-rlib.stderr
src/test/ui-fulldeps/mod_dir_path_canonicalized.rs
src/test/ui-fulldeps/outlive-expansion-phase.stderr
src/test/ui-fulldeps/plugin-args-1.stderr
src/test/ui-fulldeps/plugin-args-2.stderr
src/test/ui-fulldeps/plugin-args-3.stderr
src/test/ui-fulldeps/plugin-attr-register-deny.stderr
src/test/ui-fulldeps/plugin-reexport.stderr
src/test/ui-fulldeps/pprust-expr-roundtrip.rs
src/test/ui-fulldeps/roman-numerals-macro.stderr
src/test/ui/abi/abort-on-c-abi.rs [deleted file]
src/test/ui/always-inhabited-union-ref.rs [deleted file]
src/test/ui/always-inhabited-union-ref.stderr [deleted file]
src/test/ui/associated-const/associated-const-impl-wrong-lifetime.stderr
src/test/ui/associated-const/associated-const-type-parameter-arrays-2.stderr
src/test/ui/associated-const/associated-const-type-parameter-arrays.stderr
src/test/ui/associated-item/issue-48027.rs [new file with mode: 0644]
src/test/ui/associated-item/issue-48027.stderr [new file with mode: 0644]
src/test/ui/associated-type-bounds/bad-bounds-on-assoc-in-trait.stderr
src/test/ui/associated-types/associated-types-bound-failure.fixed [new file with mode: 0644]
src/test/ui/associated-types/associated-types-bound-failure.rs
src/test/ui/associated-types/associated-types-bound-failure.stderr
src/test/ui/associated-types/associated-types-for-unimpl-trait.fixed [new file with mode: 0644]
src/test/ui/associated-types/associated-types-for-unimpl-trait.rs
src/test/ui/associated-types/associated-types-for-unimpl-trait.stderr
src/test/ui/associated-types/associated-types-invalid-trait-ref-issue-18865.stderr
src/test/ui/associated-types/associated-types-no-suitable-bound.stderr
src/test/ui/associated-types/associated-types-no-suitable-supertrait-2.stderr
src/test/ui/associated-types/associated-types-no-suitable-supertrait.stderr
src/test/ui/associated-types/associated-types-projection-to-unrelated-trait-in-method-without-default.fixed [new file with mode: 0644]
src/test/ui/associated-types/associated-types-projection-to-unrelated-trait-in-method-without-default.rs
src/test/ui/associated-types/associated-types-projection-to-unrelated-trait-in-method-without-default.stderr
src/test/ui/associated-types/associated-types-unsized.fixed [new file with mode: 0644]
src/test/ui/associated-types/associated-types-unsized.rs
src/test/ui/associated-types/associated-types-unsized.stderr
src/test/ui/associated-types/cache/project-fn-ret-contravariant.transmute.stderr
src/test/ui/associated-types/cache/project-fn-ret-invariant.transmute.stderr
src/test/ui/associated-types/issue-44153.rs [new file with mode: 0644]
src/test/ui/associated-types/issue-44153.stderr [new file with mode: 0644]
src/test/ui/associated-types/issue-48010.rs [new file with mode: 0644]
src/test/ui/async-await/async-assoc-fn-anon-lifetimes.rs [new file with mode: 0644]
src/test/ui/async-await/async-borrowck-escaping-block-error.fixed [new file with mode: 0644]
src/test/ui/async-await/async-borrowck-escaping-block-error.rs [new file with mode: 0644]
src/test/ui/async-await/async-borrowck-escaping-block-error.stderr [new file with mode: 0644]
src/test/ui/async-await/async-fn-size-moved-locals.rs
src/test/ui/async-await/async-fn-size-uninit-locals.rs [new file with mode: 0644]
src/test/ui/async-await/async-fn-size.rs
src/test/ui/async-await/issues/issue-63388-2.stderr
src/test/ui/async-await/issues/issue-65159.rs [new file with mode: 0644]
src/test/ui/async-await/issues/issue-65159.stderr [new file with mode: 0644]
src/test/ui/async-await/unused-lifetime.rs [new file with mode: 0644]
src/test/ui/async-await/unused-lifetime.stderr [new file with mode: 0644]
src/test/ui/auto-trait-validation.stderr
src/test/ui/bad/bad-method-typaram-kind.stderr
src/test/ui/borrowck/regions-bound-missing-bound-in-impl.stderr
src/test/ui/builtin-superkinds/builtin-superkinds-double-superkind.stderr
src/test/ui/builtin-superkinds/builtin-superkinds-in-metadata.stderr
src/test/ui/builtin-superkinds/builtin-superkinds-typaram-not-send.stderr
src/test/ui/c-variadic/variadic-ffi-4.stderr
src/test/ui/call-fn-never-arg-wrong-type.rs [deleted file]
src/test/ui/call-fn-never-arg-wrong-type.stderr [deleted file]
src/test/ui/check_match/issue-43253.rs
src/test/ui/check_match/issue-43253.stderr
src/test/ui/closure-expected-type/expect-fn-supply-fn.stderr
src/test/ui/closures/closure-bounds-cant-promote-superkind-in-struct.stderr
src/test/ui/closures/closure-bounds-subtype.stderr
src/test/ui/closures/closure-expected-type/expect-region-supply-region.stderr
src/test/ui/coercion/coerce-issue-49593-box-never.rs
src/test/ui/conflicting-repr-hints.stderr
src/test/ui/const-generics/auxiliary/const_generic_lib.rs [new file with mode: 0644]
src/test/ui/const-generics/const-argument-cross-crate-mismatch.rs [new file with mode: 0644]
src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr [new file with mode: 0644]
src/test/ui/const-generics/const-argument-cross-crate.rs [new file with mode: 0644]
src/test/ui/const-generics/const-parameter-uppercase-lint.stderr
src/test/ui/const-generics/fn-const-param-call.rs [new file with mode: 0644]
src/test/ui/const-generics/fn-const-param-call.stderr [new file with mode: 0644]
src/test/ui/const-generics/fn-const-param-infer.rs [new file with mode: 0644]
src/test/ui/const-generics/fn-const-param-infer.stderr [new file with mode: 0644]
src/test/ui/const-generics/issue-60263.rs [deleted file]
src/test/ui/const-generics/issue-60263.stderr [deleted file]
src/test/ui/const-generics/issue-60818-struct-constructors.rs [deleted file]
src/test/ui/const-generics/issue-60818-struct-constructors.stderr [deleted file]
src/test/ui/const-generics/issue-61336-1.rs [deleted file]
src/test/ui/const-generics/issue-61336-1.stderr [deleted file]
src/test/ui/const-generics/issue-61336-2.rs [deleted file]
src/test/ui/const-generics/issue-61336-2.stderr [deleted file]
src/test/ui/const-generics/issue-61336.rs [deleted file]
src/test/ui/const-generics/issue-61336.stderr [deleted file]
src/test/ui/const-generics/issue-61422.rs [deleted file]
src/test/ui/const-generics/issue-61422.stderr [deleted file]
src/test/ui/const-generics/issue-61432.rs [deleted file]
src/test/ui/const-generics/issue-61432.stderr [deleted file]
src/test/ui/const-generics/issue-64519.rs [deleted file]
src/test/ui/const-generics/issue-64519.stderr [deleted file]
src/test/ui/const-generics/issues/issue-60263.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-60263.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-60818-struct-constructors.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-60818-struct-constructors.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61336-1.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61336-1.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61336-2.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61336-2.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61336.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61336.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61422.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61422.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61432.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-61432.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-62187-encountered-polymorphic-const.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-62187-encountered-polymorphic-const.stderr [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-64519.rs [new file with mode: 0644]
src/test/ui/const-generics/issues/issue-64519.stderr [new file with mode: 0644]
src/test/ui/const-generics/raw-ptr-const-param-deref.rs [new file with mode: 0644]
src/test/ui/const-generics/raw-ptr-const-param-deref.stderr [new file with mode: 0644]
src/test/ui/const-generics/raw-ptr-const-param.rs [new file with mode: 0644]
src/test/ui/const-generics/raw-ptr-const-param.stderr [new file with mode: 0644]
src/test/ui/const-generics/slice-const-param-mismatch.stderr
src/test/ui/const-generics/struct-with-invalid-const-param.stderr
src/test/ui/const-generics/types-mismatch-const-args.rs [new file with mode: 0644]
src/test/ui/const-generics/types-mismatch-const-args.stderr [new file with mode: 0644]
src/test/ui/consts/auxiliary/external_macro.rs [new file with mode: 0644]
src/test/ui/consts/const-err2.rs
src/test/ui/consts/const-err2.stderr
src/test/ui/consts/const-err3.rs
src/test/ui/consts/const-err3.stderr
src/test/ui/consts/const-eval/issue-50814.rs
src/test/ui/consts/const-eval/issue-50814.stderr
src/test/ui/consts/const-eval/issue-64908.rs [new file with mode: 0644]
src/test/ui/consts/const-eval/issue-65394.rs [new file with mode: 0644]
src/test/ui/consts/const-eval/issue-65394.stderr [new file with mode: 0644]
src/test/ui/consts/const-eval/write-to-uninhabited-enum-variant.rs [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn-min-const-fn.rs [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn-min-const-fn.stderr [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn-requires-unsafe.rs [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn-requires-unsafe.stderr [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/const-extern-fn.rs [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/feature-gate-const_extern_fn.rs [new file with mode: 0644]
src/test/ui/consts/const-extern-fn/feature-gate-const_extern_fn.stderr [new file with mode: 0644]
src/test/ui/consts/const-external-macro-const-err.rs [new file with mode: 0644]
src/test/ui/consts/const-external-macro-const-err.stderr [new file with mode: 0644]
src/test/ui/consts/const-match-check.eval1.stderr
src/test/ui/consts/const-match-check.eval2.stderr
src/test/ui/consts/const-match-check.matchck.stderr
src/test/ui/consts/const-prop-ice.rs
src/test/ui/consts/const-prop-ice.stderr
src/test/ui/consts/issue-64506.rs [new file with mode: 0644]
src/test/ui/consts/issue-65348.rs [new file with mode: 0644]
src/test/ui/consts/too_generic_eval_ice.stderr
src/test/ui/defaulted-never-note.rs [deleted file]
src/test/ui/defaulted-never-note.stderr [deleted file]
src/test/ui/deprecation/deprecation-sanity.stderr
src/test/ui/deprecation/derive_on_deprecated.rs
src/test/ui/did_you_mean/issue-40396.rs
src/test/ui/did_you_mean/issue-40396.stderr
src/test/ui/did_you_mean/issue-41679-tilde-bitwise-negation-attempt.stderr
src/test/ui/did_you_mean/issue-43871-enum-instead-of-variant.stderr
src/test/ui/did_you_mean/issue-56028-there-is-an-enum-variant.stderr
src/test/ui/dispatch_from_dyn_zst.rs [deleted file]
src/test/ui/diverging-fallback-control-flow.rs [deleted file]
src/test/ui/dst/dst-object-from-unsized-type.stderr
src/test/ui/empty/empty-never-array.stderr
src/test/ui/enum/enum-variant-type-2.stderr
src/test/ui/error-codes/E0005.stderr
src/test/ui/error-codes/E0423.stderr
src/test/ui/error-codes/E0424.stderr
src/test/ui/error-codes/E0478.stderr
src/test/ui/error-codes/E0621-does-not-trigger-for-closures.stderr
src/test/ui/exhaustive_integer_patterns.rs
src/test/ui/exhaustive_integer_patterns.stderr
src/test/ui/explicit/explicit-self-lifetime-mismatch.stderr
src/test/ui/expr_attr_paren_order.stderr
src/test/ui/extern/issue-64655-allow-unwind-when-calling-panic-directly.rs [new file with mode: 0644]
src/test/ui/extern/issue-64655-extern-rust-must-allow-unwind.rs [new file with mode: 0644]
src/test/ui/feature-gate/issue-43106-gating-of-builtin-attrs.stderr
src/test/ui/feature-gates/bench.rs
src/test/ui/feature-gates/bench.stderr
src/test/ui/feature-gates/feature-gate-const_generics-ptr.rs [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-const_generics-ptr.stderr [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-exhaustive-patterns.rs
src/test/ui/feature-gates/feature-gate-exhaustive-patterns.stderr
src/test/ui/feature-gates/feature-gate-plugin.stderr
src/test/ui/feature-gates/feature-gate-plugin_registrar.stderr
src/test/ui/feature-gates/feature-gate-repr-simd.stderr
src/test/ui/feature-gates/feature-gate-track_caller.rs [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-track_caller.stderr [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-unwind-attributes.rs
src/test/ui/feature-gates/feature-gate-unwind-attributes.stderr
src/test/ui/for-loop-while/loop-break-value.rs
src/test/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs [new file with mode: 0644]
src/test/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr [new file with mode: 0644]
src/test/ui/hr-subtype/hr-subtype.free_inv_x_vs_free_inv_y.stderr
src/test/ui/hr-subtype/hr-subtype.free_x_vs_free_y.stderr
src/test/ui/hrtb/hrtb-higher-ranker-supertraits-transitive.stderr
src/test/ui/hrtb/hrtb-higher-ranker-supertraits.stderr
src/test/ui/hygiene/globs.stderr
src/test/ui/hygiene/rustc-macro-transparency.stderr
src/test/ui/hygiene/unpretty-debug.stdout
src/test/ui/if/if-no-match-bindings.stderr
src/test/ui/impl-for-never.rs [deleted file]
src/test/ui/impl-header-lifetime-elision/dyn-trait.stderr
src/test/ui/impl-trait/hidden-lifetimes.stderr
src/test/ui/impl-trait/issue-55872-1.stderr
src/test/ui/impl-trait/must_outlive_least_region_or_bound.stderr
src/test/ui/impl-trait/region-escape-via-bound.stderr
src/test/ui/impl-trait/static-return-lifetime-infered.stderr
src/test/ui/in-band-lifetimes/mismatched_trait_impl-2.stderr
src/test/ui/in-band-lifetimes/mismatched_trait_impl.nll.stderr
src/test/ui/in-band-lifetimes/mismatched_trait_impl.stderr
src/test/ui/include-single-expr-helper-1.rs [new file with mode: 0644]
src/test/ui/include-single-expr-helper.rs [new file with mode: 0644]
src/test/ui/include-single-expr.rs [new file with mode: 0644]
src/test/ui/include-single-expr.stderr [new file with mode: 0644]
src/test/ui/inner-static-type-parameter.stderr
src/test/ui/invalid/invalid-plugin-attr.stderr
src/test/ui/issue-53912.rs [deleted file]
src/test/ui/issue-59020.rs [deleted file]
src/test/ui/issues/auxiliary/issue-57271-lib.rs [new file with mode: 0644]
src/test/ui/issues/issue-10200.stderr
src/test/ui/issues/issue-10291.stderr
src/test/ui/issues/issue-13352.rs [deleted file]
src/test/ui/issues/issue-13352.stderr [deleted file]
src/test/ui/issues/issue-13867.rs
src/test/ui/issues/issue-16683.stderr
src/test/ui/issues/issue-17001.stderr
src/test/ui/issues/issue-17405.stderr
src/test/ui/issues/issue-17546.stderr
src/test/ui/issues/issue-17718-const-naming.stderr
src/test/ui/issues/issue-17740.stderr
src/test/ui/issues/issue-17758.stderr
src/test/ui/issues/issue-17905-2.stderr
src/test/ui/issues/issue-18119.stderr
src/test/ui/issues/issue-20005.stderr
src/test/ui/issues/issue-20831-debruijn.stderr
src/test/ui/issues/issue-21449.stderr
src/test/ui/issues/issue-21475.rs
src/test/ui/issues/issue-2149.rs [deleted file]
src/test/ui/issues/issue-2149.stderr [deleted file]
src/test/ui/issues/issue-21837.stderr
src/test/ui/issues/issue-22872.stderr
src/test/ui/issues/issue-23189.stderr
src/test/ui/issues/issue-26251.rs
src/test/ui/issues/issue-26459.stderr
src/test/ui/issues/issue-27060-2.stderr
src/test/ui/issues/issue-27078.stderr
src/test/ui/issues/issue-27815.stderr
src/test/ui/issues/issue-27942.stderr
src/test/ui/issues/issue-28848.stderr
src/test/ui/issues/issue-30535.stderr
src/test/ui/issues/issue-31561.stderr
src/test/ui/issues/issue-35675.stderr
src/test/ui/issues/issue-3707.stderr
src/test/ui/issues/issue-37884.stderr
src/test/ui/issues/issue-38821.stderr
src/test/ui/issues/issue-42312.stderr
src/test/ui/issues/issue-43784-associated-type.stderr
src/test/ui/issues/issue-43784-supertrait.stderr
src/test/ui/issues/issue-44402.rs [deleted file]
src/test/ui/issues/issue-46332.stderr
src/test/ui/issues/issue-47486.rs [new file with mode: 0644]
src/test/ui/issues/issue-47486.stderr [new file with mode: 0644]
src/test/ui/issues/issue-50264-inner-deref-trait/option-as_deref.rs
src/test/ui/issues/issue-50264-inner-deref-trait/option-as_deref.stderr
src/test/ui/issues/issue-50264-inner-deref-trait/option-as_deref_mut.rs
src/test/ui/issues/issue-50264-inner-deref-trait/option-as_deref_mut.stderr
src/test/ui/issues/issue-52213.stderr
src/test/ui/issues/issue-52262.rs [new file with mode: 0644]
src/test/ui/issues/issue-52262.stderr [new file with mode: 0644]
src/test/ui/issues/issue-53912.rs [new file with mode: 0644]
src/test/ui/issues/issue-54348.rs
src/test/ui/issues/issue-54348.stderr
src/test/ui/issues/issue-55796.stderr
src/test/ui/issues/issue-57271.rs [new file with mode: 0644]
src/test/ui/issues/issue-57271.stderr [new file with mode: 0644]
src/test/ui/issues/issue-57399-self-return-impl-trait.rs [new file with mode: 0644]
src/test/ui/issues/issue-57399-self-return-impl-trait.stderr [new file with mode: 0644]
src/test/ui/issues/issue-59020.rs [new file with mode: 0644]
src/test/ui/issues/issue-64792-bad-unicode-ctor.rs [new file with mode: 0644]
src/test/ui/issues/issue-64792-bad-unicode-ctor.stderr [new file with mode: 0644]
src/test/ui/issues/issue-65284-suggest-generic-trait-bound.rs [new file with mode: 0644]
src/test/ui/issues/issue-65284-suggest-generic-trait-bound.stderr [new file with mode: 0644]
src/test/ui/kindck/kindck-impl-type-params.nll.stderr
src/test/ui/kindck/kindck-impl-type-params.stderr
src/test/ui/lexical-scopes.stderr
src/test/ui/lifetimes/lifetime-bound-will-change-warning.stderr
src/test/ui/lint/lint-exceeding-bitshifts2.rs
src/test/ui/lint/lint-exceeding-bitshifts2.stderr
src/test/ui/lint/lint-non-camel-case-types.stderr
src/test/ui/lint/lint-non-snake-case-functions.stderr
src/test/ui/lint/lint-non-uppercase-statics.stderr
src/test/ui/lint/lint-uppercase-variables.stderr
src/test/ui/lint/must_use-unit.rs
src/test/ui/lint/must_use-unit.stderr
src/test/ui/lint/not_found.stderr
src/test/ui/lint/reasons.stderr
src/test/ui/lint/redundant-semicolon/redundant-semi-proc-macro.stderr
src/test/ui/lint/use_suggestion_json.stderr
src/test/ui/lub-if.stderr
src/test/ui/lub-match.stderr
src/test/ui/macros/same-sequence-span.rs
src/test/ui/macros/same-sequence-span.stderr
src/test/ui/malformed/malformed-plugin-1.stderr
src/test/ui/malformed/malformed-plugin-2.stderr
src/test/ui/malformed/malformed-plugin-3.stderr
src/test/ui/match/match-range-fail-dominate.rs
src/test/ui/match/match-range-fail-dominate.stderr
src/test/ui/match/match-ref-mut-invariance.stderr
src/test/ui/match/match-ref-mut-let-invariance.stderr
src/test/ui/match/non-exhaustive-defined-here.stderr
src/test/ui/mod/mod_file_disambig.stderr
src/test/ui/multiple-plugin-registrars.stderr
src/test/ui/never-assign-dead-code.rs [deleted file]
src/test/ui/never-assign-dead-code.stderr [deleted file]
src/test/ui/never-assign-wrong-type.rs [deleted file]
src/test/ui/never-assign-wrong-type.stderr [deleted file]
src/test/ui/never-from-impl-is-reserved.rs [deleted file]
src/test/ui/never-from-impl-is-reserved.stderr [deleted file]
src/test/ui/never-result.rs [deleted file]
src/test/ui/never-type-rvalues.rs [deleted file]
src/test/ui/never_coercions.rs [deleted file]
src/test/ui/never_transmute_never.rs [deleted file]
src/test/ui/never_type/adjust_never.rs [new file with mode: 0644]
src/test/ui/never_type/call-fn-never-arg-wrong-type.rs [new file with mode: 0644]
src/test/ui/never_type/call-fn-never-arg-wrong-type.stderr [new file with mode: 0644]
src/test/ui/never_type/call-fn-never-arg.rs [new file with mode: 0644]
src/test/ui/never_type/cast-never.rs [new file with mode: 0644]
src/test/ui/never_type/defaulted-never-note.rs [new file with mode: 0644]
src/test/ui/never_type/defaulted-never-note.stderr [new file with mode: 0644]
src/test/ui/never_type/dispatch_from_dyn_zst.rs [new file with mode: 0644]
src/test/ui/never_type/diverging-fallback-control-flow.rs [new file with mode: 0644]
src/test/ui/never_type/impl-for-never.rs [new file with mode: 0644]
src/test/ui/never_type/issue-13352.rs [new file with mode: 0644]
src/test/ui/never_type/issue-13352.stderr [new file with mode: 0644]
src/test/ui/never_type/issue-2149.rs [new file with mode: 0644]
src/test/ui/never_type/issue-2149.stderr [new file with mode: 0644]
src/test/ui/never_type/issue-44402.rs [new file with mode: 0644]
src/test/ui/never_type/never-assign-dead-code.rs [new file with mode: 0644]
src/test/ui/never_type/never-assign-dead-code.stderr [new file with mode: 0644]
src/test/ui/never_type/never-assign-wrong-type.rs [new file with mode: 0644]
src/test/ui/never_type/never-assign-wrong-type.stderr [new file with mode: 0644]
src/test/ui/never_type/never-associated-type.rs [new file with mode: 0644]
src/test/ui/never_type/never-from-impl-is-reserved.rs [new file with mode: 0644]
src/test/ui/never_type/never-from-impl-is-reserved.stderr [new file with mode: 0644]
src/test/ui/never_type/never-result.rs [new file with mode: 0644]
src/test/ui/never_type/never-type-arg.rs [new file with mode: 0644]
src/test/ui/never_type/never-type-rvalues.rs [new file with mode: 0644]
src/test/ui/never_type/never_coercions.rs [new file with mode: 0644]
src/test/ui/never_type/never_transmute_never.rs [new file with mode: 0644]
src/test/ui/never_type/panic-uninitialized-zeroed.rs [new file with mode: 0644]
src/test/ui/never_type/try_from.rs [new file with mode: 0644]
src/test/ui/nll/issue-50716.stderr
src/test/ui/nll/issue-52742.stderr
src/test/ui/nll/issue-55394.stderr
src/test/ui/nll/issue-55401.stderr
src/test/ui/nll/issue-63154-normalize.rs [new file with mode: 0644]
src/test/ui/nll/normalization-bounds-error.stderr
src/test/ui/nll/trait-associated-constant.stderr
src/test/ui/nll/type-alias-free-regions.stderr
src/test/ui/nll/user-annotations/constant-in-expr-inherent-1.stderr
src/test/ui/nll/user-annotations/constant-in-expr-normalize.stderr
src/test/ui/nll/user-annotations/constant-in-expr-trait-item-1.stderr
src/test/ui/nll/user-annotations/constant-in-expr-trait-item-2.stderr
src/test/ui/nll/user-annotations/constant-in-expr-trait-item-3.stderr
src/test/ui/no-patterns-in-args-macro.stderr
src/test/ui/no-patterns-in-args.stderr
src/test/ui/not-panic/not-panic-safe.stderr
src/test/ui/object-lifetime/object-lifetime-default-elision.stderr
src/test/ui/object-lifetime/object-lifetime-default-from-rptr-box-error.stderr
src/test/ui/object-lifetime/object-lifetime-default-from-rptr-struct-error.stderr
src/test/ui/object-lifetime/object-lifetime-default-mybox.stderr
src/test/ui/panic-uninitialized-zeroed.rs [deleted file]
src/test/ui/panics/abort-on-panic.rs [new file with mode: 0644]
src/test/ui/parser/doc-inside-trait-item.stderr
src/test/ui/parser/intersection-patterns.rs [new file with mode: 0644]
src/test/ui/parser/intersection-patterns.stderr [new file with mode: 0644]
src/test/ui/parser/issue-33413.rs
src/test/ui/parser/issue-33413.stderr
src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs [new file with mode: 0644]
src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.stderr [new file with mode: 0644]
src/test/ui/parser/mismatched-delim-brace-empty-block.rs [new file with mode: 0644]
src/test/ui/parser/mismatched-delim-brace-empty-block.stderr [new file with mode: 0644]
src/test/ui/parser/no-const-fn-in-extern-block.rs [new file with mode: 0644]
src/test/ui/parser/no-const-fn-in-extern-block.stderr [new file with mode: 0644]
src/test/ui/parser/require-parens-for-chained-comparison.rs
src/test/ui/parser/require-parens-for-chained-comparison.stderr
src/test/ui/partialeq_help.stderr
src/test/ui/phantom-oibit.stderr
src/test/ui/precise_pointer_size_matching.rs
src/test/ui/privacy/privacy-ns1.stderr
src/test/ui/privacy/privacy-ns2.stderr
src/test/ui/privacy/privacy5.rs
src/test/ui/privacy/privacy5.stderr
src/test/ui/proc-macro/auxiliary/gen-macro-rules-hygiene.rs [new file with mode: 0644]
src/test/ui/proc-macro/auxiliary/more-gates.rs [deleted file]
src/test/ui/proc-macro/disappearing-resolution.rs [new file with mode: 0644]
src/test/ui/proc-macro/disappearing-resolution.stderr [new file with mode: 0644]
src/test/ui/proc-macro/gen-macro-rules-hygiene.rs [new file with mode: 0644]
src/test/ui/proc-macro/gen-macro-rules-hygiene.stderr [new file with mode: 0644]
src/test/ui/proc-macro/more-gates.rs [deleted file]
src/test/ui/proc-macro/more-gates.stderr [deleted file]
src/test/ui/reachable/auxiliary/unreachable_variant.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-arm.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-arm.stderr [new file with mode: 0644]
src/test/ui/reachable/unreachable-code.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-code.stderr [new file with mode: 0644]
src/test/ui/reachable/unreachable-in-call.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-in-call.stderr [new file with mode: 0644]
src/test/ui/reachable/unreachable-loop-patterns.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-loop-patterns.stderr [new file with mode: 0644]
src/test/ui/reachable/unreachable-try-pattern.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-try-pattern.stderr [new file with mode: 0644]
src/test/ui/reachable/unreachable-variant.rs [new file with mode: 0644]
src/test/ui/reachable/unreachable-variant.stderr [new file with mode: 0644]
src/test/ui/reachable/unwarned-match-on-never.rs [new file with mode: 0644]
src/test/ui/reachable/unwarned-match-on-never.stderr [new file with mode: 0644]
src/test/ui/recursion/recursive-types-are-not-uninhabited.stderr
src/test/ui/refutable-pattern-errors.stderr
src/test/ui/regions/region-bounds-on-objects-and-type-parameters.stderr
src/test/ui/regions/region-invariant-static-error-reporting.rs
src/test/ui/regions/region-invariant-static-error-reporting.stderr
src/test/ui/regions/region-object-lifetime-2.stderr
src/test/ui/regions/region-object-lifetime-4.stderr
src/test/ui/regions/region-object-lifetime-in-coercion.stderr
src/test/ui/regions/regions-addr-of-self.stderr
src/test/ui/regions/regions-addr-of-upvar-self.stderr
src/test/ui/regions/regions-assoc-type-in-supertrait-outlives-container.migrate.stderr
src/test/ui/regions/regions-assoc-type-region-bound-in-trait-not-met.stderr
src/test/ui/regions/regions-assoc-type-static-bound-in-trait-not-met.stderr
src/test/ui/regions/regions-bounds.stderr
src/test/ui/regions/regions-close-object-into-object-2.stderr
src/test/ui/regions/regions-close-object-into-object-4.stderr
src/test/ui/regions/regions-close-over-type-parameter-multiple.stderr
src/test/ui/regions/regions-creating-enums4.stderr
src/test/ui/regions/regions-early-bound-error-method.stderr
src/test/ui/regions/regions-early-bound-error.stderr
src/test/ui/regions/regions-escape-method.stderr
src/test/ui/regions/regions-escape-via-trait-or-not.stderr
src/test/ui/regions/regions-free-region-ordering-callee-4.stderr
src/test/ui/regions/regions-free-region-ordering-incorrect.stderr
src/test/ui/regions/regions-implied-bounds-projection-gap-hr-1.stderr
src/test/ui/regions/regions-infer-call-3.stderr
src/test/ui/regions/regions-infer-invariance-due-to-decl.stderr
src/test/ui/regions/regions-infer-invariance-due-to-mutability-3.stderr
src/test/ui/regions/regions-infer-invariance-due-to-mutability-4.stderr
src/test/ui/regions/regions-infer-not-param.stderr
src/test/ui/regions/regions-infer-paramd-indirect.stderr
src/test/ui/regions/regions-nested-fns.stderr
src/test/ui/regions/regions-normalize-in-where-clause-list.stderr
src/test/ui/regions/regions-outlives-projection-container-hrtb.migrate.stderr
src/test/ui/regions/regions-outlives-projection-container-wc.migrate.stderr
src/test/ui/regions/regions-outlives-projection-container.stderr
src/test/ui/regions/regions-ret-borrowed-1.stderr
src/test/ui/regions/regions-ret-borrowed.stderr
src/test/ui/regions/regions-return-ref-to-upvar-issue-17403.stderr
src/test/ui/regions/regions-static-bound.migrate.stderr
src/test/ui/regions/regions-trait-1.stderr
src/test/ui/regions/regions-trait-object-subtyping.stderr
src/test/ui/regions/regions-variance-invariant-use-covariant.stderr
src/test/ui/regions/regions-wf-trait-object.stderr
src/test/ui/reject-specialized-drops-8142.stderr
src/test/ui/resolve/issue-16058.stderr
src/test/ui/resolve/issue-21221-1.stderr
src/test/ui/resolve/issue-2356.stderr
src/test/ui/resolve/issue-65025-extern-static-parent-generics.rs [new file with mode: 0644]
src/test/ui/resolve/issue-65025-extern-static-parent-generics.stderr [new file with mode: 0644]
src/test/ui/resolve/issue-65035-static-with-parent-generics.rs [new file with mode: 0644]
src/test/ui/resolve/issue-65035-static-with-parent-generics.stderr [new file with mode: 0644]
src/test/ui/resolve/levenshtein.stderr
src/test/ui/resolve/privacy-struct-ctor.rs
src/test/ui/resolve/privacy-struct-ctor.stderr
src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/auxiliary/types.rs [new file with mode: 0644]
src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/extern_crate_improper.rs [new file with mode: 0644]
src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/extern_crate_improper.stderr [new file with mode: 0644]
src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/same_crate_proper.rs [new file with mode: 0644]
src/test/ui/rfc-2008-non-exhaustive/struct.rs
src/test/ui/rfc-2008-non-exhaustive/struct.stderr
src/test/ui/rfc-2008-non-exhaustive/variant.stderr
src/test/ui/rfc-2091-track-caller/error-odd-syntax.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-odd-syntax.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-invalid-abi.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-invalid-abi.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-naked.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-naked.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-trait-decl.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-trait-decl.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-trait-default-impl.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-trait-default-impl.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-trait-fn-impl.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/error-with-trait-fn-impl.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/only-for-fns.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/only-for-fns.stderr [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/pass.rs [new file with mode: 0644]
src/test/ui/rfc-2091-track-caller/pass.stderr [new file with mode: 0644]
src/test/ui/rfc-2093-infer-outlives/regions-outlives-nominal-type-region-rev.stderr
src/test/ui/rfc-2093-infer-outlives/regions-outlives-nominal-type-region.stderr
src/test/ui/rfc-2093-infer-outlives/regions-outlives-nominal-type-type-rev.stderr
src/test/ui/rfc-2093-infer-outlives/regions-outlives-nominal-type-type.stderr
src/test/ui/rfc-2093-infer-outlives/regions-struct-not-wf.stderr
src/test/ui/rfc-2497-if-let-chains/disallowed-positions.stderr
src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib-2.rs [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib-2.stderr [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib.rs [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib.stderr [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/link-ordinal-and-name.rs [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/link-ordinal-and-name.stderr [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/link-ordinal-invalid-format.rs [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/link-ordinal-invalid-format.stderr [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/link-ordinal-too-large.rs [new file with mode: 0644]
src/test/ui/rfc-2627-raw-dylib/link-ordinal-too-large.stderr [new file with mode: 0644]
src/test/ui/rfcs/rfc-2005-default-binding-mode/range.rs
src/test/ui/rust-2018/issue-52202-use-suggestions.stderr
src/test/ui/save-analysis/issue-64659.rs [new file with mode: 0644]
src/test/ui/save-analysis/issue-65411.rs [new file with mode: 0644]
src/test/ui/self/arbitrary_self_types_pin_lifetime_impl_trait-async.stderr
src/test/ui/specialization/auxiliary/cross_crates_defaults.rs
src/test/ui/specialization/defaultimpl/specialization-wfcheck.stderr
src/test/ui/specialization/issue-36804.rs
src/test/ui/specialization/non-defaulted-item-fail.rs [new file with mode: 0644]
src/test/ui/specialization/non-defaulted-item-fail.stderr [new file with mode: 0644]
src/test/ui/specialization/specialization-default-methods.rs
src/test/ui/static/static-closures.stderr
src/test/ui/static/static-lifetime.stderr
src/test/ui/suggestions/constrain-trait.fixed [new file with mode: 0644]
src/test/ui/suggestions/constrain-trait.rs [new file with mode: 0644]
src/test/ui/suggestions/constrain-trait.stderr [new file with mode: 0644]
src/test/ui/suggestions/imm-ref-trait-object-literal.rs [new file with mode: 0644]
src/test/ui/suggestions/imm-ref-trait-object-literal.stderr [new file with mode: 0644]
src/test/ui/suggestions/imm-ref-trait-object.rs [new file with mode: 0644]
src/test/ui/suggestions/imm-ref-trait-object.stderr [new file with mode: 0644]
src/test/ui/suggestions/into-str.stderr
src/test/ui/suggestions/missing-assoc-type-bound-restriction.rs [new file with mode: 0644]
src/test/ui/suggestions/missing-assoc-type-bound-restriction.stderr [new file with mode: 0644]
src/test/ui/suggestions/mut-borrow-needed-by-trait.rs [new file with mode: 0644]
src/test/ui/suggestions/mut-borrow-needed-by-trait.stderr [new file with mode: 0644]
src/test/ui/suggestions/remove-as_str.rs [new file with mode: 0644]
src/test/ui/suggestions/remove-as_str.stderr [new file with mode: 0644]
src/test/ui/suggestions/restrict-type-argument.rs [new file with mode: 0644]
src/test/ui/suggestions/restrict-type-argument.stderr [new file with mode: 0644]
src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish-through-deref.rs [new file with mode: 0644]
src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish-through-deref.stderr [new file with mode: 0644]
src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish.rs [new file with mode: 0644]
src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish.stderr [new file with mode: 0644]
src/test/ui/tool-attributes/tool-attributes-misplaced-1.stderr
src/test/ui/traits/trait-alias/trait-alias-wf.stderr
src/test/ui/traits/trait-as-struct-constructor.stderr
src/test/ui/traits/trait-bounds-on-structs-and-enums.stderr
src/test/ui/traits/trait-impl-for-module.stderr
src/test/ui/traits/trait-impl-of-supertrait-has-wrong-lifetime-parameters.stderr
src/test/ui/traits/trait-matching-lifetimes.stderr
src/test/ui/traits/trait-suggest-where-clause.stderr
src/test/ui/traits/traits-inductive-overflow-supertrait-oibit.stderr
src/test/ui/traits/traits-repeated-supertrait-ambig.stderr
src/test/ui/try-block/try-block-in-edition2015.stderr
src/test/ui/try_from.rs [deleted file]
src/test/ui/type-alias-impl-trait/generic_underconstrained.stderr
src/test/ui/type-alias-impl-trait/generic_underconstrained2.stderr
src/test/ui/type/type-ascription-with-fn-call.stderr
src/test/ui/type/type-check-defaults.stderr
src/test/ui/type/type-params-in-different-spaces-2.stderr
src/test/ui/typeck/typeck-auto-trait-no-supertraits-2.stderr
src/test/ui/typeck/typeck-auto-trait-no-supertraits.stderr
src/test/ui/typeck/typeck-default-trait-impl-assoc-type.fixed [new file with mode: 0644]
src/test/ui/typeck/typeck-default-trait-impl-assoc-type.rs
src/test/ui/typeck/typeck-default-trait-impl-assoc-type.stderr
src/test/ui/typeck/typeck-default-trait-impl-send-param.stderr
src/test/ui/ufcs/ufcs-explicit-self-bad.stderr
src/test/ui/ufcs/ufcs-partially-resolved.stderr
src/test/ui/underscore-imports/hygiene-2.rs [new file with mode: 0644]
src/test/ui/underscore-imports/hygiene.rs [new file with mode: 0644]
src/test/ui/underscore-imports/hygiene.stderr [new file with mode: 0644]
src/test/ui/underscore-imports/macro-expanded.rs [new file with mode: 0644]
src/test/ui/underscore-lifetime/dyn-trait-underscore.stderr
src/test/ui/uninhabited/always-inhabited-union-ref.rs [new file with mode: 0644]
src/test/ui/uninhabited/always-inhabited-union-ref.stderr [new file with mode: 0644]
src/test/ui/uninhabited/uninhabited-irrefutable.stderr
src/test/ui/uninhabited/uninhabited-matches-feature-gated.stderr
src/test/ui/union/union-sized-field.stderr
src/test/ui/unreachable/auxiliary/unreachable_variant.rs [deleted file]
src/test/ui/unreachable/unreachable-arm.rs [deleted file]
src/test/ui/unreachable/unreachable-arm.stderr [deleted file]
src/test/ui/unreachable/unreachable-code.rs [deleted file]
src/test/ui/unreachable/unreachable-code.stderr [deleted file]
src/test/ui/unreachable/unreachable-in-call.rs [deleted file]
src/test/ui/unreachable/unreachable-in-call.stderr [deleted file]
src/test/ui/unreachable/unreachable-loop-patterns.rs [deleted file]
src/test/ui/unreachable/unreachable-loop-patterns.stderr [deleted file]
src/test/ui/unreachable/unreachable-try-pattern.rs [deleted file]
src/test/ui/unreachable/unreachable-try-pattern.stderr [deleted file]
src/test/ui/unreachable/unreachable-variant.rs [deleted file]
src/test/ui/unreachable/unreachable-variant.stderr [deleted file]
src/test/ui/unreachable/unwarned-match-on-never.rs [deleted file]
src/test/ui/unreachable/unwarned-match-on-never.stderr [deleted file]
src/test/ui/unsized/unsized-bare-typaram.stderr
src/test/ui/unsized/unsized-enum.stderr
src/test/ui/unsized/unsized-enum2.stderr
src/test/ui/unsized/unsized-inherent-impl-self-type.stderr
src/test/ui/unsized/unsized-struct.stderr
src/test/ui/unsized/unsized-trait-impl-self-type.stderr
src/test/ui/unsized/unsized-trait-impl-trait-arg.stderr
src/test/ui/unsized3.stderr
src/test/ui/unsized5.stderr
src/test/ui/unsized6.stderr
src/test/ui/unsized7.stderr
src/test/ui/use/issue-18986.stderr
src/test/ui/variance/variance-btree-invariant-types.stderr
src/test/ui/variance/variance-contravariant-arg-object.stderr
src/test/ui/variance/variance-contravariant-arg-trait-match.stderr
src/test/ui/variance/variance-contravariant-self-trait-match.stderr
src/test/ui/variance/variance-covariant-arg-object.stderr
src/test/ui/variance/variance-covariant-arg-trait-match.stderr
src/test/ui/variance/variance-covariant-self-trait-match.stderr
src/test/ui/variance/variance-invariant-arg-object.stderr
src/test/ui/variance/variance-invariant-arg-trait-match.stderr
src/test/ui/variance/variance-invariant-self-trait-match.stderr
src/test/ui/variance/variance-use-contravariant-struct-1.stderr
src/test/ui/variance/variance-use-covariant-struct-1.stderr
src/test/ui/variance/variance-use-invariant-struct-1.stderr
src/test/ui/variants/variant-used-as-type.stderr
src/test/ui/wf/issue-48638.rs [new file with mode: 0644]
src/test/ui/wf/wf-enum-bound.stderr
src/test/ui/wf/wf-enum-fields-struct-variant.stderr
src/test/ui/wf/wf-enum-fields.stderr
src/test/ui/wf/wf-fn-where-clause.stderr
src/test/ui/wf/wf-impl-associated-type-trait.stderr
src/test/ui/wf/wf-in-fn-arg.stderr
src/test/ui/wf/wf-in-fn-ret.stderr
src/test/ui/wf/wf-in-fn-type-arg.stderr
src/test/ui/wf/wf-in-fn-type-ret.stderr
src/test/ui/wf/wf-in-fn-where-clause.stderr
src/test/ui/wf/wf-in-obj-type-trait.stderr
src/test/ui/wf/wf-inherent-impl-method-where-clause.stderr
src/test/ui/wf/wf-inherent-impl-where-clause.stderr
src/test/ui/wf/wf-static-method.stderr
src/test/ui/wf/wf-struct-bound.stderr
src/test/ui/wf/wf-struct-field.stderr
src/test/ui/wf/wf-trait-associated-type-bound.stderr
src/test/ui/wf/wf-trait-associated-type-trait.stderr
src/test/ui/wf/wf-trait-bound.stderr
src/test/ui/wf/wf-trait-default-fn-arg.stderr
src/test/ui/wf/wf-trait-default-fn-ret.stderr
src/test/ui/wf/wf-trait-default-fn-where-clause.stderr
src/test/ui/wf/wf-trait-fn-arg.stderr
src/test/ui/wf/wf-trait-fn-ret.stderr
src/test/ui/wf/wf-trait-fn-where-clause.stderr
src/test/ui/wf/wf-trait-superbound.stderr
src/test/ui/where-clauses/where-clause-constraints-are-local-for-inherent-impl.stderr
src/test/ui/where-clauses/where-clause-constraints-are-local-for-trait-impl.stderr
src/tools/cargo
src/tools/clippy
src/tools/error_index_generator/build.rs
src/tools/miri
src/tools/publish_toolstate.py
src/tools/rls
src/tools/rustfmt
src/tools/tidy/src/deps.rs
src/tools/tidy/src/error_codes_check.rs [new file with mode: 0644]
src/tools/tidy/src/lib.rs
src/tools/tidy/src/main.rs
triagebot.toml

index 58b829e31d6777845ec46ce0cb49d8dfb5edaa26..e3708bc485399fd42b32c6a1c24491771afa1a04 100644 (file)
@@ -1,40 +1,3 @@
 # The Rust Code of Conduct
 
-A version of this document [can be found online](https://www.rust-lang.org/conduct.html).
-
-## Conduct
-
-**Contact**: [rust-mods@rust-lang.org](mailto:rust-mods@rust-lang.org)
-
-* We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
-* On IRC, please avoid using overtly sexual nicknames or other nicknames that might detract from a friendly, safe and welcoming environment for all.
-* Please be kind and courteous. There's no need to be mean or rude.
-* Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
-* Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
-* We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term "harassment" as including the definition in the <a href="http://citizencodeofconduct.org/">Citizen Code of Conduct</a>; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups.
-* Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the [Rust moderation team][mod_team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a safe place for you and we've got your back.
-* Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
-
-## Moderation
-
-
-These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation, please contact the [Rust moderation team][mod_team].
-
-1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
-2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
-3. Moderators will first respond to such remarks with a warning.
-4. If the warning is unheeded, the user will be "kicked," i.e., kicked out of the communication channel to cool off.
-5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
-6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
-7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, **in private**. Complaints about bans in-channel are not allowed.
-8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
-
-In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
-
-And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
-
-The enforcement policies listed above apply to all official Rust venues; including all communication channels (Rust Discord server, Rust Zulip server); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
-
-*Adapted from the [Node.js Policy on Trolling](https://blog.izs.me/2012/08/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
-
-[mod_team]: https://www.rust-lang.org/team.html#Moderation-team
+The Code of Conduct for this repository [can be found online](https://www.rust-lang.org/conduct.html).
index a1506619dff2f87f47dba2ece3d5484992790dd5..f05550abdc344321084554c4f12fcce97d11709e 100644 (file)
@@ -107,6 +107,12 @@ dependencies = [
  "winapi 0.3.6",
 ]
 
+[[package]]
+name = "autocfg"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875"
+
 [[package]]
 name = "backtrace"
 version = "0.3.37"
@@ -550,9 +556,9 @@ dependencies = [
 
 [[package]]
 name = "compiletest_rs"
-version = "0.3.23"
+version = "0.3.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb783fe7afb90ec3d3e49ccaf9196d29ab63c6ed61d4b0695839daa580ae3a3d"
+checksum = "676a74b493d50ac33cacd83fd536597e6b52c0b46b9856f7b9c809d82fef4ac0"
 dependencies = [
  "diff",
  "filetime",
@@ -659,6 +665,16 @@ dependencies = [
  "crossbeam-utils 0.6.5",
 ]
 
+[[package]]
+name = "crossbeam-deque"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71"
+dependencies = [
+ "crossbeam-epoch 0.7.2",
+ "crossbeam-utils 0.6.5",
+]
+
 [[package]]
 name = "crossbeam-epoch"
 version = "0.3.1"
@@ -993,7 +1009,7 @@ dependencies = [
  "proc-macro2 0.4.30",
  "quote 0.6.12",
  "syn 0.15.35",
- "synstructure",
+ "synstructure 0.10.2",
 ]
 
 [[package]]
@@ -1259,7 +1275,7 @@ version = "2.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "df044dd42cdb7e32f28557b661406fc0f2494be75199779998810dbc35030e0d"
 dependencies = [
- "hashbrown",
+ "hashbrown 0.5.0",
  "lazy_static 1.3.0",
  "log",
  "pest",
@@ -1276,10 +1292,19 @@ version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e1de41fb8dba9714efd92241565cdff73f78508c95697dd56787d3cba27e2353"
 dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6587d09be37fb98a11cb08b9000a3f592451c1b1b613ca69d949160e313a430a"
+dependencies = [
+ "autocfg",
  "compiler_builtins",
  "rustc-std-workspace-alloc",
  "rustc-std-workspace-core",
- "serde",
 ]
 
 [[package]]
@@ -1316,9 +1341,9 @@ checksum = "023b39be39e3a2da62a94feb433e91e8bcd37676fbc8bea371daf52b7a769a3e"
 
 [[package]]
 name = "home"
-version = "0.5.0"
+version = "0.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c07c315e106bd6f83f026a20ddaeef2706782e490db1dcdd37caad38a0e895b3"
+checksum = "a3753954f7bd71f0e671afb8b5a992d1724cf43b7f95a563cd4a0bde94659ca8"
 dependencies = [
  "scopeguard 1.0.0",
  "winapi 0.3.6",
@@ -2643,9 +2668,9 @@ dependencies = [
 
 [[package]]
 name = "racer"
-version = "2.1.27"
+version = "2.1.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dde22b84ab75220015cbd91240222402bf885cbe3a5dc856475771abb82533ae"
+checksum = "acc70369054bad4ad0c16a3f45cd73e0695361a3af35c7b465e619ac2674f064"
 dependencies = [
  "bitflags",
  "clap",
@@ -2802,22 +2827,22 @@ dependencies = [
 
 [[package]]
 name = "rayon"
-version = "1.1.0"
+version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4b0186e22767d5b9738a05eab7c6ac90b15db17e5b5f9bd87976dd7d89a10a4"
+checksum = "83a27732a533a1be0a0035a111fe76db89ad312f6f0347004c220c57f209a123"
 dependencies = [
- "crossbeam-deque 0.6.3",
+ "crossbeam-deque 0.7.1",
  "either",
  "rayon-core",
 ]
 
 [[package]]
 name = "rayon-core"
-version = "1.5.0"
+version = "1.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ebbe0df8435ac0c397d467b6cad6d25543d06e8a019ef3f6af3c384597515bd2"
+checksum = "98dcf634205083b17d0861252431eb2acbfb698ab7478a2d20de07954f47ec7b"
 dependencies = [
- "crossbeam-deque 0.6.3",
+ "crossbeam-deque 0.7.1",
  "crossbeam-queue",
  "crossbeam-utils 0.6.5",
  "lazy_static 1.3.0",
@@ -3085,8 +3110,8 @@ dependencies = [
  "num_cpus",
  "parking_lot 0.9.0",
  "polonius-engine",
- "rustc-rayon",
- "rustc-rayon-core",
+ "rustc-rayon 0.3.0",
+ "rustc-rayon-core 0.3.0",
  "rustc_apfloat",
  "rustc_data_structures",
  "rustc_errors",
@@ -3098,14 +3123,15 @@ dependencies = [
  "serialize",
  "smallvec",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
 ]
 
 [[package]]
 name = "rustc-ap-arena"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f59b76d334bd533f3fdc5c651c27678c5e80fac67c6f7da22ba21a58878c55f5"
+checksum = "a623fd4805842e9bd0bb6e6dace63efede0ee22de4522a0b03b7c3d15a22f009"
 dependencies = [
  "rustc-ap-rustc_data_structures",
  "smallvec",
@@ -3113,15 +3139,15 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-graphviz"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e632ef08ca17458acfd46d2ead3d541a1c249586cd5329f5fe333dacfab6142"
+checksum = "ee549ade784b444ef10c0240c3487ed785aa65d711071f7984246b15329a17b6"
 
 [[package]]
 name = "rustc-ap-rustc_data_structures"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e89e2c7be68185418f3cd56af3df8b29007a59a1cebefa63612d055f9bcb1a36"
+checksum = "ca545744a5a9b42e3d0410d6290d40de96dd567253fe77f310c1de4afd213dd4"
 dependencies = [
  "cfg-if",
  "crossbeam-utils 0.6.5",
@@ -3130,21 +3156,22 @@ dependencies = [
  "jobserver",
  "lazy_static 1.3.0",
  "log",
- "parking_lot 0.7.1",
+ "parking_lot 0.9.0",
  "rustc-ap-graphviz",
+ "rustc-ap-rustc_index",
  "rustc-ap-serialize",
  "rustc-hash",
- "rustc-rayon",
- "rustc-rayon-core",
+ "rustc-rayon 0.2.0",
+ "rustc-rayon-core 0.2.0",
  "smallvec",
  "stable_deref_trait",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_errors"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e47cb380abeb72b01e42b2342d592f7eeea7d536c2f1f0d0e550dc509e46333"
+checksum = "a6967a41ed38ef4bce0f559fe9a4801d8ba12ac032f40a12a55e72f79d52c9bb"
 dependencies = [
  "annotate-snippets",
  "atty",
@@ -3157,46 +3184,57 @@ dependencies = [
  "unicode-width",
 ]
 
+[[package]]
+name = "rustc-ap-rustc_index"
+version = "606.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "457a5c204ae2fdaa5bdb5b196e58ca59896870d80445fe423063c9453496e3ea"
+dependencies = [
+ "rustc-ap-serialize",
+ "smallvec",
+]
+
 [[package]]
 name = "rustc-ap-rustc_lexer"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "494cfaf67f49217d67d0774eeecbba61ac89acf478db97ef11f113ed8a959305"
+checksum = "ed0c064676f8a08e42a36b0d4e4a102465fb0f4b75e11436cb7f66d2c3fa7139"
 dependencies = [
  "unicode-xid 0.2.0",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_macros"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2e5d36becc59b4497f9cbd3ae0610081de0207a1d0e95c066369167b14f486f"
+checksum = "b2d77e46159c5288c585decbcdc9d742889c65e307c31e104c7a36d63fe1f5d0"
 dependencies = [
  "itertools 0.8.0",
  "proc-macro2 0.4.30",
  "quote 0.6.12",
  "syn 0.15.35",
- "synstructure",
+ "synstructure 0.10.2",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_target"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7bfc5f96dfc3b9f8d5b57884f7f37467ecff6776cd4b8b491a7daece6fdd7c2"
+checksum = "86ca895350b0de14d064b499168c93fa183958d5462eb042c927d93623e41ec1"
 dependencies = [
  "bitflags",
  "log",
  "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_index",
  "rustc-ap-serialize",
  "rustc-ap-syntax_pos",
 ]
 
 [[package]]
 name = "rustc-ap-serialize"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bb9ee231cf79eded39c56647499f83d6136ff5c8c0baaa9e21b6febee00f4f6"
+checksum = "92679240e86f4583cc05f8dcf6439bdab87bac9e6555718469176de9bd52ba20"
 dependencies = [
  "indexmap",
  "smallvec",
@@ -3204,17 +3242,17 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-syntax"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3827fc208814efbde82d613e31d11b4250ce9e8cf8afe4a4d47bbbd099632c9"
+checksum = "0a0c30f8e38c847dbfd9e2f1e472ab06d0bd0a23ab53ae4c5a44912842ce834e"
 dependencies = [
  "bitflags",
  "lazy_static 1.3.0",
  "log",
  "rustc-ap-rustc_data_structures",
  "rustc-ap-rustc_errors",
+ "rustc-ap-rustc_index",
  "rustc-ap-rustc_lexer",
- "rustc-ap-rustc_macros",
  "rustc-ap-rustc_target",
  "rustc-ap-serialize",
  "rustc-ap-syntax_pos",
@@ -3224,13 +3262,14 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-syntax_pos"
-version = "583.0.0"
+version = "606.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "930ed81c34f325e512cc315c04d676fa84a373879d5c43bb54054a0522b05213"
+checksum = "2bdaa0fb40143b4b878256ac4e2b498885daafc269502504d91929eab4744bf4"
 dependencies = [
  "cfg-if",
  "rustc-ap-arena",
  "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_index",
  "rustc-ap-rustc_macros",
  "rustc-ap-serialize",
  "scoped-tls",
@@ -3274,7 +3313,18 @@ checksum = "0d2e07e19601f21c59aad953c2632172ba70cb27e685771514ea66e4062b3363"
 dependencies = [
  "crossbeam-deque 0.2.0",
  "either",
- "rustc-rayon-core",
+ "rustc-rayon-core 0.2.0",
+]
+
+[[package]]
+name = "rustc-rayon"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f32767f90d938f1b7199a174ef249ae1924f6e5bbdb9d112fea141e016f25b3a"
+dependencies = [
+ "crossbeam-deque 0.7.1",
+ "either",
+ "rustc-rayon-core 0.3.0",
 ]
 
 [[package]]
@@ -3289,6 +3339,19 @@ dependencies = [
  "num_cpus",
 ]
 
+[[package]]
+name = "rustc-rayon-core"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea2427831f0053ea3ea73559c8eabd893133a51b251d142bacee53c62a288cb3"
+dependencies = [
+ "crossbeam-deque 0.7.1",
+ "crossbeam-queue",
+ "crossbeam-utils 0.6.5",
+ "lazy_static 1.3.0",
+ "num_cpus",
+]
+
 [[package]]
 name = "rustc-serialize"
 version = "0.3.24"
@@ -3376,6 +3439,7 @@ dependencies = [
  "rustc_target",
  "serialize",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
  "tempfile",
 ]
@@ -3409,8 +3473,8 @@ dependencies = [
  "log",
  "parking_lot 0.9.0",
  "rustc-hash",
- "rustc-rayon",
- "rustc-rayon-core",
+ "rustc-rayon 0.3.0",
+ "rustc-rayon-core 0.3.0",
  "rustc_index",
  "serialize",
  "smallvec",
@@ -3490,7 +3554,7 @@ dependencies = [
  "log",
  "once_cell",
  "rustc",
- "rustc-rayon",
+ "rustc-rayon 0.3.0",
  "rustc_codegen_ssa",
  "rustc_codegen_utils",
  "rustc_data_structures",
@@ -3508,6 +3572,7 @@ dependencies = [
  "serialize",
  "smallvec",
  "syntax",
+ "syntax_expand",
  "syntax_ext",
  "syntax_pos",
  "tempfile",
@@ -3557,10 +3622,10 @@ name = "rustc_macros"
 version = "0.1.0"
 dependencies = [
  "itertools 0.8.0",
- "proc-macro2 0.4.30",
- "quote 0.6.12",
- "syn 0.15.35",
- "synstructure",
+ "proc-macro2 1.0.3",
+ "quote 1.0.2",
+ "syn 1.0.5",
+ "synstructure 0.12.1",
 ]
 
 [[package]]
@@ -3579,6 +3644,7 @@ dependencies = [
  "smallvec",
  "stable_deref_trait",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
 ]
 
@@ -3624,7 +3690,10 @@ dependencies = [
  "rustc",
  "rustc_data_structures",
  "rustc_errors",
+ "rustc_index",
+ "rustc_target",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
 ]
 
@@ -3642,6 +3711,7 @@ dependencies = [
  "rustc",
  "rustc_metadata",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
 ]
 
@@ -3670,6 +3740,7 @@ dependencies = [
  "rustc_metadata",
  "smallvec",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
 ]
 
@@ -3767,7 +3838,7 @@ version = "0.0.0"
 dependencies = [
  "minifier",
  "pulldown-cmark 0.5.3",
- "rustc-rayon",
+ "rustc-rayon 0.3.0",
  "tempfile",
 ]
 
@@ -3806,7 +3877,7 @@ dependencies = [
 
 [[package]]
 name = "rustfmt-nightly"
-version = "1.4.8"
+version = "1.4.9"
 dependencies = [
  "annotate-snippets",
  "bytecount",
@@ -4254,6 +4325,18 @@ dependencies = [
  "unicode-xid 0.1.0",
 ]
 
+[[package]]
+name = "synstructure"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203"
+dependencies = [
+ "proc-macro2 1.0.3",
+ "quote 1.0.2",
+ "syn 1.0.5",
+ "unicode-xid 0.2.0",
+]
+
 [[package]]
 name = "syntax"
 version = "0.0.0"
@@ -4272,6 +4355,25 @@ dependencies = [
  "syntax_pos",
 ]
 
+[[package]]
+name = "syntax_expand"
+version = "0.0.0"
+dependencies = [
+ "bitflags",
+ "lazy_static 1.3.0",
+ "log",
+ "rustc_data_structures",
+ "rustc_errors",
+ "rustc_index",
+ "rustc_lexer",
+ "rustc_target",
+ "scoped-tls",
+ "serialize",
+ "smallvec",
+ "syntax",
+ "syntax_pos",
+]
+
 [[package]]
 name = "syntax_ext"
 version = "0.0.0"
@@ -4283,6 +4385,7 @@ dependencies = [
  "rustc_target",
  "smallvec",
  "syntax",
+ "syntax_expand",
  "syntax_pos",
 ]
 
index 9b43bb0eff0e845c94b0d8a7da8ad4cdd8c60c97..b8071b98f707f43e7da2b4bb9410b876af1575e4 100644 (file)
@@ -817,12 +817,22 @@ pub fn cargo(
 
         let mut rustflags = Rustflags::new(&target);
         if stage != 0 {
+            if let Ok(s) = env::var("CARGOFLAGS_NOT_BOOTSTRAP") {
+                cargo.args(s.split_whitespace());
+            }
             rustflags.env("RUSTFLAGS_NOT_BOOTSTRAP");
         } else {
+            if let Ok(s) = env::var("CARGOFLAGS_BOOTSTRAP") {
+                cargo.args(s.split_whitespace());
+            }
             rustflags.env("RUSTFLAGS_BOOTSTRAP");
             rustflags.arg("--cfg=bootstrap");
         }
 
+        if let Ok(s) = env::var("CARGOFLAGS") {
+            cargo.args(s.split_whitespace());
+        }
+
         match mode {
             Mode::Std | Mode::ToolBootstrap | Mode::ToolStd => {},
             Mode::Rustc | Mode::Codegen | Mode::ToolRustc => {
index c2aa75fd88fcc80376e2f09a93eec3afd593d564..592477df02f4dafe98982b580dbea0acbb6c050c 100644 (file)
@@ -1126,7 +1126,7 @@ fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, bool)> {
         }
 
         let mut paths = Vec::new();
-        let contents = t!(fs::read(stamp));
+        let contents = t!(fs::read(stamp), &stamp);
         // This is the method we use for extracting paths from the stamp file passed to us. See
         // run_cargo for more information (in compile.rs).
         for part in contents.split(|b| *b == 0) {
index 7bf9ea2688f4c5a872d3f6f0b3f1ac645a9a7811..fb308bc35ebc5e8519bb1a2b487b213f071f03e0 100644 (file)
@@ -157,6 +157,7 @@ fn run(self, builder: &Builder<'_>) -> PathBuf {
            .define("WITH_POLLY", "OFF")
            .define("LLVM_ENABLE_TERMINFO", "OFF")
            .define("LLVM_ENABLE_LIBEDIT", "OFF")
+           .define("LLVM_ENABLE_BINDINGS", "OFF")
            .define("LLVM_ENABLE_Z3_SOLVER", "OFF")
            .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string())
            .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
@@ -169,15 +170,6 @@ fn run(self, builder: &Builder<'_>) -> PathBuf {
             }
         }
 
-        // By default, LLVM will automatically find OCaml and, if it finds it,
-        // install the LLVM bindings in LLVM_OCAML_INSTALL_PATH, which defaults
-        // to /usr/bin/ocaml.
-        // This causes problem for non-root builds of Rust. Side-step the issue
-        // by setting LLVM_OCAML_INSTALL_PATH to a relative path, so it installs
-        // in the prefix.
-        cfg.define("LLVM_OCAML_INSTALL_PATH",
-            env::var_os("LLVM_OCAML_INSTALL_PATH").unwrap_or_else(|| "usr/lib/ocaml".into()));
-
         let want_lldb = builder.config.lldb_enabled && !self.emscripten;
 
         // This setting makes the LLVM tools link to the dynamic LLVM library,
index f035a7119188a1aa3c415e09096e20b8e896104f..bb94fb2b755f53342b76fd3dbd28e0e223510196 100644 (file)
@@ -21,6 +21,13 @@ macro_rules! t {
             Err(e) => panic!("{} failed with {}", stringify!($e), e),
         }
     };
+    // it can show extra info in the second parameter
+    ($e:expr, $extra:expr) => {
+        match $e {
+            Ok(e) => e,
+            Err(e) => panic!("{} failed with {} ({:?})", stringify!($e), e, $extra),
+        }
+    };
 }
 
 // Because Cargo adds the compiler's dylib path to our library search path, llvm-config may
index 5f7761297095c8eef7e614f1eec90783b90775d5..4442afc98e4064f22ccdb68a9eb1599ee9047491 100644 (file)
@@ -124,14 +124,14 @@ jobs:
         IMAGE: dist-x86_64-netbsd
         DEPLOY: 1
 
-      asmjs:
-        IMAGE: asmjs
       i686-gnu:
         IMAGE: i686-gnu
       i686-gnu-nopt:
         IMAGE: i686-gnu-nopt
       test-various:
         IMAGE: test-various
+      wasm32:
+        IMAGE: wasm32
       x86_64-gnu:
         IMAGE: x86_64-gnu
       x86_64-gnu-full-bootstrap:
index 367e43849923fca154a6867d158431a214d2f8c7..a2d83eca24b0a6eb3634f17771d51072612e67be 100644 (file)
@@ -165,8 +165,7 @@ For targets: `arm-unknown-linux-gnueabihf`
 For targets: `armv7-unknown-linux-gnueabihf`
 
 - Path and misc options > Prefix directory = /x-tools/${CT\_TARGET}
-- Path and misc options > Patches origin = Bundled, then local
-- Path and misc options > Local patch directory = /tmp/patches
+- Path and misc options > Patches origin = Bundled only
 - Target options > Target Architecture = arm
 - Target options > Suffix to the arch-part = v7
 - Target options > Architecture level = armv7-a -- (+)
@@ -174,9 +173,9 @@ For targets: `armv7-unknown-linux-gnueabihf`
 - Target options > Floating point = hardware (FPU) -- (\*)
 - Target options > Default instruction set mode = thumb -- (\*)
 - Operating System > Target OS = linux
-- Operating System > Linux kernel version = 3.2.72 -- Precise kernel
-- C-library > glibc version = 2.16.0
-- C compiler > gcc version = 5.2.0
+- Operating System > Linux kernel version = 3.2.101
+- C-library > glibc version = 2.17.0
+- C compiler > gcc version = 8.3.0
 - C compiler > C++ = ENABLE -- to cross compile LLVM
 
 (\*) These options have been selected to match the configuration of the arm
diff --git a/src/ci/docker/asmjs/Dockerfile b/src/ci/docker/asmjs/Dockerfile
deleted file mode 100644 (file)
index d136fae..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-FROM ubuntu:16.04
-
-RUN apt-get update && apt-get install -y --no-install-recommends \
-  g++ \
-  make \
-  file \
-  curl \
-  ca-certificates \
-  python \
-  git \
-  cmake \
-  sudo \
-  gdb \
-  xz-utils \
-  bzip2
-
-COPY scripts/emscripten.sh /scripts/
-RUN bash /scripts/emscripten.sh
-
-COPY scripts/sccache.sh /scripts/
-RUN sh /scripts/sccache.sh
-
-ENV PATH=$PATH:/emsdk-portable
-ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/
-ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/
-ENV BINARYEN_ROOT=/emsdk-portable/upstream/
-ENV EM_CONFIG=/emsdk-portable/.emscripten
-
-ENV TARGETS=asmjs-unknown-emscripten
-
-ENV SCRIPT python2.7 ../x.py test --target $TARGETS
-
-# This is almost identical to the wasm32-unknown-emscripten target, so
-# running with assertions again is not useful
-ENV NO_DEBUG_ASSERTIONS=1
-ENV NO_LLVM_ASSERTIONS=1
diff --git a/src/ci/docker/disabled/asmjs/Dockerfile b/src/ci/docker/disabled/asmjs/Dockerfile
new file mode 100644 (file)
index 0000000..e27a2a5
--- /dev/null
@@ -0,0 +1,41 @@
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+  g++ \
+  make \
+  file \
+  curl \
+  ca-certificates \
+  python \
+  git \
+  cmake \
+  sudo \
+  gdb \
+  xz-utils \
+  bzip2
+
+COPY scripts/emscripten.sh /scripts/
+RUN bash /scripts/emscripten.sh
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+ENV PATH=$PATH:/emsdk-portable
+ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/
+ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/
+ENV BINARYEN_ROOT=/emsdk-portable/upstream/
+
+ENV TARGETS=asmjs-unknown-emscripten
+
+# Use -O1 optimizations in the link step to reduce time spent optimizing JS.
+ENV EMCC_CFLAGS=-O1
+
+# Emscripten installation is user-specific
+ENV NO_CHANGE_USER=1
+
+ENV SCRIPT python2.7 ../x.py test --target $TARGETS
+
+# This is almost identical to the wasm32-unknown-emscripten target, so
+# running with assertions again is not useful
+ENV NO_DEBUG_ASSERTIONS=1
+ENV NO_LLVM_ASSERTIONS=1
diff --git a/src/ci/docker/disabled/wasm32/Dockerfile b/src/ci/docker/disabled/wasm32/Dockerfile
deleted file mode 100644 (file)
index b2cf862..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-FROM ubuntu:16.04
-
-RUN apt-get update && apt-get install -y --no-install-recommends \
-  g++ \
-  make \
-  file \
-  curl \
-  ca-certificates \
-  python \
-  git \
-  cmake \
-  sudo \
-  gdb \
-  xz-utils \
-  bzip2
-
-COPY scripts/emscripten.sh /scripts/
-RUN bash /scripts/emscripten.sh
-
-COPY scripts/sccache.sh /scripts/
-RUN sh /scripts/sccache.sh
-
-ENV PATH=$PATH:/emsdk-portable
-ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/
-ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/
-ENV BINARYEN_ROOT=/emsdk-portable/upstream/
-ENV EM_CONFIG=/emsdk-portable/.emscripten
-
-ENV TARGETS=wasm32-unknown-emscripten
-
-# FIXME: Re-enable these tests once Cargo stops trying to execute wasms
-ENV SCRIPT python2.7 ../x.py test --target $TARGETS \
-    --exclude src/libcore \
-    --exclude src/liballoc \
-    --exclude src/libproc_macro \
-    --exclude src/libstd \
-    --exclude src/libterm \
-    --exclude src/libtest
index 170b8134d3edcd8ac32931c06416e472f5f0253e..417171a861d4a01be92f41a29a6f7ceaa6499777 100644 (file)
@@ -3,12 +3,7 @@ FROM ubuntu:16.04
 COPY scripts/cross-apt-packages.sh /scripts/
 RUN sh /scripts/cross-apt-packages.sh
 
-# Ubuntu 16.04 (this container) ships with make 4, but something in the
-# toolchains we build below chokes on that, so go back to make 3
-COPY scripts/make3.sh /scripts/
-RUN sh /scripts/make3.sh
-
-COPY scripts/crosstool-ng.sh /scripts/
+COPY dist-armv7-linux/crosstool-ng.sh /scripts/
 RUN sh /scripts/crosstool-ng.sh
 
 COPY scripts/rustbuild-setup.sh /scripts/
@@ -16,7 +11,6 @@ RUN sh /scripts/rustbuild-setup.sh
 USER rustbuild
 WORKDIR /tmp
 
-COPY dist-armv7-linux/patches/ /tmp/patches/
 COPY dist-armv7-linux/build-toolchains.sh dist-armv7-linux/armv7-linux-gnueabihf.config /tmp/
 RUN ./build-toolchains.sh
 
index 5cccfd8444d3536a8c501e48f8782a958e795fd9..81b3d7477ec8d1537a5a9d787ca2bc6dc947a949 100644 (file)
@@ -1,9 +1,32 @@
 #
 # Automatically generated file; DO NOT EDIT.
-# Crosstool-NG Configuration
-#
-CT_CONFIGURE_has_make381=y
-CT_CONFIGURE_has_xz=y
+# crosstool-NG  Configuration
+#
+CT_CONFIGURE_has_static_link=y
+CT_CONFIGURE_has_cxx11=y
+CT_CONFIGURE_has_wget=y
+CT_CONFIGURE_has_curl=y
+CT_CONFIGURE_has_make_3_81_or_newer=y
+CT_CONFIGURE_has_make_4_0_or_newer=y
+CT_CONFIGURE_has_libtool_2_4_or_newer=y
+CT_CONFIGURE_has_libtoolize_2_4_or_newer=y
+CT_CONFIGURE_has_autoconf_2_65_or_newer=y
+CT_CONFIGURE_has_autoreconf_2_65_or_newer=y
+CT_CONFIGURE_has_automake_1_15_or_newer=y
+CT_CONFIGURE_has_gnu_m4_1_4_12_or_newer=y
+CT_CONFIGURE_has_python_3_4_or_newer=y
+CT_CONFIGURE_has_bison_2_7_or_newer=y
+CT_CONFIGURE_has_python=y
+CT_CONFIGURE_has_dtc=y
+CT_CONFIGURE_has_svn=y
+CT_CONFIGURE_has_git=y
+CT_CONFIGURE_has_md5sum=y
+CT_CONFIGURE_has_sha1sum=y
+CT_CONFIGURE_has_sha256sum=y
+CT_CONFIGURE_has_sha512sum=y
+CT_CONFIGURE_has_install_with_strip_program=y
+CT_CONFIG_VERSION_CURRENT="3"
+CT_CONFIG_VERSION="3"
 CT_MODULES=y
 
 #
@@ -21,40 +44,46 @@ CT_MODULES=y
 # Paths
 #
 CT_LOCAL_TARBALLS_DIR=""
+# CT_TARBALLS_BUILDROOT_LAYOUT is not set
 CT_WORK_DIR="${CT_TOP_DIR}/.build"
+CT_BUILD_TOP_DIR="${CT_WORK_DIR:-${CT_TOP_DIR}/.build}/${CT_HOST:+HOST-${CT_HOST}/}${CT_TARGET}"
 CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
-CT_INSTALL_DIR="${CT_PREFIX_DIR}"
 CT_RM_RF_PREFIX_DIR=y
 CT_REMOVE_DOCS=y
-CT_INSTALL_DIR_RO=y
+CT_INSTALL_LICENSES=y
+CT_PREFIX_DIR_RO=y
 CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
 # CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
 
 #
 # Downloading
 #
+CT_DOWNLOAD_AGENT_WGET=y
+# CT_DOWNLOAD_AGENT_CURL is not set
+# CT_DOWNLOAD_AGENT_NONE is not set
 # CT_FORBID_DOWNLOAD is not set
 # CT_FORCE_DOWNLOAD is not set
 CT_CONNECT_TIMEOUT=10
+CT_DOWNLOAD_WGET_OPTIONS="--passive-ftp --tries=3 -nc --progress=dot:binary"
 # CT_ONLY_DOWNLOAD is not set
 # CT_USE_MIRROR is not set
+CT_VERIFY_DOWNLOAD_DIGEST=y
+CT_VERIFY_DOWNLOAD_DIGEST_SHA512=y
+# CT_VERIFY_DOWNLOAD_DIGEST_SHA256 is not set
+# CT_VERIFY_DOWNLOAD_DIGEST_SHA1 is not set
+# CT_VERIFY_DOWNLOAD_DIGEST_MD5 is not set
+CT_VERIFY_DOWNLOAD_DIGEST_ALG="sha512"
+# CT_VERIFY_DOWNLOAD_SIGNATURE is not set
 
 #
 # Extracting
 #
 # CT_FORCE_EXTRACT is not set
-CT_OVERIDE_CONFIG_GUESS_SUB=y
+CT_OVERRIDE_CONFIG_GUESS_SUB=y
 # CT_ONLY_EXTRACT is not set
-# CT_PATCH_BUNDLED is not set
-# CT_PATCH_LOCAL is not set
-CT_PATCH_BUNDLED_LOCAL=y
-# CT_PATCH_LOCAL_BUNDLED is not set
-# CT_PATCH_BUNDLED_FALLBACK_LOCAL is not set
-# CT_PATCH_LOCAL_FALLBACK_BUNDLED is not set
-# CT_PATCH_NONE is not set
-CT_PATCH_ORDER="bundled,local"
-CT_PATCH_USE_LOCAL=y
-CT_LOCAL_PATCH_DIR="/tmp/patches"
+CT_PATCH_BUNDLED=y
+# CT_PATCH_BUNDLED_LOCAL is not set
+CT_PATCH_ORDER="bundled"
 
 #
 # Build behavior
@@ -90,86 +119,81 @@ CT_LOG_FILE_COMPRESS=y
 #
 # Target options
 #
+# CT_ARCH_ALPHA is not set
+# CT_ARCH_ARC is not set
+CT_ARCH_ARM=y
+# CT_ARCH_AVR is not set
+# CT_ARCH_M68K is not set
+# CT_ARCH_MIPS is not set
+# CT_ARCH_NIOS2 is not set
+# CT_ARCH_POWERPC is not set
+# CT_ARCH_S390 is not set
+# CT_ARCH_SH is not set
+# CT_ARCH_SPARC is not set
+# CT_ARCH_X86 is not set
+# CT_ARCH_XTENSA is not set
 CT_ARCH="arm"
-CT_ARCH_SUPPORTS_BOTH_MMU=y
-CT_ARCH_SUPPORTS_BOTH_ENDIAN=y
-CT_ARCH_SUPPORTS_32=y
-CT_ARCH_SUPPORTS_64=y
-CT_ARCH_SUPPORTS_WITH_ARCH=y
-CT_ARCH_SUPPORTS_WITH_CPU=y
-CT_ARCH_SUPPORTS_WITH_TUNE=y
-CT_ARCH_SUPPORTS_WITH_FLOAT=y
-CT_ARCH_SUPPORTS_WITH_FPU=y
-CT_ARCH_SUPPORTS_SOFTFP=y
-CT_ARCH_DEFAULT_HAS_MMU=y
-CT_ARCH_DEFAULT_LE=y
-CT_ARCH_DEFAULT_32=y
-CT_ARCH_ARCH="armv7-a"
+CT_ARCH_CHOICE_KSYM="ARM"
 CT_ARCH_CPU=""
 CT_ARCH_TUNE=""
-CT_ARCH_FPU="vfpv3-d16"
-# CT_ARCH_BE is not set
-CT_ARCH_LE=y
-CT_ARCH_32=y
-# CT_ARCH_64 is not set
-CT_ARCH_BITNESS=32
-CT_ARCH_FLOAT_HW=y
-# CT_ARCH_FLOAT_SW is not set
-CT_TARGET_CFLAGS=""
-CT_TARGET_LDFLAGS=""
-# CT_ARCH_alpha is not set
-CT_ARCH_arm=y
-# CT_ARCH_avr is not set
-# CT_ARCH_m68k is not set
-# CT_ARCH_mips is not set
-# CT_ARCH_nios2 is not set
-# CT_ARCH_powerpc is not set
-# CT_ARCH_s390 is not set
-# CT_ARCH_sh is not set
-# CT_ARCH_sparc is not set
-# CT_ARCH_x86 is not set
-# CT_ARCH_xtensa is not set
-CT_ARCH_alpha_AVAILABLE=y
-CT_ARCH_arm_AVAILABLE=y
-CT_ARCH_avr_AVAILABLE=y
-CT_ARCH_m68k_AVAILABLE=y
-CT_ARCH_microblaze_AVAILABLE=y
-CT_ARCH_mips_AVAILABLE=y
-CT_ARCH_nios2_AVAILABLE=y
-CT_ARCH_powerpc_AVAILABLE=y
-CT_ARCH_s390_AVAILABLE=y
-CT_ARCH_sh_AVAILABLE=y
-CT_ARCH_sparc_AVAILABLE=y
-CT_ARCH_x86_AVAILABLE=y
-CT_ARCH_xtensa_AVAILABLE=y
+CT_ARCH_ARM_SHOW=y
+
+#
+# Options for arm
+#
+CT_ARCH_ARM_PKG_KSYM=""
+CT_ARCH_ARM_MODE="thumb"
+# CT_ARCH_ARM_MODE_ARM is not set
+CT_ARCH_ARM_MODE_THUMB=y
+# CT_ARCH_ARM_INTERWORKING is not set
+CT_ARCH_ARM_EABI_FORCE=y
+CT_ARCH_ARM_EABI=y
+CT_ARCH_ARM_TUPLE_USE_EABIHF=y
+CT_ALL_ARCH_CHOICES="ALPHA ARC ARM AVR M68K MICROBLAZE MIPS MOXIE MSP430 NIOS2 POWERPC RISCV S390 SH SPARC X86 XTENSA"
 CT_ARCH_SUFFIX="v7"
+# CT_OMIT_TARGET_VENDOR is not set
 
 #
 # Generic target options
 #
 # CT_MULTILIB is not set
+CT_DEMULTILIB=y
+CT_ARCH_SUPPORTS_BOTH_MMU=y
+CT_ARCH_DEFAULT_HAS_MMU=y
 CT_ARCH_USE_MMU=y
+CT_ARCH_SUPPORTS_FLAT_FORMAT=y
+CT_ARCH_SUPPORTS_EITHER_ENDIAN=y
+CT_ARCH_DEFAULT_LE=y
+# CT_ARCH_BE is not set
+CT_ARCH_LE=y
 CT_ARCH_ENDIAN="little"
+CT_ARCH_SUPPORTS_32=y
+CT_ARCH_SUPPORTS_64=y
+CT_ARCH_DEFAULT_32=y
+CT_ARCH_BITNESS=32
+CT_ARCH_32=y
+# CT_ARCH_64 is not set
 
 #
 # Target optimisations
 #
+CT_ARCH_SUPPORTS_WITH_ARCH=y
+CT_ARCH_SUPPORTS_WITH_CPU=y
+CT_ARCH_SUPPORTS_WITH_TUNE=y
+CT_ARCH_SUPPORTS_WITH_FLOAT=y
+CT_ARCH_SUPPORTS_WITH_FPU=y
+CT_ARCH_SUPPORTS_SOFTFP=y
 CT_ARCH_EXCLUSIVE_WITH_CPU=y
+CT_ARCH_ARCH="armv7-a"
+CT_ARCH_FPU="vfpv3-d16"
 # CT_ARCH_FLOAT_AUTO is not set
+CT_ARCH_FLOAT_HW=y
 # CT_ARCH_FLOAT_SOFTFP is not set
+# CT_ARCH_FLOAT_SW is not set
+CT_TARGET_CFLAGS=""
+CT_TARGET_LDFLAGS=""
 CT_ARCH_FLOAT="hard"
 
-#
-# arm other options
-#
-CT_ARCH_ARM_MODE="thumb"
-# CT_ARCH_ARM_MODE_ARM is not set
-CT_ARCH_ARM_MODE_THUMB=y
-# CT_ARCH_ARM_INTERWORKING is not set
-CT_ARCH_ARM_EABI_FORCE=y
-CT_ARCH_ARM_EABI=y
-CT_ARCH_ARM_TUPLE_USE_EABIHF=y
-
 #
 # Toolchain options
 #
@@ -182,7 +206,9 @@ CT_USE_SYSROOT=y
 CT_SYSROOT_NAME="sysroot"
 CT_SYSROOT_DIR_PREFIX=""
 CT_WANTS_STATIC_LINK=y
+CT_WANTS_STATIC_LINK_CXX=y
 # CT_STATIC_TOOLCHAIN is not set
+CT_SHOW_CT_VERSION=y
 CT_TOOLCHAIN_PKGVERSION=""
 CT_TOOLCHAIN_BUGURL=""
 
@@ -216,126 +242,207 @@ CT_BUILD_SUFFIX=""
 # Operating System
 #
 CT_KERNEL_SUPPORTS_SHARED_LIBS=y
+# CT_KERNEL_BARE_METAL is not set
+CT_KERNEL_LINUX=y
 CT_KERNEL="linux"
-CT_KERNEL_VERSION="3.2.72"
-# CT_KERNEL_bare_metal is not set
-CT_KERNEL_linux=y
-CT_KERNEL_bare_metal_AVAILABLE=y
-CT_KERNEL_linux_AVAILABLE=y
-# CT_KERNEL_V_4_3 is not set
-# CT_KERNEL_V_4_2 is not set
-# CT_KERNEL_V_4_1 is not set
-# CT_KERNEL_V_3_18 is not set
-# CT_KERNEL_V_3_14 is not set
-# CT_KERNEL_V_3_12 is not set
-# CT_KERNEL_V_3_10 is not set
-# CT_KERNEL_V_3_4 is not set
-CT_KERNEL_V_3_2=y
-# CT_KERNEL_V_2_6_32 is not set
-# CT_KERNEL_LINUX_CUSTOM is not set
-CT_KERNEL_windows_AVAILABLE=y
-
-#
-# Common kernel options
-#
-CT_SHARED_LIBS=y
-
-#
-# linux other options
-#
+CT_KERNEL_CHOICE_KSYM="LINUX"
+CT_KERNEL_LINUX_SHOW=y
+
+#
+# Options for linux
+#
+CT_KERNEL_LINUX_PKG_KSYM="LINUX"
+CT_LINUX_DIR_NAME="linux"
+CT_LINUX_PKG_NAME="linux"
+CT_LINUX_SRC_RELEASE=y
+CT_LINUX_PATCH_ORDER="global"
+# CT_LINUX_V_4_20 is not set
+# CT_LINUX_V_4_19 is not set
+# CT_LINUX_V_4_18 is not set
+# CT_LINUX_V_4_17 is not set
+# CT_LINUX_V_4_16 is not set
+# CT_LINUX_V_4_15 is not set
+# CT_LINUX_V_4_14 is not set
+# CT_LINUX_V_4_13 is not set
+# CT_LINUX_V_4_12 is not set
+# CT_LINUX_V_4_11 is not set
+# CT_LINUX_V_4_10 is not set
+# CT_LINUX_V_4_9 is not set
+# CT_LINUX_V_4_4 is not set
+# CT_LINUX_V_4_1 is not set
+# CT_LINUX_V_3_16 is not set
+# CT_LINUX_V_3_13 is not set
+# CT_LINUX_V_3_12 is not set
+# CT_LINUX_V_3_10 is not set
+# CT_LINUX_V_3_4 is not set
+CT_LINUX_V_3_2=y
+# CT_LINUX_V_2_6_32 is not set
+# CT_LINUX_NO_VERSIONS is not set
+CT_LINUX_VERSION="3.2.101"
+CT_LINUX_MIRRORS="$(CT_Mirrors kernel.org linux ${CT_LINUX_VERSION})"
+CT_LINUX_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_LINUX_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_LINUX_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_LINUX_SIGNATURE_FORMAT="unpacked/.sign"
+CT_LINUX_4_8_or_older=y
+CT_LINUX_older_than_4_8=y
+CT_LINUX_3_7_or_older=y
+CT_LINUX_older_than_3_7=y
+CT_LINUX_later_than_3_2=y
+CT_LINUX_3_2_or_later=y
 CT_KERNEL_LINUX_VERBOSITY_0=y
 # CT_KERNEL_LINUX_VERBOSITY_1 is not set
 # CT_KERNEL_LINUX_VERBOSITY_2 is not set
 CT_KERNEL_LINUX_VERBOSE_LEVEL=0
 CT_KERNEL_LINUX_INSTALL_CHECK=y
+CT_ALL_KERNEL_CHOICES="BARE_METAL LINUX WINDOWS"
+
+#
+# Common kernel options
+#
+CT_SHARED_LIBS=y
 
 #
 # Binary utilities
 #
 CT_ARCH_BINFMT_ELF=y
+CT_BINUTILS_BINUTILS=y
 CT_BINUTILS="binutils"
-CT_BINUTILS_binutils=y
+CT_BINUTILS_CHOICE_KSYM="BINUTILS"
+CT_BINUTILS_BINUTILS_SHOW=y
+
+#
+# Options for binutils
+#
+CT_BINUTILS_BINUTILS_PKG_KSYM="BINUTILS"
+CT_BINUTILS_DIR_NAME="binutils"
+CT_BINUTILS_USE_GNU=y
+CT_BINUTILS_USE="BINUTILS"
+CT_BINUTILS_PKG_NAME="binutils"
+CT_BINUTILS_SRC_RELEASE=y
+CT_BINUTILS_PATCH_ORDER="global"
+CT_BINUTILS_V_2_32=y
+# CT_BINUTILS_V_2_31 is not set
+# CT_BINUTILS_V_2_30 is not set
+# CT_BINUTILS_V_2_29 is not set
+# CT_BINUTILS_V_2_28 is not set
+# CT_BINUTILS_V_2_27 is not set
+# CT_BINUTILS_V_2_26 is not set
+# CT_BINUTILS_NO_VERSIONS is not set
+CT_BINUTILS_VERSION="2.32"
+CT_BINUTILS_MIRRORS="$(CT_Mirrors GNU binutils) $(CT_Mirrors sourceware binutils/releases)"
+CT_BINUTILS_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_BINUTILS_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_BINUTILS_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
+CT_BINUTILS_SIGNATURE_FORMAT="packed/.sig"
+CT_BINUTILS_later_than_2_30=y
+CT_BINUTILS_2_30_or_later=y
+CT_BINUTILS_later_than_2_27=y
+CT_BINUTILS_2_27_or_later=y
+CT_BINUTILS_later_than_2_25=y
+CT_BINUTILS_2_25_or_later=y
+CT_BINUTILS_later_than_2_23=y
+CT_BINUTILS_2_23_or_later=y
 
 #
 # GNU binutils
 #
-# CT_CC_BINUTILS_SHOW_LINARO is not set
-CT_BINUTILS_V_2_25_1=y
-# CT_BINUTILS_V_2_25 is not set
-# CT_BINUTILS_V_2_24 is not set
-# CT_BINUTILS_V_2_23_2 is not set
-# CT_BINUTILS_V_2_23_1 is not set
-# CT_BINUTILS_V_2_22 is not set
-# CT_BINUTILS_V_2_21_53 is not set
-# CT_BINUTILS_V_2_21_1a is not set
-# CT_BINUTILS_V_2_20_1a is not set
-# CT_BINUTILS_V_2_19_1a is not set
-# CT_BINUTILS_V_2_18a is not set
-CT_BINUTILS_VERSION="2.25.1"
-CT_BINUTILS_2_25_1_or_later=y
-CT_BINUTILS_2_25_or_later=y
-CT_BINUTILS_2_24_or_later=y
-CT_BINUTILS_2_23_or_later=y
-CT_BINUTILS_2_22_or_later=y
-CT_BINUTILS_2_21_or_later=y
-CT_BINUTILS_2_20_or_later=y
-CT_BINUTILS_2_19_or_later=y
-CT_BINUTILS_2_18_or_later=y
 CT_BINUTILS_HAS_HASH_STYLE=y
 CT_BINUTILS_HAS_GOLD=y
-CT_BINUTILS_GOLD_SUPPORTS_ARCH=y
-CT_BINUTILS_GOLD_SUPPORT=y
 CT_BINUTILS_HAS_PLUGINS=y
 CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
-CT_BINUTILS_FORCE_LD_BFD=y
+CT_BINUTILS_GOLD_SUPPORTS_ARCH=y
+CT_BINUTILS_GOLD_SUPPORT=y
+CT_BINUTILS_FORCE_LD_BFD_DEFAULT=y
 CT_BINUTILS_LINKER_LD=y
 # CT_BINUTILS_LINKER_LD_GOLD is not set
-# CT_BINUTILS_LINKER_GOLD_LD is not set
 CT_BINUTILS_LINKERS_LIST="ld"
 CT_BINUTILS_LINKER_DEFAULT="bfd"
 # CT_BINUTILS_PLUGINS is not set
+CT_BINUTILS_RELRO=m
 CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
 # CT_BINUTILS_FOR_TARGET is not set
-
-#
-# binutils other options
-#
+CT_ALL_BINUTILS_CHOICES="BINUTILS"
 
 #
 # C-library
 #
+CT_LIBC_GLIBC=y
+# CT_LIBC_UCLIBC is not set
 CT_LIBC="glibc"
-CT_LIBC_VERSION="2.16.0"
-CT_LIBC_glibc=y
-# CT_LIBC_musl is not set
-# CT_LIBC_uClibc is not set
-CT_LIBC_avr_libc_AVAILABLE=y
-CT_LIBC_glibc_AVAILABLE=y
+CT_LIBC_CHOICE_KSYM="GLIBC"
 CT_THREADS="nptl"
-# CT_CC_GLIBC_SHOW_LINARO is not set
-# CT_LIBC_GLIBC_V_2_22 is not set
-# CT_LIBC_GLIBC_V_2_21 is not set
-# CT_LIBC_GLIBC_V_2_20 is not set
-# CT_LIBC_GLIBC_V_2_19 is not set
-# CT_LIBC_GLIBC_V_2_18 is not set
-# CT_LIBC_GLIBC_V_2_17 is not set
-CT_LIBC_GLIBC_V_2_16_0=y
-# CT_LIBC_GLIBC_V_2_15 is not set
-# CT_LIBC_GLIBC_V_2_14_1 is not set
-# CT_LIBC_GLIBC_V_2_14 is not set
-# CT_LIBC_GLIBC_V_2_13 is not set
-# CT_LIBC_GLIBC_V_2_12_2 is not set
-# CT_LIBC_GLIBC_V_2_12_1 is not set
-# CT_LIBC_GLIBC_V_2_11_1 is not set
-# CT_LIBC_GLIBC_V_2_11 is not set
-# CT_LIBC_GLIBC_V_2_10_1 is not set
-# CT_LIBC_GLIBC_V_2_9 is not set
-# CT_LIBC_GLIBC_V_2_8 is not set
-CT_LIBC_mingw_AVAILABLE=y
-CT_LIBC_musl_AVAILABLE=y
-CT_LIBC_newlib_AVAILABLE=y
-CT_LIBC_none_AVAILABLE=y
-CT_LIBC_uClibc_AVAILABLE=y
+CT_LIBC_GLIBC_SHOW=y
+
+#
+# Options for glibc
+#
+CT_LIBC_GLIBC_PKG_KSYM="GLIBC"
+CT_GLIBC_DIR_NAME="glibc"
+CT_GLIBC_USE_GNU=y
+CT_GLIBC_USE="GLIBC"
+CT_GLIBC_PKG_NAME="glibc"
+CT_GLIBC_SRC_RELEASE=y
+CT_GLIBC_PATCH_ORDER="global"
+# CT_GLIBC_V_2_29 is not set
+# CT_GLIBC_V_2_28 is not set
+# CT_GLIBC_V_2_27 is not set
+# CT_GLIBC_V_2_26 is not set
+# CT_GLIBC_V_2_25 is not set
+# CT_GLIBC_V_2_24 is not set
+# CT_GLIBC_V_2_23 is not set
+# CT_GLIBC_V_2_19 is not set
+CT_GLIBC_V_2_17=y
+# CT_GLIBC_V_2_12_1 is not set
+# CT_GLIBC_NO_VERSIONS is not set
+CT_GLIBC_VERSION="2.17"
+CT_GLIBC_MIRRORS="$(CT_Mirrors GNU glibc)"
+CT_GLIBC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GLIBC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GLIBC_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
+CT_GLIBC_SIGNATURE_FORMAT="packed/.sig"
+CT_GLIBC_2_29_or_older=y
+CT_GLIBC_older_than_2_29=y
+CT_GLIBC_2_27_or_older=y
+CT_GLIBC_older_than_2_27=y
+CT_GLIBC_2_26_or_older=y
+CT_GLIBC_older_than_2_26=y
+CT_GLIBC_2_25_or_older=y
+CT_GLIBC_older_than_2_25=y
+CT_GLIBC_2_24_or_older=y
+CT_GLIBC_older_than_2_24=y
+CT_GLIBC_2_23_or_older=y
+CT_GLIBC_older_than_2_23=y
+CT_GLIBC_2_20_or_older=y
+CT_GLIBC_older_than_2_20=y
+CT_GLIBC_2_17_or_later=y
+CT_GLIBC_2_17_or_older=y
+CT_GLIBC_later_than_2_14=y
+CT_GLIBC_2_14_or_later=y
+CT_GLIBC_DEP_KERNEL_HEADERS_VERSION=y
+CT_GLIBC_DEP_BINUTILS=y
+CT_GLIBC_DEP_GCC=y
+CT_GLIBC_DEP_PYTHON=y
+CT_GLIBC_HAS_NPTL_ADDON=y
+CT_GLIBC_HAS_PORTS_ADDON=y
+CT_GLIBC_HAS_LIBIDN_ADDON=y
+CT_GLIBC_USE_PORTS_ADDON=y
+CT_GLIBC_USE_NPTL_ADDON=y
+# CT_GLIBC_USE_LIBIDN_ADDON is not set
+CT_GLIBC_HAS_OBSOLETE_RPC=y
+CT_GLIBC_EXTRA_CONFIG_ARRAY=""
+CT_GLIBC_CONFIGPARMS=""
+CT_GLIBC_EXTRA_CFLAGS=""
+CT_GLIBC_ENABLE_OBSOLETE_RPC=y
+# CT_GLIBC_DISABLE_VERSIONING is not set
+CT_GLIBC_OLDEST_ABI=""
+CT_GLIBC_FORCE_UNWIND=y
+# CT_GLIBC_LOCALES is not set
+# CT_GLIBC_KERNEL_VERSION_NONE is not set
+CT_GLIBC_KERNEL_VERSION_AS_HEADERS=y
+# CT_GLIBC_KERNEL_VERSION_CHOSEN is not set
+CT_GLIBC_MIN_KERNEL="3.2.101"
+CT_ALL_LIBC_CHOICES="AVR_LIBC BIONIC GLIBC MINGW_W64 MOXIEBOX MUSL NEWLIB NONE UCLIBC"
 CT_LIBC_SUPPORT_THREADS_ANY=y
 CT_LIBC_SUPPORT_THREADS_NATIVE=y
 
@@ -343,100 +450,71 @@ CT_LIBC_SUPPORT_THREADS_NATIVE=y
 # Common C library options
 #
 CT_THREADS_NATIVE=y
+# CT_CREATE_LDSO_CONF is not set
 CT_LIBC_XLDD=y
 
-#
-# glibc other options
-#
-CT_LIBC_GLIBC_PORTS_EXTERNAL=y
-CT_LIBC_GLIBC_MAY_FORCE_PORTS=y
-CT_LIBC_glibc_familly=y
-CT_LIBC_GLIBC_EXTRA_CONFIG_ARRAY=""
-CT_LIBC_GLIBC_CONFIGPARMS=""
-CT_LIBC_GLIBC_EXTRA_CFLAGS=""
-CT_LIBC_EXTRA_CC_ARGS=""
-# CT_LIBC_DISABLE_VERSIONING is not set
-CT_LIBC_OLDEST_ABI=""
-CT_LIBC_GLIBC_FORCE_UNWIND=y
-CT_LIBC_GLIBC_USE_PORTS=y
-CT_LIBC_ADDONS_LIST=""
-
-#
-# WARNING !!!                                            
-#
-
-#
-#   For glibc >= 2.8, it can happen that the tarballs    
-#
-
-#
-#   for the addons are not available for download.       
-#
-
-#
-#   If that happens, bad luck... Try a previous version  
-#
-
-#
-#   or try again later... :-(                            
-#
-# CT_LIBC_LOCALES is not set
-# CT_LIBC_GLIBC_KERNEL_VERSION_NONE is not set
-CT_LIBC_GLIBC_KERNEL_VERSION_AS_HEADERS=y
-# CT_LIBC_GLIBC_KERNEL_VERSION_CHOSEN is not set
-CT_LIBC_GLIBC_MIN_KERNEL="3.2.72"
-
 #
 # C compiler
 #
-CT_CC="gcc"
 CT_CC_CORE_PASSES_NEEDED=y
 CT_CC_CORE_PASS_1_NEEDED=y
 CT_CC_CORE_PASS_2_NEEDED=y
-CT_CC_gcc=y
-# CT_CC_GCC_SHOW_LINARO is not set
-CT_CC_GCC_V_5_2_0=y
-# CT_CC_GCC_V_4_9_3 is not set
-# CT_CC_GCC_V_4_8_5 is not set
-# CT_CC_GCC_V_4_7_4 is not set
-# CT_CC_GCC_V_4_6_4 is not set
-# CT_CC_GCC_V_4_5_4 is not set
-# CT_CC_GCC_V_4_4_7 is not set
-# CT_CC_GCC_V_4_3_6 is not set
-# CT_CC_GCC_V_4_2_4 is not set
-CT_CC_GCC_4_2_or_later=y
-CT_CC_GCC_4_3_or_later=y
-CT_CC_GCC_4_4_or_later=y
-CT_CC_GCC_4_5_or_later=y
-CT_CC_GCC_4_6_or_later=y
-CT_CC_GCC_4_7_or_later=y
-CT_CC_GCC_4_8_or_later=y
-CT_CC_GCC_4_9_or_later=y
-CT_CC_GCC_5=y
-CT_CC_GCC_5_or_later=y
-CT_CC_GCC_HAS_GRAPHITE=y
-CT_CC_GCC_USE_GRAPHITE=y
-CT_CC_GCC_HAS_LTO=y
-CT_CC_GCC_USE_LTO=y
-CT_CC_GCC_HAS_PKGVERSION_BUGURL=y
-CT_CC_GCC_HAS_BUILD_ID=y
-CT_CC_GCC_HAS_LNK_HASH_STYLE=y
-CT_CC_GCC_USE_GMP_MPFR=y
-CT_CC_GCC_USE_MPC=y
-CT_CC_GCC_HAS_LIBQUADMATH=y
-CT_CC_GCC_HAS_LIBSANITIZER=y
-CT_CC_GCC_VERSION="5.2.0"
-# CT_CC_LANG_FORTRAN is not set
+CT_CC_SUPPORT_CXX=y
+CT_CC_SUPPORT_FORTRAN=y
+CT_CC_SUPPORT_ADA=y
+CT_CC_SUPPORT_OBJC=y
+CT_CC_SUPPORT_OBJCXX=y
+CT_CC_SUPPORT_GOLANG=y
+CT_CC_GCC=y
+CT_CC="gcc"
+CT_CC_CHOICE_KSYM="GCC"
+CT_CC_GCC_SHOW=y
+
+#
+# Options for gcc
+#
+CT_CC_GCC_PKG_KSYM="GCC"
+CT_GCC_DIR_NAME="gcc"
+CT_GCC_USE_GNU=y
+CT_GCC_USE="GCC"
+CT_GCC_PKG_NAME="gcc"
+CT_GCC_SRC_RELEASE=y
+CT_GCC_PATCH_ORDER="global"
+CT_GCC_V_8=y
+# CT_GCC_V_7 is not set
+# CT_GCC_V_6 is not set
+# CT_GCC_V_5 is not set
+# CT_GCC_V_4_9 is not set
+# CT_GCC_NO_VERSIONS is not set
+CT_GCC_VERSION="8.3.0"
+CT_GCC_MIRRORS="$(CT_Mirrors GNU gcc/gcc-${CT_GCC_VERSION}) $(CT_Mirrors sourceware gcc/releases/gcc-${CT_GCC_VERSION})"
+CT_GCC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GCC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GCC_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_GCC_SIGNATURE_FORMAT=""
+CT_GCC_later_than_7=y
+CT_GCC_7_or_later=y
+CT_GCC_later_than_6=y
+CT_GCC_6_or_later=y
+CT_GCC_later_than_5=y
+CT_GCC_5_or_later=y
+CT_GCC_later_than_4_9=y
+CT_GCC_4_9_or_later=y
+CT_GCC_later_than_4_8=y
+CT_GCC_4_8_or_later=y
+CT_CC_GCC_HAS_LIBMPX=y
 CT_CC_GCC_ENABLE_CXX_FLAGS=""
 CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
 CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
-CT_CC_GCC_EXTRA_ENV_ARRAY=""
 CT_CC_GCC_STATIC_LIBSTDCXX=y
 # CT_CC_GCC_SYSTEM_ZLIB is not set
+CT_CC_GCC_CONFIG_TLS=m
 
 #
 # Optimisation features
 #
+CT_CC_GCC_USE_GRAPHITE=y
+CT_CC_GCC_USE_LTO=y
 
 #
 # Settings for libraries running on target
@@ -465,97 +543,206 @@ CT_CC_GCC_DEC_FLOAT_AUTO=y
 # CT_CC_GCC_DEC_FLOAT_BID is not set
 # CT_CC_GCC_DEC_FLOAT_DPD is not set
 # CT_CC_GCC_DEC_FLOATS_NO is not set
-CT_CC_SUPPORT_CXX=y
-CT_CC_SUPPORT_FORTRAN=y
-CT_CC_SUPPORT_JAVA=y
-CT_CC_SUPPORT_ADA=y
-CT_CC_SUPPORT_OBJC=y
-CT_CC_SUPPORT_OBJCXX=y
-CT_CC_SUPPORT_GOLANG=y
+CT_ALL_CC_CHOICES="GCC"
 
 #
 # Additional supported languages:
 #
 CT_CC_LANG_CXX=y
-# CT_CC_LANG_JAVA is not set
+# CT_CC_LANG_FORTRAN is not set
 
 #
 # Debug facilities
 #
-# CT_DEBUG_dmalloc is not set
-# CT_DEBUG_duma is not set
-# CT_DEBUG_gdb is not set
-# CT_DEBUG_ltrace is not set
-# CT_DEBUG_strace is not set
+# CT_DEBUG_DUMA is not set
+# CT_DEBUG_GDB is not set
+# CT_DEBUG_LTRACE is not set
+# CT_DEBUG_STRACE is not set
+CT_ALL_DEBUG_CHOICES="DUMA GDB LTRACE STRACE"
 
 #
 # Companion libraries
 #
-CT_COMPLIBS_NEEDED=y
+# CT_COMPLIBS_CHECK is not set
+# CT_COMP_LIBS_CLOOG is not set
+# CT_COMP_LIBS_EXPAT is not set
+CT_COMP_LIBS_GETTEXT=y
+CT_COMP_LIBS_GETTEXT_PKG_KSYM="GETTEXT"
+CT_GETTEXT_DIR_NAME="gettext"
+CT_GETTEXT_PKG_NAME="gettext"
+CT_GETTEXT_SRC_RELEASE=y
+CT_GETTEXT_PATCH_ORDER="global"
+CT_GETTEXT_V_0_19_8_1=y
+# CT_GETTEXT_NO_VERSIONS is not set
+CT_GETTEXT_VERSION="0.19.8.1"
+CT_GETTEXT_MIRRORS="$(CT_Mirrors GNU gettext)"
+CT_GETTEXT_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GETTEXT_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GETTEXT_ARCHIVE_FORMATS=".tar.xz .tar.lz .tar.gz"
+CT_GETTEXT_SIGNATURE_FORMAT="packed/.sig"
+CT_COMP_LIBS_GMP=y
+CT_COMP_LIBS_GMP_PKG_KSYM="GMP"
+CT_GMP_DIR_NAME="gmp"
+CT_GMP_PKG_NAME="gmp"
+CT_GMP_SRC_RELEASE=y
+CT_GMP_PATCH_ORDER="global"
+CT_GMP_V_6_1=y
+# CT_GMP_NO_VERSIONS is not set
+CT_GMP_VERSION="6.1.2"
+CT_GMP_MIRRORS="https://gmplib.org/download/gmp https://gmplib.org/download/gmp/archive $(CT_Mirrors GNU gmp)"
+CT_GMP_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GMP_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GMP_ARCHIVE_FORMATS=".tar.xz .tar.lz .tar.bz2"
+CT_GMP_SIGNATURE_FORMAT="packed/.sig"
+CT_GMP_later_than_5_1_0=y
+CT_GMP_5_1_0_or_later=y
+CT_GMP_later_than_5_0_0=y
+CT_GMP_5_0_0_or_later=y
+CT_COMP_LIBS_ISL=y
+CT_COMP_LIBS_ISL_PKG_KSYM="ISL"
+CT_ISL_DIR_NAME="isl"
+CT_ISL_PKG_NAME="isl"
+CT_ISL_SRC_RELEASE=y
+CT_ISL_PATCH_ORDER="global"
+CT_ISL_V_0_20=y
+# CT_ISL_V_0_19 is not set
+# CT_ISL_V_0_18 is not set
+# CT_ISL_V_0_17 is not set
+# CT_ISL_V_0_16 is not set
+# CT_ISL_V_0_15 is not set
+# CT_ISL_NO_VERSIONS is not set
+CT_ISL_VERSION="0.20"
+CT_ISL_MIRRORS="http://isl.gforge.inria.fr"
+CT_ISL_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_ISL_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_ISL_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
+CT_ISL_SIGNATURE_FORMAT=""
+CT_ISL_later_than_0_18=y
+CT_ISL_0_18_or_later=y
+CT_ISL_later_than_0_15=y
+CT_ISL_0_15_or_later=y
+CT_ISL_REQUIRE_0_15_or_later=y
+CT_ISL_later_than_0_14=y
+CT_ISL_0_14_or_later=y
+CT_ISL_REQUIRE_0_14_or_later=y
+CT_ISL_later_than_0_13=y
+CT_ISL_0_13_or_later=y
+CT_ISL_later_than_0_12=y
+CT_ISL_0_12_or_later=y
+CT_ISL_REQUIRE_0_12_or_later=y
+# CT_COMP_LIBS_LIBELF is not set
+CT_COMP_LIBS_LIBICONV=y
+CT_COMP_LIBS_LIBICONV_PKG_KSYM="LIBICONV"
+CT_LIBICONV_DIR_NAME="libiconv"
+CT_LIBICONV_PKG_NAME="libiconv"
+CT_LIBICONV_SRC_RELEASE=y
+CT_LIBICONV_PATCH_ORDER="global"
+CT_LIBICONV_V_1_15=y
+# CT_LIBICONV_NO_VERSIONS is not set
+CT_LIBICONV_VERSION="1.15"
+CT_LIBICONV_MIRRORS="$(CT_Mirrors GNU libiconv)"
+CT_LIBICONV_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_LIBICONV_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_LIBICONV_ARCHIVE_FORMATS=".tar.gz"
+CT_LIBICONV_SIGNATURE_FORMAT="packed/.sig"
+CT_COMP_LIBS_MPC=y
+CT_COMP_LIBS_MPC_PKG_KSYM="MPC"
+CT_MPC_DIR_NAME="mpc"
+CT_MPC_PKG_NAME="mpc"
+CT_MPC_SRC_RELEASE=y
+CT_MPC_PATCH_ORDER="global"
+# CT_MPC_V_1_1 is not set
+CT_MPC_V_1_0=y
+# CT_MPC_NO_VERSIONS is not set
+CT_MPC_VERSION="1.0.3"
+CT_MPC_MIRRORS="http://www.multiprecision.org/downloads $(CT_Mirrors GNU mpc)"
+CT_MPC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_MPC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_MPC_ARCHIVE_FORMATS=".tar.gz"
+CT_MPC_SIGNATURE_FORMAT="packed/.sig"
+CT_MPC_1_1_0_or_older=y
+CT_MPC_older_than_1_1_0=y
+CT_COMP_LIBS_MPFR=y
+CT_COMP_LIBS_MPFR_PKG_KSYM="MPFR"
+CT_MPFR_DIR_NAME="mpfr"
+CT_MPFR_PKG_NAME="mpfr"
+CT_MPFR_SRC_RELEASE=y
+CT_MPFR_PATCH_ORDER="global"
+CT_MPFR_V_3_1=y
+# CT_MPFR_NO_VERSIONS is not set
+CT_MPFR_VERSION="3.1.6"
+CT_MPFR_MIRRORS="http://www.mpfr.org/mpfr-${CT_MPFR_VERSION} $(CT_Mirrors GNU mpfr)"
+CT_MPFR_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_MPFR_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_MPFR_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz .zip"
+CT_MPFR_SIGNATURE_FORMAT="packed/.asc"
+CT_MPFR_4_0_0_or_older=y
+CT_MPFR_older_than_4_0_0=y
+CT_MPFR_REQUIRE_older_than_4_0_0=y
+CT_MPFR_later_than_3_0_0=y
+CT_MPFR_3_0_0_or_later=y
+CT_COMP_LIBS_NCURSES=y
+CT_COMP_LIBS_NCURSES_PKG_KSYM="NCURSES"
+CT_NCURSES_DIR_NAME="ncurses"
+CT_NCURSES_PKG_NAME="ncurses"
+CT_NCURSES_SRC_RELEASE=y
+CT_NCURSES_PATCH_ORDER="global"
+CT_NCURSES_V_6_1=y
+# CT_NCURSES_V_6_0 is not set
+# CT_NCURSES_NO_VERSIONS is not set
+CT_NCURSES_VERSION="6.1"
+CT_NCURSES_MIRRORS="ftp://invisible-island.net/ncurses $(CT_Mirrors GNU ncurses)"
+CT_NCURSES_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_NCURSES_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_NCURSES_ARCHIVE_FORMATS=".tar.gz"
+CT_NCURSES_SIGNATURE_FORMAT="packed/.sig"
+CT_NCURSES_HOST_CONFIG_ARGS=""
+CT_NCURSES_HOST_DISABLE_DB=y
+CT_NCURSES_HOST_FALLBACKS="linux,xterm,xterm-color,xterm-256color,vt100"
+CT_NCURSES_TARGET_CONFIG_ARGS=""
+# CT_NCURSES_TARGET_DISABLE_DB is not set
+CT_NCURSES_TARGET_FALLBACKS=""
+CT_COMP_LIBS_ZLIB=y
+CT_COMP_LIBS_ZLIB_PKG_KSYM="ZLIB"
+CT_ZLIB_DIR_NAME="zlib"
+CT_ZLIB_PKG_NAME="zlib"
+CT_ZLIB_SRC_RELEASE=y
+CT_ZLIB_PATCH_ORDER="global"
+CT_ZLIB_V_1_2_11=y
+# CT_ZLIB_NO_VERSIONS is not set
+CT_ZLIB_VERSION="1.2.11"
+CT_ZLIB_MIRRORS="http://downloads.sourceforge.net/project/libpng/zlib/${CT_ZLIB_VERSION}"
+CT_ZLIB_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_ZLIB_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_ZLIB_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_ZLIB_SIGNATURE_FORMAT="packed/.asc"
+CT_ALL_COMP_LIBS_CHOICES="CLOOG EXPAT GETTEXT GMP ISL LIBELF LIBICONV MPC MPFR NCURSES ZLIB"
 CT_LIBICONV_NEEDED=y
 CT_GETTEXT_NEEDED=y
 CT_GMP_NEEDED=y
 CT_MPFR_NEEDED=y
 CT_ISL_NEEDED=y
 CT_MPC_NEEDED=y
-CT_COMPLIBS=y
+CT_NCURSES_NEEDED=y
+CT_ZLIB_NEEDED=y
 CT_LIBICONV=y
 CT_GETTEXT=y
 CT_GMP=y
 CT_MPFR=y
 CT_ISL=y
 CT_MPC=y
-CT_LIBICONV_V_1_14=y
-CT_LIBICONV_VERSION="1.14"
-CT_GETTEXT_V_0_19_6=y
-CT_GETTEXT_VERSION="0.19.6"
-CT_GMP_V_6_0_0=y
-# CT_GMP_V_5_1_3 is not set
-# CT_GMP_V_5_1_1 is not set
-# CT_GMP_V_5_0_2 is not set
-# CT_GMP_V_5_0_1 is not set
-# CT_GMP_V_4_3_2 is not set
-# CT_GMP_V_4_3_1 is not set
-# CT_GMP_V_4_3_0 is not set
-CT_GMP_5_0_2_or_later=y
-CT_GMP_VERSION="6.0.0a"
-CT_MPFR_V_3_1_3=y
-# CT_MPFR_V_3_1_2 is not set
-# CT_MPFR_V_3_1_0 is not set
-# CT_MPFR_V_3_0_1 is not set
-# CT_MPFR_V_3_0_0 is not set
-# CT_MPFR_V_2_4_2 is not set
-# CT_MPFR_V_2_4_1 is not set
-# CT_MPFR_V_2_4_0 is not set
-CT_MPFR_VERSION="3.1.3"
-CT_ISL_V_0_14=y
-# CT_ISL_V_0_12_2 is not set
-CT_ISL_V_0_14_or_later=y
-CT_ISL_V_0_12_or_later=y
-CT_ISL_VERSION="0.14"
-# CT_CLOOG_V_0_18_4 is not set
-# CT_CLOOG_V_0_18_1 is not set
-# CT_CLOOG_V_0_18_0 is not set
-CT_MPC_V_1_0_3=y
-# CT_MPC_V_1_0_2 is not set
-# CT_MPC_V_1_0_1 is not set
-# CT_MPC_V_1_0 is not set
-# CT_MPC_V_0_9 is not set
-# CT_MPC_V_0_8_2 is not set
-# CT_MPC_V_0_8_1 is not set
-# CT_MPC_V_0_7 is not set
-CT_MPC_VERSION="1.0.3"
-
-#
-# Companion libraries common options
-#
-# CT_COMPLIBS_CHECK is not set
+CT_NCURSES=y
+CT_ZLIB=y
 
 #
 # Companion tools
 #
-
-#
-# READ HELP before you say 'Y' below !!!
-#
-# CT_COMP_TOOLS is not set
+# CT_COMP_TOOLS_FOR_HOST is not set
+# CT_COMP_TOOLS_AUTOCONF is not set
+# CT_COMP_TOOLS_AUTOMAKE is not set
+# CT_COMP_TOOLS_BISON is not set
+# CT_COMP_TOOLS_DTC is not set
+# CT_COMP_TOOLS_LIBTOOL is not set
+# CT_COMP_TOOLS_M4 is not set
+# CT_COMP_TOOLS_MAKE is not set
+CT_ALL_COMP_TOOLS_CHOICES="AUTOCONF AUTOMAKE BISON DTC LIBTOOL M4 MAKE"
diff --git a/src/ci/docker/dist-armv7-linux/crosstool-ng.sh b/src/ci/docker/dist-armv7-linux/crosstool-ng.sh
new file mode 100644 (file)
index 0000000..ae737d9
--- /dev/null
@@ -0,0 +1,12 @@
+set -ex
+
+# Mirrored from https://github.com/crosstool-ng/crosstool-ng/archive/crosstool-ng-1.24.0.tar.gz
+url="https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/crosstool-ng-1.24.0.tar.gz"
+curl -Lf $url | tar xzf -
+cd crosstool-ng-crosstool-ng-1.24.0
+./bootstrap
+./configure --prefix=/usr/local
+make -j$(nproc)
+make install
+cd ..
+rm -rf crosstool-ng-crosstool-ng-1.24.0
diff --git a/src/ci/docker/dist-armv7-linux/patches/glibc/ports-2.16.0/001-arm-libgcc_s_resume-used.patch b/src/ci/docker/dist-armv7-linux/patches/glibc/ports-2.16.0/001-arm-libgcc_s_resume-used.patch
deleted file mode 100644 (file)
index 871d522..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-commit bdb24c2851fd5f0ad9b82d7ea1db911d334b02d2
-Author: Joseph Myers <joseph@codesourcery.com>
-Date:   Tue May 20 21:27:13 2014 +0000
-
-    Fix ARM build with GCC trunk.
-    
-    sysdeps/unix/sysv/linux/arm/unwind-resume.c and
-    sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c have static
-    variables that are written in C code but only read from toplevel asms.
-    Current GCC trunk now optimizes away such apparently write-only static
-    variables, so causing a build failure.  This patch marks those
-    variables with __attribute_used__ to avoid that optimization.
-    
-    Tested that this fixes the build for ARM.
-    
-            * sysdeps/unix/sysv/linux/arm/unwind-forcedunwind.c
-            (libgcc_s_resume): Use __attribute_used__.
-            * sysdeps/unix/sysv/linux/arm/unwind-resume.c (libgcc_s_resume):
-            Likewise.
-
-diff --git a/sysdeps/unix/sysv/linux/arm/nptl/unwind-forcedunwind.c b/sysdeps/unix/sysv/linux/arm/nptl/unwind-forcedunwind.c
-index 29e2c2b00b04..e848bfeffdcb 100644
---- a/ports/sysdeps/unix/sysv/linux/arm/nptl/unwind-forcedunwind.c
-+++ b/ports/sysdeps/unix/sysv/linux/arm/nptl/unwind-forcedunwind.c
-@@ -22,7 +22,8 @@
- #include <pthreadP.h>
- static void *libgcc_s_handle;
--static void (*libgcc_s_resume) (struct _Unwind_Exception *exc);
-+static void (*libgcc_s_resume) (struct _Unwind_Exception *exc)
-+  __attribute_used__;
- static _Unwind_Reason_Code (*libgcc_s_personality)
-   (_Unwind_State, struct _Unwind_Exception *, struct _Unwind_Context *);
- static _Unwind_Reason_Code (*libgcc_s_forcedunwind)
-diff --git a/sysdeps/unix/sysv/linux/arm/nptl/unwind-resume.c b/sysdeps/unix/sysv/linux/arm/nptl/unwind-resume.c
-index 285b99b5ed0d..48d00fc83641 100644
---- a/ports/sysdeps/unix/sysv/linux/arm/nptl/unwind-resume.c
-+++ b/ports/sysdeps/unix/sysv/linux/arm/nptl/unwind-resume.c
-@@ -20,7 +20,8 @@
- #include <stdio.h>
- #include <unwind.h>
--static void (*libgcc_s_resume) (struct _Unwind_Exception *exc);
-+static void (*libgcc_s_resume) (struct _Unwind_Exception *exc)
-+  __attribute_used__;
- static _Unwind_Reason_Code (*libgcc_s_personality)
-   (_Unwind_State, struct _Unwind_Exception *, struct _Unwind_Context *);
index 51945fd72adc768fcdfc054b1a6a97b6d1629d65..bb72e33def21c107c7b4deb432629b72f2c4b424 100644 (file)
@@ -22,5 +22,6 @@ apt-get update && apt-get install -y --no-install-recommends \
   python2.7 \
   sudo \
   texinfo \
+  unzip \
   wget \
   xz-utils
index 9c7a09e227a474f9302af39e08486f16f98b2311..1be80741594cc930619dcef37fd5768513e4c21f 100644 (file)
@@ -21,13 +21,3 @@ git clone https://github.com/emscripten-core/emsdk.git /emsdk-portable
 cd /emsdk-portable
 hide_output ./emsdk install 1.38.46-upstream
 ./emsdk activate 1.38.46-upstream
-
-# Compile and cache libc
-source ./emsdk_env.sh
-echo "main(){}" > a.c
-HOME=/emsdk-portable/ emcc a.c
-rm -f a.*
-
-# Make emsdk usable by any user
-cp /root/.emscripten /emsdk-portable
-chmod a+rxw -R /emsdk-portable
diff --git a/src/ci/docker/wasm32/Dockerfile b/src/ci/docker/wasm32/Dockerfile
new file mode 100644 (file)
index 0000000..a0f35af
--- /dev/null
@@ -0,0 +1,44 @@
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+  g++ \
+  make \
+  file \
+  curl \
+  ca-certificates \
+  python \
+  git \
+  cmake \
+  sudo \
+  gdb \
+  xz-utils \
+  bzip2
+
+COPY scripts/emscripten.sh /scripts/
+RUN bash /scripts/emscripten.sh
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+ENV PATH=$PATH:/emsdk-portable
+ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/
+ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/
+ENV BINARYEN_ROOT=/emsdk-portable/upstream/
+
+ENV TARGETS=wasm32-unknown-emscripten
+
+# Use -O1 optimizations in the link step to reduce time spent optimizing.
+ENV EMCC_CFLAGS=-O1
+
+# Emscripten installation is user-specific
+ENV NO_CHANGE_USER=1
+
+# FIXME: Re-enable these tests once https://github.com/rust-lang/cargo/pull/7476
+# is picked up by CI
+ENV SCRIPT python2.7 ../x.py test --target $TARGETS \
+    --exclude src/libcore \
+    --exclude src/liballoc \
+    --exclude src/libproc_macro \
+    --exclude src/libstd \
+    --exclude src/libterm \
+    --exclude src/libtest
index 04806c80be0f54b1290287e3f85e84bdfc0b6ec7..9bb8b161963fcebc9d9ccd732ba26f42108016d5 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 04806c80be0f54b1290287e3f85e84bdfc0b6ec7
+Subproject commit 9bb8b161963fcebc9d9ccd732ba26f42108016d5
index 4374786f0b4bf0606b35d5c30a9681f342e5707b..5004ad30d69f93553ceef74439fea2159d1f769e 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 4374786f0b4bf0606b35d5c30a9681f342e5707b
+Subproject commit 5004ad30d69f93553ceef74439fea2159d1f769e
index 320d232b206edecb67489316f71a14e31dbc6c08..5b9d2fcefadfc32fceafacfc0dd9441d9b57dd94 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 320d232b206edecb67489316f71a14e31dbc6c08
+Subproject commit 5b9d2fcefadfc32fceafacfc0dd9441d9b57dd94
index a6288e7407a6c4c19ea29de6d43f40c803883f21..0b111eaae36cc4b4997684be853882a59e2c7ca7 160000 (submodule)
@@ -1 +1 @@
-Subproject commit a6288e7407a6c4c19ea29de6d43f40c803883f21
+Subproject commit 0b111eaae36cc4b4997684be853882a59e2c7ca7
index 3cda8d927973ca64c7210d37ecd750093f838ca7..d5564fd798f39f373724269a1b029ceaf444ff98 100644 (file)
@@ -10,6 +10,7 @@
         - [Warn-by-default lints](lints/listing/warn-by-default.md)
         - [Deny-by-default lints](lints/listing/deny-by-default.md)
 - [Codegen options](codegen-options/index.md)
+- [JSON Output](json.md)
 - [Targets](targets/index.md)
     - [Built-in Targets](targets/built-in.md)
     - [Custom Targets](targets/custom.md)
index 5eea9c86879001ef9a13ee6f3c8ecbc487697ccc..b2cc65c11fd2ce5afe1444d19dcc256fc00a31ec 100644 (file)
@@ -92,6 +92,7 @@ information about editions may be found in the [edition guide].
 [edition guide]: ../edition-guide/introduction.html
 
 ## `--emit`: specifies the types of output files to generate
+ <a id="option-emit"></a>
 
 This flag controls the types of output files generated by the compiler. It
 accepts a comma-separated list of values, and may be specified multiple times.
@@ -241,12 +242,13 @@ The "sysroot" is where `rustc` looks for the crates that come with the Rust
 distribution; this flag allows that to be overridden.
 
 ## `--error-format`: control how errors are produced
+ <a id="option-error-format"></a>
 
 This flag lets you control the format of messages. Messages are printed to
 stderr. The valid options are:
 
 - `human` — Human-readable output. This is the default.
-- `json` — Structured JSON output.
+- `json` — Structured JSON output. See [the JSON chapter] for more detail.
 - `short` — Short, one-line messages.
 
 ## `--color`: configure coloring of output
@@ -273,6 +275,7 @@ pathname syntax. For example `--remap-path-prefix foo=bar` will match
 `foo/lib.rs` but not `./foo/lib.rs`.
 
 ## `--json`: configure json messages printed by the compiler
+ <a id="option-json"></a>
 
 When the `--error-format=json` option is passed to rustc then all of the
 compiler's diagnostic output will be emitted in the form of JSON blobs. The
@@ -305,9 +308,13 @@ to customize the output:
 Note that it is invalid to combine the `--json` argument with the `--color`
 argument, and it is required to combine `--json` with `--error-format=json`.
 
+See [the JSON chapter] for more detail.
+
 ## `@path`: load command-line flags from a path
 
 If you specify `@path` on the command-line, then it will open `path` and read
 command line options from it. These options are one per line; a blank line indicates
 an empty option. The file can use Unix or Windows style line endings, and must be
 encoded as UTF-8.
+
+[the JSON chapter]: json.md
diff --git a/src/doc/rustc/src/json.md b/src/doc/rustc/src/json.md
new file mode 100644 (file)
index 0000000..b737849
--- /dev/null
@@ -0,0 +1,231 @@
+# JSON Output
+
+This chapter documents the JSON structures emitted by `rustc`. JSON may be
+enabled with the [`--error-format=json` flag][option-error-format]. Additional
+options may be specified with the [`--json` flag][option-json] which can
+change which messages are generated, and the format of the messages.
+
+JSON messages are emitted one per line to stderr.
+
+If parsing the output with Rust, the
+[`cargo_metadata`](https://crates.io/crates/cargo_metadata) crate provides
+some support for parsing the messages.
+
+When parsing, care should be taken to be forwards-compatible with future changes
+to the format. Optional values may be `null`. New fields may be added. Enumerated
+fields like "level" or "suggestion_applicability" may add new values.
+
+## Diagnostics
+
+Diagnostic messages provide errors or possible concerns generated during
+compilation. `rustc` provides detailed information about where the diagnostic
+originates, along with hints and suggestions.
+
+Diagnostics are arranged in a parent/child relationship where the parent
+diagnostic value is the core of the diagnostic, and the attached children
+provide additional context, help, and information.
+
+Diagnostics have the following format:
+
+```javascript
+{
+    /* The primary message. */
+    "message": "unused variable: `x`",
+    /* The diagnostic code.
+       Some messages may set this value to null.
+    */
+    "code": {
+        /* A unique string identifying which diagnostic triggered. */
+        "code": "unused_variables",
+        /* An optional string explaining more detail about the diagnostic code. */
+        "explanation": null
+    },
+    /* The severity of the diagnostic.
+       Values may be:
+       - "error": A fatal error that prevents compilation.
+       - "warning": A possible error or concern.
+       - "note": Additional information or context about the diagnostic.
+       - "help": A suggestion on how to resolve the diagnostic.
+       - "failure-note": A note attached to the message for further information.
+       - "error: internal compiler error": Indicates a bug within the compiler.
+    */
+    "level": "warning",
+    /* An array of source code locations to point out specific details about
+       where the diagnostic originates from. This may be empty, for example
+       for some global messages, or child messages attached to a parent.
+
+       Character offsets are offsets of Unicode Scalar Values.
+    */
+    "spans": [
+        {
+            /* The file where the span is located.
+               For spans located within a macro expansion, this will be the
+               name of the expanded macro in the format "<MACRONAME macros>".
+            */
+            "file_name": "lib.rs",
+            /* The byte offset where the span starts (0-based, inclusive). */
+            "byte_start": 21,
+            /* The byte offset where the span ends (0-based, exclusive). */
+            "byte_end": 22,
+            /* The first line number of the span (1-based, inclusive). */
+            "line_start": 2,
+            /* The last line number of the span (1-based, inclusive). */
+            "line_end": 2,
+            /* The first character offset of the line_start (1-based, inclusive). */
+            "column_start": 9,
+            /* The last character offset of the line_end (1-based, exclusive). */
+            "column_end": 10,
+            /* Whether or not this is the "primary" span.
+
+               This indicates that this span is the focal point of the
+               diagnostic.
+
+               There are rare cases where multiple spans may be marked as
+               primary. For example, "immutable borrow occurs here" and
+               "mutable borrow ends here" can be two separate primary spans.
+
+               The top (parent) message should always have at least one
+               primary span, unless it has zero spans. Child messages may have
+               zero or more primary spans.
+            */
+            "is_primary": true,
+            /* An array of objects showing the original source code for this
+               span. This shows the entire lines of text where the span is
+               located. A span across multiple lines will have a separate
+               value for each line.
+            */
+            "text": [
+                {
+                    /* The entire line of the original source code. */
+                    "text": "    let x = 123;",
+                    /* The first character offset of the line of
+                       where the span covers this line (1-based, inclusive). */
+                    "highlight_start": 9,
+                    /* The last character offset of the line of
+                       where the span covers this line (1-based, exclusive). */
+                    "highlight_end": 10
+                }
+            ],
+            /* An optional message to display at this span location.
+               This is typically null for primary spans.
+            */
+            "label": null,
+            /* An optional string of a suggested replacement for this span to
+               solve the issue. Tools may try to replace the contents of the
+               span with this text.
+            */
+            "suggested_replacement": null,
+            /* An optional string that indicates the confidence of the
+               "suggested_replacement". Tools may use this value to determine
+               whether or not suggestions should be automatically applied.
+
+               Possible values may be:
+               - "MachineApplicable": The suggestion is definitely what the
+                 user intended. This suggestion should be automatically
+                 applied.
+               - "MaybeIncorrect": The suggestion may be what the user
+                 intended, but it is uncertain. The suggestion should result
+                 in valid Rust code if it is applied.
+               - "HasPlaceholders": The suggestion contains placeholders like
+                 `(...)`. The suggestion cannot be applied automatically
+                 because it will not result in valid Rust code. The user will
+                 need to fill in the placeholders.
+               - "Unspecified": The applicability of the suggestion is unknown.
+            */
+            "suggestion_applicability": null,
+            /* An optional object indicating the expansion of a macro within
+               this span.
+
+               If a message occurs within a macro invocation, this object will
+               provide details of where within the macro expansion the message
+               is located.
+            */
+            "expansion": {
+                /* The span of the macro invocation.
+                   Uses the same span definition as the "spans" array.
+                */
+                "span": {/*...*/}
+                /* Name of the macro, such as "foo!" or "#[derive(Eq)]". */
+                "macro_decl_name": "some_macro!",
+                /* Optional span where the relevant part of the macro is
+                  defined. */
+                "def_site_span": {/*...*/},
+            }
+        }
+    ],
+    /* Array of attached diagnostic messages.
+       This is an array of objects using the same format as the parent
+       message. Children are not nested (children do not themselves
+       contain "children" definitions).
+    */
+    "children": [
+        {
+            "message": "`#[warn(unused_variables)]` on by default",
+            "code": null,
+            "level": "note",
+            "spans": [],
+            "children": [],
+            "rendered": null
+        },
+        {
+            "message": "consider prefixing with an underscore",
+            "code": null,
+            "level": "help",
+            "spans": [
+                {
+                    "file_name": "lib.rs",
+                    "byte_start": 21,
+                    "byte_end": 22,
+                    "line_start": 2,
+                    "line_end": 2,
+                    "column_start": 9,
+                    "column_end": 10,
+                    "is_primary": true,
+                    "text": [
+                        {
+                            "text": "    let x = 123;",
+                            "highlight_start": 9,
+                            "highlight_end": 10
+                        }
+                    ],
+                    "label": null,
+                    "suggested_replacement": "_x",
+                    "suggestion_applicability": "MachineApplicable",
+                    "expansion": null
+                }
+            ],
+            "children": [],
+            "rendered": null
+        }
+    ],
+    /* Optional string of the rendered version of the diagnostic as displayed
+       by rustc. Note that this may be influenced by the `--json` flag.
+    */
+    "rendered": "warning: unused variable: `x`\n --> lib.rs:2:9\n  |\n2 |     let x = 123;\n  |         ^ help: consider prefixing with an underscore: `_x`\n  |\n  = note: `#[warn(unused_variables)]` on by default\n\n"
+}
+```
+
+## Artifact notifications
+
+Artifact notifications are emitted when the [`--json=artifacts`
+flag][option-json] is used. They indicate that a file artifact has been saved
+to disk. More information about emit kinds may be found in the [`--emit`
+flag][option-emit] documentation.
+
+```javascript
+{
+    /* The filename that was generated. */
+    "artifact": "libfoo.rlib",
+    /* The kind of artifact that was generated. Possible values:
+       - "link": The generated crate as specified by the crate-type.
+       - "dep-info": The `.d` file with dependency information in a Makefile-like syntax.
+       - "metadata": The Rust `.rmeta` file containing metadata about the crate.
+       - "save-analysis": A JSON file emitted by the `-Zsave-analysis` feature.
+    */
+    "emit": "link"
+}
+```
+
+[option-emit]: command-line-arguments.md#option-emit
+[option-error-format]: command-line-arguments.md#option-error-format
+[option-json]: command-line-arguments.md#option-json
index 6574267f185113f13da7fa7b93b0f05898255694..5688e90ada129a8fa0fdc9942be4acff461767ae 100644 (file)
@@ -222,3 +222,28 @@ error: invalid `crate_type` value
   | ^^^^^^^^^^^^^^^^^^^^
   |
 ```
+
+## const-err
+
+This lint detects expressions that will always panic at runtime and would be an
+error in a `const` context.
+
+```rust,ignore
+let _ = [0; 4][4];
+```
+
+This will produce:
+
+```text
+error: index out of bounds: the len is 4 but the index is 4
+ --> src/lib.rs:1:9
+  |
+1 | let _ = [0; 4][4];
+  |         ^^^^^^^^^
+  |
+```
+
+## order-dependent-trait-objects
+
+This lint detects a trait coherency violation that would allow creating two
+trait impls for the same dynamic trait object involving marker traits.
index 38be07a6440dab772e199f81687e2f4c3304812b..d066f4a9cf59c1040c8691b803b3d650f399944c 100644 (file)
@@ -125,6 +125,17 @@ RUSTFLAGS="-Cprofile-use=/tmp/pgo-data/merged.profdata" \
     cargo build --release --target=x86_64-unknown-linux-gnu
 ```
 
+### Troubleshooting
+
+- It is recommended to pass `-Cllvm-args=-pgo-warn-missing-function` during the
+  `-Cprofile-use` phase. LLVM by default does not warn if it cannot find
+  profiling data for a given function. Enabling this warning will make it
+  easier to spot errors in your setup.
+
+- There is a [known issue](https://github.com/rust-lang/cargo/issues/7416) in
+  Cargo prior to version 1.39 that will prevent PGO from working correctly. Be
+  sure to use Cargo 1.39 or newer when doing PGO.
+
 ## Further Reading
 
 `rustc`'s PGO support relies entirely on LLVM's implementation of the feature
diff --git a/src/doc/unstable-book/src/compiler-flags/report-time.md b/src/doc/unstable-book/src/compiler-flags/report-time.md
new file mode 100644 (file)
index 0000000..ed4e9c6
--- /dev/null
@@ -0,0 +1,80 @@
+# `report-time`
+
+The tracking issue for this feature is: [#64888]
+
+[#64888]: https://github.com/rust-lang/rust/issues/64888
+
+------------------------
+
+The `report-time` feature adds a possibility to report execution time of the
+tests generated via `libtest`.
+
+This is unstable feature, so you have to provide `-Zunstable-options` to get
+this feature working.
+
+Sample usage command:
+
+```sh
+./test_executable -Zunstable-options --report-time
+```
+
+Available options:
+
+```sh
+--report-time [plain|colored]
+                Show execution time of each test. Awailable values:
+                plain = do not colorize the execution time (default);
+                colored = colorize output according to the `color`
+                parameter value;
+                Threshold values for colorized output can be
+                configured via
+                `RUST_TEST_TIME_UNIT`, `RUST_TEST_TIME_INTEGRATION`
+                and
+                `RUST_TEST_TIME_DOCTEST` environment variables.
+                Expected format of environment variable is
+                `VARIABLE=WARN_TIME,CRITICAL_TIME`.
+                Not available for --format=terse
+--ensure-time 
+                Treat excess of the test execution time limit as
+                error.
+                Threshold values for this option can be configured via
+                `RUST_TEST_TIME_UNIT`, `RUST_TEST_TIME_INTEGRATION`
+                and
+                `RUST_TEST_TIME_DOCTEST` environment variables.
+                Expected format of environment variable is
+                `VARIABLE=WARN_TIME,CRITICAL_TIME`.
+                `CRITICAL_TIME` here means the limit that should not be
+                exceeded by test.
+```
+
+Example of the environment variable format:
+
+```sh
+RUST_TEST_TIME_UNIT=100,200
+```
+
+where 100 stands for warn time, and 200 stands for critical time.
+
+## Examples
+
+```sh
+cargo test --tests -- -Zunstable-options --report-time
+    Finished dev [unoptimized + debuginfo] target(s) in 0.02s
+     Running target/debug/deps/example-27fb188025bec02c
+
+running 3 tests
+test tests::unit_test_quick ... ok <0.000s>
+test tests::unit_test_warn ... ok <0.055s>
+test tests::unit_test_critical ... ok <0.110s>
+
+test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+
+     Running target/debug/deps/tests-cedb06f6526d15d9
+
+running 3 tests
+test unit_test_quick ... ok <0.000s>
+test unit_test_warn ... ok <0.550s>
+test unit_test_critical ... ok <1.100s>
+
+test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+```
diff --git a/src/doc/unstable-book/src/language-features/track-caller.md b/src/doc/unstable-book/src/language-features/track-caller.md
new file mode 100644 (file)
index 0000000..afc11a2
--- /dev/null
@@ -0,0 +1,5 @@
+# `track_caller`
+
+The tracking issue for this feature is: [#47809](https://github.com/rust-lang/rust/issues/47809).
+
+------------------------
index b2789a535fe498827bd78573e1fdf20d3b34efc3..567b8ea722491edbe2c9bfe9218b1632038dbc92 100644 (file)
@@ -142,6 +142,9 @@ pub fn new(x: T) -> Box<T> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
         let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
+        if layout.size() == 0 {
+            return Box(NonNull::dangling().into())
+        }
         let ptr = unsafe {
             Global.alloc(layout)
                 .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
@@ -182,9 +185,16 @@ impl<T> Box<[T]> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
         let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
-        let ptr = unsafe { alloc::alloc(layout) };
-        let unique = Unique::new(ptr).unwrap_or_else(|| alloc::handle_alloc_error(layout));
-        let slice = unsafe { slice::from_raw_parts_mut(unique.cast().as_ptr(), len) };
+        let ptr = if layout.size() == 0 {
+            NonNull::dangling()
+        } else {
+            unsafe {
+                Global.alloc(layout)
+                    .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
+                    .cast()
+            }
+        };
+        let slice = unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len) };
         Box(Unique::from(slice))
     }
 }
@@ -871,11 +881,33 @@ fn size_hint(&self) -> (usize, Option<usize>) {
     fn nth(&mut self, n: usize) -> Option<I::Item> {
         (**self).nth(n)
     }
+    fn last(self) -> Option<I::Item> {
+        BoxIter::last(self)
+    }
+}
+
+trait BoxIter {
+    type Item;
+    fn last(self) -> Option<Self::Item>;
+}
+
+impl<I: Iterator + ?Sized> BoxIter for Box<I> {
+    type Item = I::Item;
+    default fn last(self) -> Option<I::Item> {
+        #[inline]
+        fn some<T>(_: Option<T>, x: T) -> Option<T> {
+            Some(x)
+        }
+
+        self.fold(None, some)
+    }
 }
 
+/// Specialization for sized `I`s that uses `I`s implementation of `last()`
+/// instead of the default.
 #[stable(feature = "rust1", since = "1.0.0")]
-impl<I: Iterator + Sized> Iterator for Box<I> {
-    fn last(self) -> Option<I::Item> where I: Sized {
+impl<I: Iterator> BoxIter for Box<I> {
+    fn last(self) -> Option<I::Item> {
         (*self).last()
     }
 }
index 8250fc38ccd1c7a0b4d556294b128d5d8d699714..f0796354e00c384076f8a47a59f62b849556cd1e 100644 (file)
@@ -2,7 +2,7 @@
 // to TreeMap
 
 use core::borrow::Borrow;
-use core::cmp::Ordering::{self, Less, Greater, Equal};
+use core::cmp::Ordering::{Less, Greater, Equal};
 use core::cmp::{max, min};
 use core::fmt::{self, Debug};
 use core::iter::{Peekable, FromIterator, FusedIterator};
@@ -109,6 +109,77 @@ pub struct Range<'a, T: 'a> {
     iter: btree_map::Range<'a, T, ()>,
 }
 
+/// Core of SymmetricDifference and Union.
+/// More efficient than btree.map.MergeIter,
+/// and crucially for SymmetricDifference, nexts() reports on both sides.
+#[derive(Clone)]
+struct MergeIterInner<I>
+    where I: Iterator,
+          I::Item: Copy,
+{
+    a: I,
+    b: I,
+    peeked: Option<MergeIterPeeked<I>>,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum MergeIterPeeked<I: Iterator> {
+    A(I::Item),
+    B(I::Item),
+}
+
+impl<I> MergeIterInner<I>
+    where I: ExactSizeIterator + FusedIterator,
+          I::Item: Copy + Ord,
+{
+    fn new(a: I, b: I) -> Self {
+        MergeIterInner { a, b, peeked: None }
+    }
+
+    fn nexts(&mut self) -> (Option<I::Item>, Option<I::Item>) {
+        let mut a_next = match self.peeked {
+            Some(MergeIterPeeked::A(next)) => Some(next),
+            _ => self.a.next(),
+        };
+        let mut b_next = match self.peeked {
+            Some(MergeIterPeeked::B(next)) => Some(next),
+            _ => self.b.next(),
+        };
+        let ord = match (a_next, b_next) {
+            (None, None) => Equal,
+            (_, None) => Less,
+            (None, _) => Greater,
+            (Some(a1), Some(b1)) => a1.cmp(&b1),
+        };
+        self.peeked = match ord {
+            Less => b_next.take().map(MergeIterPeeked::B),
+            Equal => None,
+            Greater => a_next.take().map(MergeIterPeeked::A),
+        };
+        (a_next, b_next)
+    }
+
+    fn lens(&self) -> (usize, usize) {
+        match self.peeked {
+            Some(MergeIterPeeked::A(_)) => (1 + self.a.len(), self.b.len()),
+            Some(MergeIterPeeked::B(_)) => (self.a.len(), 1 + self.b.len()),
+            _ => (self.a.len(), self.b.len()),
+        }
+    }
+}
+
+impl<I> Debug for MergeIterInner<I>
+    where I: Iterator + Debug,
+          I::Item: Copy + Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("MergeIterInner")
+            .field(&self.a)
+            .field(&self.b)
+            .finish()
+    }
+}
+
 /// A lazy iterator producing elements in the difference of `BTreeSet`s.
 ///
 /// This `struct` is created by the [`difference`] method on [`BTreeSet`].
@@ -120,6 +191,7 @@ pub struct Range<'a, T: 'a> {
 pub struct Difference<'a, T: 'a> {
     inner: DifferenceInner<'a, T>,
 }
+#[derive(Debug)]
 enum DifferenceInner<'a, T: 'a> {
     Stitch {
         // iterate all of self and some of other, spotting matches along the way
@@ -137,21 +209,7 @@ enum DifferenceInner<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Difference<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match &self.inner {
-            DifferenceInner::Stitch {
-                self_iter,
-                other_iter,
-            } => f
-                .debug_tuple("Difference")
-                .field(&self_iter)
-                .field(&other_iter)
-                .finish(),
-            DifferenceInner::Search {
-                self_iter,
-                other_set: _,
-            } => f.debug_tuple("Difference").field(&self_iter).finish(),
-            DifferenceInner::Iterate(iter) => f.debug_tuple("Difference").field(&iter).finish(),
-        }
+        f.debug_tuple("Difference").field(&self.inner).finish()
     }
 }
 
@@ -163,18 +221,12 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 /// [`BTreeSet`]: struct.BTreeSet.html
 /// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference
 #[stable(feature = "rust1", since = "1.0.0")]
-pub struct SymmetricDifference<'a, T: 'a> {
-    a: Peekable<Iter<'a, T>>,
-    b: Peekable<Iter<'a, T>>,
-}
+pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
 
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("SymmetricDifference")
-         .field(&self.a)
-         .field(&self.b)
-         .finish()
+        f.debug_tuple("SymmetricDifference").field(&self.0).finish()
     }
 }
 
@@ -189,6 +241,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 pub struct Intersection<'a, T: 'a> {
     inner: IntersectionInner<'a, T>,
 }
+#[derive(Debug)]
 enum IntersectionInner<'a, T: 'a> {
     Stitch {
         // iterate similarly sized sets jointly, spotting matches along the way
@@ -206,23 +259,7 @@ enum IntersectionInner<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Intersection<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match &self.inner {
-            IntersectionInner::Stitch {
-                a,
-                b,
-            } => f
-                .debug_tuple("Intersection")
-                .field(&a)
-                .field(&b)
-                .finish(),
-            IntersectionInner::Search {
-                small_iter,
-                large_set: _,
-            } => f.debug_tuple("Intersection").field(&small_iter).finish(),
-            IntersectionInner::Answer(answer) => {
-                f.debug_tuple("Intersection").field(&answer).finish()
-            }
-        }
+        f.debug_tuple("Intersection").field(&self.inner).finish()
     }
 }
 
@@ -234,18 +271,12 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 /// [`BTreeSet`]: struct.BTreeSet.html
 /// [`union`]: struct.BTreeSet.html#method.union
 #[stable(feature = "rust1", since = "1.0.0")]
-pub struct Union<'a, T: 'a> {
-    a: Peekable<Iter<'a, T>>,
-    b: Peekable<Iter<'a, T>>,
-}
+pub struct Union<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
 
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Union<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("Union")
-         .field(&self.a)
-         .field(&self.b)
-         .finish()
+        f.debug_tuple("Union").field(&self.0).finish()
     }
 }
 
@@ -355,19 +386,16 @@ pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
                     self_iter.next_back();
                     DifferenceInner::Iterate(self_iter)
                 }
-                _ => {
-                    if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
-                        DifferenceInner::Search {
-                            self_iter: self.iter(),
-                            other_set: other,
-                        }
-                    } else {
-                        DifferenceInner::Stitch {
-                            self_iter: self.iter(),
-                            other_iter: other.iter().peekable(),
-                        }
+                _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+                    DifferenceInner::Search {
+                        self_iter: self.iter(),
+                        other_set: other,
                     }
                 }
+                _ => DifferenceInner::Stitch {
+                    self_iter: self.iter(),
+                    other_iter: other.iter().peekable(),
+                },
             },
         }
     }
@@ -396,10 +424,7 @@ pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
     pub fn symmetric_difference<'a>(&'a self,
                                     other: &'a BTreeSet<T>)
                                     -> SymmetricDifference<'a, T> {
-        SymmetricDifference {
-            a: self.iter().peekable(),
-            b: other.iter().peekable(),
-        }
+        SymmetricDifference(MergeIterInner::new(self.iter(), other.iter()))
     }
 
     /// Visits the values representing the intersection,
@@ -447,24 +472,22 @@ pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T>
                 (Greater, _) | (_, Less) => IntersectionInner::Answer(None),
                 (Equal, _) => IntersectionInner::Answer(Some(self_min)),
                 (_, Equal) => IntersectionInner::Answer(Some(self_max)),
-                _ => {
-                    if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
-                        IntersectionInner::Search {
-                            small_iter: self.iter(),
-                            large_set: other,
-                        }
-                    } else if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
-                        IntersectionInner::Search {
-                            small_iter: other.iter(),
-                            large_set: self,
-                        }
-                    } else {
-                        IntersectionInner::Stitch {
-                            a: self.iter(),
-                            b: other.iter(),
-                        }
+                _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+                    IntersectionInner::Search {
+                        small_iter: self.iter(),
+                        large_set: other,
+                    }
+                }
+                _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+                    IntersectionInner::Search {
+                        small_iter: other.iter(),
+                        large_set: self,
                     }
                 }
+                _ => IntersectionInner::Stitch {
+                    a: self.iter(),
+                    b: other.iter(),
+                },
             },
         }
     }
@@ -489,10 +512,7 @@ pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T>
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
-        Union {
-            a: self.iter().peekable(),
-            b: other.iter().peekable(),
-        }
+        Union(MergeIterInner::new(self.iter(), other.iter()))
     }
 
     /// Clears the set, removing all values.
@@ -1166,15 +1186,6 @@ fn next_back(&mut self) -> Option<&'a T> {
 #[stable(feature = "fused", since = "1.26.0")]
 impl<T> FusedIterator for Range<'_, T> {}
 
-/// Compares `x` and `y`, but return `short` if x is None and `long` if y is None
-fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering {
-    match (x, y) {
-        (None, _) => short,
-        (_, None) => long,
-        (Some(x1), Some(y1)) => x1.cmp(y1),
-    }
-}
-
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> Clone for Difference<'_, T> {
     fn clone(&self) -> Self {
@@ -1261,10 +1272,7 @@ impl<T: Ord> FusedIterator for Difference<'_, T> {}
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> Clone for SymmetricDifference<'_, T> {
     fn clone(&self) -> Self {
-        SymmetricDifference {
-            a: self.a.clone(),
-            b: self.b.clone(),
-        }
+        SymmetricDifference(self.0.clone())
     }
 }
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -1273,19 +1281,19 @@ impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
 
     fn next(&mut self) -> Option<&'a T> {
         loop {
-            match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
-                Less => return self.a.next(),
-                Equal => {
-                    self.a.next();
-                    self.b.next();
-                }
-                Greater => return self.b.next(),
+            let (a_next, b_next) = self.0.nexts();
+            if a_next.and(b_next).is_none() {
+                return a_next.or(b_next);
             }
         }
     }
 
     fn size_hint(&self) -> (usize, Option<usize>) {
-        (0, Some(self.a.len() + self.b.len()))
+        let (a_len, b_len) = self.0.lens();
+        // No checked_add, because even if a and b refer to the same set,
+        // and T is an empty type, the storage overhead of sets limits
+        // the number of elements to less than half the range of usize.
+        (0, Some(a_len + b_len))
     }
 }
 
@@ -1311,7 +1319,7 @@ fn clone(&self) -> Self {
                     small_iter: small_iter.clone(),
                     large_set,
                 },
-                IntersectionInner::Answer(answer) => IntersectionInner::Answer(answer.clone()),
+                IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer),
             },
         }
     }
@@ -1365,10 +1373,7 @@ impl<T: Ord> FusedIterator for Intersection<'_, T> {}
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> Clone for Union<'_, T> {
     fn clone(&self) -> Self {
-        Union {
-            a: self.a.clone(),
-            b: self.b.clone(),
-        }
+        Union(self.0.clone())
     }
 }
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -1376,19 +1381,13 @@ impl<'a, T: Ord> Iterator for Union<'a, T> {
     type Item = &'a T;
 
     fn next(&mut self) -> Option<&'a T> {
-        match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
-            Less => self.a.next(),
-            Equal => {
-                self.b.next();
-                self.a.next()
-            }
-            Greater => self.b.next(),
-        }
+        let (a_next, b_next) = self.0.nexts();
+        a_next.or(b_next)
     }
 
     fn size_hint(&self) -> (usize, Option<usize>) {
-        let a_len = self.a.len();
-        let b_len = self.b.len();
+        let (a_len, b_len) = self.0.lens();
+        // No checked_add - see SymmetricDifference::size_hint.
         (max(a_len, b_len), Some(a_len + b_len))
     }
 }
index a4a0fbb194dd448083c320643d2c8a2ef505a60b..0bf573f5e2539fc8d84ddbc2a1c295c1fdb36626 100644 (file)
@@ -10,8 +10,8 @@
 use core::array::LengthAtMost32;
 use core::cmp::{self, Ordering};
 use core::fmt;
-use core::iter::{repeat_with, FromIterator, FusedIterator};
-use core::mem;
+use core::iter::{once, repeat_with, FromIterator, FusedIterator};
+use core::mem::{self, replace};
 use core::ops::Bound::{Excluded, Included, Unbounded};
 use core::ops::{Index, IndexMut, RangeBounds, Try};
 use core::ptr::{self, NonNull};
@@ -57,11 +57,88 @@ pub struct VecDeque<T> {
     buf: RawVec<T>,
 }
 
+/// PairSlices pairs up equal length slice parts of two deques
+///
+/// For example, given deques "A" and "B" with the following division into slices:
+///
+/// A: [0 1 2] [3 4 5]
+/// B: [a b] [c d e]
+///
+/// It produces the following sequence of matching slices:
+///
+/// ([0 1], [a b])
+/// ([2], [c])
+/// ([3 4], [d e])
+///
+/// and the uneven remainder of either A or B is skipped.
+struct PairSlices<'a, 'b, T> {
+    a0: &'a mut [T],
+    a1: &'a mut [T],
+    b0: &'b [T],
+    b1: &'b [T],
+}
+
+impl<'a, 'b, T> PairSlices<'a, 'b, T> {
+    fn from(to: &'a mut VecDeque<T>, from: &'b VecDeque<T>) -> Self {
+        let (a0, a1) = to.as_mut_slices();
+        let (b0, b1) = from.as_slices();
+        PairSlices { a0, a1, b0, b1 }
+    }
+
+    fn has_remainder(&self) -> bool {
+        !self.b0.is_empty()
+    }
+
+    fn remainder(self) -> impl Iterator<Item=&'b [T]> {
+        once(self.b0).chain(once(self.b1))
+    }
+}
+
+impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T>
+{
+    type Item = (&'a mut [T], &'b [T]);
+    fn next(&mut self) -> Option<Self::Item> {
+        // Get next part length
+        let part = cmp::min(self.a0.len(), self.b0.len());
+        if part == 0 {
+            return None;
+        }
+        let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part);
+        let (q0, q1) = self.b0.split_at(part);
+
+        // Move a1 into a0, if it's empty (and b1, b0 the same way).
+        self.a0 = p1;
+        self.b0 = q1;
+        if self.a0.is_empty() {
+            self.a0 = replace(&mut self.a1, &mut []);
+        }
+        if self.b0.is_empty() {
+            self.b0 = replace(&mut self.b1, &[]);
+        }
+        Some((p0, q0))
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: Clone> Clone for VecDeque<T> {
     fn clone(&self) -> VecDeque<T> {
         self.iter().cloned().collect()
     }
+
+    fn clone_from(&mut self, other: &Self) {
+        self.truncate(other.len());
+
+        let mut iter = PairSlices::from(self, other);
+        while let Some((dst, src)) = iter.next() {
+            dst.clone_from_slice(&src);
+        }
+
+        if iter.has_remainder() {
+            for remainder in iter.remainder() {
+                self.extend(remainder.iter().cloned());
+            }
+        }
+    }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -2209,6 +2286,16 @@ fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
         final_res
     }
 
+    fn nth(&mut self, n: usize) -> Option<Self::Item> {
+        if n >= count(self.tail, self.head, self.ring.len()) {
+            self.tail = self.head;
+            None
+        } else {
+            self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
+            self.next()
+        }
+    }
+
     #[inline]
     fn last(mut self) -> Option<&'a T> {
         self.next_back()
@@ -2327,6 +2414,16 @@ fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
         back.iter_mut().fold(accum, &mut f)
     }
 
+    fn nth(&mut self, n: usize) -> Option<Self::Item> {
+        if n >= count(self.tail, self.head, self.ring.len()) {
+            self.tail = self.head;
+            None
+        } else {
+            self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
+            self.next()
+        }
+    }
+
     #[inline]
     fn last(mut self) -> Option<&'a mut T> {
         self.next_back()
index d25352399796e684ec503bc05ed8b1597ed48806..d578ee0dac493fa922bd7d6d72f7c36f8d3184f8 100644 (file)
@@ -361,6 +361,29 @@ fn create_vec_and_test_convert(capacity: usize, offset: usize, len: usize) {
     }
 }
 
+#[test]
+fn test_clone_from() {
+    let m = vec![1; 8];
+    let n = vec![2; 12];
+    for pfv in 0..8 {
+        for pfu in 0..8 {
+            for longer in 0..2 {
+                let (vr, ur) = if longer == 0 { (&m, &n) } else { (&n, &m) };
+                let mut v = VecDeque::from(vr.clone());
+                for _ in 0..pfv {
+                    v.push_front(1);
+                }
+                let mut u = VecDeque::from(ur.clone());
+                for _ in 0..pfu {
+                    u.push_front(2);
+                }
+                v.clone_from(&u);
+                assert_eq!(&v, &u);
+            }
+        }
+    }
+}
+
 #[test]
 fn issue_53529() {
     use crate::boxed::Box;
index 68cbc366d7bc2a25bb77e9eaff2761c5a34dd559..cbfc55233a1e0e3902444dd66e3bd182cec11c17 100644 (file)
 //! arguments which have names. Like with positional parameters, it is not
 //! valid to provide named parameters that are unused by the format string.
 //!
-//! ## Argument types
+//! # Formatting Parameters
+//!
+//! Each argument being formatted can be transformed by a number of formatting
+//! parameters (corresponding to `format_spec` in the syntax above). These
+//! parameters affect the string representation of what's being formatted.
+//!
+//! ## Width
+//!
+//! ```
+//! // All of these print "Hello x    !"
+//! println!("Hello {:5}!", "x");
+//! println!("Hello {:1$}!", "x", 5);
+//! println!("Hello {1:0$}!", 5, "x");
+//! println!("Hello {:width$}!", "x", width = 5);
+//! ```
+//!
+//! This is a parameter for the "minimum width" that the format should take up.
+//! If the value's string does not fill up this many characters, then the
+//! padding specified by fill/alignment will be used to take up the required
+//! space (see below).
+//!
+//! The value for the width can also be provided as a [`usize`] in the list of
+//! parameters by adding a postfix `$`, indicating that the second argument is
+//! a [`usize`] specifying the width.
+//!
+//! Referring to an argument with the dollar syntax does not affect the "next
+//! argument" counter, so it's usually a good idea to refer to arguments by
+//! position, or use named arguments.
+//!
+//! ## Fill/Alignment
+//!
+//! ```
+//! assert_eq!(format!("Hello {:<5}!", "x"),  "Hello x    !");
+//! assert_eq!(format!("Hello {:-<5}!", "x"), "Hello x----!");
+//! assert_eq!(format!("Hello {:^5}!", "x"),  "Hello   x  !");
+//! assert_eq!(format!("Hello {:>5}!", "x"),  "Hello     x!");
+//! ```
+//!
+//! The optional fill character and alignment is provided normally in conjunction with the
+//! [`width`](#width) parameter. It must be defined before `width`, right after the `:`.
+//! This indicates that if the value being formatted is smaller than
+//! `width` some extra characters will be printed around it.
+//! Filling comes in the following variants for different alignments:
+//!
+//! * `[fill]<` - the argument is left-aligned in `width` columns
+//! * `[fill]^` - the argument is center-aligned in `width` columns
+//! * `[fill]>` - the argument is right-aligned in `width` columns
+//!
+//! The default [fill/alignment](#fillalignment) for non-numerics is a space and
+//! left-aligned. The
+//! defaults for numeric formatters is also a space but with right-alignment. If
+//! the `0` flag (see below) is specified for numerics, then the implicit fill character is
+//! `0`.
+//!
+//! Note that alignment may not be implemented by some types. In particular, it
+//! is not generally implemented for the `Debug` trait.  A good way to ensure
+//! padding is applied is to format your input, then pad this resulting string
+//! to obtain your output:
+//!
+//! ```
+//! println!("Hello {:^15}!", format!("{:?}", Some("hi"))); // => "Hello   Some("hi")   !"
+//! ```
+//!
+//! ## Sign/`#`/`0`
+//!
+//! ```
+//! assert_eq!(format!("Hello {:+}!", 5), "Hello +5!");
+//! assert_eq!(format!("{:#x}!", 27), "0x1b!");
+//! assert_eq!(format!("Hello {:05}!", 5),  "Hello 00005!");
+//! assert_eq!(format!("Hello {:05}!", -5), "Hello -0005!");
+//! assert_eq!(format!("{:#010x}!", 27), "0x0000001b!");
+//! ```
+//!
+//! These are all flags altering the behavior of the formatter.
+//!
+//! * `+` - This is intended for numeric types and indicates that the sign
+//!         should always be printed. Positive signs are never printed by
+//!         default, and the negative sign is only printed by default for the
+//!         `Signed` trait. This flag indicates that the correct sign (`+` or `-`)
+//!         should always be printed.
+//! * `-` - Currently not used
+//! * `#` - This flag is indicates that the "alternate" form of printing should
+//!         be used. The alternate forms are:
+//!     * `#?` - pretty-print the [`Debug`] formatting
+//!     * `#x` - precedes the argument with a `0x`
+//!     * `#X` - precedes the argument with a `0x`
+//!     * `#b` - precedes the argument with a `0b`
+//!     * `#o` - precedes the argument with a `0o`
+//! * `0` - This is used to indicate for integer formats that the padding to `width` should
+//!         both be done with a `0` character as well as be sign-aware. A format
+//!         like `{:08}` would yield `00000001` for the integer `1`, while the
+//!         same format would yield `-0000001` for the integer `-1`. Notice that
+//!         the negative version has one fewer zero than the positive version.
+//!         Note that padding zeroes are always placed after the sign (if any)
+//!         and before the digits. When used together with the `#` flag, a similar
+//!         rule applies: padding zeroes are inserted after the prefix but before
+//!         the digits. The prefix is included in the total width.
+//!
+//! ## Precision
+//!
+//! For non-numeric types, this can be considered a "maximum width". If the resulting string is
+//! longer than this width, then it is truncated down to this many characters and that truncated
+//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set.
+//!
+//! For integral types, this is ignored.
+//!
+//! For floating-point types, this indicates how many digits after the decimal point should be
+//! printed.
+//!
+//! There are three possible ways to specify the desired `precision`:
+//!
+//! 1. An integer `.N`:
+//!
+//!    the integer `N` itself is the precision.
+//!
+//! 2. An integer or name followed by dollar sign `.N$`:
+//!
+//!    use format *argument* `N` (which must be a `usize`) as the precision.
+//!
+//! 3. An asterisk `.*`:
+//!
+//!    `.*` means that this `{...}` is associated with *two* format inputs rather than one: the
+//!    first input holds the `usize` precision, and the second holds the value to print. Note that
+//!    in this case, if one uses the format string `{<arg>:<spec>.*}`, then the `<arg>` part refers
+//!    to the *value* to print, and the `precision` must come in the input preceding `<arg>`.
+//!
+//! For example, the following calls all print the same thing `Hello x is 0.01000`:
+//!
+//! ```
+//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)}
+//! println!("Hello {0} is {1:.5}", "x", 0.01);
+//!
+//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)}
+//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+//!
+//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)}
+//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision
+//! //                          specified in first of next two args (5)}
+//! println!("Hello {} is {:.*}",    "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision
+//! //                          specified in its predecessor (5)}
+//! println!("Hello {} is {2:.*}",   "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified
+//! //                          in arg "prec" (5)}
+//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+//! ```
 //!
-//! Each argument's type is dictated by the format string.
-//! There are various parameters which require a particular type, however.
-//! An example is the `{:.*}` syntax, which sets the number of decimal places
-//! in floating-point types:
+//! While these:
 //!
 //! ```
-//! let formatted_number = format!("{:.*}", 2, 1.234567);
+//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+//! ```
 //!
-//! assert_eq!("1.23", formatted_number)
+//! print two significantly different things:
+//!
+//! ```text
+//! Hello, `1234.560` has 3 fractional digits
+//! Hello, `123` has 3 characters
+//! Hello, `     123` has 3 right-aligned characters
 //! ```
 //!
-//! If this syntax is used, then the number of characters to print precedes the
-//! actual object being formatted, and the number of characters must have the
-//! type [`usize`].
+//! # Escaping
+//!
+//! The literal characters `{` and `}` may be included in a string by preceding
+//! them with the same character. For example, the `{` character is escaped with
+//! `{{` and the `}` character is escaped with `}}`.
 //!
-//! ## Formatting traits
+//! ```
+//! assert_eq!(format!("Hello {{}}"), "Hello {}");
+//! assert_eq!(format!("{{ Hello"), "{ Hello");
+//! ```
+//!
+//! # Syntax
+//!
+//! To summarize, here you can find the full grammar of format strings.
+//! The syntax for the formatting language used is drawn from other languages,
+//! so it should not be too alien. Arguments are formatted with Python-like
+//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like
+//! `%`. The actual grammar for the formatting syntax is:
+//!
+//! ```text
+//! format_string := <text> [ maybe-format <text> ] *
+//! maybe-format := '{' '{' | '}' '}' | <format>
+//! format := '{' [ argument ] [ ':' format_spec ] '}'
+//! argument := integer | identifier
+//!
+//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type]
+//! fill := character
+//! align := '<' | '^' | '>'
+//! sign := '+' | '-'
+//! width := count
+//! precision := count | '*'
+//! type := identifier | '?' | ''
+//! count := parameter | integer
+//! parameter := argument '$'
+//! ```
+//!
+//! # Formatting traits
 //!
 //! When requesting that an argument be formatted with a particular type, you
 //! are actually requesting that an argument ascribes to a particular trait.
 //! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\"");
 //! ```
 //!
-//! ## Related macros
+//! # Related macros
 //!
 //! There are a number of related macros in the [`format!`] family. The ones that
 //! are currently implemented are:
 //! it would internally pass around this structure until it has been determined
 //! where output should go to.
 //!
-//! # Syntax
-//!
-//! The syntax for the formatting language used is drawn from other languages,
-//! so it should not be too alien. Arguments are formatted with Python-like
-//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like
-//! `%`. The actual grammar for the formatting syntax is:
-//!
-//! ```text
-//! format_string := <text> [ maybe-format <text> ] *
-//! maybe-format := '{' '{' | '}' '}' | <format>
-//! format := '{' [ argument ] [ ':' format_spec ] '}'
-//! argument := integer | identifier
-//!
-//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type]
-//! fill := character
-//! align := '<' | '^' | '>'
-//! sign := '+' | '-'
-//! width := count
-//! precision := count | '*'
-//! type := identifier | '?' | ''
-//! count := parameter | integer
-//! parameter := argument '$'
-//! ```
-//!
-//! # Formatting Parameters
-//!
-//! Each argument being formatted can be transformed by a number of formatting
-//! parameters (corresponding to `format_spec` in the syntax above). These
-//! parameters affect the string representation of what's being formatted.
-//!
-//! ## Fill/Alignment
-//!
-//! The fill character is provided normally in conjunction with the
-//! [`width`](#width)
-//! parameter. This indicates that if the value being formatted is smaller than
-//! `width` some extra characters will be printed around it. The extra
-//! characters are specified by `fill`, and the alignment can be one of the
-//! following options:
-//!
-//! * `<` - the argument is left-aligned in `width` columns
-//! * `^` - the argument is center-aligned in `width` columns
-//! * `>` - the argument is right-aligned in `width` columns
-//!
-//! Note that alignment may not be implemented by some types. In particular, it
-//! is not generally implemented for the `Debug` trait.  A good way to ensure
-//! padding is applied is to format your input, then use this resulting string
-//! to pad your output.
-//!
-//! ## Sign/`#`/`0`
-//!
-//! These can all be interpreted as flags for a particular formatter.
-//!
-//! * `+` - This is intended for numeric types and indicates that the sign
-//!         should always be printed. Positive signs are never printed by
-//!         default, and the negative sign is only printed by default for the
-//!         `Signed` trait. This flag indicates that the correct sign (`+` or `-`)
-//!         should always be printed.
-//! * `-` - Currently not used
-//! * `#` - This flag is indicates that the "alternate" form of printing should
-//!         be used. The alternate forms are:
-//!     * `#?` - pretty-print the [`Debug`] formatting
-//!     * `#x` - precedes the argument with a `0x`
-//!     * `#X` - precedes the argument with a `0x`
-//!     * `#b` - precedes the argument with a `0b`
-//!     * `#o` - precedes the argument with a `0o`
-//! * `0` - This is used to indicate for integer formats that the padding should
-//!         both be done with a `0` character as well as be sign-aware. A format
-//!         like `{:08}` would yield `00000001` for the integer `1`, while the
-//!         same format would yield `-0000001` for the integer `-1`. Notice that
-//!         the negative version has one fewer zero than the positive version.
-//!         Note that padding zeroes are always placed after the sign (if any)
-//!         and before the digits. When used together with the `#` flag, a similar
-//!         rule applies: padding zeroes are inserted after the prefix but before
-//!         the digits.
-//!
-//! ## Width
-//!
-//! This is a parameter for the "minimum width" that the format should take up.
-//! If the value's string does not fill up this many characters, then the
-//! padding specified by fill/alignment will be used to take up the required
-//! space.
-//!
-//! The default [fill/alignment](#fillalignment) for non-numerics is a space and
-//! left-aligned. The
-//! defaults for numeric formatters is also a space but with right-alignment. If
-//! the `0` flag is specified for numerics, then the implicit fill character is
-//! `0`.
-//!
-//! The value for the width can also be provided as a [`usize`] in the list of
-//! parameters by using the dollar syntax indicating that the second argument is
-//! a [`usize`] specifying the width, for example:
-//!
-//! ```
-//! // All of these print "Hello x    !"
-//! println!("Hello {:5}!", "x");
-//! println!("Hello {:1$}!", "x", 5);
-//! println!("Hello {1:0$}!", 5, "x");
-//! println!("Hello {:width$}!", "x", width = 5);
-//! ```
-//!
-//! Referring to an argument with the dollar syntax does not affect the "next
-//! argument" counter, so it's usually a good idea to refer to arguments by
-//! position, or use named arguments.
-//!
-//! ## Precision
-//!
-//! For non-numeric types, this can be considered a "maximum width". If the resulting string is
-//! longer than this width, then it is truncated down to this many characters and that truncated
-//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set.
-//!
-//! For integral types, this is ignored.
-//!
-//! For floating-point types, this indicates how many digits after the decimal point should be
-//! printed.
-//!
-//! There are three possible ways to specify the desired `precision`:
-//!
-//! 1. An integer `.N`:
-//!
-//!    the integer `N` itself is the precision.
-//!
-//! 2. An integer or name followed by dollar sign `.N$`:
-//!
-//!    use format *argument* `N` (which must be a `usize`) as the precision.
-//!
-//! 3. An asterisk `.*`:
-//!
-//!    `.*` means that this `{...}` is associated with *two* format inputs rather than one: the
-//!    first input holds the `usize` precision, and the second holds the value to print. Note that
-//!    in this case, if one uses the format string `{<arg>:<spec>.*}`, then the `<arg>` part refers
-//!    to the *value* to print, and the `precision` must come in the input preceding `<arg>`.
-//!
-//! For example, the following calls all print the same thing `Hello x is 0.01000`:
-//!
-//! ```
-//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)}
-//! println!("Hello {0} is {1:.5}", "x", 0.01);
-//!
-//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)}
-//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
-//!
-//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)}
-//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
-//!
-//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision
-//! //                          specified in first of next two args (5)}
-//! println!("Hello {} is {:.*}",    "x", 5, 0.01);
-//!
-//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision
-//! //                          specified in its predecessor (5)}
-//! println!("Hello {} is {2:.*}",   "x", 5, 0.01);
-//!
-//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified
-//! //                          in arg "prec" (5)}
-//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
-//! ```
-//!
-//! While these:
-//!
-//! ```
-//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
-//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
-//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
-//! ```
-//!
-//! print two significantly different things:
-//!
-//! ```text
-//! Hello, `1234.560` has 3 fractional digits
-//! Hello, `123` has 3 characters
-//! Hello, `     123` has 3 right-aligned characters
-//! ```
-//!
-//! # Escaping
-//!
-//! The literal characters `{` and `}` may be included in a string by preceding
-//! them with the same character. For example, the `{` character is escaped with
-//! `{{` and the `}` character is escaped with `}}`.
-//!
 //! [`usize`]: ../../std/primitive.usize.html
 //! [`isize`]: ../../std/primitive.isize.html
 //! [`i8`]: ../../std/primitive.i8.html
index 247cd9a020161333fe7c2711ecab1762b2a81c74..3684162d8b18741782e38e7ede8f13fa6088c417 100644 (file)
 #![feature(maybe_uninit_extra, maybe_uninit_slice)]
 #![feature(alloc_layout_extra)]
 #![feature(try_trait)]
-#![feature(mem_take)]
 #![feature(associated_type_bounds)]
 
 // Allow testing this library
@@ -154,7 +153,7 @@ mod boxed {
 #[cfg(test)]
 mod tests;
 pub mod collections;
-#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
+#[cfg(target_has_atomic = "ptr")]
 pub mod sync;
 pub mod rc;
 pub mod raw_vec;
index a28c6d22abb9537ddf6f97357748978dc67bca35..f1c4c32e116ea74dcc59bec6eb349841517662b4 100644 (file)
@@ -3,8 +3,9 @@
 //!
 //! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
 //! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
-//! pointer to the same value in the heap. When the last [`Rc`] pointer to a
-//! given value is destroyed, the pointed-to value is also destroyed.
+//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
+//! given allocation is destroyed, the value stored in that allocation (often
+//! referred to as "inner value") is also dropped.
 //!
 //! Shared references in Rust disallow mutation by default, and [`Rc`]
 //! is no exception: you cannot generally obtain a mutable reference to
 //!
 //! The [`downgrade`][downgrade] method can be used to create a non-owning
 //! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
-//! to an [`Rc`], but this will return [`None`] if the value has
-//! already been dropped.
+//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
+//! already been dropped. In other words, `Weak` pointers do not keep the value
+//! inside the allocation alive; however, they *do* keep the allocation
+//! (the backing store for the inner value) alive.
 //!
 //! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
 //! [`Weak`] is used to break cycles. For example, a tree could have strong
 //! Rc::downgrade(&my_rc);
 //! ```
 //!
-//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the value may have
-//! already been destroyed.
+//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
+//! already been dropped.
 //!
 //! # Cloning references
 //!
-//! Creating a new reference from an existing reference counted pointer is done using the
-//! `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
+//! Creating a new reference to the same allocation as an existing reference counted pointer
+//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
 //!
 //! ```
 //! use std::rc::Rc;
@@ -93,7 +96,7 @@
 //!     );
 //!
 //!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
-//!     // value gives us a new pointer to the same `Owner` value, incrementing
+//!     // gives us a new pointer to the same `Owner` allocation, incrementing
 //!     // the reference count in the process.
 //!     let gadget1 = Gadget {
 //!         id: 1,
 //!     // Despite dropping `gadget_owner`, we're still able to print out the name
 //!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
 //!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
-//!     // other `Rc<Owner>` values pointing at the same `Owner`, it will remain
-//!     // allocated. The field projection `gadget1.owner.name` works because
+//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
+//!     // live. The field projection `gadget1.owner.name` works because
 //!     // `Rc<Owner>` automatically dereferences to `Owner`.
 //!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
 //!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
 //!
 //! If our requirements change, and we also need to be able to traverse from
 //! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
-//! to `Gadget` introduces a cycle between the values. This means that their
-//! reference counts can never reach 0, and the values will remain allocated
-//! forever: a memory leak. In order to get around this, we can use [`Weak`]
+//! to `Gadget` introduces a cycle. This means that their
+//! reference counts can never reach 0, and the allocation will never be destroyed:
+//! a memory leak. In order to get around this, we can use [`Weak`]
 //! pointers.
 //!
 //! Rust actually makes it somewhat difficult to produce this loop in the first
 //!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
 //!
 //!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
-//!         // guarantee the value is still allocated, we need to call
+//!         // guarantee the allocation still exists, we need to call
 //!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
 //!         //
-//!         // In this case we know the value still exists, so we simply
+//!         // In this case we know the allocation still exists, so we simply
 //!         // `unwrap` the `Option`. In a more complicated program, you might
 //!         // need graceful error handling for a `None` result.
 //!
@@ -365,7 +368,7 @@ pub fn pin(value: T) -> Pin<Rc<T>> {
         unsafe { Pin::new_unchecked(Rc::new(value)) }
     }
 
-    /// Returns the contained value, if the `Rc` has exactly one strong reference.
+    /// Returns the inner value, if the `Rc` has exactly one strong reference.
     ///
     /// Otherwise, an [`Err`][result] is returned with the same `Rc` that was
     /// passed in.
@@ -446,7 +449,7 @@ impl<T> Rc<mem::MaybeUninit<T>> {
     /// # Safety
     ///
     /// As with [`MaybeUninit::assume_init`],
-    /// it is up to the caller to guarantee that the value
+    /// it is up to the caller to guarantee that the inner value
     /// really is in an initialized state.
     /// Calling this when the content is not yet fully initialized
     /// causes immediate undefined behavior.
@@ -485,7 +488,7 @@ impl<T> Rc<[mem::MaybeUninit<T>]> {
     /// # Safety
     ///
     /// As with [`MaybeUninit::assume_init`],
-    /// it is up to the caller to guarantee that the value
+    /// it is up to the caller to guarantee that the inner value
     /// really is in an initialized state.
     /// Calling this when the content is not yet fully initialized
     /// causes immediate undefined behavior.
@@ -604,7 +607,7 @@ pub fn into_raw_non_null(this: Self) -> NonNull<T> {
         unsafe { NonNull::new_unchecked(Rc::into_raw(this) as *mut _) }
     }
 
-    /// Creates a new [`Weak`][weak] pointer to this value.
+    /// Creates a new [`Weak`][weak] pointer to this allocation.
     ///
     /// [weak]: struct.Weak.html
     ///
@@ -625,7 +628,7 @@ pub fn downgrade(this: &Self) -> Weak<T> {
         Weak { ptr: this.ptr }
     }
 
-    /// Gets the number of [`Weak`][weak] pointers to this value.
+    /// Gets the number of [`Weak`][weak] pointers to this allocation.
     ///
     /// [weak]: struct.Weak.html
     ///
@@ -645,7 +648,7 @@ pub fn weak_count(this: &Self) -> usize {
         this.weak() - 1
     }
 
-    /// Gets the number of strong (`Rc`) pointers to this value.
+    /// Gets the number of strong (`Rc`) pointers to this allocation.
     ///
     /// # Examples
     ///
@@ -664,7 +667,7 @@ pub fn strong_count(this: &Self) -> usize {
     }
 
     /// Returns `true` if there are no other `Rc` or [`Weak`][weak] pointers to
-    /// this inner value.
+    /// this allocation.
     ///
     /// [weak]: struct.Weak.html
     #[inline]
@@ -672,14 +675,14 @@ fn is_unique(this: &Self) -> bool {
         Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
     }
 
-    /// Returns a mutable reference to the inner value, if there are
-    /// no other `Rc` or [`Weak`][weak] pointers to the same value.
+    /// Returns a mutable reference into the given `Rc`, if there are
+    /// no other `Rc` or [`Weak`][weak] pointers to the same allocation.
     ///
     /// Returns [`None`] otherwise, because it is not safe to
     /// mutate a shared value.
     ///
     /// See also [`make_mut`][make_mut], which will [`clone`][clone]
-    /// the inner value when it's shared.
+    /// the inner value when there are other pointers.
     ///
     /// [weak]: struct.Weak.html
     /// [`None`]: ../../std/option/enum.Option.html#variant.None
@@ -710,7 +713,7 @@ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
         }
     }
 
-    /// Returns a mutable reference to the inner value,
+    /// Returns a mutable reference into the given `Rc`,
     /// without any check.
     ///
     /// See also [`get_mut`], which is safe and does appropriate checks.
@@ -719,7 +722,7 @@ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
     ///
     /// # Safety
     ///
-    /// Any other `Rc` or [`Weak`] pointers to the same value must not be dereferenced
+    /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
     /// for the duration of the returned borrow.
     /// This is trivially the case if no such pointers exist,
     /// for example immediately after `Rc::new`.
@@ -745,8 +748,8 @@ pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
 
     #[inline]
     #[stable(feature = "ptr_eq", since = "1.17.0")]
-    /// Returns `true` if the two `Rc`s point to the same value (not
-    /// just values that compare as equal).
+    /// Returns `true` if the two `Rc`s point to the same allocation
+    /// (in a vein similar to [`ptr::eq`]).
     ///
     /// # Examples
     ///
@@ -760,6 +763,8 @@ pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
     /// assert!(Rc::ptr_eq(&five, &same_five));
     /// assert!(!Rc::ptr_eq(&five, &other_five));
     /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
         this.ptr.as_ptr() == other.ptr.as_ptr()
     }
@@ -768,12 +773,12 @@ pub fn ptr_eq(this: &Self, other: &Self) -> bool {
 impl<T: Clone> Rc<T> {
     /// Makes a mutable reference into the given `Rc`.
     ///
-    /// If there are other `Rc` pointers to the same value, then `make_mut` will
-    /// [`clone`] the inner value to ensure unique ownership.  This is also
+    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
+    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
     /// referred to as clone-on-write.
     ///
-    /// If there are no other `Rc` pointers to this value, then [`Weak`]
-    /// pointers to this value will be dissassociated.
+    /// If there are no other `Rc` pointers to this allocation, then [`Weak`]
+    /// pointers to this allocation will be disassociated.
     ///
     /// See also [`get_mut`], which will fail rather than cloning.
     ///
@@ -794,12 +799,12 @@ impl<T: Clone> Rc<T> {
     /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
     /// *Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
     ///
-    /// // Now `data` and `other_data` point to different values.
+    /// // Now `data` and `other_data` point to different allocations.
     /// assert_eq!(*data, 8);
     /// assert_eq!(*other_data, 12);
     /// ```
     ///
-    /// [`Weak`] pointers will be dissassociated:
+    /// [`Weak`] pointers will be disassociated:
     ///
     /// ```
     /// use std::rc::Rc;
@@ -837,7 +842,7 @@ pub fn make_mut(this: &mut Self) -> &mut T {
         // returned is the *only* pointer that will ever be returned to T. Our
         // reference count is guaranteed to be 1 at this point, and we required
         // the `Rc<T>` itself to be `mut`, so we're returning the only possible
-        // reference to the inner value.
+        // reference to the allocation.
         unsafe {
             &mut this.ptr.as_mut().value
         }
@@ -878,7 +883,7 @@ pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
 
 impl<T: ?Sized> Rc<T> {
     /// Allocates an `RcBox<T>` with sufficient space for
-    /// a possibly-unsized value where the value has the layout provided.
+    /// a possibly-unsized inner value where the value has the layout provided.
     ///
     /// The function `mem_to_rcbox` is called with the data pointer
     /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
@@ -908,7 +913,7 @@ unsafe fn allocate_for_layout(
         inner
     }
 
-    /// Allocates an `RcBox<T>` with sufficient space for an unsized value
+    /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
     unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
         // Allocate for the `RcBox<T>` using the given value.
         Self::allocate_for_layout(
@@ -1111,7 +1116,7 @@ fn drop(&mut self) {
 impl<T: ?Sized> Clone for Rc<T> {
     /// Makes a clone of the `Rc` pointer.
     ///
-    /// This creates another pointer to the same inner value, increasing the
+    /// This creates another pointer to the same allocation, increasing the
     /// strong reference count.
     ///
     /// # Examples
@@ -1172,6 +1177,8 @@ impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
 /// store large values, that are slow to clone, but also heavy to check for equality, causing this
 /// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
 /// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized + Eq> RcEqIdent<T> for Rc<T> {
     #[inline]
@@ -1189,9 +1196,11 @@ fn ne(&self, other: &Rc<T>) -> bool {
 impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
     /// Equality for two `Rc`s.
     ///
-    /// Two `Rc`s are equal if their inner values are equal.
+    /// Two `Rc`s are equal if their inner values are equal, even if they are
+    /// stored in different allocation.
     ///
-    /// If `T` also implements `Eq`, two `Rc`s that point to the same value are
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Rc`s that point to the same allocation are
     /// always equal.
     ///
     /// # Examples
@@ -1212,7 +1221,8 @@ fn eq(&self, other: &Rc<T>) -> bool {
     ///
     /// Two `Rc`s are unequal if their inner values are unequal.
     ///
-    /// If `T` also implements `Eq`, two `Rc`s that point to the same value are
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Rc`s that point to the same allocation are
     /// never unequal.
     ///
     /// # Examples
@@ -1541,17 +1551,18 @@ fn from_iter(iter: slice::Iter<'a, T>) -> Self {
 }
 
 /// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
-/// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
 /// pointer, which returns an [`Option`]`<`[`Rc`]`<T>>`.
 ///
 /// Since a `Weak` reference does not count towards ownership, it will not
-/// prevent the inner value from being dropped, and `Weak` itself makes no
-/// guarantees about the value still being present and may return [`None`]
-/// when [`upgrade`]d.
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
 ///
-/// A `Weak` pointer is useful for keeping a temporary reference to the value
-/// within [`Rc`] without extending its lifetime. It is also used to prevent
-/// circular references between [`Rc`] pointers, since mutual owning references
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Rc`] pointers, since mutual owning references
 /// would never allow either [`Rc`] to be dropped. For example, a tree could
 /// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
 /// pointers from children back to their parents.
@@ -1750,10 +1761,10 @@ pub(crate) fn is_dangling<T: ?Sized>(ptr: NonNull<T>) -> bool {
 }
 
 impl<T: ?Sized> Weak<T> {
-    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], extending
-    /// the lifetime of the value if successful.
+    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
+    /// dropping of the inner value if successful.
     ///
-    /// Returns [`None`] if the value has since been dropped.
+    /// Returns [`None`] if the inner value has since been dropped.
     ///
     /// [`Rc`]: struct.Rc.html
     /// [`None`]: ../../std/option/enum.Option.html
@@ -1787,7 +1798,7 @@ pub fn upgrade(&self) -> Option<Rc<T>> {
         }
     }
 
-    /// Gets the number of strong (`Rc`) pointers pointing to this value.
+    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
     ///
     /// If `self` was created using [`Weak::new`], this will return 0.
     ///
@@ -1801,11 +1812,11 @@ pub fn strong_count(&self) -> usize {
         }
     }
 
-    /// Gets the number of `Weak` pointers pointing to this value.
+    /// Gets the number of `Weak` pointers pointing to this allocation.
     ///
     /// If `self` was created using [`Weak::new`], this will return `None`. If
     /// not, the returned value is at least 1, since `self` still points to the
-    /// value.
+    /// allocation.
     ///
     /// [`Weak::new`]: #method.new
     #[unstable(feature = "weak_counts", issue = "57977")]
@@ -1830,14 +1841,14 @@ fn inner(&self) -> Option<&RcBox<T>> {
         }
     }
 
-    /// Returns `true` if the two `Weak`s point to the same value (not just
-    /// values that compare as equal), or if both don't point to any value
+    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+    /// [`ptr::eq`]), or if both don't point to any allocation
     /// (because they were created with `Weak::new()`).
     ///
     /// # Notes
     ///
     /// Since this compares pointers it means that `Weak::new()` will equal each
-    /// other, even though they don't point to any value.
+    /// other, even though they don't point to any allocation.
     ///
     /// # Examples
     ///
@@ -1869,6 +1880,8 @@ fn inner(&self) -> Option<&RcBox<T>> {
     /// let third = Rc::downgrade(&third_rc);
     /// assert!(!first.ptr_eq(&third));
     /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
     #[inline]
     #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
     pub fn ptr_eq(&self, other: &Self) -> bool {
@@ -1918,7 +1931,7 @@ fn drop(&mut self) {
 
 #[stable(feature = "rc_weak", since = "1.4.0")]
 impl<T: ?Sized> Clone for Weak<T> {
-    /// Makes a clone of the `Weak` pointer that points to the same value.
+    /// Makes a clone of the `Weak` pointer that points to the same allocation.
     ///
     /// # Examples
     ///
index 4e4a285c21dab65eadc6914768f8fb2518f82bae..08243ef7c519f1fdb500347d8249169fe183c120 100644 (file)
@@ -411,20 +411,16 @@ pub fn into_vec(self: Box<Self>) -> Vec<T> {
     /// Basic usage:
     ///
     /// ```
-    /// #![feature(repeat_generic_slice)]
     /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]);
     /// ```
     ///
     /// A panic upon overflow:
     ///
     /// ```should_panic
-    /// #![feature(repeat_generic_slice)]
     /// // this will panic at runtime
     /// b"0123456789abcdef".repeat(usize::max_value());
     /// ```
-    #[unstable(feature = "repeat_generic_slice",
-               reason = "it's on str, why not on slice?",
-               issue = "48784")]
+    #[stable(feature = "repeat_generic_slice", since = "1.40.0")]
     pub fn repeat(&self, n: usize) -> Vec<T> where T: Copy {
         if n == 0 {
             return Vec::new();
index 5977e69b7fa0f297125b1ecab3e81374505dd4ce..69f8f71197c1f24e826d62c71a2c1f179350c92a 100644 (file)
 ///
 /// The type `Arc<T>` provides shared ownership of a value of type `T`,
 /// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
-/// a new `Arc` instance, which points to the same value on the heap as the
+/// a new `Arc` instance, which points to the same allocation on the heap as the
 /// source `Arc`, while increasing a reference count. When the last `Arc`
-/// pointer to a given value is destroyed, the pointed-to value is also
-/// destroyed.
+/// pointer to a given allocation is destroyed, the value stored in that allocation (often
+/// referred to as "inner value") is also dropped.
 ///
 /// Shared references in Rust disallow mutation by default, and `Arc` is no
 /// exception: you cannot generally obtain a mutable reference to something
@@ -61,7 +61,7 @@
 /// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
 /// counting. This means that it is thread-safe. The disadvantage is that
 /// atomic operations are more expensive than ordinary memory accesses. If you
-/// are not sharing reference-counted values between threads, consider using
+/// are not sharing reference-counted allocations between threads, consider using
 /// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
 /// compiler will catch any attempt to send an [`Rc<T>`] between threads.
 /// However, a library might choose `Arc<T>` in order to give library consumers
 ///
 /// The [`downgrade`][downgrade] method can be used to create a non-owning
 /// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
-/// to an `Arc`, but this will return [`None`] if the value has already been
-/// dropped.
+/// to an `Arc`, but this will return [`None`] if the value stored in the allocation has
+/// already been dropped. In other words, `Weak` pointers do not keep the value
+/// inside the allocation alive; however, they *do* keep the allocation
+/// (the backing store for the value) alive.
 ///
 /// A cycle between `Arc` pointers will never be deallocated. For this reason,
 /// [`Weak`][weak] is used to break cycles. For example, a tree could have
 /// Arc::downgrade(&my_arc);
 /// ```
 ///
-/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the value may have
-/// already been destroyed.
+/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the inner value may have
+/// already been dropped.
 ///
 /// [arc]: struct.Arc.html
 /// [weak]: struct.Weak.html
@@ -221,17 +223,18 @@ unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
 }
 
 /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
-/// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
 /// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
 ///
 /// Since a `Weak` reference does not count towards ownership, it will not
-/// prevent the inner value from being dropped, and `Weak` itself makes no
-/// guarantees about the value still being present and may return [`None`]
-/// when [`upgrade`]d.
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
 ///
-/// A `Weak` pointer is useful for keeping a temporary reference to the value
-/// within [`Arc`] without extending its lifetime. It is also used to prevent
-/// circular references between [`Arc`] pointers, since mutual owning references
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Arc`] pointers, since mutual owning references
 /// would never allow either [`Arc`] to be dropped. For example, a tree could
 /// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
 /// pointers from children back to their parents.
@@ -345,7 +348,7 @@ pub fn pin(data: T) -> Pin<Arc<T>> {
         unsafe { Pin::new_unchecked(Arc::new(data)) }
     }
 
-    /// Returns the contained value, if the `Arc` has exactly one strong reference.
+    /// Returns the inner value, if the `Arc` has exactly one strong reference.
     ///
     /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
     /// passed in.
@@ -426,7 +429,7 @@ impl<T> Arc<mem::MaybeUninit<T>> {
     /// # Safety
     ///
     /// As with [`MaybeUninit::assume_init`],
-    /// it is up to the caller to guarantee that the value
+    /// it is up to the caller to guarantee that the inner value
     /// really is in an initialized state.
     /// Calling this when the content is not yet fully initialized
     /// causes immediate undefined behavior.
@@ -465,7 +468,7 @@ impl<T> Arc<[mem::MaybeUninit<T>]> {
     /// # Safety
     ///
     /// As with [`MaybeUninit::assume_init`],
-    /// it is up to the caller to guarantee that the value
+    /// it is up to the caller to guarantee that the inner value
     /// really is in an initialized state.
     /// Calling this when the content is not yet fully initialized
     /// causes immediate undefined behavior.
@@ -584,7 +587,7 @@ pub fn into_raw_non_null(this: Self) -> NonNull<T> {
         unsafe { NonNull::new_unchecked(Arc::into_raw(this) as *mut _) }
     }
 
-    /// Creates a new [`Weak`][weak] pointer to this value.
+    /// Creates a new [`Weak`][weak] pointer to this allocation.
     ///
     /// [weak]: struct.Weak.html
     ///
@@ -628,7 +631,7 @@ pub fn downgrade(this: &Self) -> Weak<T> {
         }
     }
 
-    /// Gets the number of [`Weak`][weak] pointers to this value.
+    /// Gets the number of [`Weak`][weak] pointers to this allocation.
     ///
     /// [weak]: struct.Weak.html
     ///
@@ -659,7 +662,7 @@ pub fn weak_count(this: &Self) -> usize {
         if cnt == usize::MAX { 0 } else { cnt - 1 }
     }
 
-    /// Gets the number of strong (`Arc`) pointers to this value.
+    /// Gets the number of strong (`Arc`) pointers to this allocation.
     ///
     /// # Safety
     ///
@@ -710,8 +713,8 @@ unsafe fn drop_slow(&mut self) {
 
     #[inline]
     #[stable(feature = "ptr_eq", since = "1.17.0")]
-    /// Returns `true` if the two `Arc`s point to the same value (not
-    /// just values that compare as equal).
+    /// Returns `true` if the two `Arc`s point to the same allocation
+    /// (in a vein similar to [`ptr::eq`]).
     ///
     /// # Examples
     ///
@@ -725,6 +728,8 @@ unsafe fn drop_slow(&mut self) {
     /// assert!(Arc::ptr_eq(&five, &same_five));
     /// assert!(!Arc::ptr_eq(&five, &other_five));
     /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
         this.ptr.as_ptr() == other.ptr.as_ptr()
     }
@@ -732,7 +737,7 @@ pub fn ptr_eq(this: &Self, other: &Self) -> bool {
 
 impl<T: ?Sized> Arc<T> {
     /// Allocates an `ArcInner<T>` with sufficient space for
-    /// a possibly-unsized value where the value has the layout provided.
+    /// a possibly-unsized inner value where the value has the layout provided.
     ///
     /// The function `mem_to_arcinner` is called with the data pointer
     /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
@@ -761,7 +766,7 @@ unsafe fn allocate_for_layout(
         inner
     }
 
-    /// Allocates an `ArcInner<T>` with sufficient space for an unsized value.
+    /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
     unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
         // Allocate for the `ArcInner<T>` using the given value.
         Self::allocate_for_layout(
@@ -903,7 +908,7 @@ fn from_slice(v: &[T]) -> Self {
 impl<T: ?Sized> Clone for Arc<T> {
     /// Makes a clone of the `Arc` pointer.
     ///
-    /// This creates another pointer to the same inner value, increasing the
+    /// This creates another pointer to the same allocation, increasing the
     /// strong reference count.
     ///
     /// # Examples
@@ -965,15 +970,19 @@ impl<T: ?Sized> Receiver for Arc<T> {}
 impl<T: Clone> Arc<T> {
     /// Makes a mutable reference into the given `Arc`.
     ///
-    /// If there are other `Arc` or [`Weak`][weak] pointers to the same value,
-    /// then `make_mut` will invoke [`clone`][clone] on the inner value to
-    /// ensure unique ownership. This is also referred to as clone-on-write.
+    /// If there are other `Arc` or [`Weak`][weak] pointers to the same allocation,
+    /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value
+    /// to ensure unique ownership. This is also referred to as clone-on-write.
+    ///
+    /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates
+    /// any remaining `Weak` pointers.
     ///
     /// See also [`get_mut`][get_mut], which will fail rather than cloning.
     ///
     /// [weak]: struct.Weak.html
     /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
     /// [get_mut]: struct.Arc.html#method.get_mut
+    /// [`Rc::make_mut`]: ../rc/struct.Rc.html#method.make_mut
     ///
     /// # Examples
     ///
@@ -988,7 +997,7 @@ impl<T: Clone> Arc<T> {
     /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
     /// *Arc::make_mut(&mut other_data) *= 2;   // Won't clone anything
     ///
-    /// // Now `data` and `other_data` point to different values.
+    /// // Now `data` and `other_data` point to different allocations.
     /// assert_eq!(*data, 8);
     /// assert_eq!(*other_data, 12);
     /// ```
@@ -1048,14 +1057,14 @@ pub fn make_mut(this: &mut Self) -> &mut T {
 }
 
 impl<T: ?Sized> Arc<T> {
-    /// Returns a mutable reference to the inner value, if there are
-    /// no other `Arc` or [`Weak`][weak] pointers to the same value.
+    /// Returns a mutable reference into the given `Arc`, if there are
+    /// no other `Arc` or [`Weak`][weak] pointers to the same allocation.
     ///
     /// Returns [`None`][option] otherwise, because it is not safe to
     /// mutate a shared value.
     ///
     /// See also [`make_mut`][make_mut], which will [`clone`][clone]
-    /// the inner value when it's shared.
+    /// the inner value when there are other pointers.
     ///
     /// [weak]: struct.Weak.html
     /// [option]: ../../std/option/enum.Option.html
@@ -1091,7 +1100,7 @@ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
         }
     }
 
-    /// Returns a mutable reference to the inner value,
+    /// Returns a mutable reference into the given `Arc`,
     /// without any check.
     ///
     /// See also [`get_mut`], which is safe and does appropriate checks.
@@ -1100,7 +1109,7 @@ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
     ///
     /// # Safety
     ///
-    /// Any other `Arc` or [`Weak`] pointers to the same value must not be dereferenced
+    /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced
     /// for the duration of the returned borrow.
     /// This is trivially the case if no such pointers exist,
     /// for example immediately after `Arc::new`.
@@ -1424,10 +1433,10 @@ pub unsafe fn from_raw(ptr: *const T) -> Self {
 }
 
 impl<T: ?Sized> Weak<T> {
-    /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending
-    /// the lifetime of the value if successful.
+    /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
+    /// dropping of the inner value if successful.
     ///
-    /// Returns [`None`] if the value has since been dropped.
+    /// Returns [`None`] if the inner value has since been dropped.
     ///
     /// [`Arc`]: struct.Arc.html
     /// [`None`]: ../../std/option/enum.Option.html#variant.None
@@ -1482,7 +1491,7 @@ pub fn upgrade(&self) -> Option<Arc<T>> {
         }
     }
 
-    /// Gets the number of strong (`Arc`) pointers pointing to this value.
+    /// Gets the number of strong (`Arc`) pointers pointing to this allocation.
     ///
     /// If `self` was created using [`Weak::new`], this will return 0.
     ///
@@ -1497,17 +1506,17 @@ pub fn strong_count(&self) -> usize {
     }
 
     /// Gets an approximation of the number of `Weak` pointers pointing to this
-    /// value.
+    /// allocation.
     ///
     /// If `self` was created using [`Weak::new`], this will return 0. If not,
     /// the returned value is at least 1, since `self` still points to the
-    /// value.
+    /// allocation.
     ///
     /// # Accuracy
     ///
     /// Due to implementation details, the returned value can be off by 1 in
     /// either direction when other threads are manipulating any `Arc`s or
-    /// `Weak`s pointing to the same value.
+    /// `Weak`s pointing to the same allocation.
     ///
     /// [`Weak::new`]: #method.new
     #[unstable(feature = "weak_counts", issue = "57977")]
@@ -1548,14 +1557,14 @@ fn inner(&self) -> Option<&ArcInner<T>> {
         }
     }
 
-    /// Returns `true` if the two `Weak`s point to the same value (not just
-    /// values that compare as equal), or if both don't point to any value
+    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+    /// [`ptr::eq`]), or if both don't point to any allocation
     /// (because they were created with `Weak::new()`).
     ///
     /// # Notes
     ///
     /// Since this compares pointers it means that `Weak::new()` will equal each
-    /// other, even though they don't point to any value.
+    /// other, even though they don't point to any allocation.
     ///
     /// # Examples
     ///
@@ -1587,6 +1596,8 @@ fn inner(&self) -> Option<&ArcInner<T>> {
     /// let third = Arc::downgrade(&third_rc);
     /// assert!(!first.ptr_eq(&third));
     /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
     #[inline]
     #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
     pub fn ptr_eq(&self, other: &Self) -> bool {
@@ -1596,7 +1607,7 @@ pub fn ptr_eq(&self, other: &Self) -> bool {
 
 #[stable(feature = "arc_weak", since = "1.4.0")]
 impl<T: ?Sized> Clone for Weak<T> {
-    /// Makes a clone of the `Weak` pointer that points to the same value.
+    /// Makes a clone of the `Weak` pointer that points to the same allocation.
     ///
     /// # Examples
     ///
@@ -1726,6 +1737,8 @@ impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
 /// store large values, that are slow to clone, but also heavy to check for equality, causing this
 /// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to
 /// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized + Eq> ArcEqIdent<T> for Arc<T> {
     #[inline]
@@ -1743,10 +1756,11 @@ fn ne(&self, other: &Arc<T>) -> bool {
 impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
     /// Equality for two `Arc`s.
     ///
-    /// Two `Arc`s are equal if their inner values are equal.
+    /// Two `Arc`s are equal if their inner values are equal, even if they are
+    /// stored in different allocation.
     ///
-    /// If `T` also implements `Eq`, two `Arc`s that point to the same value are
-    /// always equal.
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Arc`s that point to the same allocation are always equal.
     ///
     /// # Examples
     ///
@@ -1766,8 +1780,8 @@ fn eq(&self, other: &Arc<T>) -> bool {
     ///
     /// Two `Arc`s are unequal if their inner values are unequal.
     ///
-    /// If `T` also implements `Eq`, two `Arc`s that point to the same value are
-    /// never unequal.
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Arc`s that point to the same value are never unequal.
     ///
     /// # Examples
     ///
diff --git a/src/liballoc/tests/boxed.rs b/src/liballoc/tests/boxed.rs
new file mode 100644 (file)
index 0000000..bc3d53b
--- /dev/null
@@ -0,0 +1,18 @@
+use std::ptr::NonNull;
+use std::mem::MaybeUninit;
+
+#[test]
+fn unitialized_zero_size_box() {
+    assert_eq!(
+        &*Box::<()>::new_uninit() as *const _,
+        NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
+    );
+    assert_eq!(
+        Box::<[()]>::new_uninit_slice(4).as_ptr(),
+        NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
+    );
+    assert_eq!(
+        Box::<[String]>::new_uninit_slice(0).as_ptr(),
+        NonNull::<MaybeUninit<String>>::dangling().as_ptr(),
+    );
+}
index 5c611fd21d21bedf9584bca4119adf662ecd17f6..e4883abc8b56c3362d1fd4726f0d73746f430971 100644 (file)
@@ -221,6 +221,18 @@ fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) {
                                &[-2, 1, 5, 11, 14, 22]);
 }
 
+#[test]
+fn test_symmetric_difference_size_hint() {
+    let x: BTreeSet<i32> = [2, 4].iter().copied().collect();
+    let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
+    let mut iter = x.symmetric_difference(&y);
+    assert_eq!(iter.size_hint(), (0, Some(5)));
+    assert_eq!(iter.next(), Some(&1));
+    assert_eq!(iter.size_hint(), (0, Some(4)));
+    assert_eq!(iter.next(), Some(&3));
+    assert_eq!(iter.size_hint(), (0, Some(1)));
+}
+
 #[test]
 fn test_union() {
     fn check_union(a: &[i32], b: &[i32], expected: &[i32]) {
@@ -235,6 +247,18 @@ fn check_union(a: &[i32], b: &[i32], expected: &[i32]) {
                 &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]);
 }
 
+#[test]
+fn test_union_size_hint() {
+    let x: BTreeSet<i32> = [2, 4].iter().copied().collect();
+    let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
+    let mut iter = x.union(&y);
+    assert_eq!(iter.size_hint(), (3, Some(5)));
+    assert_eq!(iter.next(), Some(&1));
+    assert_eq!(iter.size_hint(), (2, Some(4)));
+    assert_eq!(iter.next(), Some(&2));
+    assert_eq!(iter.size_hint(), (1, Some(2)));
+}
+
 #[test]
 // Only tests the simple function definition with respect to intersection
 fn test_is_disjoint() {
@@ -244,7 +268,7 @@ fn test_is_disjoint() {
 }
 
 #[test]
-// Also tests the trivial function definition of is_superset
+// Also implicitly tests the trivial function definition of is_superset
 fn test_is_subset() {
     fn is_subset(a: &[i32], b: &[i32]) -> bool {
         let set_a = a.iter().collect::<BTreeSet<_>>();
index 5723a30c0f34f62ec8829fde287e5877694ede5a..676874c8b27df07666641a6fada446d60cc8bf1a 100644 (file)
@@ -2,9 +2,9 @@
 #![feature(box_syntax)]
 #![feature(drain_filter)]
 #![feature(exact_size_is_empty)]
+#![feature(new_uninit)]
 #![feature(option_flattening)]
 #![feature(pattern)]
-#![feature(repeat_generic_slice)]
 #![feature(trusted_len)]
 #![feature(try_reserve)]
 #![feature(unboxed_closures)]
@@ -15,6 +15,7 @@
 
 mod arc;
 mod binary_heap;
+mod boxed;
 mod btree;
 mod cow_str;
 mod fmt;
diff --git a/src/liballoc/tests/str.rs.rej b/src/liballoc/tests/str.rs.rej
deleted file mode 100644 (file)
index abcba0c..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-diff a/src/liballoc/tests/str.rs b/src/liballoc/tests/str.rs   (rejected hunks)
-@@ -483,7 +483,7 @@ mod slice_index {
-     }
-     #[test]
--    #[cfg(not(target_arch = "asmjs"))] // hits an OOM
-+    #[cfg(not(target_arch = "js"))] // hits an OOM
-     #[cfg(not(miri))] // Miri is too slow
-     fn simple_big() {
-         fn a_million_letter_x() -> String {
index 85b59162620fab9791541adcc7ef63c8f82c12bb..e2704e807d104bd42378db2b496e5d5abf1fdd7a 100644 (file)
@@ -2,14 +2,14 @@
 //! of any `'static` type through runtime reflection.
 //!
 //! `Any` itself can be used to get a `TypeId`, and has more features when used
-//! as a trait object. As `&Any` (a borrowed trait object), it has the `is` and
-//! `downcast_ref` methods, to test if the contained value is of a given type,
-//! and to get a reference to the inner value as a type. As `&mut Any`, there
+//! as a trait object. As `&dyn Any` (a borrowed trait object), it has the `is`
+//! and `downcast_ref` methods, to test if the contained value is of a given type,
+//! and to get a reference to the inner value as a type. As `&mut dyn Any`, there
 //! is also the `downcast_mut` method, for getting a mutable reference to the
-//! inner value. `Box<Any>` adds the `downcast` method, which attempts to
+//! inner value. `Box<dyn Any>` adds the `downcast` method, which attempts to
 //! convert to a `Box<T>`. See the [`Box`] documentation for the full details.
 //!
-//! Note that &Any is limited to testing whether a value is of a specified
+//! Note that `&dyn Any` is limited to testing whether a value is of a specified
 //! concrete type, and cannot be used to test whether a type implements a trait.
 //!
 //! [`Box`]: ../../std/boxed/struct.Box.html
@@ -445,6 +445,15 @@ pub const fn of<T: ?Sized + 'static>() -> TypeId {
 ///
 /// The current implementation uses the same infrastructure as compiler
 /// diagnostics and debuginfo, but this is not guaranteed.
+///
+/// # Example
+///
+/// ```rust
+/// assert_eq!(
+///     std::any::type_name::<Option<String>>(),
+///     "core::option::Option<alloc::string::String>",
+/// );
+/// ```
 #[stable(feature = "type_name", since = "1.38.0")]
 #[rustc_const_unstable(feature = "const_type_name")]
 pub const fn type_name<T: ?Sized>() -> &'static str {
index 8579dbf353e800e8c287bcfd3eecacc9eb7f9451..fda103a52d8bc851b8fa4d07f277eb27302d425f 100644 (file)
@@ -229,52 +229,6 @@ pub struct Cell<T: ?Sized> {
     value: UnsafeCell<T>,
 }
 
-impl<T:Copy> Cell<T> {
-    /// Returns a copy of the contained value.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// use std::cell::Cell;
-    ///
-    /// let c = Cell::new(5);
-    ///
-    /// let five = c.get();
-    /// ```
-    #[inline]
-    #[stable(feature = "rust1", since = "1.0.0")]
-    pub fn get(&self) -> T {
-        unsafe{ *self.value.get() }
-    }
-
-    /// Updates the contained value using a function and returns the new value.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// #![feature(cell_update)]
-    ///
-    /// use std::cell::Cell;
-    ///
-    /// let c = Cell::new(5);
-    /// let new = c.update(|x| x + 1);
-    ///
-    /// assert_eq!(new, 6);
-    /// assert_eq!(c.get(), 6);
-    /// ```
-    #[inline]
-    #[unstable(feature = "cell_update", issue = "50186")]
-    pub fn update<F>(&self, f: F) -> T
-    where
-        F: FnOnce(T) -> T,
-    {
-        let old = self.get();
-        let new = f(old);
-        self.set(new);
-        new
-    }
-}
-
 #[stable(feature = "rust1", since = "1.0.0")]
 unsafe impl<T: ?Sized> Send for Cell<T> where T: Send {}
 
@@ -448,6 +402,52 @@ pub fn into_inner(self) -> T {
     }
 }
 
+impl<T:Copy> Cell<T> {
+    /// Returns a copy of the contained value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::cell::Cell;
+    ///
+    /// let c = Cell::new(5);
+    ///
+    /// let five = c.get();
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get(&self) -> T {
+        unsafe{ *self.value.get() }
+    }
+
+    /// Updates the contained value using a function and returns the new value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(cell_update)]
+    ///
+    /// use std::cell::Cell;
+    ///
+    /// let c = Cell::new(5);
+    /// let new = c.update(|x| x + 1);
+    ///
+    /// assert_eq!(new, 6);
+    /// assert_eq!(c.get(), 6);
+    /// ```
+    #[inline]
+    #[unstable(feature = "cell_update", issue = "50186")]
+    pub fn update<F>(&self, f: F) -> T
+    where
+        F: FnOnce(T) -> T,
+    {
+        let old = self.get();
+        let new = f(old);
+        self.set(new);
+        new
+    }
+}
+
 impl<T: ?Sized> Cell<T> {
     /// Returns a raw pointer to the underlying data in this cell.
     ///
index a69eb0f6d4b20fb1b05e621f4b962f1eb7c465d0..971d89e004446e306a1ad424c43375e36f6d07b4 100644 (file)
@@ -116,9 +116,9 @@ pub fn to_digit(self, radix: u32) -> Option<u32> {
 
         // the code is split up here to improve execution speed for cases where
         // the `radix` is constant and 10 or smaller
-        let val = if radix <= 10  {
+        let val = if radix <= 10 {
             match self {
-                '0' ..= '9' => self as u32 - '0' as u32,
+                '0'..='9' => self as u32 - '0' as u32,
                 _ => return None,
             }
         } else {
@@ -130,8 +130,11 @@ pub fn to_digit(self, radix: u32) -> Option<u32> {
             }
         };
 
-        if val < radix { Some(val) }
-        else { None }
+        if val < radix {
+            Some(val)
+        } else {
+            None
+        }
     }
 
     /// Returns an iterator that yields the hexadecimal Unicode escape of a
@@ -303,8 +306,8 @@ pub fn escape_default(self) -> EscapeDefault {
             '\r' => EscapeDefaultState::Backslash('r'),
             '\n' => EscapeDefaultState::Backslash('n'),
             '\\' | '\'' | '"' => EscapeDefaultState::Backslash(self),
-            '\x20' ..= '\x7e' => EscapeDefaultState::Char(self),
-            _ => EscapeDefaultState::Unicode(self.escape_unicode())
+            '\x20'..='\x7e' => EscapeDefaultState::Char(self),
+            _ => EscapeDefaultState::Unicode(self.escape_unicode()),
         };
         EscapeDefault { state: init_state }
     }
@@ -436,30 +439,31 @@ pub fn len_utf16(self) -> usize {
     pub fn encode_utf8(self, dst: &mut [u8]) -> &mut str {
         let code = self as u32;
         unsafe {
-            let len =
-            if code < MAX_ONE_B && !dst.is_empty() {
+            let len = if code < MAX_ONE_B && !dst.is_empty() {
                 *dst.get_unchecked_mut(0) = code as u8;
                 1
             } else if code < MAX_TWO_B && dst.len() >= 2 {
                 *dst.get_unchecked_mut(0) = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;
                 *dst.get_unchecked_mut(1) = (code & 0x3F) as u8 | TAG_CONT;
                 2
-            } else if code < MAX_THREE_B && dst.len() >= 3  {
+            } else if code < MAX_THREE_B && dst.len() >= 3 {
                 *dst.get_unchecked_mut(0) = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;
-                *dst.get_unchecked_mut(1) = (code >>  6 & 0x3F) as u8 | TAG_CONT;
+                *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT;
                 *dst.get_unchecked_mut(2) = (code & 0x3F) as u8 | TAG_CONT;
                 3
             } else if dst.len() >= 4 {
                 *dst.get_unchecked_mut(0) = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;
                 *dst.get_unchecked_mut(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT;
-                *dst.get_unchecked_mut(2) = (code >>  6 & 0x3F) as u8 | TAG_CONT;
+                *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT;
                 *dst.get_unchecked_mut(3) = (code & 0x3F) as u8 | TAG_CONT;
                 4
             } else {
-                panic!("encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}",
+                panic!(
+                    "encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}",
                     from_u32_unchecked(code).len_utf8(),
                     code,
-                    dst.len())
+                    dst.len(),
+                )
             };
             from_utf8_unchecked_mut(dst.get_unchecked_mut(..len))
         }
@@ -515,15 +519,24 @@ pub fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16] {
                 *dst.get_unchecked_mut(1) = 0xDC00 | ((code as u16) & 0x3FF);
                 slice::from_raw_parts_mut(dst.as_mut_ptr(), 2)
             } else {
-                panic!("encode_utf16: need {} units to encode U+{:X}, but the buffer has {}",
+                panic!(
+                    "encode_utf16: need {} units to encode U+{:X}, but the buffer has {}",
                     from_u32_unchecked(code).len_utf16(),
                     code,
-                    dst.len())
+                    dst.len(),
+                )
             }
         }
     }
 
-    /// Returns `true` if this `char` is an alphabetic code point, and false if not.
+    /// Returns `true` if this `char` has the `Alphabetic` property.
+    ///
+    /// `Alphabetic` is described in Chapter 4 (Character Properties) of the [Unicode Standard] and
+    /// specified in the [Unicode Character Database][ucd] [`DerivedCoreProperties.txt`].
+    ///
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt
     ///
     /// # Examples
     ///
@@ -547,10 +560,14 @@ pub fn is_alphabetic(self) -> bool {
         }
     }
 
-    /// Returns `true` if this `char` is lowercase.
+    /// Returns `true` if this `char` has the `Lowercase` property.
     ///
-    /// 'Lowercase' is defined according to the terms of the Unicode Derived Core
-    /// Property `Lowercase`.
+    /// `Lowercase` is described in Chapter 4 (Character Properties) of the [Unicode Standard] and
+    /// specified in the [Unicode Character Database][ucd] [`DerivedCoreProperties.txt`].
+    ///
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt
     ///
     /// # Examples
     ///
@@ -575,10 +592,14 @@ pub fn is_lowercase(self) -> bool {
         }
     }
 
-    /// Returns `true` if this `char` is uppercase.
+    /// Returns `true` if this `char` has the `Uppercase` property.
+    ///
+    /// `Uppercase` is described in Chapter 4 (Character Properties) of the [Unicode Standard] and
+    /// specified in the [Unicode Character Database][ucd] [`DerivedCoreProperties.txt`].
     ///
-    /// 'Uppercase' is defined according to the terms of the Unicode Derived Core
-    /// Property `Uppercase`.
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt
     ///
     /// # Examples
     ///
@@ -603,10 +624,12 @@ pub fn is_uppercase(self) -> bool {
         }
     }
 
-    /// Returns `true` if this `char` is whitespace.
+    /// Returns `true` if this `char` has the `White_Space` property.
     ///
-    /// 'Whitespace' is defined according to the terms of the Unicode Derived Core
-    /// Property `White_Space`.
+    /// `White_Space` is specified in the [Unicode Character Database][ucd] [`PropList.txt`].
+    ///
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`PropList.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/PropList.txt
     ///
     /// # Examples
     ///
@@ -630,10 +653,10 @@ pub fn is_whitespace(self) -> bool {
         }
     }
 
-    /// Returns `true` if this `char` is alphanumeric.
+    /// Returns `true` if this `char` satisfies either [`is_alphabetic()`] or [`is_numeric()`].
     ///
-    /// 'Alphanumeric'-ness is defined in terms of the Unicode General Categories
-    /// `Nd`, `Nl`, `No` and the Derived Core Property `Alphabetic`.
+    /// [`is_alphabetic()`]: #method.is_alphabetic
+    /// [`is_numeric()`]: #method.is_numeric
     ///
     /// # Examples
     ///
@@ -655,10 +678,15 @@ pub fn is_alphanumeric(self) -> bool {
         self.is_alphabetic() || self.is_numeric()
     }
 
-    /// Returns `true` if this `char` is a control code point.
+    /// Returns `true` if this `char` has the general category for control codes.
+    ///
+    /// Control codes (code points with the general category of `Cc`) are described in Chapter 4
+    /// (Character Properties) of the [Unicode Standard] and specified in the [Unicode Character
+    /// Database][ucd] [`UnicodeData.txt`].
     ///
-    /// 'Control code point' is defined in terms of the Unicode General
-    /// Category `Cc`.
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt
     ///
     /// # Examples
     ///
@@ -675,19 +703,29 @@ pub fn is_control(self) -> bool {
         general_category::Cc(self)
     }
 
-    /// Returns `true` if this `char` is an extended grapheme character.
+    /// Returns `true` if this `char` has the `Grapheme_Extend` property.
     ///
-    /// 'Extended grapheme character' is defined in terms of the Unicode Shaping and Rendering
-    /// Category `Grapheme_Extend`.
+    /// `Grapheme_Extend` is described in [Unicode Standard Annex #29 (Unicode Text
+    /// Segmentation)][uax29] and specified in the [Unicode Character Database][ucd]
+    /// [`DerivedCoreProperties.txt`].
+    ///
+    /// [uax29]: https://www.unicode.org/reports/tr29/
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`DerivedCoreProperties.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/DerivedCoreProperties.txt
     #[inline]
     pub(crate) fn is_grapheme_extended(self) -> bool {
         derived_property::Grapheme_Extend(self)
     }
 
-    /// Returns `true` if this `char` is numeric.
+    /// Returns `true` if this `char` has one of the general categories for numbers.
+    ///
+    /// The general categories for numbers (`Nd` for decimal digits, `Nl` for letter-like numeric
+    /// characters, and `No` for other numeric characters) are specified in the [Unicode Character
+    /// Database][ucd] [`UnicodeData.txt`].
     ///
-    /// 'Numeric'-ness is defined in terms of the Unicode General Categories
-    /// `Nd`, `Nl`, `No`.
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt
     ///
     /// # Examples
     ///
@@ -713,25 +751,29 @@ pub fn is_numeric(self) -> bool {
         }
     }
 
-    /// Returns an iterator that yields the lowercase equivalent of a `char`
-    /// as one or more `char`s.
+    /// Returns an iterator that yields the lowercase mapping of this `char` as one or more
+    /// `char`s.
     ///
-    /// If a character does not have a lowercase equivalent, the same character
-    /// will be returned back by the iterator.
+    /// If this `char` does not have a lowercase mapping, the iterator yields the same `char`.
     ///
-    /// This performs complex unconditional mappings with no tailoring: it maps
-    /// one Unicode character to its lowercase equivalent according to the
-    /// [Unicode database] and the additional complex mappings
-    /// [`SpecialCasing.txt`]. Conditional mappings (based on context or
-    /// language) are not considered here.
+    /// If this `char` has a one-to-one lowercase mapping given by the [Unicode Character
+    /// Database][ucd] [`UnicodeData.txt`], the iterator yields that `char`.
     ///
-    /// For a full reference, see [here][reference].
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt
     ///
-    /// [Unicode database]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
+    /// If this `char` requires special considerations (e.g. multiple `char`s) the iterator yields
+    /// the `char`(s) given by [`SpecialCasing.txt`].
     ///
-    /// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
+    /// [`SpecialCasing.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/SpecialCasing.txt
     ///
-    /// [reference]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
+    /// This operation performs an unconditional mapping without tailoring. That is, the conversion
+    /// is independent of context and language.
+    ///
+    /// In the [Unicode Standard], Chapter 4 (Character Properties) discusses case mapping in
+    /// general and Chapter 3 (Conformance) discusses the default algorithm for case conversion.
+    ///
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
     ///
     /// # Examples
     ///
@@ -774,25 +816,29 @@ pub fn to_lowercase(self) -> ToLowercase {
         ToLowercase(CaseMappingIter::new(conversions::to_lower(self)))
     }
 
-    /// Returns an iterator that yields the uppercase equivalent of a `char`
-    /// as one or more `char`s.
+    /// Returns an iterator that yields the uppercase mapping of this `char` as one or more
+    /// `char`s.
+    ///
+    /// If this `char` does not have a uppercase mapping, the iterator yields the same `char`.
+    ///
+    /// If this `char` has a one-to-one uppercase mapping given by the [Unicode Character
+    /// Database][ucd] [`UnicodeData.txt`], the iterator yields that `char`.
     ///
-    /// If a character does not have an uppercase equivalent, the same character
-    /// will be returned back by the iterator.
+    /// [ucd]: https://www.unicode.org/reports/tr44/
+    /// [`UnicodeData.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt
     ///
-    /// This performs complex unconditional mappings with no tailoring: it maps
-    /// one Unicode character to its uppercase equivalent according to the
-    /// [Unicode database] and the additional complex mappings
-    /// [`SpecialCasing.txt`]. Conditional mappings (based on context or
-    /// language) are not considered here.
+    /// If this `char` requires special considerations (e.g. multiple `char`s) the iterator yields
+    /// the `char`(s) given by [`SpecialCasing.txt`].
     ///
-    /// For a full reference, see [here][reference].
+    /// [`SpecialCasing.txt`]: https://www.unicode.org/Public/UCD/latest/ucd/SpecialCasing.txt
     ///
-    /// [Unicode database]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt
+    /// This operation performs an unconditional mapping without tailoring. That is, the conversion
+    /// is independent of context and language.
     ///
-    /// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt
+    /// In the [Unicode Standard], Chapter 4 (Character Properties) discusses case mapping in
+    /// general and Chapter 3 (Conformance) discusses the default algorithm for case conversion.
     ///
-    /// [reference]: http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
+    /// [Unicode Standard]: https://www.unicode.org/versions/latest/
     ///
     /// # Examples
     ///
index 15ce2277fa00d9188aad74dc7043f72faa149a3e..e6e3454b36f810bb96cf2c525059182aa3eb69c9 100644 (file)
@@ -775,10 +775,10 @@ pub fn entry(&mut self, key: &dyn fmt::Debug, value: &dyn fmt::Debug) -> &mut De
                reason = "recently added",
                issue = "62482")]
     pub fn key(&mut self, key: &dyn fmt::Debug) -> &mut DebugMap<'a, 'b> {
-        assert!(!self.has_key, "attempted to begin a new map entry \
-                                without completing the previous one");
-
         self.result = self.result.and_then(|_| {
+            assert!(!self.has_key, "attempted to begin a new map entry \
+                                    without completing the previous one");
+
             if self.is_pretty() {
                 if !self.has_fields {
                     self.fmt.write_str("\n")?;
@@ -839,9 +839,9 @@ pub fn key(&mut self, key: &dyn fmt::Debug) -> &mut DebugMap<'a, 'b> {
                reason = "recently added",
                issue = "62482")]
     pub fn value(&mut self, value: &dyn fmt::Debug) -> &mut DebugMap<'a, 'b> {
-        assert!(self.has_key, "attempted to format a map value before its key");
-
         self.result = self.result.and_then(|_| {
+            assert!(self.has_key, "attempted to format a map value before its key");
+
             if self.is_pretty() {
                 let mut slot = None;
                 let mut writer = PadAdapter::wrap(&mut self.fmt, &mut slot, &mut self.state);
@@ -924,9 +924,11 @@ pub fn entries<K, V, I>(&mut self, entries: I) -> &mut DebugMap<'a, 'b>
     /// ```
     #[stable(feature = "debug_builders", since = "1.2.0")]
     pub fn finish(&mut self) -> fmt::Result {
-        assert!(!self.has_key, "attempted to finish a map with a partial entry");
+        self.result.and_then(|_| {
+            assert!(!self.has_key, "attempted to finish a map with a partial entry");
 
-        self.result.and_then(|_| self.fmt.write_str("}"))
+            self.fmt.write_str("}")
+        })
     }
 
     fn is_pretty(&self) -> bool {
index 5dfdd1623061e19e7fb8fb0d307bfdd0ed6f9068..8413b2e0ac49e0f2f2aad0841f7944affe370ccb 100644 (file)
@@ -108,10 +108,10 @@ pub mod rt {
 /// [`io::Write`]: ../../std/io/trait.Write.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub trait Write {
-    /// Writes a slice of bytes into this writer, returning whether the write
+    /// Writes a string slice into this writer, returning whether the write
     /// succeeded.
     ///
-    /// This method can only succeed if the entire byte slice was successfully
+    /// This method can only succeed if the entire string slice was successfully
     /// written, and this method will not return until all data has been
     /// written or an error occurs.
     ///
index e8c76b49a8e567a92ead873465484b63d10d8a55..30e8dddff85ad47617060586fe10fd6b1fa9e047 100644 (file)
 #![feature(adx_target_feature)]
 #![feature(maybe_uninit_slice)]
 #![feature(external_doc)]
-#![feature(mem_take)]
 #![feature(associated_type_bounds)]
 
 #[prelude_import]
index ca1b06fb81a78f534a8774cabec180a3af0873a0..1320e63df0635289d00b1ba756bb35182605de92 100644 (file)
@@ -520,18 +520,20 @@ macro_rules! unreachable {
     });
 }
 
-/// Indicates unfinished code.
+/// Indicates unfinished code by panicking with a message of "not yet implemented".
 ///
-/// This can be useful if you are prototyping and are just looking to have your
-/// code type-check, or if you're implementing a trait that requires multiple
-/// methods, and you're only planning on using one of them.
+/// This allows the your code to type-check, which is useful if you are prototyping or
+/// implementing a trait that requires multiple methods which you don't plan of using all of.
 ///
 /// There is no difference between `unimplemented!` and `todo!` apart from the
 /// name.
 ///
 /// # Panics
 ///
-/// This will always [panic!](macro.panic.html)
+/// This will always [panic!](macro.panic.html) because `unimplemented!` is just a
+/// shorthand for `panic!` with a fixed, specific message.
+///
+/// Like `panic!`, this macro has a second form for displaying custom values.
 ///
 /// # Examples
 ///
@@ -539,38 +541,53 @@ macro_rules! unreachable {
 ///
 /// ```
 /// trait Foo {
-///     fn bar(&self);
+///     fn bar(&self) -> u8;
 ///     fn baz(&self);
+///     fn qux(&self) -> Result<u64, ()>;
 /// }
 /// ```
 ///
-/// We want to implement `Foo` on one of our types, but we also want to work on
-/// just `bar()` first. In order for our code to compile, we need to implement
-/// `baz()`, so we can use `unimplemented!`:
+/// We want to implement `Foo` for 'MyStruct', but so far we only know how to
+/// implement the `bar()` function. `baz()` and `qux()` will still need to be defined
+/// in our implementation of `Foo`, but we can use `unimplemented!` in their definitions
+/// to allow our code to compile.
+///
+/// In the meantime, we want to have our program stop running once these
+/// unimplemented functions are reached.
 ///
 /// ```
 /// # trait Foo {
-/// #     fn bar(&self);
+/// #     fn bar(&self) -> u8;
 /// #     fn baz(&self);
+/// #     fn qux(&self) -> Result<u64, ()>;
 /// # }
 /// struct MyStruct;
 ///
 /// impl Foo for MyStruct {
-///     fn bar(&self) {
-///         // implementation goes here
+///     fn bar(&self) -> u8 {
+///         1 + 1
 ///     }
 ///
 ///     fn baz(&self) {
-///         // let's not worry about implementing baz() for now
+///         // We aren't sure how to even start writing baz yet,
+///         // so we have no logic here at all.
+///         // This will display "thread 'main' panicked at 'not yet implemented'".
 ///         unimplemented!();
 ///     }
+///
+///     fn qux(&self) -> Result<u64, ()> {
+///         let n = self.bar();
+///         // We have some logic here,
+///         // so we can use unimplemented! to display what we have so far.
+///         // This will display:
+///         // "thread 'main' panicked at 'not yet implemented: we need to divide by 2'".
+///         unimplemented!("we need to divide by {}", n);
+///     }
 /// }
 ///
 /// fn main() {
 ///     let s = MyStruct;
 ///     s.bar();
-///
-///     // we aren't even using baz() yet, so this is fine.
 /// }
 /// ```
 #[macro_export]
index 95ad4272cedd0568a953edb9d0434cc6490a47dd..c7da56aad309a976f9291effb656acda9364a1dc 100644 (file)
@@ -236,7 +236,7 @@ pub fn forget_unsized<T: ?Sized>(t: T) {
 /// ```
 ///
 /// [alignment]: ./fn.align_of.html
-#[inline]
+#[inline(always)]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[rustc_promotable]
 pub const fn size_of<T>() -> usize {
@@ -328,7 +328,7 @@ pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
 ///
 /// assert_eq!(4, mem::align_of::<i32>());
 /// ```
-#[inline]
+#[inline(always)]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[rustc_promotable]
 pub const fn align_of<T>() -> usize {
@@ -520,8 +520,6 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
 /// A simple example:
 ///
 /// ```
-/// #![feature(mem_take)]
-///
 /// use std::mem;
 ///
 /// let mut v: Vec<i32> = vec![1, 2];
@@ -552,8 +550,6 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
 /// `self`, allowing it to be returned:
 ///
 /// ```
-/// #![feature(mem_take)]
-///
 /// use std::mem;
 ///
 /// # struct Buffer<T> { buf: Vec<T> }
@@ -572,7 +568,7 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
 ///
 /// [`Clone`]: ../../std/clone/trait.Clone.html
 #[inline]
-#[unstable(feature = "mem_take", issue = "61129")]
+#[stable(feature = "mem_take", since = "1.40.0")]
 pub fn take<T: Default>(dest: &mut T) -> T {
     replace(dest, T::default())
 }
index ebde82de83457709337f2bbd61317556ac142fcd..8f4ade377e312c013b384f2134c3876e50560501 100644 (file)
@@ -1058,7 +1058,7 @@ pub fn saturating_abs(self) -> Self {
             #[inline]
             pub fn saturating_mul(self, rhs: Self) -> Self {
                 self.checked_mul(rhs).unwrap_or_else(|| {
-                    if (self < 0 && rhs < 0) || (self > 0 && rhs > 0) {
+                    if (self < 0) == (rhs < 0) {
                         Self::max_value()
                     } else {
                         Self::min_value()
@@ -1864,7 +1864,7 @@ pub fn pow(self, mut exp: u32) -> Self {
 
 # Panics
 
-This function will panic if `rhs` is 0.
+This function will panic if `rhs` is 0 or the division results in overflow.
 
 # Examples
 
@@ -1903,7 +1903,7 @@ pub fn div_euclid(self, rhs: Self) -> Self {
 
 # Panics
 
-This function will panic if `rhs` is 0.
+This function will panic if `rhs` is 0 or the division results in overflow.
 
 # Examples
 
@@ -3694,6 +3694,10 @@ pub fn pow(self, mut exp: u32) -> Self {
 definitions of division are equal, this
 is exactly equal to `self / rhs`.
 
+# Panics
+
+This function will panic if `rhs` is 0.
+
 # Examples
 
 Basic usage:
@@ -3719,6 +3723,10 @@ pub fn div_euclid(self, rhs: Self) -> Self {
 definitions of division are equal, this
 is exactly equal to `self % rhs`.
 
+# Panics
+
+This function will panic if `rhs` is 0.
+
 # Examples
 
 Basic usage:
index 59a10ae99bb6aa5bff9c66a5ebe3b2520a22a807..5fe9895d8d24f94543955286541f66ae464ec06f 100644 (file)
@@ -437,7 +437,7 @@ pub const fn trailing_zeros(self) -> u32 {
             /// wrapping the truncated bits to the end of the resulting
             /// integer.
             ///
-            /// Please note this isn't the same operation as the `>>` shifting
+            /// Please note this isn't the same operation as the `<<` shifting
             /// operator!
             ///
             /// # Examples
@@ -463,7 +463,7 @@ pub const fn rotate_left(self, n: u32) -> Self {
             /// wrapping the truncated bits to the beginning of the resulting
             /// integer.
             ///
-            /// Please note this isn't the same operation as the `<<` shifting
+            /// Please note this isn't the same operation as the `>>` shifting
             /// operator!
             ///
             /// # Examples
index 301e432c98dfc4a3af707cf9e2e87475090803c2..47e3a0d21676f6caed5ed4c23832283215947ab5 100644 (file)
@@ -1102,7 +1102,6 @@ pub fn unwrap_or_default(self) -> T {
     }
 }
 
-#[unstable(feature = "inner_deref", reason = "newly added", issue = "50264")]
 impl<T: Deref> Option<T> {
     /// Converts from `Option<T>` (or `&Option<T>`) to `Option<&T::Target>`.
     ///
@@ -1114,20 +1113,18 @@ impl<T: Deref> Option<T> {
     /// # Examples
     ///
     /// ```
-    /// #![feature(inner_deref)]
-    ///
     /// let x: Option<String> = Some("hey".to_owned());
     /// assert_eq!(x.as_deref(), Some("hey"));
     ///
     /// let x: Option<String> = None;
     /// assert_eq!(x.as_deref(), None);
     /// ```
+    #[stable(feature = "option_deref", since = "1.40.0")]
     pub fn as_deref(&self) -> Option<&T::Target> {
         self.as_ref().map(|t| t.deref())
     }
 }
 
-#[unstable(feature = "inner_deref", reason = "newly added", issue = "50264")]
 impl<T: DerefMut> Option<T> {
     /// Converts from `Option<T>` (or `&mut Option<T>`) to `Option<&mut T::Target>`.
     ///
@@ -1137,14 +1134,13 @@ impl<T: DerefMut> Option<T> {
     /// # Examples
     ///
     /// ```
-    /// #![feature(inner_deref)]
-    ///
     /// let mut x: Option<String> = Some("hey".to_owned());
     /// assert_eq!(x.as_deref_mut().map(|x| {
     ///     x.make_ascii_uppercase();
     ///     x
     /// }), Some("HEY".to_owned().as_mut_str()));
     /// ```
+    #[stable(feature = "option_deref", since = "1.40.0")]
     pub fn as_deref_mut(&mut self) -> Option<&mut T::Target> {
         self.as_mut().map(|t| t.deref_mut())
     }
index b5462d988378be5c6f099456ffae8ccfa44d7036..4e79ea812044b41deb1c370c296965c7dde34f13 100644 (file)
@@ -63,6 +63,7 @@ impl<T> [T] {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     // SAFETY: const sound because we transmute out the length field as a usize (which it must be)
+    #[allow(unused_attributes)]
     #[allow_internal_unstable(const_fn_union)]
     pub const fn len(&self) -> usize {
         unsafe {
index ece61dde490fd5cb8a3d843448544bcd1106863a..885696e5acf49f8550c6fa85acc4eda8ec2b891a 100644 (file)
@@ -2167,6 +2167,7 @@ pub fn is_char_boundary(&self, index: usize) -> bool {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline(always)]
     // SAFETY: const sound because we transmute two types with the same layout
+    #[allow(unused_attributes)]
     #[allow_internal_unstable(const_fn_union)]
     pub const fn as_bytes(&self) -> &[u8] {
         #[repr(C)]
index c9ccef972c2b5faabdabf7f55a79c4378d180a9e..73d5abf1aed2378a27c50a8c56fbc271a9cfb889 100644 (file)
 //!
 //! Each method takes an [`Ordering`] which represents the strength of
 //! the memory barrier for that operation. These orderings are the
-//! same as [LLVM atomic orderings][1]. For more information see the [nomicon][2].
+//! same as the [C++20 atomic orderings][1]. For more information see the [nomicon][2].
 //!
 //! [`Ordering`]: enum.Ordering.html
 //!
-//! [1]: https://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations
+//! [1]: https://en.cppreference.com/w/cpp/atomic/memory_order
 //! [2]: ../../../nomicon/atomics.html
 //!
 //! Atomic variables are safe to share between threads (they implement [`Sync`])
 //! ```
 
 #![stable(feature = "rust1", since = "1.0.0")]
-#![cfg_attr(not(target_has_atomic = "8"), allow(dead_code))]
-#![cfg_attr(not(target_has_atomic = "8"), allow(unused_imports))]
+#![cfg_attr(not(target_has_atomic_load_store = "8"), allow(dead_code))]
+#![cfg_attr(not(target_has_atomic_load_store = "8"), allow(unused_imports))]
 
 use self::Ordering::*;
 
@@ -160,14 +160,14 @@ pub fn spin_loop_hint() {
 /// This type has the same in-memory representation as a [`bool`].
 ///
 /// [`bool`]: ../../../std/primitive.bool.html
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[repr(C, align(1))]
 pub struct AtomicBool {
     v: UnsafeCell<u8>,
 }
 
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 impl Default for AtomicBool {
     /// Creates an `AtomicBool` initialized to `false`.
@@ -177,14 +177,14 @@ fn default() -> Self {
 }
 
 // Send is implicitly implemented for AtomicBool.
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 unsafe impl Sync for AtomicBool {}
 
 /// A raw pointer type which can be safely shared between threads.
 ///
 /// This type has the same in-memory representation as a `*mut T`.
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[cfg_attr(target_pointer_width = "16", repr(C, align(2)))]
 #[cfg_attr(target_pointer_width = "32", repr(C, align(4)))]
@@ -193,7 +193,7 @@ pub struct AtomicPtr<T> {
     p: UnsafeCell<*mut T>,
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> Default for AtomicPtr<T> {
     /// Creates a null `AtomicPtr<T>`.
@@ -202,10 +202,10 @@ fn default() -> AtomicPtr<T> {
     }
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 unsafe impl<T> Send for AtomicPtr<T> {}
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 unsafe impl<T> Sync for AtomicPtr<T> {}
 
@@ -217,8 +217,8 @@ unsafe impl<T> Sync for AtomicPtr<T> {}
 /// operations synchronize other memory while additionally preserving a total order of such
 /// operations across all threads.
 ///
-/// Rust's memory orderings are [the same as
-/// LLVM's](https://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations).
+/// Rust's memory orderings are [the same as those of
+/// C++20](https://en.cppreference.com/w/cpp/atomic/memory_order).
 ///
 /// For more information see the [nomicon].
 ///
@@ -231,9 +231,9 @@ unsafe impl<T> Sync for AtomicPtr<T> {}
 pub enum Ordering {
     /// No ordering constraints, only atomic operations.
     ///
-    /// Corresponds to LLVM's [`Monotonic`] ordering.
+    /// Corresponds to [`memory_order_relaxed`] in C++20.
     ///
-    /// [`Monotonic`]: https://llvm.org/docs/Atomics.html#monotonic
+    /// [`memory_order_relaxed`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Relaxed_ordering
     #[stable(feature = "rust1", since = "1.0.0")]
     Relaxed,
     /// When coupled with a store, all previous operations become ordered
@@ -246,11 +246,12 @@ pub enum Ordering {
     ///
     /// This ordering is only applicable for operations that can perform a store.
     ///
-    /// Corresponds to LLVM's [`Release`] ordering.
+    /// Corresponds to [`memory_order_release`] in C++20.
     ///
-    /// [`Release`]: https://llvm.org/docs/Atomics.html#release
-    /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire
-    /// [`Relaxed`]: https://llvm.org/docs/Atomics.html#monotonic
+    /// [`Release`]: #variant.Release
+    /// [`Acquire`]: #variant.Acquire
+    /// [`Relaxed`]: #variant.Relaxed
+    /// [`memory_order_release`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering
     #[stable(feature = "rust1", since = "1.0.0")]
     Release,
     /// When coupled with a load, if the loaded value was written by a store operation with
@@ -263,11 +264,12 @@ pub enum Ordering {
     ///
     /// This ordering is only applicable for operations that can perform a load.
     ///
-    /// Corresponds to LLVM's [`Acquire`] ordering.
+    /// Corresponds to [`memory_order_acquire`] in C++20.
     ///
-    /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire
-    /// [`Release`]: https://llvm.org/docs/Atomics.html#release
-    /// [`Relaxed`]: https://llvm.org/docs/Atomics.html#monotonic
+    /// [`Acquire`]: #variant.Acquire
+    /// [`Release`]: #variant.Release
+    /// [`Relaxed`]: #variant.Relaxed
+    /// [`memory_order_acquire`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering
     #[stable(feature = "rust1", since = "1.0.0")]
     Acquire,
     /// Has the effects of both [`Acquire`] and [`Release`] together:
@@ -275,28 +277,28 @@ pub enum Ordering {
     ///
     /// Notice that in the case of `compare_and_swap`, it is possible that the operation ends up
     /// not performing any store and hence it has just [`Acquire`] ordering. However,
-    /// [`AcqRel`][`AcquireRelease`] will never perform [`Relaxed`] accesses.
+    /// `AcqRel` will never perform [`Relaxed`] accesses.
     ///
     /// This ordering is only applicable for operations that combine both loads and stores.
     ///
-    /// Corresponds to LLVM's [`AcquireRelease`] ordering.
+    /// Corresponds to [`memory_order_acq_rel`] in C++20.
     ///
-    /// [`AcquireRelease`]: https://llvm.org/docs/Atomics.html#acquirerelease
-    /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire
-    /// [`Release`]: https://llvm.org/docs/Atomics.html#release
-    /// [`Relaxed`]: https://llvm.org/docs/Atomics.html#monotonic
+    /// [`memory_order_acq_rel`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering
+    /// [`Acquire`]: #variant.Acquire
+    /// [`Release`]: #variant.Release
+    /// [`Relaxed`]: #variant.Relaxed
     #[stable(feature = "rust1", since = "1.0.0")]
     AcqRel,
     /// Like [`Acquire`]/[`Release`]/[`AcqRel`] (for load, store, and load-with-store
     /// operations, respectively) with the additional guarantee that all threads see all
     /// sequentially consistent operations in the same order.
     ///
-    /// Corresponds to LLVM's [`SequentiallyConsistent`] ordering.
+    /// Corresponds to [`memory_order_seq_cst`] in C++20.
     ///
-    /// [`SequentiallyConsistent`]: https://llvm.org/docs/Atomics.html#sequentiallyconsistent
-    /// [`Acquire`]: https://llvm.org/docs/Atomics.html#acquire
-    /// [`Release`]: https://llvm.org/docs/Atomics.html#release
-    /// [`AcqRel`]: https://llvm.org/docs/Atomics.html#acquirerelease
+    /// [`memory_order_seq_cst`]: https://en.cppreference.com/w/cpp/atomic/memory_order#Sequentially-consistent_ordering
+    /// [`Acquire`]: #variant.Acquire
+    /// [`Release`]: #variant.Release
+    /// [`AcqRel`]: #variant.AcqRel
     #[stable(feature = "rust1", since = "1.0.0")]
     SeqCst,
 }
@@ -304,7 +306,7 @@ pub enum Ordering {
 /// An [`AtomicBool`] initialized to `false`.
 ///
 /// [`AtomicBool`]: struct.AtomicBool.html
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[rustc_deprecated(
     since = "1.34.0",
@@ -313,7 +315,7 @@ pub enum Ordering {
 )]
 pub const ATOMIC_BOOL_INIT: AtomicBool = AtomicBool::new(false);
 
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 impl AtomicBool {
     /// Creates a new `AtomicBool`.
     ///
@@ -462,7 +464,7 @@ pub fn store(&self, val: bool, order: Ordering) {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn swap(&self, val: bool, order: Ordering) -> bool {
         unsafe { atomic_swap(self.v.get(), val as u8, order) != 0 }
     }
@@ -500,7 +502,7 @@ pub fn swap(&self, val: bool, order: Ordering) -> bool {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn compare_and_swap(&self, current: bool, new: bool, order: Ordering) -> bool {
         match self.compare_exchange(current, new, order, strongest_failure_ordering(order)) {
             Ok(x) => x,
@@ -551,7 +553,7 @@ pub fn compare_and_swap(&self, current: bool, new: bool, order: Ordering) -> boo
     /// ```
     #[inline]
     #[stable(feature = "extended_compare_and_swap", since = "1.10.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn compare_exchange(&self,
                             current: bool,
                             new: bool,
@@ -607,7 +609,7 @@ pub fn compare_exchange(&self,
     /// ```
     #[inline]
     #[stable(feature = "extended_compare_and_swap", since = "1.10.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn compare_exchange_weak(&self,
                                  current: bool,
                                  new: bool,
@@ -658,7 +660,7 @@ pub fn compare_exchange_weak(&self,
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
         unsafe { atomic_and(self.v.get(), val as u8, order) != 0 }
     }
@@ -700,7 +702,7 @@ pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
         // We can't use atomic_nand here because it can result in a bool with
         // an invalid value. This happens because the atomic operation is done
@@ -753,7 +755,7 @@ pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
         unsafe { atomic_or(self.v.get(), val as u8, order) != 0 }
     }
@@ -794,13 +796,13 @@ pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "8")]
     pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
         unsafe { atomic_xor(self.v.get(), val as u8, order) != 0 }
     }
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 impl<T> AtomicPtr<T> {
     /// Creates a new `AtomicPtr`.
     ///
@@ -951,7 +953,7 @@ pub fn store(&self, ptr: *mut T, order: Ordering) {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "ptr")]
     pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
         unsafe { atomic_swap(self.p.get() as *mut usize, ptr as usize, order) as *mut T }
     }
@@ -987,7 +989,7 @@ pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "ptr")]
     pub fn compare_and_swap(&self, current: *mut T, new: *mut T, order: Ordering) -> *mut T {
         match self.compare_exchange(current, new, order, strongest_failure_ordering(order)) {
             Ok(x) => x,
@@ -1029,7 +1031,7 @@ pub fn compare_and_swap(&self, current: *mut T, new: *mut T, order: Ordering) ->
     /// ```
     #[inline]
     #[stable(feature = "extended_compare_and_swap", since = "1.10.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "ptr")]
     pub fn compare_exchange(&self,
                             current: *mut T,
                             new: *mut T,
@@ -1089,7 +1091,7 @@ pub fn compare_exchange(&self,
     /// ```
     #[inline]
     #[stable(feature = "extended_compare_and_swap", since = "1.10.0")]
-    #[cfg(target_has_atomic = "cas")]
+    #[cfg(target_has_atomic = "ptr")]
     pub fn compare_exchange_weak(&self,
                                  current: *mut T,
                                  new: *mut T,
@@ -1110,7 +1112,7 @@ pub fn compare_exchange_weak(&self,
     }
 }
 
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 #[stable(feature = "atomic_bool_from", since = "1.24.0")]
 impl From<bool> for AtomicBool {
     /// Converts a `bool` into an `AtomicBool`.
@@ -1126,16 +1128,17 @@ impl From<bool> for AtomicBool {
     fn from(b: bool) -> Self { Self::new(b) }
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "atomic_from", since = "1.23.0")]
 impl<T> From<*mut T> for AtomicPtr<T> {
     #[inline]
     fn from(p: *mut T) -> Self { Self::new(p) }
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 macro_rules! atomic_int {
-    ($stable:meta,
+    ($cfg_cas:meta,
+     $stable:meta,
      $stable_cxchg:meta,
      $stable_debug:meta,
      $stable_access:meta,
@@ -1356,7 +1359,7 @@ pub fn store(&self, val: $int_type, order: Ordering) {
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn swap(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_swap(self.v.get(), val, order) }
                 }
@@ -1396,7 +1399,7 @@ pub fn swap(&self, val: $int_type, order: Ordering) -> $int_type {
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn compare_and_swap(&self,
                                         current: $int_type,
                                         new: $int_type,
@@ -1454,7 +1457,7 @@ pub fn compare_and_swap(&self,
 ```"),
                 #[inline]
                 #[$stable_cxchg]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn compare_exchange(&self,
                                         current: $int_type,
                                         new: $int_type,
@@ -1506,7 +1509,7 @@ pub fn compare_exchange(&self,
 ```"),
                 #[inline]
                 #[$stable_cxchg]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn compare_exchange_weak(&self,
                                              current: $int_type,
                                              new: $int_type,
@@ -1544,7 +1547,7 @@ pub fn compare_exchange_weak(&self,
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_add(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_add(self.v.get(), val, order) }
                 }
@@ -1576,7 +1579,7 @@ pub fn fetch_add(&self, val: $int_type, order: Ordering) -> $int_type {
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_sub(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_sub(self.v.get(), val, order) }
                 }
@@ -1611,7 +1614,7 @@ pub fn fetch_sub(&self, val: $int_type, order: Ordering) -> $int_type {
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_and(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_and(self.v.get(), val, order) }
                 }
@@ -1647,7 +1650,7 @@ pub fn fetch_and(&self, val: $int_type, order: Ordering) -> $int_type {
 ```"),
                 #[inline]
                 #[$stable_nand]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_nand(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_nand(self.v.get(), val, order) }
                 }
@@ -1682,7 +1685,7 @@ pub fn fetch_nand(&self, val: $int_type, order: Ordering) -> $int_type {
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_or(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_or(self.v.get(), val, order) }
                 }
@@ -1717,7 +1720,7 @@ pub fn fetch_or(&self, val: $int_type, order: Ordering) -> $int_type {
 ```"),
                 #[inline]
                 #[$stable]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_xor(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { atomic_xor(self.v.get(), val, order) }
                 }
@@ -1767,7 +1770,7 @@ pub fn fetch_xor(&self, val: $int_type, order: Ordering) -> $int_type {
                 #[unstable(feature = "no_more_cas",
                        reason = "no more CAS loops in user code",
                        issue = "48655")]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_update<F>(&self,
                                        mut f: F,
                                        fetch_order: Ordering,
@@ -1828,7 +1831,7 @@ pub fn fetch_update<F>(&self,
                 #[unstable(feature = "atomic_min_max",
                        reason = "easier and faster min/max than writing manual CAS loop",
                        issue = "48655")]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_max(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { $max_fn(self.v.get(), val, order) }
                 }
@@ -1880,7 +1883,7 @@ pub fn fetch_max(&self, val: $int_type, order: Ordering) -> $int_type {
                 #[unstable(feature = "atomic_min_max",
                        reason = "easier and faster min/max than writing manual CAS loop",
                        issue = "48655")]
-                #[cfg(target_has_atomic = "cas")]
+                #[$cfg_cas]
                 pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
                     unsafe { $min_fn(self.v.get(), val, order) }
                 }
@@ -1890,8 +1893,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     }
 }
 
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 atomic_int! {
+    cfg(target_has_atomic = "8"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -1906,8 +1910,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicI8::new(0)",
     i8 AtomicI8 ATOMIC_I8_INIT
 }
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 atomic_int! {
+    cfg(target_has_atomic = "8"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -1922,8 +1927,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicU8::new(0)",
     u8 AtomicU8 ATOMIC_U8_INIT
 }
-#[cfg(target_has_atomic = "16")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "16"))]
 atomic_int! {
+    cfg(target_has_atomic = "16"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -1938,8 +1944,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicI16::new(0)",
     i16 AtomicI16 ATOMIC_I16_INIT
 }
-#[cfg(target_has_atomic = "16")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "16"))]
 atomic_int! {
+    cfg(target_has_atomic = "16"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -1954,8 +1961,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicU16::new(0)",
     u16 AtomicU16 ATOMIC_U16_INIT
 }
-#[cfg(target_has_atomic = "32")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "32"))]
 atomic_int! {
+    cfg(target_has_atomic = "32"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -1970,8 +1978,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicI32::new(0)",
     i32 AtomicI32 ATOMIC_I32_INIT
 }
-#[cfg(target_has_atomic = "32")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "32"))]
 atomic_int! {
+    cfg(target_has_atomic = "32"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -1986,8 +1995,12 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicU32::new(0)",
     u32 AtomicU32 ATOMIC_U32_INIT
 }
-#[cfg(target_has_atomic = "64")]
+#[cfg(any(
+    all(bootstrap, target_has_atomic = "64"),
+    target_has_atomic_load_store = "64"
+))]
 atomic_int! {
+    cfg(target_has_atomic = "64"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -2002,8 +2015,12 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicI64::new(0)",
     i64 AtomicI64 ATOMIC_I64_INIT
 }
-#[cfg(target_has_atomic = "64")]
+#[cfg(any(
+    all(bootstrap, target_has_atomic = "64"),
+    target_has_atomic_load_store = "64"
+))]
 atomic_int! {
+    cfg(target_has_atomic = "64"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
     stable(feature = "integer_atomics_stable", since = "1.34.0"),
@@ -2018,8 +2035,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicU64::new(0)",
     u64 AtomicU64 ATOMIC_U64_INIT
 }
-#[cfg(target_has_atomic = "128")]
+#[cfg(target_has_atomic_load_store = "128")]
 atomic_int! {
+    cfg(target_has_atomic = "128"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
@@ -2034,8 +2052,9 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicI128::new(0)",
     i128 AtomicI128 ATOMIC_I128_INIT
 }
-#[cfg(target_has_atomic = "128")]
+#[cfg(target_has_atomic_load_store = "128")]
 atomic_int! {
+    cfg(target_has_atomic = "128"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
     unstable(feature = "integer_atomics", issue = "32976"),
@@ -2050,20 +2069,24 @@ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
     "AtomicU128::new(0)",
     u128 AtomicU128 ATOMIC_U128_INIT
 }
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[cfg(target_pointer_width = "16")]
 macro_rules! ptr_width {
     () => { 2 }
 }
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[cfg(target_pointer_width = "32")]
 macro_rules! ptr_width {
     () => { 4 }
 }
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[cfg(target_pointer_width = "64")]
 macro_rules! ptr_width {
     () => { 8 }
 }
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 atomic_int!{
+    cfg(target_has_atomic = "ptr"),
     stable(feature = "rust1", since = "1.0.0"),
     stable(feature = "extended_compare_and_swap", since = "1.10.0"),
     stable(feature = "atomic_debug", since = "1.3.0"),
@@ -2078,8 +2101,9 @@ macro_rules! ptr_width {
     "AtomicIsize::new(0)",
     isize AtomicIsize ATOMIC_ISIZE_INIT
 }
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 atomic_int!{
+    cfg(target_has_atomic = "ptr"),
     stable(feature = "rust1", since = "1.0.0"),
     stable(feature = "extended_compare_and_swap", since = "1.10.0"),
     stable(feature = "atomic_debug", since = "1.3.0"),
@@ -2096,7 +2120,7 @@ macro_rules! ptr_width {
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 fn strongest_failure_ordering(order: Ordering) -> Ordering {
     match order {
         Release => Relaxed,
@@ -2130,7 +2154,7 @@ unsafe fn atomic_load<T>(dst: *const T, order: Ordering) -> T {
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xchg_acq(dst, val),
@@ -2143,7 +2167,7 @@ unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// Returns the previous value (like __sync_fetch_and_add).
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xadd_acq(dst, val),
@@ -2156,7 +2180,7 @@ unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// Returns the previous value (like __sync_fetch_and_sub).
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xsub_acq(dst, val),
@@ -2168,7 +2192,7 @@ unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_compare_exchange<T>(dst: *mut T,
                                      old: T,
                                      new: T,
@@ -2193,7 +2217,7 @@ unsafe fn atomic_compare_exchange<T>(dst: *mut T,
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_compare_exchange_weak<T>(dst: *mut T,
                                           old: T,
                                           new: T,
@@ -2218,7 +2242,7 @@ unsafe fn atomic_compare_exchange_weak<T>(dst: *mut T,
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_and_acq(dst, val),
@@ -2230,7 +2254,7 @@ unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_nand_acq(dst, val),
@@ -2242,7 +2266,7 @@ unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_or_acq(dst, val),
@@ -2254,7 +2278,7 @@ unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
 }
 
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xor_acq(dst, val),
@@ -2267,7 +2291,7 @@ unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// returns the max value (signed comparison)
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_max<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_max_acq(dst, val),
@@ -2280,7 +2304,7 @@ unsafe fn atomic_max<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// returns the min value (signed comparison)
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_min<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_min_acq(dst, val),
@@ -2293,7 +2317,7 @@ unsafe fn atomic_min<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// returns the max value (signed comparison)
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_umax<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_umax_acq(dst, val),
@@ -2306,7 +2330,7 @@ unsafe fn atomic_umax<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// returns the min value (signed comparison)
 #[inline]
-#[cfg(target_has_atomic = "cas")]
+#[cfg(target_has_atomic = "8")]
 unsafe fn atomic_umin<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_umin_acq(dst, val),
@@ -2504,7 +2528,7 @@ pub fn compiler_fence(order: Ordering) {
 }
 
 
-#[cfg(target_has_atomic = "8")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "8"))]
 #[stable(feature = "atomic_debug", since = "1.3.0")]
 impl fmt::Debug for AtomicBool {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -2512,7 +2536,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "atomic_debug", since = "1.3.0")]
 impl<T> fmt::Debug for AtomicPtr<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -2520,7 +2544,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(any(bootstrap, target_has_atomic_load_store = "ptr"))]
 #[stable(feature = "atomic_pointer", since = "1.24.0")]
 impl<T> fmt::Pointer for AtomicPtr<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
index 65e293642874c3ba730e67484a0c1fd474618f79..6f841bd2adf41a9be45c16512042475952e85512 100644 (file)
@@ -217,7 +217,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 /// This handle encapsulates a [`RawWaker`] instance, which defines the
 /// executor-specific wakeup behavior.
 ///
-/// Implements [`Clone`], [`trait@Send`], and [`trait@Sync`].
+/// Implements [`Clone`], [`Send`], and [`Sync`].
 ///
 /// [`RawWaker`]: struct.RawWaker.html
 #[repr(transparent)]
index 200659b91bb4ec696697e0423347c4fbb720cb3c..255724432816d99ea2d96ebe5dd036ad9bc6c1b3 100644 (file)
@@ -319,6 +319,46 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
                    format!("{:#?}", Bar));
     }
 
+    #[test]
+    fn test_entry_err() {
+        // Ensure errors in a map entry don't trigger panics (#65231)
+        use std::fmt::Write;
+
+        struct ErrorFmt;
+
+        impl fmt::Debug for ErrorFmt {
+            fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result {
+                Err(fmt::Error)
+            }
+        }
+
+        struct KeyValue<K, V>(usize, K, V);
+
+        impl<K, V> fmt::Debug for KeyValue<K, V>
+        where
+            K: fmt::Debug,
+            V: fmt::Debug,
+        {
+            fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+                let mut map = fmt.debug_map();
+
+                for _ in 0..self.0 {
+                    map.entry(&self.1, &self.2);
+                }
+
+                map.finish()
+            }
+        }
+
+        let mut buf = String::new();
+
+        assert!(write!(&mut buf, "{:?}", KeyValue(1, ErrorFmt, "bar")).is_err());
+        assert!(write!(&mut buf, "{:?}", KeyValue(1, "foo", ErrorFmt)).is_err());
+
+        assert!(write!(&mut buf, "{:?}", KeyValue(2, ErrorFmt, "bar")).is_err());
+        assert!(write!(&mut buf, "{:?}", KeyValue(2, "foo", ErrorFmt)).is_err());
+    }
+
     #[test]
     #[should_panic]
     fn test_invalid_key_when_entry_is_incomplete() {
index e199670b561c2b9bad94061a7aca42f1e944bea4..f612c52d398357c5ec3591f466a3e6523842ac5f 100644 (file)
@@ -25,7 +25,6 @@
 #![feature(extern_types)]
 #![feature(in_band_lifetimes)]
 #![feature(optin_builtin_traits)]
-#![feature(mem_take)]
 #![feature(non_exhaustive)]
 #![feature(rustc_attrs)]
 #![feature(specialization)]
index a7c94d057dc49624ca3119a9daec9056239b7c06..cf9f36ca37c90c46da392ff206de066dd34b0b22 100644 (file)
@@ -18,8 +18,8 @@ jobserver = "0.1"
 num_cpus = "1.0"
 scoped-tls = "1.0"
 log = { version = "0.4", features = ["release_max_level_info", "std"] }
-rustc-rayon = "0.2.0"
-rustc-rayon-core = "0.2.0"
+rustc-rayon = "0.3.0"
+rustc-rayon-core = "0.3.0"
 polonius-engine  = "0.10.0"
 rustc_apfloat = { path = "../librustc_apfloat" }
 rustc_target = { path = "../librustc_target" }
@@ -29,6 +29,7 @@ rustc_index = { path = "../librustc_index" }
 errors = { path = "../librustc_errors", package = "rustc_errors" }
 rustc_serialize = { path = "../libserialize", package = "serialize" }
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
 backtrace = "0.3.3"
 parking_lot = "0.9"
index 5a5919d786638d3dcbf87444d95fa76de6afe473..3daf0fc9df7a06606cd090f84ca7d1d29cdcbaf9 100644 (file)
@@ -98,7 +98,6 @@ macro_rules! arena_types {
                 rustc::hir::def_id::DefId,
             >,
             [few] resolve_lifetimes: rustc::middle::resolve_lifetime::ResolveLifetimes,
-            [decode] generic_predicates: rustc::ty::GenericPredicates<'tcx>,
             [few] lint_levels: rustc::lint::LintLevelMap,
             [few] stability_index: rustc::middle::stability::Index<'tcx>,
             [few] features: syntax::feature_gate::Features,
index 0c56fc7914b4cfe505a153a9c8782071c378474b..337cdddc432c4640ed1c8321f5e9d554a373bef2 100644 (file)
@@ -3,13 +3,14 @@
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_index::vec::{Idx, IndexVec};
 use smallvec::SmallVec;
-use rustc_data_structures::sync::{Lrc, Lock, AtomicU32, Ordering};
+use rustc_data_structures::sync::{Lrc, Lock, AtomicU32, AtomicU64, Ordering};
+use rustc_data_structures::sharded::{self, Sharded};
+use std::sync::atomic::Ordering::SeqCst;
 use std::env;
 use std::hash::Hash;
 use std::collections::hash_map::Entry;
 use std::mem;
 use crate::ty::{self, TyCtxt};
-use crate::util::common::{ProfileQueriesMsg, profq_msg};
 use parking_lot::{Mutex, Condvar};
 
 use crate::ich::{StableHashingContext, StableHashingContextProvider, Fingerprint};
@@ -31,7 +32,7 @@ pub struct DepNodeIndex { .. }
 }
 
 impl DepNodeIndex {
-    const INVALID: DepNodeIndex = DepNodeIndex::MAX;
+    pub const INVALID: DepNodeIndex = DepNodeIndex::MAX;
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
@@ -54,7 +55,7 @@ struct DepGraphData {
     /// tracking. The `current` field is the dependency graph of only the
     /// current compilation session: We don't merge the previous dep-graph into
     /// current one anymore.
-    current: Lock<CurrentDepGraph>,
+    current: CurrentDepGraph,
 
     /// The dep-graph from the previous compilation session. It contains all
     /// nodes and edges as well as all fingerprints of nodes that have them.
@@ -75,9 +76,6 @@ struct DepGraphData {
     previous_work_products: FxHashMap<WorkProductId, WorkProduct>,
 
     dep_node_debug: Lock<FxHashMap<DepNode, String>>,
-
-    // Used for testing, only populated when -Zquery-dep-graph is specified.
-    loaded_from_cache: Lock<FxHashMap<DepNodeIndex, bool>>,
 }
 
 pub fn hash_result<R>(hcx: &mut StableHashingContext<'_>, result: &R) -> Option<Fingerprint>
@@ -99,12 +97,11 @@ pub fn new(prev_graph: PreviousDepGraph,
             data: Some(Lrc::new(DepGraphData {
                 previous_work_products: prev_work_products,
                 dep_node_debug: Default::default(),
-                current: Lock::new(CurrentDepGraph::new(prev_graph_node_count)),
+                current: CurrentDepGraph::new(prev_graph_node_count),
                 emitting_diagnostics: Default::default(),
                 emitting_diagnostics_cond_var: Condvar::new(),
                 previous: prev_graph,
                 colors: DepNodeColorMap::new(prev_graph_node_count),
-                loaded_from_cache: Default::default(),
             })),
         }
     }
@@ -122,13 +119,12 @@ pub fn is_fully_enabled(&self) -> bool {
     }
 
     pub fn query(&self) -> DepGraphQuery {
-        let current_dep_graph = self.data.as_ref().unwrap().current.borrow();
-        let nodes: Vec<_> = current_dep_graph.data.iter().map(|n| n.node).collect();
+        let data = self.data.as_ref().unwrap().current.data.lock();
+        let nodes: Vec<_> = data.iter().map(|n| n.node).collect();
         let mut edges = Vec::new();
-        for (from, edge_targets) in current_dep_graph.data.iter()
-                                                           .map(|d| (d.node, &d.edges)) {
+        for (from, edge_targets) in data.iter().map(|d| (d.node, &d.edges)) {
             for &edge_target in edge_targets.iter() {
-                let to = current_dep_graph.data[edge_target].node;
+                let to = data[edge_target].node;
                 edges.push((from, to));
             }
         }
@@ -207,7 +203,7 @@ pub fn with_task<'a, C, A, R>(
                 read_set: Default::default(),
             }),
             |data, key, fingerprint, task| {
-                data.borrow_mut().complete_task(key, task.unwrap(), fingerprint)
+                data.complete_task(key, task.unwrap(), fingerprint)
             },
             hash_result)
     }
@@ -228,7 +224,7 @@ fn identity_fn<C, A>(_: C, arg: A) -> A {
         self.with_task_impl(key, cx, input, true, identity_fn,
             |_| None,
             |data, key, fingerprint, _| {
-                data.borrow_mut().alloc_node(key, SmallVec::new(), fingerprint)
+                data.alloc_node(key, SmallVec::new(), fingerprint)
             },
             hash_result::<R>)
     }
@@ -241,7 +237,7 @@ fn with_task_impl<'a, C, A, R>(
         no_tcx: bool,
         task: fn(C, A) -> R,
         create_task: fn(DepNode) -> Option<TaskDeps>,
-        finish_task_and_alloc_depnode: fn(&Lock<CurrentDepGraph>,
+        finish_task_and_alloc_depnode: fn(&CurrentDepGraph,
                                           DepNode,
                                           Fingerprint,
                                           Option<TaskDeps>) -> DepNodeIndex,
@@ -260,10 +256,6 @@ fn with_task_impl<'a, C, A, R>(
             //  - we can get an idea of the runtime cost.
             let mut hcx = cx.get_stable_hashing_context();
 
-            if cfg!(debug_assertions) {
-                profq_msg(hcx.sess(), ProfileQueriesMsg::TaskBegin(key.clone()))
-            };
-
             let result = if no_tcx {
                 task(cx, arg)
             } else {
@@ -279,10 +271,6 @@ fn with_task_impl<'a, C, A, R>(
                 })
             };
 
-            if cfg!(debug_assertions) {
-                profq_msg(hcx.sess(), ProfileQueriesMsg::TaskEnd)
-            };
-
             let current_fingerprint = hash_result(&mut hcx, &result);
 
             let dep_node_index = finish_task_and_alloc_depnode(
@@ -363,7 +351,6 @@ pub fn with_anon_task<OP,R>(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeInde
                 (r, task_deps.into_inner())
             });
             let dep_node_index = data.current
-                                     .borrow_mut()
                                      .complete_anon_task(dep_kind, task_deps);
             (result, dep_node_index)
         } else {
@@ -387,8 +374,7 @@ pub fn with_eval_always_task<'a, C, A, R>(
         self.with_task_impl(key, cx, arg, false, task,
             |_| None,
             |data, key, fingerprint, _| {
-                let mut current = data.borrow_mut();
-                current.alloc_node(key, smallvec![], fingerprint)
+                data.alloc_node(key, smallvec![], fingerprint)
             },
             hash_result)
     }
@@ -396,9 +382,9 @@ pub fn with_eval_always_task<'a, C, A, R>(
     #[inline]
     pub fn read(&self, v: DepNode) {
         if let Some(ref data) = self.data {
-            let current = data.current.borrow_mut();
-            if let Some(&dep_node_index) = current.node_to_node_index.get(&v) {
-                std::mem::drop(current);
+            let map = data.current.node_to_node_index.get_shard_by_value(&v).lock();
+            if let Some(dep_node_index) = map.get(&v).copied() {
+                std::mem::drop(map);
                 data.read_index(dep_node_index);
             } else {
                 bug!("DepKind {:?} should be pre-allocated but isn't.", v.kind)
@@ -419,8 +405,9 @@ pub fn dep_node_index_of(&self, dep_node: &DepNode) -> DepNodeIndex {
             .as_ref()
             .unwrap()
             .current
-            .borrow_mut()
             .node_to_node_index
+            .get_shard_by_value(dep_node)
+            .lock()
             .get(dep_node)
             .cloned()
             .unwrap()
@@ -429,7 +416,11 @@ pub fn dep_node_index_of(&self, dep_node: &DepNode) -> DepNodeIndex {
     #[inline]
     pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool {
         if let Some(ref data) = self.data {
-            data.current.borrow_mut().node_to_node_index.contains_key(dep_node)
+            data.current
+                .node_to_node_index
+                .get_shard_by_value(&dep_node)
+                .lock()
+                .contains_key(dep_node)
         } else {
             false
         }
@@ -437,8 +428,8 @@ pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool {
 
     #[inline]
     pub fn fingerprint_of(&self, dep_node_index: DepNodeIndex) -> Fingerprint {
-        let current = self.data.as_ref().expect("dep graph enabled").current.borrow_mut();
-        current.data[dep_node_index].fingerprint
+        let data = self.data.as_ref().expect("dep graph enabled").current.data.lock();
+        data[dep_node_index].fingerprint
     }
 
     pub fn prev_fingerprint_of(&self, dep_node: &DepNode) -> Option<Fingerprint> {
@@ -492,32 +483,29 @@ pub(super) fn dep_node_debug_str(&self, dep_node: DepNode) -> Option<String> {
 
     pub fn edge_deduplication_data(&self) -> Option<(u64, u64)> {
         if cfg!(debug_assertions) {
-            let current_dep_graph = self.data.as_ref().unwrap().current.borrow();
+            let current_dep_graph = &self.data.as_ref().unwrap().current;
 
-            Some((current_dep_graph.total_read_count,
-                  current_dep_graph.total_duplicate_read_count))
+            Some((current_dep_graph.total_read_count.load(SeqCst),
+                  current_dep_graph.total_duplicate_read_count.load(SeqCst)))
         } else {
             None
         }
     }
 
     pub fn serialize(&self) -> SerializedDepGraph {
-        let current_dep_graph = self.data.as_ref().unwrap().current.borrow();
+        let data = self.data.as_ref().unwrap().current.data.lock();
 
         let fingerprints: IndexVec<SerializedDepNodeIndex, _> =
-            current_dep_graph.data.iter().map(|d| d.fingerprint).collect();
+            data.iter().map(|d| d.fingerprint).collect();
         let nodes: IndexVec<SerializedDepNodeIndex, _> =
-            current_dep_graph.data.iter().map(|d| d.node).collect();
+            data.iter().map(|d| d.node).collect();
 
-        let total_edge_count: usize = current_dep_graph.data.iter()
-                                                            .map(|d| d.edges.len())
-                                                            .sum();
+        let total_edge_count: usize = data.iter().map(|d| d.edges.len()).sum();
 
         let mut edge_list_indices = IndexVec::with_capacity(nodes.len());
         let mut edge_list_data = Vec::with_capacity(total_edge_count);
 
-        for (current_dep_node_index, edges) in current_dep_graph.data.iter_enumerated()
-                                                                .map(|(i, d)| (i, &d.edges)) {
+        for (current_dep_node_index, edges) in data.iter_enumerated().map(|(i, d)| (i, &d.edges)) {
             let start = edge_list_data.len() as u32;
             // This should really just be a memcpy :/
             edge_list_data.extend(edges.iter().map(|i| SerializedDepNodeIndex::new(i.index())));
@@ -613,7 +601,11 @@ fn try_mark_previous_green<'tcx>(
 
         #[cfg(not(parallel_compiler))]
         {
-            debug_assert!(!data.current.borrow().node_to_node_index.contains_key(dep_node));
+            debug_assert!(!data.current
+                               .node_to_node_index
+                               .get_shard_by_value(dep_node)
+                               .lock()
+                               .contains_key(dep_node));
             debug_assert!(data.colors.get(prev_dep_node_index).is_none());
         }
 
@@ -746,15 +738,13 @@ fn try_mark_previous_green<'tcx>(
         // There may be multiple threads trying to mark the same dep node green concurrently
 
         let dep_node_index = {
-            let mut current = data.current.borrow_mut();
-
             // Copy the fingerprint from the previous graph,
             // so we don't have to recompute it
             let fingerprint = data.previous.fingerprint_by_index(prev_dep_node_index);
 
             // We allocating an entry for the node in the current dependency graph and
             // adding all the appropriate edges imported from the previous graph
-            current.intern_node(*dep_node, current_deps, fingerprint)
+            data.current.intern_node(*dep_node, current_deps, fingerprint)
         };
 
         // ... emitting any stored diagnostic ...
@@ -858,6 +848,8 @@ pub fn is_green(&self, dep_node: &DepNode) -> bool {
     // This method will only load queries that will end up in the disk cache.
     // Other queries will not be executed.
     pub fn exec_cache_promotions(&self, tcx: TyCtxt<'_>) {
+        let _prof_timer = tcx.prof.generic_activity("incr_comp_query_cache_promotion");
+
         let data = self.data.as_ref().unwrap();
         for prev_index in data.colors.values.indices() {
             match data.colors.get(prev_index) {
@@ -874,25 +866,6 @@ pub fn exec_cache_promotions(&self, tcx: TyCtxt<'_>) {
             }
         }
     }
-
-    pub fn mark_loaded_from_cache(&self, dep_node_index: DepNodeIndex, state: bool) {
-        debug!("mark_loaded_from_cache({:?}, {})",
-               self.data.as_ref().unwrap().current.borrow().data[dep_node_index].node,
-               state);
-
-        self.data
-            .as_ref()
-            .unwrap()
-            .loaded_from_cache
-            .borrow_mut()
-            .insert(dep_node_index, state);
-    }
-
-    pub fn was_loaded_from_cache(&self, dep_node: &DepNode) -> Option<bool> {
-        let data = self.data.as_ref().unwrap();
-        let dep_node_index = data.current.borrow().node_to_node_index[dep_node];
-        data.loaded_from_cache.borrow().get(&dep_node_index).cloned()
-    }
 }
 
 /// A "work product" is an intermediate result that we save into the
@@ -947,9 +920,27 @@ struct DepNodeData {
     fingerprint: Fingerprint,
 }
 
+/// `CurrentDepGraph` stores the dependency graph for the current session.
+/// It will be populated as we run queries or tasks.
+///
+/// The nodes in it are identified by an index (`DepNodeIndex`).
+/// The data for each node is stored in its `DepNodeData`, found in the `data` field.
+///
+/// We never remove nodes from the graph: they are only added.
+///
+/// This struct uses two locks internally. The `data` and `node_to_node_index` fields are
+/// locked separately. Operations that take a `DepNodeIndex` typically just access
+/// the data field.
+///
+/// The only operation that must manipulate both locks is adding new nodes, in which case
+/// we first acquire the `node_to_node_index` lock and then, once a new node is to be inserted,
+/// acquire the lock on `data.`
 pub(super) struct CurrentDepGraph {
-    data: IndexVec<DepNodeIndex, DepNodeData>,
-    node_to_node_index: FxHashMap<DepNode, DepNodeIndex>,
+    data: Lock<IndexVec<DepNodeIndex, DepNodeData>>,
+    node_to_node_index: Sharded<FxHashMap<DepNode, DepNodeIndex>>,
+
+    /// Used to trap when a specific edge is added to the graph.
+    /// This is used for debug purposes and is only active with `debug_assertions`.
     #[allow(dead_code)]
     forbidden_edge: Option<EdgeFilter>,
 
@@ -966,8 +957,10 @@ pub(super) struct CurrentDepGraph {
     /// the `DepGraph` is created.
     anon_id_seed: Fingerprint,
 
-    total_read_count: u64,
-    total_duplicate_read_count: u64,
+    /// These are simple counters that are for profiling and
+    /// debugging and only active with `debug_assertions`.
+    total_read_count: AtomicU64,
+    total_duplicate_read_count: AtomicU64,
 }
 
 impl CurrentDepGraph {
@@ -1001,20 +994,20 @@ fn new(prev_graph_node_count: usize) -> CurrentDepGraph {
         let new_node_count_estimate = (prev_graph_node_count * 102) / 100 + 200;
 
         CurrentDepGraph {
-            data: IndexVec::with_capacity(new_node_count_estimate),
-            node_to_node_index: FxHashMap::with_capacity_and_hasher(
-                new_node_count_estimate,
+            data: Lock::new(IndexVec::with_capacity(new_node_count_estimate)),
+            node_to_node_index: Sharded::new(|| FxHashMap::with_capacity_and_hasher(
+                new_node_count_estimate / sharded::SHARDS,
                 Default::default(),
-            ),
+            )),
             anon_id_seed: stable_hasher.finish(),
             forbidden_edge,
-            total_read_count: 0,
-            total_duplicate_read_count: 0,
+            total_read_count: AtomicU64::new(0),
+            total_duplicate_read_count: AtomicU64::new(0),
         }
     }
 
     fn complete_task(
-        &mut self,
+        &self,
         node: DepNode,
         task_deps: TaskDeps,
         fingerprint: Fingerprint
@@ -1022,7 +1015,7 @@ fn complete_task(
         self.alloc_node(node, task_deps.reads, fingerprint)
     }
 
-    fn complete_anon_task(&mut self, kind: DepKind, task_deps: TaskDeps) -> DepNodeIndex {
+    fn complete_anon_task(&self, kind: DepKind, task_deps: TaskDeps) -> DepNodeIndex {
         debug_assert!(!kind.is_eval_always());
 
         let mut hasher = StableHasher::new();
@@ -1047,28 +1040,30 @@ fn complete_anon_task(&mut self, kind: DepKind, task_deps: TaskDeps) -> DepNodeI
     }
 
     fn alloc_node(
-        &mut self,
+        &self,
         dep_node: DepNode,
         edges: SmallVec<[DepNodeIndex; 8]>,
         fingerprint: Fingerprint
     ) -> DepNodeIndex {
-        debug_assert!(!self.node_to_node_index.contains_key(&dep_node));
+        debug_assert!(!self.node_to_node_index
+                           .get_shard_by_value(&dep_node)
+                           .lock()
+                           .contains_key(&dep_node));
         self.intern_node(dep_node, edges, fingerprint)
     }
 
     fn intern_node(
-        &mut self,
+        &self,
         dep_node: DepNode,
         edges: SmallVec<[DepNodeIndex; 8]>,
         fingerprint: Fingerprint
     ) -> DepNodeIndex {
-        debug_assert_eq!(self.node_to_node_index.len(), self.data.len());
-
-        match self.node_to_node_index.entry(dep_node) {
+        match self.node_to_node_index.get_shard_by_value(&dep_node).lock().entry(dep_node) {
             Entry::Occupied(entry) => *entry.get(),
             Entry::Vacant(entry) => {
-                let dep_node_index = DepNodeIndex::new(self.data.len());
-                self.data.push(DepNodeData {
+                let mut data = self.data.lock();
+                let dep_node_index = DepNodeIndex::new(data.len());
+                data.push(DepNodeData {
                     node: dep_node,
                     edges,
                     fingerprint
@@ -1087,7 +1082,7 @@ fn read_index(&self, source: DepNodeIndex) {
             if let Some(task_deps) = icx.task_deps {
                 let mut task_deps = task_deps.lock();
                 if cfg!(debug_assertions) {
-                    self.current.lock().total_read_count += 1;
+                    self.current.total_read_count.fetch_add(1, SeqCst);
                 }
                 if task_deps.read_set.insert(source) {
                     task_deps.reads.push(source);
@@ -1095,9 +1090,9 @@ fn read_index(&self, source: DepNodeIndex) {
                     #[cfg(debug_assertions)]
                     {
                         if let Some(target) = task_deps.node {
-                            let graph = self.current.lock();
-                            if let Some(ref forbidden_edge) = graph.forbidden_edge {
-                                let source = graph.data[source].node;
+                            let data = self.current.data.lock();
+                            if let Some(ref forbidden_edge) = self.current.forbidden_edge {
+                                let source = data[source].node;
                                 if forbidden_edge.test(&source, &target) {
                                     bug!("forbidden edge {:?} -> {:?} created",
                                         source,
@@ -1107,7 +1102,7 @@ fn read_index(&self, source: DepNodeIndex) {
                         }
                     }
                 } else if cfg!(debug_assertions) {
-                    self.current.lock().total_duplicate_read_count += 1;
+                    self.current.total_duplicate_read_count.fetch_add(1, SeqCst);
                 }
             }
         })
index 968b0b9f2f2b73eab1e5b67b62fbf7ea0d4d93af..3d501cacf6f4ca57d239ff26c68091b24160e186 100644 (file)
@@ -259,8 +259,8 @@ trait Foo {
 This is similar to the second sub-error, but subtler. It happens in situations
 like the following:
 
-```compile_fail
-trait Super<A> {}
+```compile_fail,E0038
+trait Super<A: ?Sized> {}
 
 trait Trait: Super<Self> {
 }
@@ -270,17 +270,21 @@ trait Trait: Super<Self> {
 impl Super<Foo> for Foo{}
 
 impl Trait for Foo {}
+
+fn main() {
+    let x: Box<dyn Trait>;
+}
 ```
 
 Here, the supertrait might have methods as follows:
 
 ```
-trait Super<A> {
-    fn get_a(&self) -> A; // note that this is object safe!
+trait Super<A: ?Sized> {
+    fn get_a(&self) -> &A; // note that this is object safe!
 }
 ```
 
-If the trait `Foo` was deriving from something like `Super<String>` or
+If the trait `Trait` was deriving from something like `Super<String>` or
 `Super<T>` (where `Foo` itself is `Foo<T>`), this is okay, because given a type
 `get_a()` will definitely return an object of that type.
 
@@ -466,67 +470,6 @@ fn main() {
 ```
 "##,
 
-// This shouldn't really ever trigger since the repeated value error comes first
-E0136: r##"
-A binary can only have one entry point, and by default that entry point is the
-function `main()`. If there are multiple such functions, please rename one.
-"##,
-
-E0137: r##"
-More than one function was declared with the `#[main]` attribute.
-
-Erroneous code example:
-
-```compile_fail,E0137
-#![feature(main)]
-
-#[main]
-fn foo() {}
-
-#[main]
-fn f() {} // error: multiple functions with a `#[main]` attribute
-```
-
-This error indicates that the compiler found multiple functions with the
-`#[main]` attribute. This is an error because there must be a unique entry
-point into a Rust program. Example:
-
-```
-#![feature(main)]
-
-#[main]
-fn f() {} // ok!
-```
-"##,
-
-E0138: r##"
-More than one function was declared with the `#[start]` attribute.
-
-Erroneous code example:
-
-```compile_fail,E0138
-#![feature(start)]
-
-#[start]
-fn foo(argc: isize, argv: *const *const u8) -> isize {}
-
-#[start]
-fn f(argc: isize, argv: *const *const u8) -> isize {}
-// error: multiple 'start' functions
-```
-
-This error indicates that the compiler found multiple functions with the
-`#[start]` attribute. This is an error because there must be a unique entry
-point into a Rust program. Example:
-
-```
-#![feature(start)]
-
-#[start]
-fn foo(argc: isize, argv: *const *const u8) -> isize { 0 } // ok!
-```
-"##,
-
 E0139: r##"
 #### Note: this error code is no longer emitted by the compiler.
 
@@ -1580,8 +1523,51 @@ impl<'a, T> SomeTrait<'a> for T
 ```
 "##,
 
+E0495: r##"
+A lifetime cannot be determined in the given situation.
+
+Erroneous code example:
+
+```compile_fail,E0495
+fn transmute_lifetime<'a, 'b, T>(t: &'a (T,)) -> &'b T {
+    match (&t,) { // error!
+        ((u,),) => u,
+    }
+}
+
+let y = Box::new((42,));
+let x = transmute_lifetime(&y);
+```
+
+In this code, you have two ways to solve this issue:
+ 1. Enforce that `'a` lives at least as long as `'b`.
+ 2. Use the same lifetime requirement for both input and output values.
+
+So for the first solution, you can do it by replacing `'a` with `'a: 'b`:
+
+```
+fn transmute_lifetime<'a: 'b, 'b, T>(t: &'a (T,)) -> &'b T {
+    match (&t,) { // ok!
+        ((u,),) => u,
+    }
+}
+```
+
+In the second you can do it by simply removing `'b` so they both use `'a`:
+
+```
+fn transmute_lifetime<'a, T>(t: &'a (T,)) -> &'a T {
+    match (&t,) { // ok!
+        ((u,),) => u,
+    }
+}
+```
+"##,
+
 E0496: r##"
-A lifetime name is shadowing another lifetime name. Erroneous code example:
+A lifetime name is shadowing another lifetime name.
+
+Erroneous code example:
 
 ```compile_fail,E0496
 struct Foo<'a> {
@@ -1613,8 +1599,11 @@ fn main() {
 "##,
 
 E0497: r##"
-A stability attribute was used outside of the standard library. Erroneous code
-example:
+#### Note: this error code is no longer emitted by the compiler.
+
+A stability attribute was used outside of the standard library.
+
+Erroneous code example:
 
 ```compile_fail
 #[stable] // error: stability attributes may not be used outside of the
@@ -1626,33 +1615,6 @@ fn foo() {}
 Also, for now, it is not possible to write deprecation messages either.
 "##,
 
-E0512: r##"
-Transmute with two differently sized types was attempted. Erroneous code
-example:
-
-```compile_fail,E0512
-fn takes_u8(_: u8) {}
-
-fn main() {
-    unsafe { takes_u8(::std::mem::transmute(0u16)); }
-    // error: cannot transmute between types of different sizes,
-    //        or dependently-sized types
-}
-```
-
-Please use types with same size or use the expected type directly. Example:
-
-```
-fn takes_u8(_: u8) {}
-
-fn main() {
-    unsafe { takes_u8(::std::mem::transmute(0i8)); } // ok!
-    // or:
-    unsafe { takes_u8(0u8); } // ok!
-}
-```
-"##,
-
 E0517: r##"
 This error indicates that a `#[repr(..)]` attribute was placed on an
 unsupported item.
@@ -1787,6 +1749,27 @@ fn main() {
 https://doc.rust-lang.org/book/ch13-01-closures.html
 "##,
 
+E0566: r##"
+Conflicting representation hints have been used on a same item.
+
+Erroneous code example:
+
+```
+#[repr(u32, u64)] // warning!
+enum Repr { A }
+```
+
+In most cases (if not all), using just one representation hint is more than
+enough. If you want to have a representation hint depending on the current
+architecture, use `cfg_attr`. Example:
+
+```
+#[cfg_attr(linux, repr(u32))]
+#[cfg_attr(not(linux), repr(u64))]
+enum Repr { A }
+```
+"##,
+
 E0580: r##"
 The `main` function was incorrectly declared.
 
@@ -1847,84 +1830,6 @@ fn main() {
 [RFC 1522]: https://github.com/rust-lang/rfcs/blob/master/text/1522-conservative-impl-trait.md
 "##,
 
-E0591: r##"
-Per [RFC 401][rfc401], if you have a function declaration `foo`:
-
-```
-// For the purposes of this explanation, all of these
-// different kinds of `fn` declarations are equivalent:
-struct S;
-fn foo(x: S) { /* ... */ }
-# #[cfg(for_demonstration_only)]
-extern "C" { fn foo(x: S); }
-# #[cfg(for_demonstration_only)]
-impl S { fn foo(self) { /* ... */ } }
-```
-
-the type of `foo` is **not** `fn(S)`, as one might expect.
-Rather, it is a unique, zero-sized marker type written here as `typeof(foo)`.
-However, `typeof(foo)` can be _coerced_ to a function pointer `fn(S)`,
-so you rarely notice this:
-
-```
-# struct S;
-# fn foo(_: S) {}
-let x: fn(S) = foo; // OK, coerces
-```
-
-The reason that this matter is that the type `fn(S)` is not specific to
-any particular function: it's a function _pointer_. So calling `x()` results
-in a virtual call, whereas `foo()` is statically dispatched, because the type
-of `foo` tells us precisely what function is being called.
-
-As noted above, coercions mean that most code doesn't have to be
-concerned with this distinction. However, you can tell the difference
-when using **transmute** to convert a fn item into a fn pointer.
-
-This is sometimes done as part of an FFI:
-
-```compile_fail,E0591
-extern "C" fn foo(userdata: Box<i32>) {
-    /* ... */
-}
-
-# fn callback(_: extern "C" fn(*mut i32)) {}
-# use std::mem::transmute;
-# unsafe {
-let f: extern "C" fn(*mut i32) = transmute(foo);
-callback(f);
-# }
-```
-
-Here, transmute is being used to convert the types of the fn arguments.
-This pattern is incorrect because, because the type of `foo` is a function
-**item** (`typeof(foo)`), which is zero-sized, and the target type (`fn()`)
-is a function pointer, which is not zero-sized.
-This pattern should be rewritten. There are a few possible ways to do this:
-
-- change the original fn declaration to match the expected signature,
-  and do the cast in the fn body (the preferred option)
-- cast the fn item fo a fn pointer before calling transmute, as shown here:
-
-    ```
-    # extern "C" fn foo(_: Box<i32>) {}
-    # use std::mem::transmute;
-    # unsafe {
-    let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_));
-    let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
-    # }
-    ```
-
-The same applies to transmutes to `*mut fn()`, which were observed in practice.
-Note though that use of this type is generally incorrect.
-The intention is typically to describe a function pointer, but just `fn()`
-alone suffices for that. `*mut fn()` is a pointer to a fn pointer.
-(Since these values are typically just passed to C code, however, this rarely
-makes a difference in practice.)
-
-[rfc401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md
-"##,
-
 E0593: r##"
 You tried to supply an `Fn`-based type with an incorrect number of arguments
 than what was expected.
@@ -1941,21 +1846,6 @@ fn main() {
 ```
 "##,
 
-E0601: r##"
-No `main` function was found in a binary crate. To fix this error, add a
-`main` function. For example:
-
-```
-fn main() {
-    // Your program will start here.
-    println!("Hello world!");
-}
-```
-
-If you don't know the basics of Rust, you can go look to the Rust Book to get
-started: https://doc.rust-lang.org/book/
-"##,
-
 E0602: r##"
 An unknown lint was used on the command line.
 
@@ -2115,6 +2005,24 @@ struct Foo {
 transparent wrapper around a float. This can make a difference for the ABI.
 "##,
 
+E0697: r##"
+A closure has been used as `static`.
+
+Erroneous code example:
+
+```compile_fail,E0697
+fn main() {
+    static || {}; // used as `static`
+}
+```
+
+Closures cannot be used as `static`. They "save" the environment,
+and as such a static closure would save only a static environment
+which would consist only of variables with a static lifetime. Given
+this it would be better to use a proper function. The easiest fix
+is to remove the `static` keyword.
+"##,
+
 E0698: r##"
 When using generators (or async) all type variables must be bound so a
 generator can be constructed.
@@ -2234,6 +2142,25 @@ fn foo(){}
 rejected in your own crates.
 "##,
 
+E0736: r##"
+#[track_caller] and #[naked] cannot be applied to the same function.
+
+Erroneous code example:
+
+```compile_fail,E0736
+#![feature(track_caller)]
+
+#[naked]
+#[track_caller]
+fn foo() {}
+```
+
+This is primarily due to ABI incompatibilities between the two attributes.
+See [RFC 2091] for details on this and other limitations.
+
+[RFC 2091]: https://github.com/rust-lang/rfcs/blob/master/text/2091-inline-semantic.md
+"##,
+
 ;
 //  E0006, // merged with E0005
 //  E0101, // replaced with E0282
@@ -2243,7 +2170,7 @@ fn foo(){}
 //  E0272, // on_unimplemented #0
 //  E0273, // on_unimplemented #1
 //  E0274, // on_unimplemented #2
-    E0278, // requirement is not satisfied
+//  E0278, // requirement is not satisfied
     E0279, // requirement is not satisfied
     E0280, // requirement is not satisfied
 //  E0285, // overflow evaluation builtin bounds
@@ -2275,9 +2202,6 @@ fn foo(){}
     E0488, // lifetime of variable does not enclose its declaration
     E0489, // type/lifetime parameter not in scope here
     E0490, // a value of type `..` is borrowed for too long
-    E0495, // cannot infer an appropriate lifetime due to conflicting
-           // requirements
-    E0566, // conflicting representation hints
     E0623, // lifetime mismatch where both parameters are anonymous regions
     E0628, // generators cannot have explicit parameters
     E0631, // type mismatch in closure arguments
@@ -2285,15 +2209,15 @@ fn foo(){}
     E0657, // `impl Trait` can only capture lifetimes bound at the fn level
     E0687, // in-band lifetimes cannot be used in `fn`/`Fn` syntax
     E0688, // in-band lifetimes cannot be mixed with explicit lifetime binders
-    E0697, // closures cannot be static
-    E0707, // multiple elided lifetimes used in arguments of `async fn`
+//  E0707, // multiple elided lifetimes used in arguments of `async fn`
     E0708, // `async` non-`move` closures with parameters are not currently
            // supported
-    E0709, // multiple different lifetimes used in arguments of `async fn`
+//  E0709, // multiple different lifetimes used in arguments of `async fn`
     E0710, // an unknown tool name found in scoped lint
     E0711, // a feature has been declared with conflicting stability attributes
 //  E0702, // replaced with a generic attribute input check
     E0726, // non-explicit (not `'_`) elided lifetime in unsupported position
     E0727, // `async` generators are not yet supported
     E0728, // `await` must be in an `async` function or block
+    E0739, // invalid track_caller application/syntax
 }
index d5e956555bdfb7e4a526027c246af743a10261cf..c37fec982b1160bd80d505d3f6ad3432ac721545 100644 (file)
@@ -11,7 +11,7 @@
 use crate::ty::query::Providers;
 
 use std::fmt::{self, Display};
-use syntax::symbol::sym;
+use syntax::{attr, symbol::sym};
 use syntax_pos::Span;
 
 #[derive(Copy, Clone, PartialEq)]
@@ -103,6 +103,8 @@ fn check_attributes(&self, item: &hir::Item, target: Target) {
                 self.check_marker(attr, item, target)
             } else if attr.check_name(sym::target_feature) {
                 self.check_target_feature(attr, item, target)
+            } else if attr.check_name(sym::track_caller) {
+                self.check_track_caller(attr, &item, target)
             } else {
                 true
             };
@@ -135,6 +137,32 @@ fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) -> bo
         }
     }
 
+    /// Checks if a `#[track_caller]` is applied to a non-naked function. Returns `true` if valid.
+    fn check_track_caller(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) -> bool {
+        if target != Target::Fn {
+            struct_span_err!(
+                self.tcx.sess,
+                attr.span,
+                E0739,
+                "attribute should be applied to function"
+            )
+            .span_label(item.span, "not a function")
+            .emit();
+            false
+        } else if attr::contains_name(&item.attrs, sym::naked) {
+            struct_span_err!(
+                self.tcx.sess,
+                attr.span,
+                E0736,
+                "cannot use `#[track_caller]` with `#[naked]`",
+            )
+            .emit();
+            false
+        } else {
+            true
+        }
+    }
+
     /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. Returns `true` if valid.
     fn check_non_exhaustive(
         &self,
index f7d31ca06ee56e173dc8d900632d980de17d8206..a071a539e01df16e1ac32fd61a864dfbcabccb2a 100644 (file)
@@ -6,7 +6,7 @@
 use crate::util::nodemap::DefIdMap;
 
 use syntax::ast;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::ast::NodeId;
 use syntax_pos::Span;
 use rustc_macros::HashStable;
index 2238a56b29d04b150139935aed91f6deaa676067..d5287fd415b7f07023244376a3211df770b41f4a 100644 (file)
 use syntax::ptr::P as AstP;
 use syntax::ast::*;
 use syntax::errors;
-use syntax::ext::base::SpecialDerives;
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::base::SpecialDerives;
 use syntax::print::pprust;
+use syntax::parse::token::{self, Nonterminal, Token};
+use syntax::tokenstream::{TokenStream, TokenTree};
+use syntax::sess::ParseSess;
 use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned};
 use syntax::symbol::{kw, sym, Symbol};
-use syntax::tokenstream::{TokenStream, TokenTree};
-use syntax::parse::token::{self, Token};
 use syntax::visit::{self, Visitor};
+use syntax_pos::hygiene::ExpnId;
 use syntax_pos::Span;
 
 const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF;
@@ -86,6 +87,11 @@ pub struct LoweringContext<'a> {
 
     resolver: &'a mut dyn Resolver,
 
+    /// HACK(Centril): there is a cyclic dependency between the parser and lowering
+    /// if we don't have this function pointer. To avoid that dependency so that
+    /// librustc is independent of the parser, we use dynamic dispatch here.
+    nt_to_tokenstream: NtToTokenstream,
+
     /// The items being lowered are collected here.
     items: BTreeMap<hir::HirId, hir::Item>,
 
@@ -180,6 +186,8 @@ fn resolve_str_path(
     fn has_derives(&self, node_id: NodeId, derives: SpecialDerives) -> bool;
 }
 
+type NtToTokenstream = fn(&Nonterminal, &ParseSess, Span) -> TokenStream;
+
 /// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
 /// and if so, what meaning it has.
 #[derive(Debug)]
@@ -236,17 +244,21 @@ pub fn lower_crate(
     dep_graph: &DepGraph,
     krate: &Crate,
     resolver: &mut dyn Resolver,
+    nt_to_tokenstream: NtToTokenstream,
 ) -> hir::Crate {
     // We're constructing the HIR here; we don't care what we will
     // read, since we haven't even constructed the *input* to
     // incr. comp. yet.
     dep_graph.assert_ignored();
 
+    let _prof_timer = sess.prof.generic_activity("hir_lowering");
+
     LoweringContext {
         crate_root: sess.parse_sess.injected_crate_name.try_get().copied(),
         sess,
         cstore,
         resolver,
+        nt_to_tokenstream,
         items: BTreeMap::new(),
         trait_items: BTreeMap::new(),
         impl_items: BTreeMap::new(),
@@ -844,7 +856,7 @@ fn maybe_collect_in_band_lifetime(&mut self, ident: Ident) {
     /// header, we convert it to an in-band lifetime.
     fn collect_fresh_in_band_lifetime(&mut self, span: Span) -> ParamName {
         assert!(self.is_collecting_in_band_lifetimes);
-        let index = self.lifetimes_to_define.len();
+        let index = self.lifetimes_to_define.len() + self.in_scope_lifetimes.len();
         let hir_name = ParamName::Fresh(index);
         self.lifetimes_to_define.push((span, hir_name));
         hir_name
@@ -1020,7 +1032,7 @@ fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
     fn lower_token(&mut self, token: Token) -> TokenStream {
         match token.kind {
             token::Interpolated(nt) => {
-                let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
+                let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
                 self.lower_token_stream(tts)
             }
             _ => TokenTree::Token(token).into(),
@@ -3279,10 +3291,14 @@ fn elided_path_lifetime(&mut self, span: Span) -> hir::Lifetime {
                 let id = self.sess.next_node_id();
                 self.new_named_lifetime(id, span, hir::LifetimeName::Error)
             }
-            // This is the normal case.
-            AnonymousLifetimeMode::PassThrough => self.new_implicit_lifetime(span),
-
-            AnonymousLifetimeMode::ReportError => self.new_error_lifetime(None, span),
+            // `PassThrough` is the normal case.
+            // `new_error_lifetime`, which would usually be used in the case of `ReportError`,
+            // is unsuitable here, as these can occur from missing lifetime parameters in a
+            // `PathSegment`, for which there is no associated `'_` or `&T` with no explicit
+            // lifetime. Instead, we simply create an implicit lifetime, which will be checked
+            // later, at which point a suitable error will be emitted.
+          | AnonymousLifetimeMode::PassThrough
+          | AnonymousLifetimeMode::ReportError => self.new_implicit_lifetime(span),
         }
     }
 
index 548a2fedfff6dfc0da7e344d8520e56067afa39b..73d2ac5c134d3f1fa3465c2f391aa4cb5a12af2b 100644 (file)
@@ -18,7 +18,7 @@
 use syntax::attr;
 use syntax::ast::*;
 use syntax::visit::{self, Visitor};
-use syntax::ext::base::SpecialDerives;
+use syntax_expand::base::SpecialDerives;
 use syntax::source_map::{respan, DesugaringKind, Spanned};
 use syntax::symbol::{kw, sym};
 use syntax_pos::Span;
index 1997e2aab35e8173b0d662ef0bfe7667bd7f8241..9be339be7034f2b00d1d83823d3bc79f48f10ce9 100644 (file)
@@ -2,7 +2,7 @@
 use crate::hir::def_id::DefIndex;
 
 use syntax::ast::*;
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::hygiene::ExpnId;
 use syntax::visit;
 use syntax::symbol::{kw, sym};
 use syntax::parse::token::{self, Token};
@@ -90,7 +90,7 @@ fn collect_field(&mut self, field: &'a StructField, index: Option<usize>) {
         }
     }
 
-    pub fn visit_macro_invoc(&mut self, id: NodeId) {
+    fn visit_macro_invoc(&mut self, id: NodeId) {
         self.definitions.set_invocation_parent(id.placeholder_to_expn_id(), self.parent_def);
     }
 }
index 71bf230e37dedd8539e1d6dd2b665c1dec28d7fd..d2732c92d26887688ff7e83d79131d2d418e8106 100644 (file)
@@ -17,7 +17,7 @@
 use std::fmt::Write;
 use std::hash::Hash;
 use syntax::ast;
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::hygiene::ExpnId;
 use syntax::symbol::{Symbol, sym, InternedString};
 use syntax_pos::{Span, DUMMY_SP};
 
@@ -111,7 +111,7 @@ pub struct Definitions {
 /// A unique identifier that we can use to lookup a definition
 /// precisely. It combines the index of the definition's parent (if
 /// any) with a `DisambiguatedDefPathData`.
-#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
 pub struct DefKey {
     /// The parent path.
     pub parent: Option<DefIndex>,
@@ -162,13 +162,13 @@ fn root_parent_stable_hash(crate_name: &str,
 /// between them. This introduces some artificial ordering dependency
 /// but means that if you have, e.g., two impls for the same type in
 /// the same module, they do get distinct `DefId`s.
-#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
 pub struct DisambiguatedDefPathData {
     pub data: DefPathData,
     pub disambiguator: u32
 }
 
-#[derive(Clone, Debug, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
 pub struct DefPath {
     /// The path leading from the crate root to the item.
     pub data: Vec<DisambiguatedDefPathData>,
index 1705f5692d4f9698c176921f45813021c0deaf67..f839087ec027108677776a5b816cf1fdb1afe160 100644 (file)
@@ -20,7 +20,7 @@
 use rustc_index::vec::IndexVec;
 use syntax::ast::{self, Name, NodeId};
 use syntax::source_map::Spanned;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax_pos::{Span, DUMMY_SP};
 
 pub mod blocks;
@@ -1222,6 +1222,8 @@ pub fn map_crate<'hir>(sess: &crate::session::Session,
                        forest: &'hir Forest,
                        definitions: &'hir Definitions)
                        -> Map<'hir> {
+    let _prof_timer = sess.prof.generic_activity("build_hir_map");
+
     // Build the reverse mapping of `node_to_hir_id`.
     let hir_to_node_id = definitions.node_to_hir_id.iter_enumerated()
         .map(|(node_id, &hir_id)| (hir_id, node_id)).collect();
index d5892794d64968e09d92daa92e7d84b1886f7dbe..568e051aaf08fc750e618bad84e220338dde626b 100644 (file)
@@ -669,6 +669,12 @@ pub fn span(&self) -> Option<Span> {
             Some(self.span)
         }
     }
+
+    /// The `WhereClause` under normal circumstances points at either the predicates or the empty
+    /// space where the `where` clause should be. Only of use for diagnostic suggestions.
+    pub fn span_for_predicates_or_empty_place(&self) -> Span {
+        self.span
+    }
 }
 
 /// A single predicate in a where-clause.
@@ -989,6 +995,15 @@ pub enum RangeEnd {
     Excluded,
 }
 
+impl fmt::Display for RangeEnd {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.write_str(match self {
+            RangeEnd::Included => "..=",
+            RangeEnd::Excluded => "..",
+        })
+    }
+}
+
 #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)]
 pub enum PatKind {
     /// Represents a wildcard pattern (i.e., `_`).
@@ -1053,6 +1068,13 @@ pub fn and(self, other: Self) -> Self {
             MutImmutable => MutImmutable,
         }
     }
+
+    pub fn invert(self) -> Self {
+        match self {
+            MutMutable => MutImmutable,
+            MutImmutable => MutMutable,
+        }
+    }
 }
 
 #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Hash, HashStable)]
@@ -1359,6 +1381,10 @@ pub fn id(&self) -> BodyId {
             hir_id: self.value.hir_id,
         }
     }
+
+    pub fn generator_kind(&self) -> Option<GeneratorKind> {
+        self.generator_kind
+    }
 }
 
 /// The type of source expression that caused this generator to be created.
@@ -1548,6 +1574,19 @@ pub fn is_place_expr(&self) -> bool {
             }
         }
     }
+
+    /// If `Self.kind` is `ExprKind::DropTemps(expr)`, drill down until we get a non-`DropTemps`
+    /// `Expr`. This is used in suggestions to ignore this `ExprKind` as it is semantically
+    /// silent, only signaling the ownership system. By doing this, suggestions that check the
+    /// `ExprKind` of any given `Expr` for presentation don't have to care about `DropTemps`
+    /// beyond remembering to call this function before doing analysis on it.
+    pub fn peel_drop_temps(&self) -> &Self {
+        let mut expr = self;
+        while let ExprKind::DropTemps(inner) = &expr.kind {
+            expr = inner;
+        }
+        expr
+    }
 }
 
 impl fmt::Debug for Expr {
@@ -2669,6 +2708,11 @@ pub struct CodegenFnAttrs {
     /// probably isn't set when this is set, this is for foreign items while
     /// `#[export_name]` is for Rust-defined functions.
     pub link_name: Option<Symbol>,
+    /// The `#[link_ordinal = "..."]` attribute, indicating an ordinal an
+    /// imported function has in the dynamic library. Note that this must not
+    /// be set when `link_name` is set. This is for foreign items with the
+    /// "raw-dylib" kind.
+    pub link_ordinal: Option<usize>,
     /// The `#[target_feature(enable = "...")]` attribute and the enabled
     /// features (only enabled features are supported right now).
     pub target_features: Vec<Symbol>,
@@ -2716,7 +2760,9 @@ pub struct CodegenFnAttrFlags: u32 {
         const USED                      = 1 << 9;
         /// #[ffi_returns_twice], indicates that an extern function can return
         /// multiple times
-        const FFI_RETURNS_TWICE = 1 << 10;
+        const FFI_RETURNS_TWICE         = 1 << 10;
+        /// #[track_caller]: allow access to the caller location
+        const TRACK_CALLER              = 1 << 11;
     }
 }
 
@@ -2728,6 +2774,7 @@ pub fn new() -> CodegenFnAttrs {
             optimize: OptimizeAttr::None,
             export_name: None,
             link_name: None,
+            link_ordinal: None,
             target_features: vec![],
             linkage: None,
             link_section: None,
index 6cffaa8a494c4a9bf798b77de0a0538054747a36..b852098d4cef7fc2a9e148c0df1def1da5659227 100644 (file)
@@ -1,10 +1,10 @@
 use rustc_target::spec::abi::Abi;
 use syntax::ast;
 use syntax::source_map::{SourceMap, Spanned};
-use syntax::parse::ParseSess;
 use syntax::print::pp::{self, Breaks};
 use syntax::print::pp::Breaks::{Consistent, Inconsistent};
 use syntax::print::pprust::{self, Comments, PrintState};
+use syntax::sess::ParseSess;
 use syntax::symbol::kw;
 use syntax::util::parser::{self, AssocOp, Fixity};
 use syntax_pos::{self, BytePos, FileName};
index 3e6b271b8349754575b72578c7bde5123fd55f59..14d0673ecc03fe51f3677cc1ecaa98973bb85332 100644 (file)
@@ -13,7 +13,7 @@
 
 use syntax::ast;
 use syntax::source_map::SourceMap;
-use syntax::ext::hygiene::SyntaxContext;
+use syntax_expand::hygiene::SyntaxContext;
 use syntax::symbol::Symbol;
 use syntax::tokenstream::DelimSpan;
 use syntax_pos::{Span, DUMMY_SP};
index 23a2f115e05e2516fbb0acc8a38106c89812c0ac..defc3fb25bc52bb890276adcdac6d755e963b7fc 100644 (file)
@@ -59,7 +59,7 @@ fn to_stable_hash_key(&self,
     Intel
 });
 
-impl_stable_hash_for!(enum ::syntax::ext::base::MacroKind {
+impl_stable_hash_for!(enum ::syntax_expand::base::MacroKind {
     Bang,
     Attr,
     Derive,
index d31b527a55b6951e6e4663744a6e52b9ba98cba5..f6068855e630cc3d514c59d13d41cc553e0a6ca0 100644 (file)
@@ -200,7 +200,7 @@ fn msg_span_from_early_bound_and_free_regions(
                 {
                     sp = param.span;
                 }
-                (format!("the lifetime {} as defined on", br.name), sp)
+                (format!("the lifetime `{}` as defined on", br.name), sp)
             }
             ty::ReFree(ty::FreeRegion {
                 bound_region: ty::BoundRegion::BrNamed(_, name),
@@ -213,7 +213,7 @@ fn msg_span_from_early_bound_and_free_regions(
                 {
                     sp = param.span;
                 }
-                (format!("the lifetime {} as defined on", name), sp)
+                (format!("the lifetime `{}` as defined on", name), sp)
             }
             ty::ReFree(ref fr) => match fr.bound_region {
                 ty::BrAnon(idx) => (
@@ -221,7 +221,7 @@ fn msg_span_from_early_bound_and_free_regions(
                     self.hir().span(node),
                 ),
                 _ => (
-                    format!("the lifetime {} as defined on", region),
+                    format!("the lifetime `{}` as defined on", region),
                     cm.def_span(self.hir().span(node)),
                 ),
             },
@@ -935,6 +935,7 @@ fn push_ty_ref<'tcx>(
                         .filter(|(a, b)| a == b)
                         .count();
                     let len = sub1.len() - common_default_params;
+                    let consts_offset = len - sub1.consts().count();
 
                     // Only draw `<...>` if there're lifetime/type arguments.
                     if len > 0 {
@@ -981,7 +982,8 @@ fn lifetime_display(lifetime: Region<'_>) -> String {
                     //         ^ elided type as this type argument was the same in both sides
                     let type_arguments = sub1.types().zip(sub2.types());
                     let regions_len = sub1.regions().count();
-                    for (i, (ta1, ta2)) in type_arguments.take(len).enumerate() {
+                    let num_display_types = consts_offset - regions_len;
+                    for (i, (ta1, ta2)) in type_arguments.take(num_display_types).enumerate() {
                         let i = i + regions_len;
                         if ta1 == ta2 {
                             values.0.push_normal("_");
@@ -994,6 +996,21 @@ fn lifetime_display(lifetime: Region<'_>) -> String {
                         self.push_comma(&mut values.0, &mut values.1, len, i);
                     }
 
+                    // Do the same for const arguments, if they are equal, do not highlight and
+                    // elide them from the output.
+                    let const_arguments = sub1.consts().zip(sub2.consts());
+                    for (i, (ca1, ca2)) in const_arguments.enumerate() {
+                        let i = i + consts_offset;
+                        if ca1 == ca2 {
+                            values.0.push_normal("_");
+                            values.1.push_normal("_");
+                        } else {
+                            values.0.push_highlighted(ca1.to_string());
+                            values.1.push_highlighted(ca2.to_string());
+                        }
+                        self.push_comma(&mut values.0, &mut values.1, len, i);
+                    }
+
                     // Close the type argument bracket.
                     // Only draw `<...>` if there're lifetime/type arguments.
                     if len > 0 {
index f11f94c428e86a54ef2ffe667a56166648ebffbd..f30f19d41509dae6fc66849778fd9e27185437b5 100644 (file)
@@ -19,8 +19,8 @@
 use rustc_data_structures::graph::implementation::{
     Direction, Graph, NodeIndex, INCOMING, OUTGOING,
 };
+use rustc_index::bit_set::BitSet;
 use rustc_index::vec::{Idx, IndexVec};
-use smallvec::SmallVec;
 use std::fmt;
 use syntax_pos::Span;
 
@@ -304,8 +304,7 @@ fn enforce_member_constraint(
     }
 
     fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
-        self.iterate_until_fixed_point("Expansion", |constraint| {
-            debug!("expansion: constraint={:?}", constraint);
+        let mut process_constraint = |constraint: &Constraint<'tcx>| {
             let (a_region, b_vid, b_data, retain) = match *constraint {
                 Constraint::RegSubVar(a_region, b_vid) => {
                     let b_data = var_values.value_mut(b_vid);
@@ -331,7 +330,33 @@ fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
 
             let changed = self.expand_node(a_region, b_vid, b_data);
             (changed, retain)
-        })
+        };
+
+        // Using bitsets to track the remaining elements is faster than using a
+        // `Vec` by itself (which requires removing elements, which requires
+        // element shuffling, which is slow).
+        let constraints: Vec<_> = self.data.constraints.keys().collect();
+        let mut live_indices: BitSet<usize> = BitSet::new_filled(constraints.len());
+        let mut killed_indices: BitSet<usize> = BitSet::new_empty(constraints.len());
+        let mut changed = true;
+        while changed {
+            changed = false;
+            for index in live_indices.iter() {
+                let constraint = constraints[index];
+                let (edge_changed, retain) = process_constraint(constraint);
+                if edge_changed {
+                    changed = true;
+                }
+                if !retain {
+                    let changed = killed_indices.insert(index);
+                    debug_assert!(changed);
+                }
+            }
+            live_indices.subtract(&killed_indices);
+
+            // We could clear `killed_indices` here, but we don't need to and
+            // it's cheaper not to.
+        }
     }
 
     // This function is very hot in some workloads. There's a single callsite
@@ -360,13 +385,21 @@ fn expand_node(
         match *b_data {
             VarValue::Value(cur_region) => {
                 // Identical scopes can show up quite often, if the fixed point
-                // iteration converges slowly, skip them
+                // iteration converges slowly. Skip them. This is purely an
+                // optimization.
                 if let (ReScope(a_scope), ReScope(cur_scope)) = (a_region, cur_region) {
                     if a_scope == cur_scope {
                         return false;
                     }
                 }
 
+                // This is a specialized version of the `lub_concrete_regions`
+                // check below for a common case, here purely as an
+                // optimization.
+                if let ReEmpty = a_region {
+                    return false;
+                }
+
                 let mut lub = self.lub_concrete_regions(a_region, cur_region);
                 if lub == cur_region {
                     return false;
@@ -407,8 +440,6 @@ fn sub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> bool {
 
     /// Returns the smallest region `c` such that `a <= c` and `b <= c`.
     fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> {
-        let tcx = self.tcx();
-
         match (a, b) {
             (&ty::ReClosureBound(..), _)
             | (_, &ty::ReClosureBound(..))
@@ -468,7 +499,7 @@ fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx>
 
                 // otherwise, we don't know what the free region is,
                 // so we must conservatively say the LUB is static:
-                tcx.lifetimes.re_static
+                self.tcx().lifetimes.re_static
             }
 
             (&ReScope(a_id), &ReScope(b_id)) => {
@@ -476,7 +507,7 @@ fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx>
                 // subtype of the region corresponding to an inner
                 // block.
                 let lub = self.region_rels.region_scope_tree.nearest_common_ancestor(a_id, b_id);
-                tcx.mk_region(ReScope(lub))
+                self.tcx().mk_region(ReScope(lub))
             }
 
             (&ReEarlyBound(_), &ReEarlyBound(_))
@@ -490,7 +521,7 @@ fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx>
                 if a == b {
                     a
                 } else {
-                    tcx.lifetimes.re_static
+                    self.tcx().lifetimes.re_static
                 }
             }
         }
@@ -860,29 +891,6 @@ fn process_edges<'tcx>(
         }
     }
 
-    fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F)
-    where
-        F: FnMut(&Constraint<'tcx>) -> (bool, bool),
-    {
-        let mut constraints: SmallVec<[_; 16]> = self.data.constraints.keys().collect();
-        let mut iteration = 0;
-        let mut changed = true;
-        while changed {
-            changed = false;
-            iteration += 1;
-            debug!("---- {} Iteration {}{}", "#", tag, iteration);
-            constraints.retain(|constraint| {
-                let (edge_changed, retain) = body(constraint);
-                if edge_changed {
-                    debug!("updated due to constraint {:?}", constraint);
-                    changed = true;
-                }
-                retain
-            });
-        }
-        debug!("---- {} Complete after {} iteration(s)", tag, iteration);
-    }
-
     fn bound_is_met(
         &self,
         bound: &VerifyBound<'tcx>,
index ea9e931ad838e83c71ec470ace67064e717acb08..af74d13572431a147c3235666ce7ab470b9337e7 100644 (file)
@@ -814,16 +814,16 @@ fn commit_from(&self, snapshot: CombinedSnapshot<'a, 'tcx>) {
     /// Executes `f` and commit the bindings.
     pub fn commit_unconditionally<R, F>(&self, f: F) -> R
     where
-        F: FnOnce() -> R,
+        F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R,
     {
-        debug!("commit()");
+        debug!("commit_unconditionally()");
         let snapshot = self.start_snapshot();
-        let r = f();
+        let r = f(&snapshot);
         self.commit_from(snapshot);
         r
     }
 
-    /// Executes `f` and commit the bindings if closure `f` returns `Ok(_)`.
+    /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`.
     pub fn commit_if_ok<T, E, F>(&self, f: F) -> Result<T, E>
     where
         F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result<T, E>,
@@ -843,19 +843,7 @@ pub fn commit_if_ok<T, E, F>(&self, f: F) -> Result<T, E>
         r
     }
 
-    /// Execute `f` in a snapshot, and commit the bindings it creates.
-    pub fn in_snapshot<T, F>(&self, f: F) -> T
-    where
-        F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> T,
-    {
-        debug!("in_snapshot()");
-        let snapshot = self.start_snapshot();
-        let r = f(&snapshot);
-        self.commit_from(snapshot);
-        r
-    }
-
-    /// Executes `f` then unroll any bindings it creates.
+    /// Execute `f` then unroll any bindings it creates.
     pub fn probe<R, F>(&self, f: F) -> R
     where
         F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R,
index e7205dd47a6172c657913b72673d90e1c34ca56c..bd19a002fe8b7bf676e185b527bd5a034eaa574c 100644 (file)
@@ -733,12 +733,12 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool {
                 // Skip lifetime parameters of the enclosing item(s)
                 // Also skip the witness type, because that has no free regions.
 
-                for upvar_ty in substs.upvar_tys(def_id, self.tcx) {
+                for upvar_ty in substs.as_generator().upvar_tys(def_id, self.tcx) {
                     upvar_ty.visit_with(self);
                 }
 
-                substs.return_ty(def_id, self.tcx).visit_with(self);
-                substs.yield_ty(def_id, self.tcx).visit_with(self);
+                substs.as_generator().return_ty(def_id, self.tcx).visit_with(self);
+                substs.as_generator().yield_ty(def_id, self.tcx).visit_with(self);
             }
             _ => {
                 ty.super_visit_with(self);
@@ -902,7 +902,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
             ty::Generator(def_id, substs, movability) => {
                 let generics = self.tcx.generics_of(def_id);
                 let substs =
-                    self.tcx.mk_substs(substs.substs.iter().enumerate().map(|(index, &kind)| {
+                    self.tcx.mk_substs(substs.iter().enumerate().map(|(index, &kind)| {
                         if index < generics.parent_count {
                             // Accommodate missing regions in the parent kinds...
                             self.fold_kind_mapping_missing_regions_to_empty(kind)
@@ -912,7 +912,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
                         }
                     }));
 
-                self.tcx.mk_generator(def_id, ty::GeneratorSubsts { substs }, movability)
+                self.tcx.mk_generator(def_id, substs, movability)
             }
 
             ty::Param(..) => {
index 0c83bbc1e5394aafb079097016d09afaedb7702a..3d069425685c7761dd67a7f10e127297e238e54b 100644 (file)
@@ -14,9 +14,11 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
     /// retain the older (arguably incorrect) behavior of the
     /// compiler.
     ///
-    /// NB. The use of snapshot here is mostly an efficiency thing --
-    /// we could search *all* region constraints, but that'd be a
-    /// bigger set and the data structures are not setup for that. If
+    /// NB. Although `_snapshot` isn't used, it's passed in to prove
+    /// that we are in a snapshot, which guarantees that we can just
+    /// search the "undo log" for edges. This is mostly an efficiency
+    /// thing -- we could search *all* region constraints, but that'd be
+    /// a bigger set and the data structures are not setup for that. If
     /// we wind up keeping some form of this check long term, it would
     /// probably be better to remove the snapshot parameter and to
     /// refactor the constraint set.
index bd9899b644b5e2be6eae2d96af6528a9b34c7bc5..8943fc342c023efa22c6b9f52919df12807e799d 100644 (file)
@@ -35,7 +35,6 @@
 #![feature(const_transmute)]
 #![feature(core_intrinsics)]
 #![feature(drain_filter)]
-#![feature(inner_deref)]
 #![cfg_attr(windows, feature(libc))]
 #![feature(never_type)]
 #![feature(exhaustive_patterns)]
@@ -57,9 +56,8 @@
 #![feature(test)]
 #![feature(in_band_lifetimes)]
 #![feature(crate_visibility_modifier)]
-#![feature(proc_macro_hygiene)]
+#![cfg_attr(bootstrap, feature(proc_macro_hygiene))]
 #![feature(log_syntax)]
-#![feature(mem_take)]
 #![feature(associated_type_bounds)]
 #![feature(rustc_attrs)]
 
 pub mod middle {
     pub mod expr_use_visitor;
     pub mod cstore;
-    pub mod dead;
     pub mod dependency_format;
     pub mod diagnostic_items;
-    pub mod entry;
     pub mod exported_symbols;
     pub mod free_region;
-    pub mod intrinsicck;
     pub mod lib_features;
     pub mod lang_items;
-    pub mod liveness;
     pub mod mem_categorization;
     pub mod privacy;
     pub mod reachable;
index 5906a6388a8bd5209ed84ebb67323ce065d37129..4c28f6372fe2c6db543819ea21a7d9324e4b39ba 100644 (file)
@@ -21,7 +21,8 @@
 declare_lint! {
     pub CONST_ERR,
     Deny,
-    "constant evaluation detected erroneous expression"
+    "constant evaluation detected erroneous expression",
+    report_in_external_macro: true
 }
 
 declare_lint! {
     "detects unreachable patterns"
 }
 
+declare_lint! {
+    pub OVERLAPPING_PATTERNS,
+    Warn,
+    "detects overlapping patterns"
+}
+
 declare_lint! {
     pub UNUSED_MACROS,
     Warn,
@@ -368,6 +375,12 @@ pub mod parser {
         Allow,
         "possible meta-variable misuse at macro definition"
     }
+
+    declare_lint! {
+        pub INCOMPLETE_INCLUDE,
+        Deny,
+        "trailing content in included file"
+    }
 }
 
 declare_lint! {
@@ -416,6 +429,7 @@ pub mod parser {
         DEAD_CODE,
         UNREACHABLE_CODE,
         UNREACHABLE_PATTERNS,
+        OVERLAPPING_PATTERNS,
         UNUSED_MACROS,
         WARNINGS,
         UNUSED_FEATURES,
index 28afe9730a0344659660e1f28d35e3e078af549d..60b1b192d10db13aac2c2479fd2fff0111b5fd7f 100644 (file)
@@ -12,6 +12,7 @@
 use syntax::ast;
 use syntax::attr;
 use syntax::feature_gate;
+use syntax::print::pprust;
 use syntax::source_map::MultiSpan;
 use syntax::symbol::{Symbol, sym};
 
@@ -285,7 +286,7 @@ pub fn push(&mut self, attrs: &[ast::Attribute]) -> BuilderPush {
                             tool_ident.span,
                             E0710,
                             "an unknown tool name found in scoped lint: `{}`",
-                            meta_item.path
+                            pprust::path_to_string(&meta_item.path),
                         );
                         continue;
                     }
index 5b490b701267deae304e620ddce0e8178eb2b305..7443cca822a99b40a4037cf9631805b0c71e4d91 100644 (file)
@@ -28,6 +28,7 @@
 use crate::hir;
 use crate::lint::builtin::BuiltinLintDiagnostics;
 use crate::lint::builtin::parser::{ILL_FORMED_ATTRIBUTE_INPUT, META_VARIABLE_MISUSE};
+use crate::lint::builtin::parser::INCOMPLETE_INCLUDE;
 use crate::session::{Session, DiagnosticMessageId};
 use crate::ty::TyCtxt;
 use crate::ty::query::Providers;
@@ -38,7 +39,7 @@
 use syntax::source_map::{MultiSpan, ExpnKind, DesugaringKind};
 use syntax::early_buffered_lints::BufferedEarlyLintId;
 use syntax::edition::Edition;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::symbol::{Symbol, sym};
 use syntax_pos::Span;
 
@@ -83,6 +84,7 @@ pub fn from_parser_lint_id(lint_id: BufferedEarlyLintId) -> &'static Self {
         match lint_id {
             BufferedEarlyLintId::IllFormedAttributeInput => ILL_FORMED_ATTRIBUTE_INPUT,
             BufferedEarlyLintId::MetaVariableMisuse => META_VARIABLE_MISUSE,
+            BufferedEarlyLintId::IncompleteInclude => INCOMPLETE_INCLUDE,
         }
     }
 
index 510787998ad071c969f8cab544042737ecbb4418..2170a288c922bac2d84a27e2c041fdaf9694116c 100644 (file)
@@ -32,6 +32,12 @@ pub struct CrateSource {
     pub rmeta: Option<(PathBuf, PathKind)>,
 }
 
+impl CrateSource {
+    pub fn paths(&self) -> impl Iterator<Item = &PathBuf> {
+        self.dylib.iter().chain(self.rlib.iter()).chain(self.rmeta.iter()).map(|p| &p.0)
+    }
+}
+
 #[derive(RustcEncodable, RustcDecodable, Copy, Clone,
          Ord, PartialOrd, Eq, PartialEq, Debug, HashStable)]
 pub enum DepKind {
@@ -96,6 +102,8 @@ pub enum NativeLibraryKind {
     NativeStaticNobundle,
     /// macOS-specific
     NativeFramework,
+    /// Windows dynamic library without import library.
+    NativeRawDylib,
     /// default way to specify a dynamic library
     NativeUnknown,
 }
@@ -206,7 +214,6 @@ pub trait CrateStore {
     fn crate_is_private_dep_untracked(&self, cnum: CrateNum) -> bool;
     fn crate_disambiguator_untracked(&self, cnum: CrateNum) -> CrateDisambiguator;
     fn crate_hash_untracked(&self, cnum: CrateNum) -> Svh;
-    fn extern_mod_stmt_cnum_untracked(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
     fn item_generics_cloned_untracked(&self, def: DefId, sess: &Session) -> ty::Generics;
     fn postorder_cnums_untracked(&self) -> Vec<CrateNum>;
 
diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs
deleted file mode 100644 (file)
index 7c75a14..0000000
+++ /dev/null
@@ -1,676 +0,0 @@
-// This implements the dead-code warning pass. It follows middle::reachable
-// closely. The idea is that all reachable symbols are live, codes called
-// from live codes are live, and everything else is dead.
-
-use crate::hir::Node;
-use crate::hir::{self, PatKind, TyKind};
-use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
-use crate::hir::itemlikevisit::ItemLikeVisitor;
-
-use crate::hir::def::{CtorOf, Res, DefKind};
-use crate::hir::CodegenFnAttrFlags;
-use crate::hir::def_id::{DefId, LOCAL_CRATE};
-use crate::lint;
-use crate::middle::privacy;
-use crate::ty::{self, DefIdTree, TyCtxt};
-use crate::util::nodemap::FxHashSet;
-
-use rustc_data_structures::fx::FxHashMap;
-
-use syntax::{ast, attr};
-use syntax::symbol::sym;
-use syntax_pos;
-
-// Any local node that may call something in its body block should be
-// explored. For example, if it's a live Node::Item that is a
-// function, then we should explore its block to check for codes that
-// may need to be marked as live.
-fn should_explore(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
-    match tcx.hir().find(hir_id) {
-        Some(Node::Item(..)) |
-        Some(Node::ImplItem(..)) |
-        Some(Node::ForeignItem(..)) |
-        Some(Node::TraitItem(..)) |
-        Some(Node::Variant(..)) |
-        Some(Node::AnonConst(..)) |
-        Some(Node::Pat(..)) => true,
-        _ => false
-    }
-}
-
-struct MarkSymbolVisitor<'a, 'tcx> {
-    worklist: Vec<hir::HirId>,
-    tcx: TyCtxt<'tcx>,
-    tables: &'a ty::TypeckTables<'tcx>,
-    live_symbols: FxHashSet<hir::HirId>,
-    repr_has_repr_c: bool,
-    in_pat: bool,
-    inherited_pub_visibility: bool,
-    ignore_variant_stack: Vec<DefId>,
-    // maps from tuple struct constructors to tuple struct items
-    struct_constructors: FxHashMap<hir::HirId, hir::HirId>,
-}
-
-impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
-    fn check_def_id(&mut self, def_id: DefId) {
-        if let Some(hir_id) = self.tcx.hir().as_local_hir_id(def_id) {
-            if should_explore(self.tcx, hir_id) || self.struct_constructors.contains_key(&hir_id) {
-                self.worklist.push(hir_id);
-            }
-            self.live_symbols.insert(hir_id);
-        }
-    }
-
-    fn insert_def_id(&mut self, def_id: DefId) {
-        if let Some(hir_id) = self.tcx.hir().as_local_hir_id(def_id) {
-            debug_assert!(!should_explore(self.tcx, hir_id));
-            self.live_symbols.insert(hir_id);
-        }
-    }
-
-    fn handle_res(&mut self, res: Res) {
-        match res {
-            Res::Def(DefKind::Const, _)
-            | Res::Def(DefKind::AssocConst, _)
-            | Res::Def(DefKind::TyAlias, _) => {
-                self.check_def_id(res.def_id());
-            }
-            _ if self.in_pat => {},
-            Res::PrimTy(..) | Res::SelfCtor(..) |
-            Res::Local(..) => {}
-            Res::Def(DefKind::Ctor(CtorOf::Variant, ..), ctor_def_id) => {
-                let variant_id = self.tcx.parent(ctor_def_id).unwrap();
-                let enum_id = self.tcx.parent(variant_id).unwrap();
-                self.check_def_id(enum_id);
-                if !self.ignore_variant_stack.contains(&ctor_def_id) {
-                    self.check_def_id(variant_id);
-                }
-            }
-            Res::Def(DefKind::Variant, variant_id) => {
-                let enum_id = self.tcx.parent(variant_id).unwrap();
-                self.check_def_id(enum_id);
-                if !self.ignore_variant_stack.contains(&variant_id) {
-                    self.check_def_id(variant_id);
-                }
-            }
-            Res::SelfTy(t, i) => {
-                if let Some(t) = t {
-                    self.check_def_id(t);
-                }
-                if let Some(i) = i {
-                    self.check_def_id(i);
-                }
-            }
-            Res::ToolMod | Res::NonMacroAttr(..) | Res::Err => {}
-            _ => {
-                self.check_def_id(res.def_id());
-            }
-        }
-    }
-
-    fn lookup_and_handle_method(&mut self, id: hir::HirId) {
-        if let Some(def_id) = self.tables.type_dependent_def_id(id) {
-            self.check_def_id(def_id);
-        } else {
-            bug!("no type-dependent def for method");
-        }
-    }
-
-    fn handle_field_access(&mut self, lhs: &hir::Expr, hir_id: hir::HirId) {
-        match self.tables.expr_ty_adjusted(lhs).kind {
-            ty::Adt(def, _) => {
-                let index = self.tcx.field_index(hir_id, self.tables);
-                self.insert_def_id(def.non_enum_variant().fields[index].did);
-            }
-            ty::Tuple(..) => {}
-            _ => span_bug!(lhs.span, "named field access on non-ADT"),
-        }
-    }
-
-    fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, res: Res, pats: &[hir::FieldPat]) {
-        let variant = match self.tables.node_type(lhs.hir_id).kind {
-            ty::Adt(adt, _) => adt.variant_of_res(res),
-            _ => span_bug!(lhs.span, "non-ADT in struct pattern")
-        };
-        for pat in pats {
-            if let PatKind::Wild = pat.pat.kind {
-                continue;
-            }
-            let index = self.tcx.field_index(pat.hir_id, self.tables);
-            self.insert_def_id(variant.fields[index].did);
-        }
-    }
-
-    fn mark_live_symbols(&mut self) {
-        let mut scanned = FxHashSet::default();
-        while let Some(id) = self.worklist.pop() {
-            if !scanned.insert(id) {
-                continue
-            }
-
-            // in the case of tuple struct constructors we want to check the item, not the generated
-            // tuple struct constructor function
-            let id = self.struct_constructors.get(&id).cloned().unwrap_or(id);
-
-            if let Some(node) = self.tcx.hir().find(id) {
-                self.live_symbols.insert(id);
-                self.visit_node(node);
-            }
-        }
-    }
-
-    fn visit_node(&mut self, node: Node<'tcx>) {
-        let had_repr_c = self.repr_has_repr_c;
-        self.repr_has_repr_c = false;
-        let had_inherited_pub_visibility = self.inherited_pub_visibility;
-        self.inherited_pub_visibility = false;
-        match node {
-            Node::Item(item) => {
-                match item.kind {
-                    hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => {
-                        let def_id = self.tcx.hir().local_def_id(item.hir_id);
-                        let def = self.tcx.adt_def(def_id);
-                        self.repr_has_repr_c = def.repr.c();
-
-                        intravisit::walk_item(self, &item);
-                    }
-                    hir::ItemKind::Enum(..) => {
-                        self.inherited_pub_visibility = item.vis.node.is_pub();
-
-                        intravisit::walk_item(self, &item);
-                    }
-                    hir::ItemKind::ForeignMod(..) => {}
-                    _ => {
-                        intravisit::walk_item(self, &item);
-                    }
-                }
-            }
-            Node::TraitItem(trait_item) => {
-                intravisit::walk_trait_item(self, trait_item);
-            }
-            Node::ImplItem(impl_item) => {
-                intravisit::walk_impl_item(self, impl_item);
-            }
-            Node::ForeignItem(foreign_item) => {
-                intravisit::walk_foreign_item(self, &foreign_item);
-            }
-            _ => {}
-        }
-        self.repr_has_repr_c = had_repr_c;
-        self.inherited_pub_visibility = had_inherited_pub_visibility;
-    }
-
-    fn mark_as_used_if_union(&mut self, adt: &ty::AdtDef, fields: &hir::HirVec<hir::Field>) {
-        if adt.is_union() && adt.non_enum_variant().fields.len() > 1 && adt.did.is_local() {
-            for field in fields {
-                let index = self.tcx.field_index(field.hir_id, self.tables);
-                self.insert_def_id(adt.non_enum_variant().fields[index].did);
-            }
-        }
-    }
-}
-
-impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
-    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
-        NestedVisitorMap::None
-    }
-
-    fn visit_nested_body(&mut self, body: hir::BodyId) {
-        let old_tables = self.tables;
-        self.tables = self.tcx.body_tables(body);
-        let body = self.tcx.hir().body(body);
-        self.visit_body(body);
-        self.tables = old_tables;
-    }
-
-    fn visit_variant_data(&mut self, def: &'tcx hir::VariantData, _: ast::Name,
-                          _: &hir::Generics, _: hir::HirId, _: syntax_pos::Span) {
-        let has_repr_c = self.repr_has_repr_c;
-        let inherited_pub_visibility = self.inherited_pub_visibility;
-        let live_fields = def.fields().iter().filter(|f| {
-            has_repr_c || inherited_pub_visibility || f.vis.node.is_pub()
-        });
-        self.live_symbols.extend(live_fields.map(|f| f.hir_id));
-
-        intravisit::walk_struct_def(self, def);
-    }
-
-    fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
-        match expr.kind {
-            hir::ExprKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => {
-                let res = self.tables.qpath_res(qpath, expr.hir_id);
-                self.handle_res(res);
-            }
-            hir::ExprKind::MethodCall(..) => {
-                self.lookup_and_handle_method(expr.hir_id);
-            }
-            hir::ExprKind::Field(ref lhs, ..) => {
-                self.handle_field_access(&lhs, expr.hir_id);
-            }
-            hir::ExprKind::Struct(_, ref fields, _) => {
-                if let ty::Adt(ref adt, _) = self.tables.expr_ty(expr).kind {
-                    self.mark_as_used_if_union(adt, fields);
-                }
-            }
-            _ => ()
-        }
-
-        intravisit::walk_expr(self, expr);
-    }
-
-    fn visit_arm(&mut self, arm: &'tcx hir::Arm) {
-        // Inside the body, ignore constructions of variants
-        // necessary for the pattern to match. Those construction sites
-        // can't be reached unless the variant is constructed elsewhere.
-        let len = self.ignore_variant_stack.len();
-        self.ignore_variant_stack.extend(arm.pat.necessary_variants());
-        intravisit::walk_arm(self, arm);
-        self.ignore_variant_stack.truncate(len);
-    }
-
-    fn visit_pat(&mut self, pat: &'tcx hir::Pat) {
-        match pat.kind {
-            PatKind::Struct(ref path, ref fields, _) => {
-                let res = self.tables.qpath_res(path, pat.hir_id);
-                self.handle_field_pattern_match(pat, res, fields);
-            }
-            PatKind::Path(ref qpath) => {
-                let res = self.tables.qpath_res(qpath, pat.hir_id);
-                self.handle_res(res);
-            }
-            _ => ()
-        }
-
-        self.in_pat = true;
-        intravisit::walk_pat(self, pat);
-        self.in_pat = false;
-    }
-
-    fn visit_path(&mut self, path: &'tcx hir::Path, _: hir::HirId) {
-        self.handle_res(path.res);
-        intravisit::walk_path(self, path);
-    }
-
-    fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
-        match ty.kind {
-            TyKind::Def(item_id, _) => {
-                let item = self.tcx.hir().expect_item(item_id.id);
-                intravisit::walk_item(self, item);
-            }
-            _ => ()
-        }
-        intravisit::walk_ty(self, ty);
-    }
-
-    fn visit_anon_const(&mut self, c: &'tcx hir::AnonConst) {
-        self.live_symbols.insert(c.hir_id);
-        intravisit::walk_anon_const(self, c);
-    }
-}
-
-fn has_allow_dead_code_or_lang_attr(
-    tcx: TyCtxt<'_>,
-    id: hir::HirId,
-    attrs: &[ast::Attribute],
-) -> bool {
-    if attr::contains_name(attrs, sym::lang) {
-        return true;
-    }
-
-    // Stable attribute for #[lang = "panic_impl"]
-    if attr::contains_name(attrs, sym::panic_handler) {
-        return true;
-    }
-
-    // (To be) stable attribute for #[lang = "oom"]
-    if attr::contains_name(attrs, sym::alloc_error_handler) {
-        return true;
-    }
-
-    let def_id = tcx.hir().local_def_id(id);
-    let cg_attrs = tcx.codegen_fn_attrs(def_id);
-
-    // #[used], #[no_mangle], #[export_name], etc also keeps the item alive
-    // forcefully, e.g., for placing it in a specific section.
-    if cg_attrs.contains_extern_indicator() ||
-        cg_attrs.flags.contains(CodegenFnAttrFlags::USED) {
-        return true;
-    }
-
-    tcx.lint_level_at_node(lint::builtin::DEAD_CODE, id).0 == lint::Allow
-}
-
-// This visitor seeds items that
-//   1) We want to explicitly consider as live:
-//     * Item annotated with #[allow(dead_code)]
-//         - This is done so that if we want to suppress warnings for a
-//           group of dead functions, we only have to annotate the "root".
-//           For example, if both `f` and `g` are dead and `f` calls `g`,
-//           then annotating `f` with `#[allow(dead_code)]` will suppress
-//           warning for both `f` and `g`.
-//     * Item annotated with #[lang=".."]
-//         - This is because lang items are always callable from elsewhere.
-//   or
-//   2) We are not sure to be live or not
-//     * Implementation of a trait method
-struct LifeSeeder<'k, 'tcx> {
-    worklist: Vec<hir::HirId>,
-    krate: &'k hir::Crate,
-    tcx: TyCtxt<'tcx>,
-    // see `MarkSymbolVisitor::struct_constructors`
-    struct_constructors: FxHashMap<hir::HirId, hir::HirId>,
-}
-
-impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> {
-    fn visit_item(&mut self, item: &hir::Item) {
-        let allow_dead_code = has_allow_dead_code_or_lang_attr(self.tcx,
-                                                               item.hir_id,
-                                                               &item.attrs);
-        if allow_dead_code {
-            self.worklist.push(item.hir_id);
-        }
-        match item.kind {
-            hir::ItemKind::Enum(ref enum_def, _) => {
-                if allow_dead_code {
-                    self.worklist.extend(enum_def.variants.iter().map(|variant| variant.id));
-                }
-
-                for variant in &enum_def.variants {
-                    if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
-                        self.struct_constructors.insert(ctor_hir_id, variant.id);
-                    }
-                }
-            }
-            hir::ItemKind::Trait(.., ref trait_item_refs) => {
-                for trait_item_ref in trait_item_refs {
-                    let trait_item = self.krate.trait_item(trait_item_ref.id);
-                    match trait_item.kind {
-                        hir::TraitItemKind::Const(_, Some(_)) |
-                        hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => {
-                            if has_allow_dead_code_or_lang_attr(self.tcx,
-                                                                trait_item.hir_id,
-                                                                &trait_item.attrs) {
-                                self.worklist.push(trait_item.hir_id);
-                            }
-                        }
-                        _ => {}
-                    }
-                }
-            }
-            hir::ItemKind::Impl(.., ref opt_trait, _, ref impl_item_refs) => {
-                for impl_item_ref in impl_item_refs {
-                    let impl_item = self.krate.impl_item(impl_item_ref.id);
-                    if opt_trait.is_some() ||
-                            has_allow_dead_code_or_lang_attr(self.tcx,
-                                                             impl_item.hir_id,
-                                                             &impl_item.attrs) {
-                        self.worklist.push(impl_item_ref.id.hir_id);
-                    }
-                }
-            }
-            hir::ItemKind::Struct(ref variant_data, _) => {
-                if let Some(ctor_hir_id) = variant_data.ctor_hir_id() {
-                    self.struct_constructors.insert(ctor_hir_id, item.hir_id);
-                }
-            }
-            _ => ()
-        }
-    }
-
-    fn visit_trait_item(&mut self, _item: &hir::TraitItem) {
-        // ignore: we are handling this in `visit_item` above
-    }
-
-    fn visit_impl_item(&mut self, _item: &hir::ImplItem) {
-        // ignore: we are handling this in `visit_item` above
-    }
-}
-
-fn create_and_seed_worklist<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    access_levels: &privacy::AccessLevels,
-    krate: &hir::Crate,
-) -> (Vec<hir::HirId>, FxHashMap<hir::HirId, hir::HirId>) {
-    let worklist = access_levels.map.iter().filter_map(|(&id, level)| {
-        if level >= &privacy::AccessLevel::Reachable {
-            Some(id)
-        } else {
-            None
-        }
-    }).chain(
-        // Seed entry point
-        tcx.entry_fn(LOCAL_CRATE).map(|(def_id, _)| tcx.hir().as_local_hir_id(def_id).unwrap())
-    ).collect::<Vec<_>>();
-
-    // Seed implemented trait items
-    let mut life_seeder = LifeSeeder {
-        worklist,
-        krate,
-        tcx,
-        struct_constructors: Default::default(),
-    };
-    krate.visit_all_item_likes(&mut life_seeder);
-
-    (life_seeder.worklist, life_seeder.struct_constructors)
-}
-
-fn find_live<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    access_levels: &privacy::AccessLevels,
-    krate: &hir::Crate,
-) -> FxHashSet<hir::HirId> {
-    let (worklist, struct_constructors) = create_and_seed_worklist(tcx, access_levels, krate);
-    let mut symbol_visitor = MarkSymbolVisitor {
-        worklist,
-        tcx,
-        tables: &ty::TypeckTables::empty(None),
-        live_symbols: Default::default(),
-        repr_has_repr_c: false,
-        in_pat: false,
-        inherited_pub_visibility: false,
-        ignore_variant_stack: vec![],
-        struct_constructors,
-    };
-    symbol_visitor.mark_live_symbols();
-    symbol_visitor.live_symbols
-}
-
-struct DeadVisitor<'tcx> {
-    tcx: TyCtxt<'tcx>,
-    live_symbols: FxHashSet<hir::HirId>,
-}
-
-impl DeadVisitor<'tcx> {
-    fn should_warn_about_item(&mut self, item: &hir::Item) -> bool {
-        let should_warn = match item.kind {
-            hir::ItemKind::Static(..)
-            | hir::ItemKind::Const(..)
-            | hir::ItemKind::Fn(..)
-            | hir::ItemKind::TyAlias(..)
-            | hir::ItemKind::Enum(..)
-            | hir::ItemKind::Struct(..)
-            | hir::ItemKind::Union(..) => true,
-            _ => false
-        };
-        should_warn && !self.symbol_is_live(item.hir_id)
-    }
-
-    fn should_warn_about_field(&mut self, field: &hir::StructField) -> bool {
-        let field_type = self.tcx.type_of(self.tcx.hir().local_def_id(field.hir_id));
-        !field.is_positional()
-            && !self.symbol_is_live(field.hir_id)
-            && !field_type.is_phantom_data()
-            && !has_allow_dead_code_or_lang_attr(self.tcx, field.hir_id, &field.attrs)
-    }
-
-    fn should_warn_about_variant(&mut self, variant: &hir::Variant) -> bool {
-        !self.symbol_is_live(variant.id)
-            && !has_allow_dead_code_or_lang_attr(self.tcx,
-                                                 variant.id,
-                                                 &variant.attrs)
-    }
-
-    fn should_warn_about_foreign_item(&mut self, fi: &hir::ForeignItem) -> bool {
-        !self.symbol_is_live(fi.hir_id)
-            && !has_allow_dead_code_or_lang_attr(self.tcx, fi.hir_id, &fi.attrs)
-    }
-
-    // id := HIR id of an item's definition.
-    fn symbol_is_live(
-        &mut self,
-        id: hir::HirId,
-    ) -> bool {
-        if self.live_symbols.contains(&id) {
-            return true;
-        }
-        // If it's a type whose items are live, then it's live, too.
-        // This is done to handle the case where, for example, the static
-        // method of a private type is used, but the type itself is never
-        // called directly.
-        let def_id = self.tcx.hir().local_def_id(id);
-        let inherent_impls = self.tcx.inherent_impls(def_id);
-        for &impl_did in inherent_impls.iter() {
-            for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] {
-                if let Some(item_hir_id) = self.tcx.hir().as_local_hir_id(item_did) {
-                    if self.live_symbols.contains(&item_hir_id) {
-                        return true;
-                    }
-                }
-            }
-        }
-        false
-    }
-
-    fn warn_dead_code(&mut self,
-                      id: hir::HirId,
-                      span: syntax_pos::Span,
-                      name: ast::Name,
-                      node_type: &str,
-                      participle: &str) {
-        if !name.as_str().starts_with("_") {
-            self.tcx
-                .lint_hir(lint::builtin::DEAD_CODE,
-                          id,
-                          span,
-                          &format!("{} is never {}: `{}`",
-                                   node_type, participle, name));
-        }
-    }
-}
-
-impl Visitor<'tcx> for DeadVisitor<'tcx> {
-    /// Walk nested items in place so that we don't report dead-code
-    /// on inner functions when the outer function is already getting
-    /// an error. We could do this also by checking the parents, but
-    /// this is how the code is setup and it seems harmless enough.
-    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
-        NestedVisitorMap::All(&self.tcx.hir())
-    }
-
-    fn visit_item(&mut self, item: &'tcx hir::Item) {
-        if self.should_warn_about_item(item) {
-            // For items that have a definition with a signature followed by a
-            // block, point only at the signature.
-            let span = match item.kind {
-                hir::ItemKind::Fn(..) |
-                hir::ItemKind::Mod(..) |
-                hir::ItemKind::Enum(..) |
-                hir::ItemKind::Struct(..) |
-                hir::ItemKind::Union(..) |
-                hir::ItemKind::Trait(..) |
-                hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span),
-                _ => item.span,
-            };
-            let participle = match item.kind {
-                hir::ItemKind::Struct(..) => "constructed", // Issue #52325
-                _ => "used"
-            };
-            self.warn_dead_code(
-                item.hir_id,
-                span,
-                item.ident.name,
-                item.kind.descriptive_variant(),
-                participle,
-            );
-        } else {
-            // Only continue if we didn't warn
-            intravisit::walk_item(self, item);
-        }
-    }
-
-    fn visit_variant(&mut self,
-                     variant: &'tcx hir::Variant,
-                     g: &'tcx hir::Generics,
-                     id: hir::HirId) {
-        if self.should_warn_about_variant(&variant) {
-            self.warn_dead_code(variant.id, variant.span, variant.ident.name,
-                                "variant", "constructed");
-        } else {
-            intravisit::walk_variant(self, variant, g, id);
-        }
-    }
-
-    fn visit_foreign_item(&mut self, fi: &'tcx hir::ForeignItem) {
-        if self.should_warn_about_foreign_item(fi) {
-            self.warn_dead_code(fi.hir_id, fi.span, fi.ident.name,
-                                fi.kind.descriptive_variant(), "used");
-        }
-        intravisit::walk_foreign_item(self, fi);
-    }
-
-    fn visit_struct_field(&mut self, field: &'tcx hir::StructField) {
-        if self.should_warn_about_field(&field) {
-            self.warn_dead_code(field.hir_id, field.span, field.ident.name, "field", "used");
-        }
-        intravisit::walk_struct_field(self, field);
-    }
-
-    fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
-        match impl_item.kind {
-            hir::ImplItemKind::Const(_, body_id) => {
-                if !self.symbol_is_live(impl_item.hir_id) {
-                    self.warn_dead_code(impl_item.hir_id,
-                                        impl_item.span,
-                                        impl_item.ident.name,
-                                        "associated const",
-                                        "used");
-                }
-                self.visit_nested_body(body_id)
-            }
-            hir::ImplItemKind::Method(_, body_id) => {
-                if !self.symbol_is_live(impl_item.hir_id) {
-                    let span = self.tcx.sess.source_map().def_span(impl_item.span);
-                    self.warn_dead_code(impl_item.hir_id, span, impl_item.ident.name, "method",
-                        "used");
-                }
-                self.visit_nested_body(body_id)
-            }
-            hir::ImplItemKind::OpaqueTy(..) |
-            hir::ImplItemKind::TyAlias(..) => {}
-        }
-    }
-
-    // Overwrite so that we don't warn the trait item itself.
-    fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
-        match trait_item.kind {
-            hir::TraitItemKind::Const(_, Some(body_id)) |
-            hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => {
-                self.visit_nested_body(body_id)
-            }
-            hir::TraitItemKind::Const(_, None) |
-            hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) |
-            hir::TraitItemKind::Type(..) => {}
-        }
-    }
-}
-
-pub fn check_crate(tcx: TyCtxt<'_>) {
-    let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
-    let krate = tcx.hir().krate();
-    let live_symbols = find_live(tcx, access_levels, krate);
-    let mut visitor = DeadVisitor {
-        tcx,
-        live_symbols,
-    };
-    intravisit::walk_crate(&mut visitor, krate);
-}
diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs
deleted file mode 100644 (file)
index 660fe14..0000000
+++ /dev/null
@@ -1,202 +0,0 @@
-use crate::hir::map as hir_map;
-use crate::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE};
-use crate::session::{config, Session};
-use crate::session::config::EntryFnType;
-use syntax::attr;
-use syntax::entry::EntryPointType;
-use syntax::symbol::sym;
-use syntax_pos::Span;
-use crate::hir::{HirId, Item, ItemKind, ImplItem, TraitItem};
-use crate::hir::itemlikevisit::ItemLikeVisitor;
-use crate::ty::TyCtxt;
-use crate::ty::query::Providers;
-
-struct EntryContext<'a, 'tcx> {
-    session: &'a Session,
-
-    map: &'a hir_map::Map<'tcx>,
-
-    /// The top-level function called `main`.
-    main_fn: Option<(HirId, Span)>,
-
-    /// The function that has attribute named `main`.
-    attr_main_fn: Option<(HirId, Span)>,
-
-    /// The function that has the attribute 'start' on it.
-    start_fn: Option<(HirId, Span)>,
-
-    /// The functions that one might think are `main` but aren't, e.g.
-    /// main functions not defined at the top level. For diagnostics.
-    non_main_fns: Vec<(HirId, Span)> ,
-}
-
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for EntryContext<'a, 'tcx> {
-    fn visit_item(&mut self, item: &'tcx Item) {
-        let def_id = self.map.local_def_id(item.hir_id);
-        let def_key = self.map.def_key(def_id);
-        let at_root = def_key.parent == Some(CRATE_DEF_INDEX);
-        find_item(item, self, at_root);
-    }
-
-    fn visit_trait_item(&mut self, _trait_item: &'tcx TraitItem) {
-        // Entry fn is never a trait item.
-    }
-
-    fn visit_impl_item(&mut self, _impl_item: &'tcx ImplItem) {
-        // Entry fn is never a trait item.
-    }
-}
-
-fn entry_fn(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<(DefId, EntryFnType)> {
-    assert_eq!(cnum, LOCAL_CRATE);
-
-    let any_exe = tcx.sess.crate_types.borrow().iter().any(|ty| {
-        *ty == config::CrateType::Executable
-    });
-    if !any_exe {
-        // No need to find a main function.
-        return None;
-    }
-
-    // If the user wants no main function at all, then stop here.
-    if attr::contains_name(&tcx.hir().krate().attrs, sym::no_main) {
-        return None;
-    }
-
-    let mut ctxt = EntryContext {
-        session: tcx.sess,
-        map: tcx.hir(),
-        main_fn: None,
-        attr_main_fn: None,
-        start_fn: None,
-        non_main_fns: Vec::new(),
-    };
-
-    tcx.hir().krate().visit_all_item_likes(&mut ctxt);
-
-    configure_main(tcx, &ctxt)
-}
-
-// Beware, this is duplicated in `libsyntax/entry.rs`, so make sure to keep
-// them in sync.
-fn entry_point_type(item: &Item, at_root: bool) -> EntryPointType {
-    match item.kind {
-        ItemKind::Fn(..) => {
-            if attr::contains_name(&item.attrs, sym::start) {
-                EntryPointType::Start
-            } else if attr::contains_name(&item.attrs, sym::main) {
-                EntryPointType::MainAttr
-            } else if item.ident.name == sym::main {
-                if at_root {
-                    // This is a top-level function so can be `main`.
-                    EntryPointType::MainNamed
-                } else {
-                    EntryPointType::OtherMain
-                }
-            } else {
-                EntryPointType::None
-            }
-        }
-        _ => EntryPointType::None,
-    }
-}
-
-
-fn find_item(item: &Item, ctxt: &mut EntryContext<'_, '_>, at_root: bool) {
-    match entry_point_type(item, at_root) {
-        EntryPointType::MainNamed => {
-            if ctxt.main_fn.is_none() {
-                ctxt.main_fn = Some((item.hir_id, item.span));
-            } else {
-                span_err!(ctxt.session, item.span, E0136,
-                          "multiple `main` functions");
-            }
-        },
-        EntryPointType::OtherMain => {
-            ctxt.non_main_fns.push((item.hir_id, item.span));
-        },
-        EntryPointType::MainAttr => {
-            if ctxt.attr_main_fn.is_none() {
-                ctxt.attr_main_fn = Some((item.hir_id, item.span));
-            } else {
-                struct_span_err!(ctxt.session, item.span, E0137,
-                                 "multiple functions with a `#[main]` attribute")
-                .span_label(item.span, "additional `#[main]` function")
-                .span_label(ctxt.attr_main_fn.unwrap().1, "first `#[main]` function")
-                .emit();
-            }
-        },
-        EntryPointType::Start => {
-            if ctxt.start_fn.is_none() {
-                ctxt.start_fn = Some((item.hir_id, item.span));
-            } else {
-                struct_span_err!(ctxt.session, item.span, E0138, "multiple `start` functions")
-                    .span_label(ctxt.start_fn.unwrap().1, "previous `start` function here")
-                    .span_label(item.span, "multiple `start` functions")
-                    .emit();
-            }
-        }
-        EntryPointType::None => (),
-    }
-}
-
-fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) -> Option<(DefId, EntryFnType)> {
-    if let Some((hir_id, _)) = visitor.start_fn {
-        Some((tcx.hir().local_def_id(hir_id), EntryFnType::Start))
-    } else if let Some((hir_id, _)) = visitor.attr_main_fn {
-        Some((tcx.hir().local_def_id(hir_id), EntryFnType::Main))
-    } else if let Some((hir_id, _)) = visitor.main_fn {
-        Some((tcx.hir().local_def_id(hir_id), EntryFnType::Main))
-    } else {
-        no_main_err(tcx, visitor);
-        None
-    }
-}
-
-fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
-    // There is no main function.
-    let mut err = struct_err!(tcx.sess, E0601,
-        "`main` function not found in crate `{}`", tcx.crate_name(LOCAL_CRATE));
-    let filename = &tcx.sess.local_crate_source_file;
-    let note = if !visitor.non_main_fns.is_empty() {
-        for &(_, span) in &visitor.non_main_fns {
-            err.span_note(span, "here is a function named `main`");
-        }
-        err.note("you have one or more functions named `main` not defined at the crate level");
-        err.help("either move the `main` function definitions or attach the `#[main]` attribute \
-                  to one of them");
-        // There were some functions named `main` though. Try to give the user a hint.
-        format!("the main function must be defined at the crate level{}",
-                 filename.as_ref().map(|f| format!(" (in `{}`)", f.display())).unwrap_or_default())
-    } else if let Some(filename) = filename {
-        format!("consider adding a `main` function to `{}`", filename.display())
-    } else {
-        String::from("consider adding a `main` function at the crate level")
-    };
-    let sp = tcx.hir().krate().span;
-    // The file may be empty, which leads to the diagnostic machinery not emitting this
-    // note. This is a relatively simple way to detect that case and emit a span-less
-    // note instead.
-    if let Ok(_) = tcx.sess.source_map().lookup_line(sp.lo()) {
-        err.set_span(sp);
-        err.span_label(sp, &note);
-    } else {
-        err.note(&note);
-    }
-    if tcx.sess.teach(&err.get_code().unwrap()) {
-        err.note("If you don't know the basics of Rust, you can go look to the Rust Book \
-                  to get started: https://doc.rust-lang.org/book/");
-    }
-    err.emit();
-}
-
-pub fn find_entry_point(tcx: TyCtxt<'_>) -> Option<(DefId, EntryFnType)> {
-    tcx.entry_fn(LOCAL_CRATE)
-}
-
-pub fn provide(providers: &mut Providers<'_>) {
-    *providers = Providers {
-        entry_fn,
-        ..*providers
-    };
-}
diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs
deleted file mode 100644 (file)
index 7b5aea8..0000000
+++ /dev/null
@@ -1,170 +0,0 @@
-use crate::hir::def::{Res, DefKind};
-use crate::hir::def_id::DefId;
-use crate::ty::{self, Ty, TyCtxt};
-use crate::ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx};
-use crate::ty::query::Providers;
-
-use rustc_target::spec::abi::Abi::RustIntrinsic;
-use rustc_index::vec::Idx;
-use syntax_pos::{Span, sym};
-use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
-use crate::hir;
-
-fn check_mod_intrinsics(tcx: TyCtxt<'_>, module_def_id: DefId) {
-    tcx.hir().visit_item_likes_in_module(
-        module_def_id,
-        &mut ItemVisitor { tcx }.as_deep_visitor()
-    );
-}
-
-pub fn provide(providers: &mut Providers<'_>) {
-    *providers = Providers {
-        check_mod_intrinsics,
-        ..*providers
-    };
-}
-
-struct ItemVisitor<'tcx> {
-    tcx: TyCtxt<'tcx>,
-}
-
-struct ExprVisitor<'tcx> {
-    tcx: TyCtxt<'tcx>,
-    tables: &'tcx ty::TypeckTables<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
-}
-
-/// If the type is `Option<T>`, it will return `T`, otherwise
-/// the type itself. Works on most `Option`-like types.
-fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
-    let (def, substs) = match ty.kind {
-        ty::Adt(def, substs) => (def, substs),
-        _ => return ty
-    };
-
-    if def.variants.len() == 2 && !def.repr.c() && def.repr.int.is_none() {
-        let data_idx;
-
-        let one = VariantIdx::new(1);
-        let zero = VariantIdx::new(0);
-
-        if def.variants[zero].fields.is_empty() {
-            data_idx = one;
-        } else if def.variants[one].fields.is_empty() {
-            data_idx = zero;
-        } else {
-            return ty;
-        }
-
-        if def.variants[data_idx].fields.len() == 1 {
-            return def.variants[data_idx].fields[0].ty(tcx, substs);
-        }
-    }
-
-    ty
-}
-
-impl ExprVisitor<'tcx> {
-    fn def_id_is_transmute(&self, def_id: DefId) -> bool {
-        self.tcx.fn_sig(def_id).abi() == RustIntrinsic &&
-        self.tcx.item_name(def_id) == sym::transmute
-    }
-
-    fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
-        let sk_from = SizeSkeleton::compute(from, self.tcx, self.param_env);
-        let sk_to = SizeSkeleton::compute(to, self.tcx, self.param_env);
-
-        // Check for same size using the skeletons.
-        if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) {
-            if sk_from.same_size(sk_to) {
-                return;
-            }
-
-            // Special-case transmutting from `typeof(function)` and
-            // `Option<typeof(function)>` to present a clearer error.
-            let from = unpack_option_like(self.tcx, from);
-            if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.kind, sk_to) {
-                if size_to == Pointer.size(&self.tcx) {
-                    struct_span_err!(self.tcx.sess, span, E0591,
-                                     "can't transmute zero-sized type")
-                        .note(&format!("source type: {}", from))
-                        .note(&format!("target type: {}", to))
-                        .help("cast with `as` to a pointer instead")
-                        .emit();
-                    return;
-                }
-            }
-        }
-
-        // Try to display a sensible error with as much information as possible.
-        let skeleton_string = |ty: Ty<'tcx>, sk| {
-            match sk {
-                Ok(SizeSkeleton::Known(size)) => {
-                    format!("{} bits", size.bits())
-                }
-                Ok(SizeSkeleton::Pointer { tail, .. }) => {
-                    format!("pointer to `{}`", tail)
-                }
-                Err(LayoutError::Unknown(bad)) => {
-                    if bad == ty {
-                        "this type does not have a fixed size".to_owned()
-                    } else {
-                        format!("size can vary because of {}", bad)
-                    }
-                }
-                Err(err) => err.to_string()
-            }
-        };
-
-        let mut err = struct_span_err!(self.tcx.sess, span, E0512,
-                                       "cannot transmute between types of different sizes, \
-                                        or dependently-sized types");
-        if from == to {
-            err.note(&format!("`{}` does not have a fixed size", from));
-        } else {
-            err.note(&format!("source type: `{}` ({})", from, skeleton_string(from, sk_from)))
-                .note(&format!("target type: `{}` ({})", to, skeleton_string(to, sk_to)));
-        }
-        err.emit()
-    }
-}
-
-impl Visitor<'tcx> for ItemVisitor<'tcx> {
-    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
-        NestedVisitorMap::None
-    }
-
-    fn visit_nested_body(&mut self, body_id: hir::BodyId) {
-        let owner_def_id = self.tcx.hir().body_owner_def_id(body_id);
-        let body = self.tcx.hir().body(body_id);
-        let param_env = self.tcx.param_env(owner_def_id);
-        let tables = self.tcx.typeck_tables_of(owner_def_id);
-        ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body);
-        self.visit_body(body);
-    }
-}
-
-impl Visitor<'tcx> for ExprVisitor<'tcx> {
-    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
-        NestedVisitorMap::None
-    }
-
-    fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
-        let res = if let hir::ExprKind::Path(ref qpath) = expr.kind {
-            self.tables.qpath_res(qpath, expr.hir_id)
-        } else {
-            Res::Err
-        };
-        if let Res::Def(DefKind::Fn, did) = res {
-            if self.def_id_is_transmute(did) {
-                let typ = self.tables.node_type(expr.hir_id);
-                let sig = typ.fn_sig(self.tcx);
-                let from = sig.inputs().skip_binder()[0];
-                let to = *sig.output().skip_binder();
-                self.check_transmute(expr.span, from, to);
-            }
-        }
-
-        intravisit::walk_expr(self, expr);
-    }
-}
diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs
deleted file mode 100644 (file)
index a654a26..0000000
+++ /dev/null
@@ -1,1568 +0,0 @@
-//! A classic liveness analysis based on dataflow over the AST. Computes,
-//! for each local variable in a function, whether that variable is live
-//! at a given point. Program execution points are identified by their
-//! IDs.
-//!
-//! # Basic idea
-//!
-//! The basic model is that each local variable is assigned an index. We
-//! represent sets of local variables using a vector indexed by this
-//! index. The value in the vector is either 0, indicating the variable
-//! is dead, or the ID of an expression that uses the variable.
-//!
-//! We conceptually walk over the AST in reverse execution order. If we
-//! find a use of a variable, we add it to the set of live variables. If
-//! we find an assignment to a variable, we remove it from the set of live
-//! variables. When we have to merge two flows, we take the union of
-//! those two flows -- if the variable is live on both paths, we simply
-//! pick one ID. In the event of loops, we continue doing this until a
-//! fixed point is reached.
-//!
-//! ## Checking initialization
-//!
-//! At the function entry point, all variables must be dead. If this is
-//! not the case, we can report an error using the ID found in the set of
-//! live variables, which identifies a use of the variable which is not
-//! dominated by an assignment.
-//!
-//! ## Checking moves
-//!
-//! After each explicit move, the variable must be dead.
-//!
-//! ## Computing last uses
-//!
-//! Any use of the variable where the variable is dead afterwards is a
-//! last use.
-//!
-//! # Implementation details
-//!
-//! The actual implementation contains two (nested) walks over the AST.
-//! The outer walk has the job of building up the ir_maps instance for the
-//! enclosing function. On the way down the tree, it identifies those AST
-//! nodes and variable IDs that will be needed for the liveness analysis
-//! and assigns them contiguous IDs. The liveness ID for an AST node is
-//! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
-//! is called a `variable` (another newtype'd `u32`).
-//!
-//! On the way back up the tree, as we are about to exit from a function
-//! declaration we allocate a `liveness` instance. Now that we know
-//! precisely how many nodes and variables we need, we can allocate all
-//! the various arrays that we will need to precisely the right size. We then
-//! perform the actual propagation on the `liveness` instance.
-//!
-//! This propagation is encoded in the various `propagate_through_*()`
-//! methods. It effectively does a reverse walk of the AST; whenever we
-//! reach a loop node, we iterate until a fixed point is reached.
-//!
-//! ## The `RWU` struct
-//!
-//! At each live node `N`, we track three pieces of information for each
-//! variable `V` (these are encapsulated in the `RWU` struct):
-//!
-//! - `reader`: the `LiveNode` ID of some node which will read the value
-//!    that `V` holds on entry to `N`. Formally: a node `M` such
-//!    that there exists a path `P` from `N` to `M` where `P` does not
-//!    write `V`. If the `reader` is `invalid_node()`, then the current
-//!    value will never be read (the variable is dead, essentially).
-//!
-//! - `writer`: the `LiveNode` ID of some node which will write the
-//!    variable `V` and which is reachable from `N`. Formally: a node `M`
-//!    such that there exists a path `P` from `N` to `M` and `M` writes
-//!    `V`. If the `writer` is `invalid_node()`, then there is no writer
-//!    of `V` that follows `N`.
-//!
-//! - `used`: a boolean value indicating whether `V` is *used*. We
-//!   distinguish a *read* from a *use* in that a *use* is some read that
-//!   is not just used to generate a new value. For example, `x += 1` is
-//!   a read but not a use. This is used to generate better warnings.
-//!
-//! ## Special Variables
-//!
-//! We generate various special variables for various, well, special purposes.
-//! These are described in the `specials` struct:
-//!
-//! - `exit_ln`: a live node that is generated to represent every 'exit' from
-//!   the function, whether it be by explicit return, panic, or other means.
-//!
-//! - `fallthrough_ln`: a live node that represents a fallthrough
-//!
-//! - `clean_exit_var`: a synthetic variable that is only 'read' from the
-//!   fallthrough node. It is only live if the function could converge
-//!   via means other than an explicit `return` expression. That is, it is
-//!   only dead if the end of the function's block can never be reached.
-//!   It is the responsibility of typeck to ensure that there are no
-//!   `return` expressions in a function declared as diverging.
-
-use self::LiveNodeKind::*;
-use self::VarKind::*;
-
-use crate::hir;
-use crate::hir::{Expr, HirId};
-use crate::hir::def::*;
-use crate::hir::def_id::DefId;
-use crate::hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap};
-use crate::hir::Node;
-use crate::hir::ptr::P;
-use crate::ty::{self, TyCtxt};
-use crate::ty::query::Providers;
-use crate::lint;
-use crate::util::nodemap::{HirIdMap, HirIdSet};
-
-use errors::Applicability;
-use rustc_data_structures::fx::FxIndexMap;
-use std::collections::VecDeque;
-use std::{fmt, u32};
-use std::io::prelude::*;
-use std::io;
-use std::rc::Rc;
-use syntax::ast;
-use syntax::symbol::sym;
-use syntax_pos::Span;
-
-#[derive(Copy, Clone, PartialEq)]
-struct Variable(u32);
-
-#[derive(Copy, Clone, PartialEq)]
-struct LiveNode(u32);
-
-impl Variable {
-    fn get(&self) -> usize { self.0 as usize }
-}
-
-impl LiveNode {
-    fn get(&self) -> usize { self.0 as usize }
-}
-
-#[derive(Copy, Clone, PartialEq, Debug)]
-enum LiveNodeKind {
-    UpvarNode(Span),
-    ExprNode(Span),
-    VarDefNode(Span),
-    ExitNode
-}
-
-fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
-    let cm = tcx.sess.source_map();
-    match lnk {
-        UpvarNode(s) => {
-            format!("Upvar node [{}]", cm.span_to_string(s))
-        }
-        ExprNode(s) => {
-            format!("Expr node [{}]", cm.span_to_string(s))
-        }
-        VarDefNode(s) => {
-            format!("Var def node [{}]", cm.span_to_string(s))
-        }
-        ExitNode => "Exit node".to_owned(),
-    }
-}
-
-impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
-    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
-        NestedVisitorMap::OnlyBodies(&self.tcx.hir())
-    }
-
-    fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx hir::FnDecl,
-                b: hir::BodyId, s: Span, id: HirId) {
-        visit_fn(self, fk, fd, b, s, id);
-    }
-
-    fn visit_local(&mut self, l: &'tcx hir::Local) { visit_local(self, l); }
-    fn visit_expr(&mut self, ex: &'tcx Expr) { visit_expr(self, ex); }
-    fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); }
-}
-
-fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: DefId) {
-    tcx.hir().visit_item_likes_in_module(
-        module_def_id,
-        &mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),
-    );
-}
-
-pub fn provide(providers: &mut Providers<'_>) {
-    *providers = Providers {
-        check_mod_liveness,
-        ..*providers
-    };
-}
-
-impl fmt::Debug for LiveNode {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "ln({})", self.get())
-    }
-}
-
-impl fmt::Debug for Variable {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "v({})", self.get())
-    }
-}
-
-// ______________________________________________________________________
-// Creating ir_maps
-//
-// This is the first pass and the one that drives the main
-// computation.  It walks up and down the IR once.  On the way down,
-// we count for each function the number of variables as well as
-// liveness nodes.  A liveness node is basically an expression or
-// capture clause that does something of interest: either it has
-// interesting control flow or it uses/defines a local variable.
-//
-// On the way back up, at each function node we create liveness sets
-// (we now know precisely how big to make our various vectors and so
-// forth) and then do the data-flow propagation to compute the set
-// of live variables at each program point.
-//
-// Finally, we run back over the IR one last time and, using the
-// computed liveness, check various safety conditions.  For example,
-// there must be no live nodes at the definition site for a variable
-// unless it has an initializer.  Similarly, each non-mutable local
-// variable must not be assigned if there is some successor
-// assignment.  And so forth.
-
-impl LiveNode {
-    fn is_valid(&self) -> bool {
-        self.0 != u32::MAX
-    }
-}
-
-fn invalid_node() -> LiveNode { LiveNode(u32::MAX) }
-
-struct CaptureInfo {
-    ln: LiveNode,
-    var_hid: HirId
-}
-
-#[derive(Copy, Clone, Debug)]
-struct LocalInfo {
-    id: HirId,
-    name: ast::Name,
-    is_shorthand: bool,
-}
-
-#[derive(Copy, Clone, Debug)]
-enum VarKind {
-    Param(HirId, ast::Name),
-    Local(LocalInfo),
-    CleanExit
-}
-
-struct IrMaps<'tcx> {
-    tcx: TyCtxt<'tcx>,
-    body_owner: DefId,
-    num_live_nodes: usize,
-    num_vars: usize,
-    live_node_map: HirIdMap<LiveNode>,
-    variable_map: HirIdMap<Variable>,
-    capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
-    var_kinds: Vec<VarKind>,
-    lnks: Vec<LiveNodeKind>,
-}
-
-impl IrMaps<'tcx> {
-    fn new(tcx: TyCtxt<'tcx>, body_owner: DefId) -> IrMaps<'tcx> {
-        IrMaps {
-            tcx,
-            body_owner,
-            num_live_nodes: 0,
-            num_vars: 0,
-            live_node_map: HirIdMap::default(),
-            variable_map: HirIdMap::default(),
-            capture_info_map: Default::default(),
-            var_kinds: Vec::new(),
-            lnks: Vec::new(),
-        }
-    }
-
-    fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
-        let ln = LiveNode(self.num_live_nodes as u32);
-        self.lnks.push(lnk);
-        self.num_live_nodes += 1;
-
-        debug!("{:?} is of kind {}", ln,
-               live_node_kind_to_string(lnk, self.tcx));
-
-        ln
-    }
-
-    fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
-        let ln = self.add_live_node(lnk);
-        self.live_node_map.insert(hir_id, ln);
-
-        debug!("{:?} is node {:?}", ln, hir_id);
-    }
-
-    fn add_variable(&mut self, vk: VarKind) -> Variable {
-        let v = Variable(self.num_vars as u32);
-        self.var_kinds.push(vk);
-        self.num_vars += 1;
-
-        match vk {
-            Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) => {
-                self.variable_map.insert(node_id, v);
-            },
-            CleanExit => {}
-        }
-
-        debug!("{:?} is {:?}", v, vk);
-
-        v
-    }
-
-    fn variable(&self, hir_id: HirId, span: Span) -> Variable {
-        match self.variable_map.get(&hir_id) {
-            Some(&var) => var,
-            None => {
-                span_bug!(span, "no variable registered for id {:?}", hir_id);
-            }
-        }
-    }
-
-    fn variable_name(&self, var: Variable) -> String {
-        match self.var_kinds[var.get()] {
-            Local(LocalInfo { name, .. }) | Param(_, name) => {
-                name.to_string()
-            },
-            CleanExit => "<clean-exit>".to_owned()
-        }
-    }
-
-    fn variable_is_shorthand(&self, var: Variable) -> bool {
-        match self.var_kinds[var.get()] {
-            Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
-            Param(..) | CleanExit => false
-        }
-    }
-
-    fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
-        self.capture_info_map.insert(hir_id, Rc::new(cs));
-    }
-
-    fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
-        self.lnks[ln.get()]
-    }
-}
-
-fn visit_fn<'tcx>(
-    ir: &mut IrMaps<'tcx>,
-    fk: FnKind<'tcx>,
-    decl: &'tcx hir::FnDecl,
-    body_id: hir::BodyId,
-    sp: Span,
-    id: hir::HirId,
-) {
-    debug!("visit_fn");
-
-    // swap in a new set of IR maps for this function body:
-    let def_id = ir.tcx.hir().local_def_id(id);
-    let mut fn_maps = IrMaps::new(ir.tcx, def_id);
-
-    // Don't run unused pass for #[derive()]
-    if let FnKind::Method(..) = fk {
-        let parent = ir.tcx.hir().get_parent_item(id);
-        if let Some(Node::Item(i)) = ir.tcx.hir().find(parent) {
-            if i.attrs.iter().any(|a| a.check_name(sym::automatically_derived)) {
-                return;
-            }
-        }
-    }
-
-    debug!("creating fn_maps: {:p}", &fn_maps);
-
-    let body = ir.tcx.hir().body(body_id);
-
-    for param in &body.params {
-        let is_shorthand = match param.pat.kind {
-            crate::hir::PatKind::Struct(..) => true,
-            _ => false,
-        };
-        param.pat.each_binding(|_bm, hir_id, _x, ident| {
-            debug!("adding parameters {:?}", hir_id);
-            let var = if is_shorthand {
-                Local(LocalInfo {
-                    id: hir_id,
-                    name: ident.name,
-                    is_shorthand: true,
-                })
-            } else {
-                Param(hir_id, ident.name)
-            };
-            fn_maps.add_variable(var);
-        })
-    };
-
-    // gather up the various local variables, significant expressions,
-    // and so forth:
-    intravisit::walk_fn(&mut fn_maps, fk, decl, body_id, sp, id);
-
-    // compute liveness
-    let mut lsets = Liveness::new(&mut fn_maps, body_id);
-    let entry_ln = lsets.compute(&body.value);
-
-    // check for various error conditions
-    lsets.visit_body(body);
-    lsets.warn_about_unused_args(body, entry_ln);
-}
-
-fn add_from_pat(ir: &mut IrMaps<'_>, pat: &P<hir::Pat>) {
-    // For struct patterns, take note of which fields used shorthand
-    // (`x` rather than `x: x`).
-    let mut shorthand_field_ids = HirIdSet::default();
-    let mut pats = VecDeque::new();
-    pats.push_back(pat);
-    while let Some(pat) = pats.pop_front() {
-        use crate::hir::PatKind::*;
-        match &pat.kind {
-            Binding(.., inner_pat) => {
-                pats.extend(inner_pat.iter());
-            }
-            Struct(_, fields, _) => {
-                let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
-                shorthand_field_ids.extend(ids);
-            }
-            Ref(inner_pat, _) | Box(inner_pat) => {
-                pats.push_back(inner_pat);
-            }
-            TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
-                pats.extend(inner_pats.iter());
-            }
-            Slice(pre_pats, inner_pat, post_pats) => {
-                pats.extend(pre_pats.iter());
-                pats.extend(inner_pat.iter());
-                pats.extend(post_pats.iter());
-            }
-            _ => {}
-        }
-    }
-
-    pat.each_binding(|_, hir_id, _, ident| {
-        ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
-        ir.add_variable(Local(LocalInfo {
-            id: hir_id,
-            name: ident.name,
-            is_shorthand: shorthand_field_ids.contains(&hir_id)
-        }));
-    });
-}
-
-fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local) {
-    add_from_pat(ir, &local.pat);
-    intravisit::walk_local(ir, local);
-}
-
-fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm) {
-    add_from_pat(ir, &arm.pat);
-    intravisit::walk_arm(ir, arm);
-}
-
-fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr) {
-    match expr.kind {
-      // live nodes required for uses or definitions of variables:
-      hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
-        debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
-        if let Res::Local(var_hir_id) = path.res {
-            let upvars = ir.tcx.upvars(ir.body_owner);
-            if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hir_id)) {
-                ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
-            }
-        }
-        intravisit::walk_expr(ir, expr);
-      }
-      hir::ExprKind::Closure(..) => {
-        // Interesting control flow (for loops can contain labeled
-        // breaks or continues)
-        ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
-
-        // Make a live_node for each captured variable, with the span
-        // being the location that the variable is used.  This results
-        // in better error messages than just pointing at the closure
-        // construction site.
-        let mut call_caps = Vec::new();
-        let closure_def_id = ir.tcx.hir().local_def_id(expr.hir_id);
-        if let Some(upvars) = ir.tcx.upvars(closure_def_id) {
-            let parent_upvars = ir.tcx.upvars(ir.body_owner);
-            call_caps.extend(upvars.iter().filter_map(|(&var_id, upvar)| {
-                let has_parent = parent_upvars
-                    .map_or(false, |upvars| upvars.contains_key(&var_id));
-                if !has_parent {
-                    let upvar_ln = ir.add_live_node(UpvarNode(upvar.span));
-                    Some(CaptureInfo { ln: upvar_ln, var_hid: var_id })
-                } else {
-                    None
-                }
-            }));
-        }
-        ir.set_captures(expr.hir_id, call_caps);
-        let old_body_owner = ir.body_owner;
-        ir.body_owner = closure_def_id;
-        intravisit::walk_expr(ir, expr);
-        ir.body_owner = old_body_owner;
-      }
-
-      // live nodes required for interesting control flow:
-      hir::ExprKind::Match(..) |
-      hir::ExprKind::Loop(..) => {
-        ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
-        intravisit::walk_expr(ir, expr);
-      }
-      hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
-        ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
-        intravisit::walk_expr(ir, expr);
-      }
-
-      // otherwise, live nodes are not required:
-      hir::ExprKind::Index(..) |
-      hir::ExprKind::Field(..) |
-      hir::ExprKind::Array(..) |
-      hir::ExprKind::Call(..) |
-      hir::ExprKind::MethodCall(..) |
-      hir::ExprKind::Tup(..) |
-      hir::ExprKind::Binary(..) |
-      hir::ExprKind::AddrOf(..) |
-      hir::ExprKind::Cast(..) |
-      hir::ExprKind::DropTemps(..) |
-      hir::ExprKind::Unary(..) |
-      hir::ExprKind::Break(..) |
-      hir::ExprKind::Continue(_) |
-      hir::ExprKind::Lit(_) |
-      hir::ExprKind::Ret(..) |
-      hir::ExprKind::Block(..) |
-      hir::ExprKind::Assign(..) |
-      hir::ExprKind::AssignOp(..) |
-      hir::ExprKind::Struct(..) |
-      hir::ExprKind::Repeat(..) |
-      hir::ExprKind::InlineAsm(..) |
-      hir::ExprKind::Box(..) |
-      hir::ExprKind::Yield(..) |
-      hir::ExprKind::Type(..) |
-      hir::ExprKind::Err |
-      hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
-          intravisit::walk_expr(ir, expr);
-      }
-    }
-}
-
-// ______________________________________________________________________
-// Computing liveness sets
-//
-// Actually we compute just a bit more than just liveness, but we use
-// the same basic propagation framework in all cases.
-
-#[derive(Clone, Copy)]
-struct RWU {
-    reader: LiveNode,
-    writer: LiveNode,
-    used: bool
-}
-
-/// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
-/// very large, so it uses a more compact representation that takes advantage
-/// of the fact that when the number of `RWU`s is large, most of them have an
-/// invalid reader and an invalid writer.
-struct RWUTable {
-    /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
-    /// an index into `unpacked_rwus`. In the common cases, this compacts the
-    /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
-    /// in 96.
-    ///
-    /// More compact representations are possible -- e.g., use only 2 bits per
-    /// packed `RWU` and make the secondary table a HashMap that maps from
-    /// indices to `RWU`s -- but this one strikes a good balance between size
-    /// and speed.
-    packed_rwus: Vec<u32>,
-    unpacked_rwus: Vec<RWU>,
-}
-
-// A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
-const INV_INV_FALSE: u32 = u32::MAX;
-
-// A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
-const INV_INV_TRUE: u32 = u32::MAX - 1;
-
-impl RWUTable {
-    fn new(num_rwus: usize) -> RWUTable {
-        Self {
-            packed_rwus: vec![INV_INV_FALSE; num_rwus],
-            unpacked_rwus: vec![],
-        }
-    }
-
-    fn get(&self, idx: usize) -> RWU {
-        let packed_rwu = self.packed_rwus[idx];
-        match packed_rwu {
-            INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false },
-            INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true },
-            _ => self.unpacked_rwus[packed_rwu as usize],
-        }
-    }
-
-    fn get_reader(&self, idx: usize) -> LiveNode {
-        let packed_rwu = self.packed_rwus[idx];
-        match packed_rwu {
-            INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
-            _ => self.unpacked_rwus[packed_rwu as usize].reader,
-        }
-    }
-
-    fn get_writer(&self, idx: usize) -> LiveNode {
-        let packed_rwu = self.packed_rwus[idx];
-        match packed_rwu {
-            INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
-            _ => self.unpacked_rwus[packed_rwu as usize].writer,
-        }
-    }
-
-    fn get_used(&self, idx: usize) -> bool {
-        let packed_rwu = self.packed_rwus[idx];
-        match packed_rwu {
-            INV_INV_FALSE => false,
-            INV_INV_TRUE => true,
-            _ => self.unpacked_rwus[packed_rwu as usize].used,
-        }
-    }
-
-    #[inline]
-    fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
-        self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
-    }
-
-    fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
-        if rwu.reader == invalid_node() && rwu.writer == invalid_node() {
-            // When we overwrite an indexing entry in `self.packed_rwus` with
-            // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
-            // from `self.unpacked_rwus`; it's not worth the effort, and we
-            // can't have entries shifting around anyway.
-            self.packed_rwus[idx] = if rwu.used {
-                INV_INV_TRUE
-            } else {
-                INV_INV_FALSE
-            }
-        } else {
-            // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
-            // point to it.
-            self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
-            self.unpacked_rwus.push(rwu);
-        }
-    }
-
-    fn assign_inv_inv(&mut self, idx: usize) {
-        self.packed_rwus[idx] = if self.get_used(idx) {
-            INV_INV_TRUE
-        } else {
-            INV_INV_FALSE
-        };
-    }
-}
-
-#[derive(Copy, Clone)]
-struct Specials {
-    exit_ln: LiveNode,
-    fallthrough_ln: LiveNode,
-    clean_exit_var: Variable
-}
-
-const ACC_READ: u32 = 1;
-const ACC_WRITE: u32 = 2;
-const ACC_USE: u32 = 4;
-
-struct Liveness<'a, 'tcx> {
-    ir: &'a mut IrMaps<'tcx>,
-    tables: &'a ty::TypeckTables<'tcx>,
-    s: Specials,
-    successors: Vec<LiveNode>,
-    rwu_table: RWUTable,
-
-    // mappings from loop node ID to LiveNode
-    // ("break" label should map to loop node ID,
-    // it probably doesn't now)
-    break_ln: HirIdMap<LiveNode>,
-    cont_ln: HirIdMap<LiveNode>,
-}
-
-impl<'a, 'tcx> Liveness<'a, 'tcx> {
-    fn new(ir: &'a mut IrMaps<'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
-        // Special nodes and variables:
-        // - exit_ln represents the end of the fn, either by return or panic
-        // - implicit_ret_var is a pseudo-variable that represents
-        //   an implicit return
-        let specials = Specials {
-            exit_ln: ir.add_live_node(ExitNode),
-            fallthrough_ln: ir.add_live_node(ExitNode),
-            clean_exit_var: ir.add_variable(CleanExit)
-        };
-
-        let tables = ir.tcx.body_tables(body);
-
-        let num_live_nodes = ir.num_live_nodes;
-        let num_vars = ir.num_vars;
-
-        Liveness {
-            ir,
-            tables,
-            s: specials,
-            successors: vec![invalid_node(); num_live_nodes],
-            rwu_table: RWUTable::new(num_live_nodes * num_vars),
-            break_ln: Default::default(),
-            cont_ln: Default::default(),
-        }
-    }
-
-    fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
-        match self.ir.live_node_map.get(&hir_id) {
-          Some(&ln) => ln,
-          None => {
-            // This must be a mismatch between the ir_map construction
-            // above and the propagation code below; the two sets of
-            // code have to agree about which AST nodes are worth
-            // creating liveness nodes for.
-            span_bug!(
-                span,
-                "no live node registered for node {:?}",
-                hir_id);
-          }
-        }
-    }
-
-    fn variable(&self, hir_id: HirId, span: Span) -> Variable {
-        self.ir.variable(hir_id, span)
-    }
-
-    fn define_bindings_in_pat(&mut self, pat: &hir::Pat, mut succ: LiveNode) -> LiveNode {
-        // In an or-pattern, only consider the first pattern; any later patterns
-        // must have the same bindings, and we also consider the first pattern
-        // to be the "authoritative" set of ids.
-        pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
-            let ln = self.live_node(hir_id, pat_sp);
-            let var = self.variable(hir_id, ident.span);
-            self.init_from_succ(ln, succ);
-            self.define(ln, var);
-            succ = ln;
-        });
-        succ
-    }
-
-    fn idx(&self, ln: LiveNode, var: Variable) -> usize {
-        ln.get() * self.ir.num_vars + var.get()
-    }
-
-    fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
-        assert!(ln.is_valid());
-        let reader = self.rwu_table.get_reader(self.idx(ln, var));
-        if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None }
-    }
-
-    // Is this variable live on entry to any of its successor nodes?
-    fn live_on_exit(&self, ln: LiveNode, var: Variable)
-                    -> Option<LiveNodeKind> {
-        let successor = self.successors[ln.get()];
-        self.live_on_entry(successor, var)
-    }
-
-    fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
-        assert!(ln.is_valid());
-        self.rwu_table.get_used(self.idx(ln, var))
-    }
-
-    fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
-                         -> Option<LiveNodeKind> {
-        assert!(ln.is_valid());
-        let writer = self.rwu_table.get_writer(self.idx(ln, var));
-        if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None }
-    }
-
-    fn assigned_on_exit(&self, ln: LiveNode, var: Variable)
-                        -> Option<LiveNodeKind> {
-        let successor = self.successors[ln.get()];
-        self.assigned_on_entry(successor, var)
-    }
-
-    fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where
-        F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
-    {
-        let node_base_idx = self.idx(ln, Variable(0));
-        let succ_base_idx = self.idx(succ_ln, Variable(0));
-        for var_idx in 0..self.ir.num_vars {
-            op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
-        }
-    }
-
-    fn write_vars<F>(&self,
-                     wr: &mut dyn Write,
-                     ln: LiveNode,
-                     mut test: F)
-                     -> io::Result<()> where
-        F: FnMut(usize) -> LiveNode,
-    {
-        let node_base_idx = self.idx(ln, Variable(0));
-        for var_idx in 0..self.ir.num_vars {
-            let idx = node_base_idx + var_idx;
-            if test(idx).is_valid() {
-                write!(wr, " {:?}", Variable(var_idx as u32))?;
-            }
-        }
-        Ok(())
-    }
-
-
-    #[allow(unused_must_use)]
-    fn ln_str(&self, ln: LiveNode) -> String {
-        let mut wr = Vec::new();
-        {
-            let wr = &mut wr as &mut dyn Write;
-            write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln));
-            self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx));
-            write!(wr, "  writes");
-            self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx));
-            write!(wr, "  precedes {:?}]", self.successors[ln.get()]);
-        }
-        String::from_utf8(wr).unwrap()
-    }
-
-    fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
-        self.successors[ln.get()] = succ_ln;
-
-        // It is not necessary to initialize the RWUs here because they are all
-        // set to INV_INV_FALSE when they are created, and the sets only grow
-        // during iterations.
-    }
-
-    fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
-        // more efficient version of init_empty() / merge_from_succ()
-        self.successors[ln.get()] = succ_ln;
-
-        self.indices2(ln, succ_ln, |this, idx, succ_idx| {
-            this.rwu_table.copy_packed(idx, succ_idx);
-        });
-        debug!("init_from_succ(ln={}, succ={})",
-               self.ln_str(ln), self.ln_str(succ_ln));
-    }
-
-    fn merge_from_succ(&mut self,
-                       ln: LiveNode,
-                       succ_ln: LiveNode,
-                       first_merge: bool)
-                       -> bool {
-        if ln == succ_ln { return false; }
-
-        let mut changed = false;
-        self.indices2(ln, succ_ln, |this, idx, succ_idx| {
-            let mut rwu = this.rwu_table.get(idx);
-            let succ_rwu = this.rwu_table.get(succ_idx);
-            if succ_rwu.reader.is_valid() && !rwu.reader.is_valid() {
-                rwu.reader = succ_rwu.reader;
-                changed = true
-            }
-
-            if succ_rwu.writer.is_valid() && !rwu.writer.is_valid() {
-                rwu.writer = succ_rwu.writer;
-                changed = true
-            }
-
-            if succ_rwu.used && !rwu.used {
-                rwu.used = true;
-                changed = true;
-            }
-
-            if changed {
-                this.rwu_table.assign_unpacked(idx, rwu);
-            }
-        });
-
-        debug!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
-               ln, self.ln_str(succ_ln), first_merge, changed);
-        return changed;
-    }
-
-    // Indicates that a local variable was *defined*; we know that no
-    // uses of the variable can precede the definition (resolve checks
-    // this) so we just clear out all the data.
-    fn define(&mut self, writer: LiveNode, var: Variable) {
-        let idx = self.idx(writer, var);
-        self.rwu_table.assign_inv_inv(idx);
-
-        debug!("{:?} defines {:?} (idx={}): {}", writer, var,
-               idx, self.ln_str(writer));
-    }
-
-    // Either read, write, or both depending on the acc bitset
-    fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
-        debug!("{:?} accesses[{:x}] {:?}: {}",
-               ln, acc, var, self.ln_str(ln));
-
-        let idx = self.idx(ln, var);
-        let mut rwu = self.rwu_table.get(idx);
-
-        if (acc & ACC_WRITE) != 0 {
-            rwu.reader = invalid_node();
-            rwu.writer = ln;
-        }
-
-        // Important: if we both read/write, must do read second
-        // or else the write will override.
-        if (acc & ACC_READ) != 0 {
-            rwu.reader = ln;
-        }
-
-        if (acc & ACC_USE) != 0 {
-            rwu.used = true;
-        }
-
-        self.rwu_table.assign_unpacked(idx, rwu);
-    }
-
-    fn compute(&mut self, body: &hir::Expr) -> LiveNode {
-        debug!("compute: using id for body, {}",
-               self.ir.tcx.hir().hir_to_pretty_string(body.hir_id));
-
-        // the fallthrough exit is only for those cases where we do not
-        // explicitly return:
-        let s = self.s;
-        self.init_from_succ(s.fallthrough_ln, s.exit_ln);
-        self.acc(s.fallthrough_ln, s.clean_exit_var, ACC_READ);
-
-        let entry_ln = self.propagate_through_expr(body, s.fallthrough_ln);
-
-        // hack to skip the loop unless debug! is enabled:
-        debug!("^^ liveness computation results for body {} (entry={:?})", {
-                   for ln_idx in 0..self.ir.num_live_nodes {
-                        debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32)));
-                   }
-                   body.hir_id
-               },
-               entry_ln);
-
-        entry_ln
-    }
-
-    fn propagate_through_block(&mut self, blk: &hir::Block, succ: LiveNode)
-                               -> LiveNode {
-        if blk.targeted_by_break {
-            self.break_ln.insert(blk.hir_id, succ);
-        }
-        let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ);
-        blk.stmts.iter().rev().fold(succ, |succ, stmt| {
-            self.propagate_through_stmt(stmt, succ)
-        })
-    }
-
-    fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode)
-                              -> LiveNode {
-        match stmt.kind {
-            hir::StmtKind::Local(ref local) => {
-                // Note: we mark the variable as defined regardless of whether
-                // there is an initializer.  Initially I had thought to only mark
-                // the live variable as defined if it was initialized, and then we
-                // could check for uninit variables just by scanning what is live
-                // at the start of the function. But that doesn't work so well for
-                // immutable variables defined in a loop:
-                //     loop { let x; x = 5; }
-                // because the "assignment" loops back around and generates an error.
-                //
-                // So now we just check that variables defined w/o an
-                // initializer are not live at the point of their
-                // initialization, which is mildly more complex than checking
-                // once at the func header but otherwise equivalent.
-
-                let succ = self.propagate_through_opt_expr(local.init.as_ref().map(|e| &**e), succ);
-                self.define_bindings_in_pat(&local.pat, succ)
-            }
-            hir::StmtKind::Item(..) => succ,
-            hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
-                self.propagate_through_expr(&expr, succ)
-            }
-        }
-    }
-
-    fn propagate_through_exprs(&mut self, exprs: &[Expr], succ: LiveNode)
-                               -> LiveNode {
-        exprs.iter().rev().fold(succ, |succ, expr| {
-            self.propagate_through_expr(&expr, succ)
-        })
-    }
-
-    fn propagate_through_opt_expr(&mut self,
-                                  opt_expr: Option<&Expr>,
-                                  succ: LiveNode)
-                                  -> LiveNode {
-        opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
-    }
-
-    fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode)
-                              -> LiveNode {
-        debug!("propagate_through_expr: {}", self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
-
-        match expr.kind {
-            // Interesting cases with control flow or which gen/kill
-            hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
-                self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
-            }
-
-            hir::ExprKind::Field(ref e, _) => {
-                self.propagate_through_expr(&e, succ)
-            }
-
-            hir::ExprKind::Closure(..) => {
-                debug!("{} is an ExprKind::Closure",
-                       self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
-
-                // the construction of a closure itself is not important,
-                // but we have to consider the closed over variables.
-                let caps = self.ir.capture_info_map.get(&expr.hir_id).cloned().unwrap_or_else(||
-                    span_bug!(expr.span, "no registered caps"));
-
-                caps.iter().rev().fold(succ, |succ, cap| {
-                    self.init_from_succ(cap.ln, succ);
-                    let var = self.variable(cap.var_hid, expr.span);
-                    self.acc(cap.ln, var, ACC_READ | ACC_USE);
-                    cap.ln
-                })
-            }
-
-            // Note that labels have been resolved, so we don't need to look
-            // at the label ident
-            hir::ExprKind::Loop(ref blk, _, _) => {
-                self.propagate_through_loop(expr, &blk, succ)
-            }
-
-            hir::ExprKind::Match(ref e, ref arms, _) => {
-                //
-                //      (e)
-                //       |
-                //       v
-                //     (expr)
-                //     / | \
-                //    |  |  |
-                //    v  v  v
-                //   (..arms..)
-                //    |  |  |
-                //    v  v  v
-                //   (  succ  )
-                //
-                //
-                let ln = self.live_node(expr.hir_id, expr.span);
-                self.init_empty(ln, succ);
-                let mut first_merge = true;
-                for arm in arms {
-                    let body_succ = self.propagate_through_expr(&arm.body, succ);
-
-                    let guard_succ = self.propagate_through_opt_expr(
-                        arm.guard.as_ref().map(|hir::Guard::If(e)| &**e),
-                        body_succ
-                    );
-                    let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
-                    self.merge_from_succ(ln, arm_succ, first_merge);
-                    first_merge = false;
-                };
-                self.propagate_through_expr(&e, ln)
-            }
-
-            hir::ExprKind::Ret(ref o_e) => {
-                // ignore succ and subst exit_ln:
-                let exit_ln = self.s.exit_ln;
-                self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln)
-            }
-
-            hir::ExprKind::Break(label, ref opt_expr) => {
-                // Find which label this break jumps to
-                let target = match label.target_id {
-                    Ok(hir_id) => self.break_ln.get(&hir_id),
-                    Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
-                }.cloned();
-
-                // Now that we know the label we're going to,
-                // look it up in the break loop nodes table
-
-                match target {
-                    Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
-                    None => span_bug!(expr.span, "break to unknown label")
-                }
-            }
-
-            hir::ExprKind::Continue(label) => {
-                // Find which label this expr continues to
-                let sc = label.target_id.unwrap_or_else(|err|
-                    span_bug!(expr.span, "loop scope error: {}", err));
-
-                // Now that we know the label we're going to,
-                // look it up in the continue loop nodes table
-                self.cont_ln.get(&sc).cloned().unwrap_or_else(||
-                    span_bug!(expr.span, "continue to unknown label"))
-            }
-
-            hir::ExprKind::Assign(ref l, ref r) => {
-                // see comment on places in
-                // propagate_through_place_components()
-                let succ = self.write_place(&l, succ, ACC_WRITE);
-                let succ = self.propagate_through_place_components(&l, succ);
-                self.propagate_through_expr(&r, succ)
-            }
-
-            hir::ExprKind::AssignOp(_, ref l, ref r) => {
-                // an overloaded assign op is like a method call
-                if self.tables.is_method_call(expr) {
-                    let succ = self.propagate_through_expr(&l, succ);
-                    self.propagate_through_expr(&r, succ)
-                } else {
-                    // see comment on places in
-                    // propagate_through_place_components()
-                    let succ = self.write_place(&l, succ, ACC_WRITE|ACC_READ);
-                    let succ = self.propagate_through_expr(&r, succ);
-                    self.propagate_through_place_components(&l, succ)
-                }
-            }
-
-            // Uninteresting cases: just propagate in rev exec order
-
-            hir::ExprKind::Array(ref exprs) => {
-                self.propagate_through_exprs(exprs, succ)
-            }
-
-            hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
-                let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
-                fields.iter().rev().fold(succ, |succ, field| {
-                    self.propagate_through_expr(&field.expr, succ)
-                })
-            }
-
-            hir::ExprKind::Call(ref f, ref args) => {
-                let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
-                let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
-                    self.s.exit_ln
-                } else {
-                    succ
-                };
-                let succ = self.propagate_through_exprs(args, succ);
-                self.propagate_through_expr(&f, succ)
-            }
-
-            hir::ExprKind::MethodCall(.., ref args) => {
-                let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
-                let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
-                    self.s.exit_ln
-                } else {
-                    succ
-                };
-
-                self.propagate_through_exprs(args, succ)
-            }
-
-            hir::ExprKind::Tup(ref exprs) => {
-                self.propagate_through_exprs(exprs, succ)
-            }
-
-            hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
-                let r_succ = self.propagate_through_expr(&r, succ);
-
-                let ln = self.live_node(expr.hir_id, expr.span);
-                self.init_from_succ(ln, succ);
-                self.merge_from_succ(ln, r_succ, false);
-
-                self.propagate_through_expr(&l, ln)
-            }
-
-            hir::ExprKind::Index(ref l, ref r) |
-            hir::ExprKind::Binary(_, ref l, ref r) => {
-                let r_succ = self.propagate_through_expr(&r, succ);
-                self.propagate_through_expr(&l, r_succ)
-            }
-
-            hir::ExprKind::Box(ref e) |
-            hir::ExprKind::AddrOf(_, ref e) |
-            hir::ExprKind::Cast(ref e, _) |
-            hir::ExprKind::Type(ref e, _) |
-            hir::ExprKind::DropTemps(ref e) |
-            hir::ExprKind::Unary(_, ref e) |
-            hir::ExprKind::Yield(ref e, _) |
-            hir::ExprKind::Repeat(ref e, _) => {
-                self.propagate_through_expr(&e, succ)
-            }
-
-            hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
-                let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
-                // see comment on places
-                // in propagate_through_place_components()
-                if o.is_indirect {
-                    self.propagate_through_expr(output, succ)
-                } else {
-                    let acc = if o.is_rw { ACC_WRITE|ACC_READ } else { ACC_WRITE };
-                    let succ = self.write_place(output, succ, acc);
-                    self.propagate_through_place_components(output, succ)
-                }});
-
-                // Inputs are executed first. Propagate last because of rev order
-                self.propagate_through_exprs(inputs, succ)
-            }
-
-            hir::ExprKind::Lit(..) | hir::ExprKind::Err |
-            hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
-                succ
-            }
-
-            // Note that labels have been resolved, so we don't need to look
-            // at the label ident
-            hir::ExprKind::Block(ref blk, _) => {
-                self.propagate_through_block(&blk, succ)
-            }
-        }
-    }
-
-    fn propagate_through_place_components(&mut self,
-                                          expr: &Expr,
-                                          succ: LiveNode)
-                                          -> LiveNode {
-        // # Places
-        //
-        // In general, the full flow graph structure for an
-        // assignment/move/etc can be handled in one of two ways,
-        // depending on whether what is being assigned is a "tracked
-        // value" or not. A tracked value is basically a local
-        // variable or argument.
-        //
-        // The two kinds of graphs are:
-        //
-        //    Tracked place          Untracked place
-        // ----------------------++-----------------------
-        //                       ||
-        //         |             ||           |
-        //         v             ||           v
-        //     (rvalue)          ||       (rvalue)
-        //         |             ||           |
-        //         v             ||           v
-        // (write of place)     ||   (place components)
-        //         |             ||           |
-        //         v             ||           v
-        //      (succ)           ||        (succ)
-        //                       ||
-        // ----------------------++-----------------------
-        //
-        // I will cover the two cases in turn:
-        //
-        // # Tracked places
-        //
-        // A tracked place is a local variable/argument `x`.  In
-        // these cases, the link_node where the write occurs is linked
-        // to node id of `x`.  The `write_place()` routine generates
-        // the contents of this node.  There are no subcomponents to
-        // consider.
-        //
-        // # Non-tracked places
-        //
-        // These are places like `x[5]` or `x.f`.  In that case, we
-        // basically ignore the value which is written to but generate
-        // reads for the components---`x` in these two examples.  The
-        // components reads are generated by
-        // `propagate_through_place_components()` (this fn).
-        //
-        // # Illegal places
-        //
-        // It is still possible to observe assignments to non-places;
-        // these errors are detected in the later pass borrowck.  We
-        // just ignore such cases and treat them as reads.
-
-        match expr.kind {
-            hir::ExprKind::Path(_) => succ,
-            hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
-            _ => self.propagate_through_expr(expr, succ)
-        }
-    }
-
-    // see comment on propagate_through_place()
-    fn write_place(&mut self, expr: &Expr, succ: LiveNode, acc: u32) -> LiveNode {
-        match expr.kind {
-            hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
-                self.access_path(expr.hir_id, path, succ, acc)
-            }
-
-            // We do not track other places, so just propagate through
-            // to their subcomponents.  Also, it may happen that
-            // non-places occur here, because those are detected in the
-            // later pass borrowck.
-            _ => succ
-        }
-    }
-
-    fn access_var(&mut self, hir_id: HirId, var_hid: HirId, succ: LiveNode, acc: u32, span: Span)
-                  -> LiveNode {
-        let ln = self.live_node(hir_id, span);
-        if acc != 0 {
-            self.init_from_succ(ln, succ);
-            let var = self.variable(var_hid, span);
-            self.acc(ln, var, acc);
-        }
-        ln
-    }
-
-    fn access_path(&mut self, hir_id: HirId, path: &hir::Path, succ: LiveNode, acc: u32)
-                   -> LiveNode {
-        match path.res {
-            Res::Local(hid) => {
-                let upvars = self.ir.tcx.upvars(self.ir.body_owner);
-                if !upvars.map_or(false, |upvars| upvars.contains_key(&hid)) {
-                    self.access_var(hir_id, hid, succ, acc, path.span)
-                } else {
-                    succ
-                }
-            }
-            _ => succ
-        }
-    }
-
-    fn propagate_through_loop(
-        &mut self,
-        expr: &Expr,
-        body: &hir::Block,
-        succ: LiveNode
-    ) -> LiveNode {
-        /*
-        We model control flow like this:
-
-              (expr) <-+
-                |      |
-                v      |
-              (body) --+
-
-        Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
-        Meanwhile, a `break` expression will have a successor of `succ`.
-        */
-
-        // first iteration:
-        let mut first_merge = true;
-        let ln = self.live_node(expr.hir_id, expr.span);
-        self.init_empty(ln, succ);
-        debug!("propagate_through_loop: using id for loop body {} {}",
-               expr.hir_id, self.ir.tcx.hir().hir_to_pretty_string(body.hir_id));
-
-        self.break_ln.insert(expr.hir_id, succ);
-
-        self.cont_ln.insert(expr.hir_id, ln);
-
-        let body_ln = self.propagate_through_block(body, ln);
-
-        // repeat until fixed point is reached:
-        while self.merge_from_succ(ln, body_ln, first_merge) {
-            first_merge = false;
-            assert_eq!(body_ln, self.propagate_through_block(body, ln));
-        }
-
-        ln
-    }
-}
-
-// _______________________________________________________________________
-// Checking for error conditions
-
-impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
-    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
-        NestedVisitorMap::None
-    }
-
-    fn visit_local(&mut self, local: &'tcx hir::Local) {
-        self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
-            if local.init.is_some() {
-                self.warn_about_dead_assign(spans, hir_id, ln, var);
-            }
-        });
-
-        intravisit::walk_local(self, local);
-    }
-
-    fn visit_expr(&mut self, ex: &'tcx Expr) {
-        check_expr(self, ex);
-    }
-
-    fn visit_arm(&mut self, arm: &'tcx hir::Arm) {
-        self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
-        intravisit::walk_arm(self, arm);
-    }
-}
-
-fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr) {
-    match expr.kind {
-        hir::ExprKind::Assign(ref l, _) => {
-            this.check_place(&l);
-        }
-
-        hir::ExprKind::AssignOp(_, ref l, _) => {
-            if !this.tables.is_method_call(expr) {
-                this.check_place(&l);
-            }
-        }
-
-        hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
-            for input in inputs {
-                this.visit_expr(input);
-            }
-
-            // Output operands must be places
-            for (o, output) in ia.outputs.iter().zip(outputs) {
-                if !o.is_indirect {
-                    this.check_place(output);
-                }
-                this.visit_expr(output);
-            }
-        }
-
-        // no correctness conditions related to liveness
-        hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) |
-        hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) |
-        hir::ExprKind::Index(..) | hir::ExprKind::Field(..) |
-        hir::ExprKind::Array(..) | hir::ExprKind::Tup(..) | hir::ExprKind::Binary(..) |
-        hir::ExprKind::Cast(..) | hir::ExprKind::DropTemps(..) | hir::ExprKind::Unary(..) |
-        hir::ExprKind::Ret(..) | hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) |
-        hir::ExprKind::Lit(_) | hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) |
-        hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) |
-        hir::ExprKind::Closure(..) | hir::ExprKind::Path(_) | hir::ExprKind::Yield(..) |
-        hir::ExprKind::Box(..) | hir::ExprKind::Type(..) | hir::ExprKind::Err => {}
-    }
-
-    intravisit::walk_expr(this, expr);
-}
-
-impl<'tcx> Liveness<'_, 'tcx> {
-    fn check_place(&mut self, expr: &'tcx Expr) {
-        match expr.kind {
-            hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
-                if let Res::Local(var_hid) = path.res {
-                    let upvars = self.ir.tcx.upvars(self.ir.body_owner);
-                    if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hid)) {
-                        // Assignment to an immutable variable or argument: only legal
-                        // if there is no later assignment. If this local is actually
-                        // mutable, then check for a reassignment to flag the mutability
-                        // as being used.
-                        let ln = self.live_node(expr.hir_id, expr.span);
-                        let var = self.variable(var_hid, expr.span);
-                        self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
-                    }
-                }
-            }
-            _ => {
-                // For other kinds of places, no checks are required,
-                // and any embedded expressions are actually rvalues
-                intravisit::walk_expr(self, expr);
-            }
-        }
-    }
-
-    fn should_warn(&self, var: Variable) -> Option<String> {
-        let name = self.ir.variable_name(var);
-        if name.is_empty() || name.as_bytes()[0] == b'_' {
-            None
-        } else {
-            Some(name)
-        }
-    }
-
-    fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
-        for p in &body.params {
-            self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
-                if self.live_on_entry(ln, var).is_none() {
-                    self.report_dead_assign(hir_id, spans, var, true);
-                }
-            });
-        }
-    }
-
-    fn check_unused_vars_in_pat(
-        &self,
-        pat: &hir::Pat,
-        entry_ln: Option<LiveNode>,
-        on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
-    ) {
-        // In an or-pattern, only consider the variable; any later patterns must have the same
-        // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
-        // However, we should take the spans of variables with the same name from the later
-        // patterns so the suggestions to prefix with underscores will apply to those too.
-        let mut vars: FxIndexMap<String, (LiveNode, Variable, HirId, Vec<Span>)> = <_>::default();
-
-        pat.each_binding(|_, hir_id, pat_sp, ident| {
-            let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
-            let var = self.variable(hir_id, ident.span);
-            vars.entry(self.ir.variable_name(var))
-                .and_modify(|(.., spans)| spans.push(ident.span))
-                .or_insert_with(|| (ln, var, hir_id, vec![ident.span]));
-        });
-
-        for (_, (ln, var, id, spans)) in vars {
-            if self.used_on_entry(ln, var) {
-                on_used_on_entry(spans, id, ln, var);
-            } else {
-                self.report_unused(spans, id, ln, var);
-            }
-        }
-    }
-
-    fn report_unused(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
-        if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
-            // annoying: for parameters in funcs like `fn(x: i32)
-            // {ret}`, there is only one node, so asking about
-            // assigned_on_exit() is not meaningful.
-            let is_assigned = if ln == self.s.exit_ln {
-                false
-            } else {
-                self.assigned_on_exit(ln, var).is_some()
-            };
-
-            if is_assigned {
-                self.ir.tcx.lint_hir_note(
-                    lint::builtin::UNUSED_VARIABLES,
-                    hir_id,
-                    spans,
-                    &format!("variable `{}` is assigned to, but never used", name),
-                    &format!("consider using `_{}` instead", name),
-                );
-            } else {
-                let mut err = self.ir.tcx.struct_span_lint_hir(
-                    lint::builtin::UNUSED_VARIABLES,
-                    hir_id,
-                    spans.clone(),
-                    &format!("unused variable: `{}`", name),
-                );
-
-                if self.ir.variable_is_shorthand(var) {
-                    if let Node::Binding(pat) = self.ir.tcx.hir().get(hir_id) {
-                        // Handle `ref` and `ref mut`.
-                        let spans = spans.iter()
-                            .map(|_span| (pat.span, format!("{}: _", name)))
-                            .collect();
-
-                        err.multipart_suggestion(
-                            "try ignoring the field",
-                            spans,
-                            Applicability::MachineApplicable,
-                        );
-                    }
-                } else {
-                    err.multipart_suggestion(
-                        "consider prefixing with an underscore",
-                        spans.iter().map(|span| (*span, format!("_{}", name))).collect(),
-                        Applicability::MachineApplicable,
-                    );
-                }
-
-                err.emit()
-            }
-        }
-    }
-
-    fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
-        if self.live_on_exit(ln, var).is_none() {
-            self.report_dead_assign(hir_id, spans, var, false);
-        }
-    }
-
-    fn report_dead_assign(&self, hir_id: HirId, spans: Vec<Span>, var: Variable, is_param: bool) {
-        if let Some(name) = self.should_warn(var) {
-            if is_param {
-                self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans,
-                &format!("value passed to `{}` is never read", name))
-                .help("maybe it is overwritten before being read?")
-                .emit();
-            } else {
-                self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans,
-                &format!("value assigned to `{}` is never read", name))
-                .help("maybe it is overwritten before being read?")
-                .emit();
-            }
-        }
-    }
-}
index 31d250fa08215e59503d3326c524a0859a4b21c3..a122d84a5aa7ec84481f8a675d758075cb36723c 100644 (file)
@@ -708,15 +708,22 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
                     match param.kind {
                         GenericParamKind::Lifetime { .. } => {
                             let (name, reg) = Region::early(&self.tcx.hir(), &mut index, &param);
+                            let def_id = if let Region::EarlyBound(_ ,def_id , _) = reg {
+                                def_id
+                            } else {
+                                bug!();
+                            };
                             if let hir::ParamName::Plain(param_name) = name {
                                 if param_name.name == kw::UnderscoreLifetime {
                                     // Pick the elided lifetime "definition" if one exists
                                     // and use it to make an elision scope.
+                                    self.lifetime_uses.insert(def_id.clone(), LifetimeUseSet::Many);
                                     elision = Some(reg);
                                 } else {
                                     lifetimes.insert(name, reg);
                                 }
                             } else {
+                                self.lifetime_uses.insert(def_id.clone(), LifetimeUseSet::Many);
                                 lifetimes.insert(name, reg);
                             }
                         }
@@ -1615,7 +1622,6 @@ fn check_uses_for_lifetimes_defined_by_scope(&mut self) {
                         _ => None,
                     } {
                         debug!("id = {:?} span = {:?} name = {:?}", id, span, name);
-
                         if name.name == kw::UnderscoreLifetime {
                             continue;
                         }
index 30a88d155f5f86fd4fe1b8a8ae3caa78662d5de4..302c11f309d90b21608d8fa6eaef81eb549f94de 100644 (file)
@@ -485,7 +485,13 @@ pub fn provide(providers: &mut Providers<'_>) {
 }
 
 pub fn report_unstable(
-    sess: &Session, feature: Symbol, reason: Option<Symbol>, issue: u32, is_soft: bool, span: Span
+    sess: &Session,
+    feature: Symbol,
+    reason: Option<Symbol>,
+    issue: u32,
+    is_soft: bool,
+    span: Span,
+    soft_handler: impl FnOnce(&'static lint::Lint, Span, &str),
 ) {
     let msg = match reason {
         Some(r) => format!("use of unstable library feature '{}': {}", feature, r),
@@ -511,7 +517,7 @@ pub fn report_unstable(
     let fresh = sess.one_time_diagnostics.borrow_mut().insert(error_id);
     if fresh {
         if is_soft {
-            sess.buffer_lint(lint::builtin::SOFT_UNSTABLE, CRATE_NODE_ID, span, &msg);
+            soft_handler(lint::builtin::SOFT_UNSTABLE, span, &msg)
         } else {
             emit_feature_err(
                 &sess.parse_sess, feature, span, GateIssue::Library(Some(issue)), &msg
@@ -779,10 +785,12 @@ pub fn eval_stability(self, def_id: DefId, id: Option<HirId>, span: Span) -> Eva
     /// Additionally, this function will also check if the item is deprecated. If so, and `id` is
     /// not `None`, a deprecated lint attached to `id` will be emitted.
     pub fn check_stability(self, def_id: DefId, id: Option<HirId>, span: Span) {
+        let soft_handler =
+            |lint, span, msg: &_| self.lint_hir(lint, id.unwrap_or(hir::CRATE_HIR_ID), span, msg);
         match self.eval_stability(def_id, id, span) {
             EvalResult::Allow => {}
             EvalResult::Deny { feature, reason, issue, is_soft } =>
-                report_unstable(self.sess, feature, reason, issue, is_soft, span),
+                report_unstable(self.sess, feature, reason, issue, is_soft, span, soft_handler),
             EvalResult::Unmarked => {
                 // The API could be uncallable for other reasons, for example when a private module
                 // was referenced.
index 71967b513a049455ec509bf8a730a6e45b63cf40..d918b9ee67347e8ef4be0409e75c1c35a78da0e4 100644 (file)
@@ -363,6 +363,8 @@ pub enum UndefinedBehaviorInfo {
     UbExperimental(String),
     /// Unreachable code was executed.
     Unreachable,
+    /// An enum discriminant was set to a value which was outside the range of valid values.
+    InvalidDiscriminant(ScalarMaybeUndef),
 }
 
 impl fmt::Debug for UndefinedBehaviorInfo {
@@ -373,6 +375,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                 write!(f, "{}", msg),
             Unreachable =>
                 write!(f, "entered unreachable code"),
+            InvalidDiscriminant(val) =>
+                write!(f, "encountered invalid enum discriminant {}", val),
         }
     }
 }
@@ -389,10 +393,6 @@ pub enum UnsupportedOpInfo<'tcx> {
     /// Free-form case. Only for errors that are never caught!
     Unsupported(String),
 
-    /// FIXME(#64506) Error used to work around accessing projections of
-    /// uninhabited types.
-    UninhabitedValue,
-
     // -- Everything below is not categorized yet --
     FunctionAbiMismatch(Abi, Abi),
     FunctionArgMismatch(Ty<'tcx>, Ty<'tcx>),
@@ -404,7 +404,6 @@ pub enum UnsupportedOpInfo<'tcx> {
     InvalidMemoryAccess,
     InvalidFunctionPointer,
     InvalidBool,
-    InvalidDiscriminant(ScalarMaybeUndef),
     PointerOutOfBounds {
         ptr: Pointer,
         msg: CheckInAllocMsg,
@@ -489,8 +488,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                 write!(f, "incorrect alloc info: expected size {} and align {}, \
                            got size {} and align {}",
                     size.bytes(), align.bytes(), size2.bytes(), align2.bytes()),
-            InvalidDiscriminant(val) =>
-                write!(f, "encountered invalid enum discriminant {}", val),
             InvalidMemoryAccess =>
                 write!(f, "tried to access memory through an invalid pointer"),
             DanglingPointerDeref =>
@@ -556,8 +553,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
                     not a power of two"),
             Unsupported(ref msg) =>
                 write!(f, "{}", msg),
-            UninhabitedValue =>
-                write!(f, "tried to use an uninhabited value"),
         }
     }
 }
index e925d7429fff446f269d00b145b082a09aef63c3..6c31d54e081c42b9ee62c787ff815eb7e58f02c8 100644 (file)
@@ -470,6 +470,14 @@ pub fn unwrap_memory(&self, id: AllocId) -> &'tcx Allocation {
         }
     }
 
+    /// Panics if the `AllocId` does not refer to a function
+    pub fn unwrap_fn(&self, id: AllocId) -> Instance<'tcx> {
+        match self.get(id) {
+            Some(GlobalAlloc::Function(instance)) => instance,
+            _ => bug!("expected allocation ID {} to point to a function", id),
+        }
+    }
+
     /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
     /// call this function twice, even with the same `Allocation` will ICE the compiler.
     pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
index 32f45cd9d472035ab601b8abc78a4b85396acbdd..bbf00cc23ae8835008e18b6fc596fdf440afe47a 100644 (file)
@@ -343,14 +343,19 @@ pub fn to_bits_or_ptr(
         }
     }
 
+    #[inline(always)]
+    pub fn check_raw(data: u128, size: u8, target_size: Size) {
+        assert_eq!(target_size.bytes(), size as u64);
+        assert_ne!(size, 0, "you should never look at the bits of a ZST");
+        Scalar::check_data(data, size);
+    }
+
     /// Do not call this method!  Use either `assert_bits` or `force_bits`.
     #[inline]
     pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
         match self {
             Scalar::Raw { data, size } => {
-                assert_eq!(target_size.bytes(), size as u64);
-                assert_ne!(size, 0, "you should never look at the bits of a ZST");
-                Scalar::check_data(data, size);
+                Self::check_raw(data, size, target_size);
                 Ok(data)
             }
             Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
index 59b4f24073398bfd2ae2f221793137b98aae414c..9ac1465cb0ba9a0f58dd773a7cdfe0591e28b73e 100644 (file)
@@ -15,7 +15,7 @@
 use crate::ty::print::{FmtPrinter, Printer};
 use crate::ty::subst::{Subst, SubstsRef};
 use crate::ty::{
-    self, AdtDef, CanonicalUserTypeAnnotations, GeneratorSubsts, Region, Ty, TyCtxt,
+    self, AdtDef, CanonicalUserTypeAnnotations, Region, Ty, TyCtxt,
     UserTypeAnnotationIndex,
 };
 
@@ -2189,7 +2189,7 @@ pub enum AggregateKind<'tcx> {
     Adt(&'tcx AdtDef, VariantIdx, SubstsRef<'tcx>, Option<UserTypeAnnotationIndex>, Option<usize>),
 
     Closure(DefId, SubstsRef<'tcx>),
-    Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability),
+    Generator(DefId, SubstsRef<'tcx>, hir::GeneratorMovability),
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)]
@@ -2602,7 +2602,14 @@ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
 impl<'tcx> Display for Constant<'tcx> {
     fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
         write!(fmt, "const ")?;
-        write!(fmt, "{}", self.literal)
+        // FIXME make the default pretty printing of raw pointers more detailed. Here we output the
+        // debug representation of raw pointers, so that the raw pointers in the mir dump output are
+        // detailed and just not '{pointer}'.
+        if let ty::RawPtr(_) = self.literal.ty.kind {
+            write!(fmt, "{:?} : {}", self.literal.val, self.literal.ty)
+        } else {
+            write!(fmt, "{}", self.literal)
+        }
     }
 }
 
index 313b2a5d50a305784df2deacd9fdc395c99b42ec..265ac975ed7a2c1f2e699da4b2f88af94584cf7a 100644 (file)
@@ -386,6 +386,7 @@ fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey {
                             tcx.hir().as_local_hir_id(def_id)
                         }
                         InstanceDef::VtableShim(..) |
+                        InstanceDef::ReifyShim(..) |
                         InstanceDef::Intrinsic(..) |
                         InstanceDef::FnPtrShim(..) |
                         InstanceDef::Virtual(..) |
index 26f718e858da8ea85b2c52bb935a280692b6ca91..e87aabf9a0566498cbe1cd615819a20a58c1a2b6 100644 (file)
@@ -197,7 +197,7 @@ pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
                 let ty = place.ty(local_decls, tcx).ty;
                 match ty.kind {
                     ty::Adt(adt_def, _) => adt_def.repr.discr_type().to_ty(tcx),
-                    ty::Generator(_, substs, _) => substs.discr_ty(tcx),
+                    ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx),
                     _ => {
                         // This can only be `0`, for now, so `u8` will suffice.
                         tcx.types.u8
index ed8086b8fafec5c200d9de0388681102237f9ce0..fef406e8987834328e3cf3aca97d58bec78e48a2 100644 (file)
@@ -1,5 +1,5 @@
 use crate::ty::subst::SubstsRef;
-use crate::ty::{CanonicalUserTypeAnnotation, GeneratorSubsts, Ty};
+use crate::ty::{CanonicalUserTypeAnnotation, Ty};
 use crate::mir::*;
 use syntax_pos::Span;
 
@@ -158,22 +158,7 @@ fn visit_place_base(&mut self,
                 self.super_place_base(base, context, location);
             }
 
-            fn visit_projection(&mut self,
-                                base: & $($mutability)? PlaceBase<'tcx>,
-                                projection: & $($mutability)? [PlaceElem<'tcx>],
-                                context: PlaceContext,
-                                location: Location) {
-                self.super_projection(base, projection, context, location);
-            }
-
-            fn visit_projection_elem(&mut self,
-                                     base: & $($mutability)? PlaceBase<'tcx>,
-                                     proj_base: & $($mutability)? [PlaceElem<'tcx>],
-                                     elem: & $($mutability)? PlaceElem<'tcx>,
-                                     context: PlaceContext,
-                                     location: Location) {
-                self.super_projection_elem(base, proj_base, elem, context, location);
-            }
+            visit_place_fns!($($mutability)?);
 
             fn visit_constant(&mut self,
                               constant: & $($mutability)? Constant<'tcx>,
@@ -230,12 +215,6 @@ fn visit_substs(&mut self,
                 self.super_substs(substs);
             }
 
-            fn visit_generator_substs(&mut self,
-                                      substs: & $($mutability)? GeneratorSubsts<'tcx>,
-                                    _: Location) {
-                self.super_generator_substs(substs);
-            }
-
             fn visit_local_decl(&mut self,
                                 local: Local,
                                 local_decl: & $($mutability)? LocalDecl<'tcx>) {
@@ -628,7 +607,7 @@ fn super_rvalue(&mut self,
                                 generator_substs,
                                 _movability,
                             ) => {
-                                self.visit_generator_substs(generator_substs, location);
+                                self.visit_substs(generator_substs, location);
                             }
                         }
 
@@ -687,28 +666,6 @@ fn super_retag(&mut self,
                 );
             }
 
-            fn super_place(&mut self,
-                            place: & $($mutability)? Place<'tcx>,
-                            context: PlaceContext,
-                            location: Location) {
-                let mut context = context;
-
-                if !place.projection.is_empty() {
-                    context = if context.is_mutating_use() {
-                        PlaceContext::MutatingUse(MutatingUseContext::Projection)
-                    } else {
-                        PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
-                    };
-                }
-
-                self.visit_place_base(& $($mutability)? place.base, context, location);
-
-                self.visit_projection(& $($mutability)? place.base,
-                                      & $($mutability)? place.projection,
-                                      context,
-                                      location);
-            }
-
             fn super_place_base(&mut self,
                                 place_base: & $($mutability)? PlaceBase<'tcx>,
                                 context: PlaceContext,
@@ -723,45 +680,6 @@ fn super_place_base(&mut self,
                 }
             }
 
-            fn super_projection(&mut self,
-                                base: & $($mutability)? PlaceBase<'tcx>,
-                                projection: & $($mutability)? [PlaceElem<'tcx>],
-                                context: PlaceContext,
-                                location: Location) {
-                let mut cursor = projection;
-                while let [proj_base @ .., elem] = cursor {
-                    cursor = proj_base;
-                    self.visit_projection_elem(base, cursor, elem, context, location);
-                }
-            }
-
-            fn super_projection_elem(&mut self,
-                                     _base: & $($mutability)? PlaceBase<'tcx>,
-                                     _proj_base: & $($mutability)? [PlaceElem<'tcx>],
-                                     elem: & $($mutability)? PlaceElem<'tcx>,
-                                     _context: PlaceContext,
-                                     location: Location) {
-                match elem {
-                    ProjectionElem::Field(_field, ty) => {
-                        self.visit_ty(ty, TyContext::Location(location));
-                    }
-                    ProjectionElem::Index(local) => {
-                        self.visit_local(
-                            local,
-                            PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
-                            location
-                        );
-                    }
-                    ProjectionElem::Deref |
-                    ProjectionElem::Subslice { from: _, to: _ } |
-                    ProjectionElem::ConstantIndex { offset: _,
-                                                    min_length: _,
-                                                    from_end: _ } |
-                    ProjectionElem::Downcast(_, _) => {
-                    }
-                }
-            }
-
             fn super_local_decl(&mut self,
                                 local: Local,
                                 local_decl: & $($mutability)? LocalDecl<'tcx>) {
@@ -846,10 +764,6 @@ fn super_const(&mut self, _const: & $($mutability)? &'tcx ty::Const<'tcx>) {
             fn super_substs(&mut self, _substs: & $($mutability)? SubstsRef<'tcx>) {
             }
 
-            fn super_generator_substs(&mut self,
-                                      _substs: & $($mutability)? GeneratorSubsts<'tcx>) {
-            }
-
             // Convenience methods
 
             fn visit_location(&mut self, body: & $($mutability)? Body<'tcx>, location: Location) {
@@ -868,6 +782,141 @@ fn visit_location(&mut self, body: & $($mutability)? Body<'tcx>, location: Locat
     }
 }
 
+macro_rules! visit_place_fns {
+    (mut) => (
+        fn super_place(
+            &mut self,
+            place: &mut Place<'tcx>,
+            context: PlaceContext,
+            location: Location,
+        ) {
+            self.visit_place_base(&mut place.base, context, location);
+
+            if let Some(new_projection) = self.process_projection(&place.projection) {
+                place.projection = new_projection;
+            }
+        }
+
+        fn process_projection(
+            &mut self,
+            projection: &'a [PlaceElem<'tcx>],
+        ) -> Option<Box<[PlaceElem<'tcx>]>> {
+            let mut projection = Cow::Borrowed(projection);
+
+            for i in 0..projection.len() {
+                if let Some(elem) = projection.get(i) {
+                    if let Some(elem) = self.process_projection_elem(elem) {
+                        let vec = projection.to_mut();
+                        vec[i] = elem;
+                    }
+                }
+            }
+
+            match projection {
+                Cow::Borrowed(_) => None,
+                Cow::Owned(vec) => Some(vec.into_boxed_slice()),
+            }
+        }
+
+        fn process_projection_elem(
+            &mut self,
+            _elem: &PlaceElem<'tcx>,
+        ) -> Option<PlaceElem<'tcx>> {
+            None
+        }
+    );
+
+    () => (
+        fn visit_projection(
+            &mut self,
+            base: &PlaceBase<'tcx>,
+            projection: &[PlaceElem<'tcx>],
+            context: PlaceContext,
+            location: Location,
+        ) {
+            self.super_projection(base, projection, context, location);
+        }
+
+        fn visit_projection_elem(
+            &mut self,
+            base: &PlaceBase<'tcx>,
+            proj_base: &[PlaceElem<'tcx>],
+            elem: &PlaceElem<'tcx>,
+            context: PlaceContext,
+            location: Location,
+        ) {
+            self.super_projection_elem(base, proj_base, elem, context, location);
+        }
+
+        fn super_place(
+            &mut self,
+            place: &Place<'tcx>,
+            context: PlaceContext,
+            location: Location,
+        ) {
+            let mut context = context;
+
+            if !place.projection.is_empty() {
+                context = if context.is_mutating_use() {
+                    PlaceContext::MutatingUse(MutatingUseContext::Projection)
+                } else {
+                    PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
+                };
+            }
+
+            self.visit_place_base(&place.base, context, location);
+
+            self.visit_projection(&place.base,
+                                  &place.projection,
+                                  context,
+                                  location);
+        }
+
+        fn super_projection(
+            &mut self,
+            base: &PlaceBase<'tcx>,
+            projection: &[PlaceElem<'tcx>],
+            context: PlaceContext,
+            location: Location,
+        ) {
+            let mut cursor = projection;
+            while let [proj_base @ .., elem] = cursor {
+                cursor = proj_base;
+                self.visit_projection_elem(base, cursor, elem, context, location);
+            }
+        }
+
+        fn super_projection_elem(
+            &mut self,
+            _base: &PlaceBase<'tcx>,
+            _proj_base: &[PlaceElem<'tcx>],
+            elem: &PlaceElem<'tcx>,
+            _context: PlaceContext,
+            location: Location,
+        ) {
+            match elem {
+                ProjectionElem::Field(_field, ty) => {
+                    self.visit_ty(ty, TyContext::Location(location));
+                }
+                ProjectionElem::Index(local) => {
+                    self.visit_local(
+                        local,
+                        PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
+                        location
+                    );
+                }
+                ProjectionElem::Deref |
+                ProjectionElem::Subslice { from: _, to: _ } |
+                ProjectionElem::ConstantIndex { offset: _,
+                                                min_length: _,
+                                                from_end: _ } |
+                ProjectionElem::Downcast(_, _) => {
+                }
+            }
+        }
+    );
+}
+
 make_mir_visitor!(Visitor,);
 make_mir_visitor!(MutVisitor,mut);
 
index c95652f274e36540a24fd5f2f24fe04957588363..2c407a24493ffc956560b0287ab0dc8e88743bc6 100644 (file)
@@ -61,7 +61,7 @@
         /// predicate gets in the way of some checks, which are intended
         /// to operate over only the actual where-clauses written by the
         /// user.)
-        query predicates_of(key: DefId) -> &'tcx ty::GenericPredicates<'tcx> {
+        query predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
             cache_on_disk_if { key.is_local() }
         }
 
         /// predicates (where-clauses) directly defined on it. This is
         /// equal to the `explicit_predicates_of` predicates plus the
         /// `inferred_outlives_of` predicates.
-        query predicates_defined_on(_: DefId)
-            -> &'tcx ty::GenericPredicates<'tcx> {}
+        query predicates_defined_on(_: DefId) -> ty::GenericPredicates<'tcx> {}
 
         /// Returns the predicates written explicitly by the user.
-        query explicit_predicates_of(_: DefId)
-            -> &'tcx ty::GenericPredicates<'tcx> {}
+        query explicit_predicates_of(_: DefId) -> ty::GenericPredicates<'tcx> {}
 
         /// Returns the inferred outlives predicates (e.g., for `struct
         /// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`).
         /// evaluate them even during type conversion, often before the
         /// full predicates are available (note that supertraits have
         /// additional acyclicity requirements).
-        query super_predicates_of(key: DefId) -> &'tcx ty::GenericPredicates<'tcx> {
+        query super_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
             desc { |tcx| "computing the supertraits of `{}`", tcx.def_path_str(key) }
         }
 
         /// To avoid cycles within the predicates of a single item we compute
         /// per-type-parameter predicates for resolving `T::AssocTy`.
-        query type_param_predicates(key: (DefId, DefId))
-            -> &'tcx ty::GenericPredicates<'tcx> {
+        query type_param_predicates(key: (DefId, DefId)) -> ty::GenericPredicates<'tcx> {
             no_force
             desc { |tcx| "computing the bounds for type parameter `{}`", {
                 let id = tcx.hir().as_local_hir_id(key.1).unwrap();
             cycle_delay_bug
         }
 
+        query trivial_dropck_outlives(ty: Ty<'tcx>) -> bool {
+            anon
+            no_force
+            desc { "checking if `{:?}` has trivial dropck", ty }
+        }
+
         query adt_dtorck_constraint(
             _: DefId
         ) -> Result<DtorckConstraint<'tcx>, NoSolution> {}
index 2771ce69b9e0df747c6a4cac4f33bbe17b1e1666..675e3bbd002b0e46c975d6ad0ec56f103f5dee6c 100644 (file)
 use syntax::ast::{self, IntTy, UintTy, MetaItemKind};
 use syntax::source_map::{FileName, FilePathMapping};
 use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION};
-use syntax::parse::{ParseSess, new_parser_from_source_str};
+use syntax::parse::new_parser_from_source_str;
 use syntax::parse::token;
+use syntax::sess::ParseSess;
 use syntax::symbol::{sym, Symbol};
 use syntax::feature_gate::UnstableFeatures;
 use syntax::source_map::SourceMap;
 
 use errors::emitter::HumanReadableErrorType;
-use errors::{ColorConfig, FatalError, Handler};
+use errors::{ColorConfig, FatalError, Handler, SourceMapperDyn};
 
 use getopts;
 
@@ -1316,10 +1317,6 @@ fn parse_symbol_mangling_version(
         "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
     query_dep_graph: bool = (false, parse_bool, [UNTRACKED],
         "enable queries of the dependency graph for regression testing"),
-    profile_queries: bool = (false, parse_bool, [UNTRACKED],
-        "trace and profile the queries of the incremental compilation framework"),
-    profile_queries_and_keys: bool = (false, parse_bool, [UNTRACKED],
-        "trace and profile the queries and keys of the incremental compilation framework"),
     no_analysis: bool = (false, parse_bool, [UNTRACKED],
         "parse and expand the source, but run no analysis"),
     extra_plugins: Vec<String> = (Vec::new(), parse_list, [TRACKED],
@@ -1345,7 +1342,7 @@ fn parse_symbol_mangling_version(
     mir_opt_level: usize = (1, parse_uint, [TRACKED],
         "set the MIR optimization level (0-3, default: 1)"),
     mutable_noalias: Option<bool> = (None, parse_opt_bool, [TRACKED],
-        "emit noalias metadata for mutable references (default: yes on LLVM >= 6)"),
+        "emit noalias metadata for mutable references (default: no)"),
     dump_mir: Option<String> = (None, parse_opt_string, [UNTRACKED],
         "dump MIR state to file.
         `val` is used to select which passes and functions to dump. For example:
@@ -1471,6 +1468,9 @@ fn parse_symbol_mangling_version(
         "which mangling version to use for symbol names"),
     binary_dep_depinfo: bool = (false, parse_bool, [TRACKED],
         "include artifacts (sysroot, crate dependencies) used during compilation in dep-info"),
+    insert_sideeffect: bool = (false, parse_bool, [TRACKED],
+        "fix undefined behavior when a thread doesn't eventually make progress \
+         (such as entering an empty infinite loop) by inserting llvm.sideeffect"),
 }
 
 pub fn default_lib_output() -> CrateType {
@@ -1514,22 +1514,25 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
     }
     for &i in &[8, 16, 32, 64, 128] {
         if i >= min_atomic_width && i <= max_atomic_width {
-            let s = i.to_string();
-            ret.insert((
-                sym::target_has_atomic,
-                Some(Symbol::intern(&s)),
-            ));
-            if &s == wordsz {
+            let mut insert_atomic = |s| {
                 ret.insert((
-                    sym::target_has_atomic,
-                    Some(Symbol::intern("ptr")),
+                    sym::target_has_atomic_load_store,
+                    Some(Symbol::intern(s)),
                 ));
+                if atomic_cas {
+                    ret.insert((
+                        sym::target_has_atomic,
+                        Some(Symbol::intern(s))
+                    ));
+                }
+            };
+            let s = i.to_string();
+            insert_atomic(&s);
+            if &s == wordsz {
+              insert_atomic("ptr");
             }
         }
     }
-    if atomic_cas {
-        ret.insert((sym::target_has_atomic, Some(Symbol::intern("cas"))));
-    }
     if sess.opts.debug_assertions {
         ret.insert((Symbol::intern("debug_assertions"), None));
     }
@@ -1855,6 +1858,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
 
 impl errors::emitter::Emitter for NullEmitter {
     fn emit_diagnostic(&mut self, _: &errors::Diagnostic) {}
+    fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> { None }
 }
 
 // Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
@@ -2036,11 +2040,7 @@ pub fn parse_error_format(
     return error_format;
 }
 
-pub fn build_session_options_and_crate_config(
-    matches: &getopts::Matches,
-) -> (Options, FxHashSet<(String, Option<String>)>) {
-    let color = parse_color(matches);
-
+fn parse_crate_edition(matches: &getopts::Matches) -> Edition {
     let edition = match matches.opt_str("edition") {
         Some(arg) => Edition::from_str(&arg).unwrap_or_else(|_|
             early_error(
@@ -2067,19 +2067,14 @@ pub fn build_session_options_and_crate_config(
         )
     }
 
-    let (json_rendered, json_artifact_notifications) = parse_json(matches);
-
-    let error_format = parse_error_format(matches, color, json_rendered);
-
-    let unparsed_crate_types = matches.opt_strs("crate-type");
-    let crate_types = parse_crate_types_from_list(unparsed_crate_types)
-        .unwrap_or_else(|e| early_error(error_format, &e[..]));
-
-
-    let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format);
-
-    let mut debugging_opts = build_debugging_options(matches, error_format);
+    edition
+}
 
+fn check_debug_option_stability(
+    debugging_opts: &DebuggingOptions,
+    error_format: ErrorOutputType,
+    json_rendered: HumanReadableErrorType,
+) {
     if !debugging_opts.unstable_options {
         if let ErrorOutputType::Json { pretty: true, json_rendered } = error_format {
             early_error(
@@ -2095,7 +2090,13 @@ pub fn build_session_options_and_crate_config(
             );
         }
     }
+}
 
+fn parse_output_types(
+    debugging_opts: &DebuggingOptions,
+    matches: &getopts::Matches,
+    error_format: ErrorOutputType,
+) -> OutputTypes {
     let mut output_types = BTreeMap::new();
     if !debugging_opts.parse_only {
         for list in matches.opt_strs("emit") {
@@ -2120,14 +2121,19 @@ pub fn build_session_options_and_crate_config(
     if output_types.is_empty() {
         output_types.insert(OutputType::Exe, None);
     }
+    OutputTypes(output_types)
+}
 
-    let mut cg = build_codegen_options(matches, error_format);
-    let mut codegen_units = cg.codegen_units;
+fn should_override_cgus_and_disable_thinlto(
+    output_types: &OutputTypes,
+    matches: &getopts::Matches,
+    error_format: ErrorOutputType,
+    mut codegen_units: Option<usize>,
+) -> (bool, Option<usize>) {
     let mut disable_thinlto = false;
-
     // Issue #30063: if user requests LLVM-related output to one
     // particular path, disable codegen-units.
-    let incompatible: Vec<_> = output_types
+    let incompatible: Vec<_> = output_types.0
         .iter()
         .map(|ot_path| ot_path.0)
         .filter(|ot| !ot.is_compatible_with_codegen_units_and_single_output_file())
@@ -2159,29 +2165,39 @@ pub fn build_session_options_and_crate_config(
         }
     }
 
-    if debugging_opts.threads == 0 {
+    if codegen_units == Some(0) {
         early_error(
             error_format,
-            "value for threads must be a positive non-zero integer",
+            "value for codegen units must be a positive non-zero integer",
         );
     }
 
-    if debugging_opts.threads > 1 && debugging_opts.fuel.is_some() {
+    (disable_thinlto, codegen_units)
+}
+
+fn check_thread_count(debugging_opts: &DebuggingOptions, error_format: ErrorOutputType) {
+    if debugging_opts.threads == 0 {
         early_error(
             error_format,
-            "optimization fuel is incompatible with multiple threads",
+            "value for threads must be a positive non-zero integer",
         );
     }
 
-    if codegen_units == Some(0) {
+    if debugging_opts.threads > 1 && debugging_opts.fuel.is_some() {
         early_error(
             error_format,
-            "value for codegen units must be a positive non-zero integer",
+            "optimization fuel is incompatible with multiple threads",
         );
     }
+}
 
-    let incremental = match (&debugging_opts.incremental, &cg.incremental) {
-        (&Some(ref path1), &Some(ref path2)) => {
+fn select_incremental_path(
+    debugging_opts: &DebuggingOptions,
+    cg: &CodegenOptions,
+    error_format: ErrorOutputType,
+) -> Option<PathBuf> {
+    match (&debugging_opts.incremental, &cg.incremental) {
+        (Some(path1), Some(path2)) => {
             if path1 != path2 {
                 early_error(
                     error_format,
@@ -2195,25 +2211,19 @@ pub fn build_session_options_and_crate_config(
                 Some(path1)
             }
         }
-        (&Some(ref path), &None) => Some(path),
-        (&None, &Some(ref path)) => Some(path),
-        (&None, &None) => None,
-    }.map(|m| PathBuf::from(m));
-
-    if debugging_opts.profile && incremental.is_some() {
-        early_error(
-            error_format,
-            "can't instrument with gcov profiling when compiling incrementally",
-        );
-    }
-
-    if cg.profile_generate.enabled() && cg.profile_use.is_some() {
-        early_error(
-            error_format,
-            "options `-C profile-generate` and `-C profile-use` are exclusive",
-        );
-    }
+        (Some(path), None) => Some(path),
+        (None, Some(path)) => Some(path),
+        (None, None) => None,
+    }.map(|m| PathBuf::from(m))
+}
 
+fn collect_print_requests(
+    cg: &mut CodegenOptions,
+    dopts: &mut DebuggingOptions,
+    matches: &getopts::Matches,
+    is_unstable_enabled: bool,
+    error_format: ErrorOutputType,
+) -> Vec<PrintRequest> {
     let mut prints = Vec::<PrintRequest>::new();
     if cg.target_cpu.as_ref().map_or(false, |s| s == "help") {
         prints.push(PrintRequest::TargetCPUs);
@@ -2231,72 +2241,105 @@ pub fn build_session_options_and_crate_config(
         prints.push(PrintRequest::CodeModels);
         cg.code_model = None;
     }
-    if debugging_opts
+    if dopts
         .tls_model
         .as_ref()
         .map_or(false, |s| s == "help")
     {
         prints.push(PrintRequest::TlsModels);
-        debugging_opts.tls_model = None;
+        dopts.tls_model = None;
     }
 
-    let cg = cg;
+    prints.extend(matches.opt_strs("print").into_iter().map(|s| match &*s {
+        "crate-name" => PrintRequest::CrateName,
+        "file-names" => PrintRequest::FileNames,
+        "sysroot" => PrintRequest::Sysroot,
+        "cfg" => PrintRequest::Cfg,
+        "target-list" => PrintRequest::TargetList,
+        "target-cpus" => PrintRequest::TargetCPUs,
+        "target-features" => PrintRequest::TargetFeatures,
+        "relocation-models" => PrintRequest::RelocationModels,
+        "code-models" => PrintRequest::CodeModels,
+        "tls-models" => PrintRequest::TlsModels,
+        "native-static-libs" => PrintRequest::NativeStaticLibs,
+        "target-spec-json" => {
+            if is_unstable_enabled {
+                PrintRequest::TargetSpec
+            } else {
+                early_error(
+                    error_format,
+                    "the `-Z unstable-options` flag must also be passed to \
+                     enable the target-spec-json print option",
+                );
+            }
+        }
+        req => early_error(error_format, &format!("unknown print request `{}`", req)),
+    }));
 
-    let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::from(&m));
-    let target_triple = if let Some(target) = matches.opt_str("target") {
-        if target.ends_with(".json") {
+    prints
+}
+
+fn parse_target_triple(matches: &getopts::Matches, error_format: ErrorOutputType) -> TargetTriple {
+    match matches.opt_str("target") {
+        Some(target) if target.ends_with(".json") => {
             let path = Path::new(&target);
             TargetTriple::from_path(&path).unwrap_or_else(|_|
                 early_error(error_format, &format!("target file {:?} does not exist", path)))
+        }
+        Some(target) => TargetTriple::TargetTriple(target),
+        _ => TargetTriple::from_triple(host_triple()),
+    }
+}
+
+fn parse_opt_level(
+    matches: &getopts::Matches,
+    cg: &CodegenOptions,
+    error_format: ErrorOutputType,
+) -> OptLevel {
+    // The `-O` and `-C opt-level` flags specify the same setting, so we want to be able
+    // to use them interchangeably. However, because they're technically different flags,
+    // we need to work out manually which should take precedence if both are supplied (i.e.
+    // the rightmost flag). We do this by finding the (rightmost) position of both flags and
+    // comparing them. Note that if a flag is not found, its position will be `None`, which
+    // always compared less than `Some(_)`.
+    let max_o = matches.opt_positions("O").into_iter().max();
+    let max_c = matches.opt_strs_pos("C").into_iter().flat_map(|(i, s)| {
+        if let Some("opt-level") = s.splitn(2, '=').next() {
+            Some(i)
         } else {
-            TargetTriple::TargetTriple(target)
+            None
         }
+    }).max();
+    if max_o > max_c {
+        OptLevel::Default
     } else {
-        TargetTriple::from_triple(host_triple())
-    };
-    let opt_level = {
-        // The `-O` and `-C opt-level` flags specify the same setting, so we want to be able
-        // to use them interchangeably. However, because they're technically different flags,
-        // we need to work out manually which should take precedence if both are supplied (i.e.
-        // the rightmost flag). We do this by finding the (rightmost) position of both flags and
-        // comparing them. Note that if a flag is not found, its position will be `None`, which
-        // always compared less than `Some(_)`.
-        let max_o = matches.opt_positions("O").into_iter().max();
-        let max_c = matches.opt_strs_pos("C").into_iter().flat_map(|(i, s)| {
-            if let Some("opt-level") = s.splitn(2, '=').next() {
-                Some(i)
-            } else {
-                None
-            }
-        }).max();
-        if max_o > max_c {
-            OptLevel::Default
-        } else {
-            match cg.opt_level.as_ref().map(String::as_ref) {
-                None => OptLevel::No,
-                Some("0") => OptLevel::No,
-                Some("1") => OptLevel::Less,
-                Some("2") => OptLevel::Default,
-                Some("3") => OptLevel::Aggressive,
-                Some("s") => OptLevel::Size,
-                Some("z") => OptLevel::SizeMin,
-                Some(arg) => {
-                    early_error(
-                        error_format,
-                        &format!(
-                            "optimization level needs to be \
-                             between 0-3, s or z (instead was `{}`)",
-                            arg
-                        ),
-                    );
-                }
+        match cg.opt_level.as_ref().map(String::as_ref) {
+            None => OptLevel::No,
+            Some("0") => OptLevel::No,
+            Some("1") => OptLevel::Less,
+            Some("2") => OptLevel::Default,
+            Some("3") => OptLevel::Aggressive,
+            Some("s") => OptLevel::Size,
+            Some("z") => OptLevel::SizeMin,
+            Some(arg) => {
+                early_error(
+                    error_format,
+                    &format!(
+                        "optimization level needs to be \
+                            between 0-3, s or z (instead was `{}`)",
+                        arg
+                    ),
+                );
             }
         }
-    };
-    // The `-g` and `-C debuginfo` flags specify the same setting, so we want to be able
-    // to use them interchangeably. See the note above (regarding `-O` and `-C opt-level`)
-    // for more details.
-    let debug_assertions = cg.debug_assertions.unwrap_or(opt_level == OptLevel::No);
+    }
+}
+
+fn select_debuginfo(
+    matches: &getopts::Matches,
+    cg: &CodegenOptions,
+    error_format: ErrorOutputType,
+) -> DebugInfo {
     let max_g = matches.opt_positions("g").into_iter().max();
     let max_c = matches.opt_strs_pos("C").into_iter().flat_map(|(i, s)| {
         if let Some("debuginfo") = s.splitn(2, '=').next() {
@@ -2305,7 +2348,7 @@ pub fn build_session_options_and_crate_config(
             None
         }
     }).max();
-    let debuginfo = if max_g > max_c {
+    if max_g > max_c {
         DebugInfo::Full
     } else {
         match cg.debuginfo {
@@ -2323,14 +2366,14 @@ pub fn build_session_options_and_crate_config(
                 );
             }
         }
-    };
-
-    let mut search_paths = vec![];
-    for s in &matches.opt_strs("L") {
-        search_paths.push(SearchPath::from_cli_opt(&s[..], error_format));
     }
+}
 
-    let libs = matches
+fn parse_libs(
+    matches: &getopts::Matches,
+    error_format: ErrorOutputType,
+) -> Vec<(String, Option<String>, Option<cstore::NativeLibraryKind>)> {
+    matches
         .opt_strs("l")
         .into_iter()
         .map(|s| {
@@ -2369,52 +2412,23 @@ pub fn build_session_options_and_crate_config(
             let new_name = name_parts.next();
             (name.to_owned(), new_name.map(|n| n.to_owned()), kind)
         })
-        .collect();
-
-    let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
-    let test = matches.opt_present("test");
-
-    let is_unstable_enabled = nightly_options::is_unstable_enabled(matches);
-
-    prints.extend(matches.opt_strs("print").into_iter().map(|s| match &*s {
-        "crate-name" => PrintRequest::CrateName,
-        "file-names" => PrintRequest::FileNames,
-        "sysroot" => PrintRequest::Sysroot,
-        "cfg" => PrintRequest::Cfg,
-        "target-list" => PrintRequest::TargetList,
-        "target-cpus" => PrintRequest::TargetCPUs,
-        "target-features" => PrintRequest::TargetFeatures,
-        "relocation-models" => PrintRequest::RelocationModels,
-        "code-models" => PrintRequest::CodeModels,
-        "tls-models" => PrintRequest::TlsModels,
-        "native-static-libs" => PrintRequest::NativeStaticLibs,
-        "target-spec-json" => {
-            if is_unstable_enabled {
-                PrintRequest::TargetSpec
-            } else {
-                early_error(
-                    error_format,
-                    "the `-Z unstable-options` flag must also be passed to \
-                     enable the target-spec-json print option",
-                );
-            }
-        }
-        req => early_error(error_format, &format!("unknown print request `{}`", req)),
-    }));
+        .collect()
+}
 
-    let borrowck_mode = match debugging_opts.borrowck.as_ref().map(|s| &s[..]) {
+fn parse_borrowck_mode(dopts: &DebuggingOptions, error_format: ErrorOutputType) -> BorrowckMode {
+    match dopts.borrowck.as_ref().map(|s| &s[..]) {
         None | Some("migrate") => BorrowckMode::Migrate,
         Some("mir") => BorrowckMode::Mir,
         Some(m) => early_error(error_format, &format!("unknown borrowck mode `{}`", m)),
-    };
-
-    if !cg.remark.is_empty() && debuginfo == DebugInfo::None {
-        early_warn(
-            error_format,
-            "-C remark requires \"-C debuginfo=n\" to show source locations",
-        );
     }
+}
 
+fn parse_externs(
+    matches: &getopts::Matches,
+    debugging_opts: &DebuggingOptions,
+    error_format: ErrorOutputType,
+    is_unstable_enabled: bool,
+) -> Externs {
     if matches.opt_present("extern-private") && !debugging_opts.unstable_options {
         early_error(
             ErrorOutputType::default(),
@@ -2455,10 +2469,14 @@ pub fn build_session_options_and_crate_config(
         // flag
         entry.is_private_dep |= private;
     }
+    Externs(externs)
+}
 
-    let crate_name = matches.opt_str("crate-name");
-
-    let remap_path_prefix = matches
+fn parse_remap_path_prefix(
+    matches: &getopts::Matches,
+    error_format: ErrorOutputType
+) -> Vec<(PathBuf, PathBuf)> {
+    matches
         .opt_strs("remap-path-prefix")
         .into_iter()
         .map(|remap| {
@@ -2473,42 +2491,130 @@ pub fn build_session_options_and_crate_config(
                 ),
             }
         })
-        .collect();
+        .collect()
+}
 
-    (
-        Options {
-            crate_types,
-            optimize: opt_level,
-            debuginfo,
-            lint_opts,
-            lint_cap,
-            describe_lints,
-            output_types: OutputTypes(output_types),
-            search_paths,
-            maybe_sysroot: sysroot_opt,
-            target_triple,
-            test,
-            incremental,
-            debugging_opts,
-            prints,
-            borrowck_mode,
-            cg,
+pub fn build_session_options(matches: &getopts::Matches) -> Options {
+    let color = parse_color(matches);
+
+    let edition = parse_crate_edition(matches);
+
+    let (json_rendered, json_artifact_notifications) = parse_json(matches);
+
+    let error_format = parse_error_format(matches, color, json_rendered);
+
+    let unparsed_crate_types = matches.opt_strs("crate-type");
+    let crate_types = parse_crate_types_from_list(unparsed_crate_types)
+        .unwrap_or_else(|e| early_error(error_format, &e[..]));
+
+    let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format);
+
+    let mut debugging_opts = build_debugging_options(matches, error_format);
+    check_debug_option_stability(&debugging_opts, error_format, json_rendered);
+
+    let output_types = parse_output_types(&debugging_opts, matches, error_format);
+
+    let mut cg = build_codegen_options(matches, error_format);
+    let (disable_thinlto, codegen_units) = should_override_cgus_and_disable_thinlto(
+        &output_types,
+        matches,
+        error_format,
+        cg.codegen_units,
+    );
+
+    check_thread_count(&debugging_opts, error_format);
+
+    let incremental = select_incremental_path(&debugging_opts, &cg, error_format);
+
+    if debugging_opts.profile && incremental.is_some() {
+        early_error(
             error_format,
-            externs: Externs(externs),
-            crate_name,
-            alt_std_name: None,
-            libs,
-            unstable_features: UnstableFeatures::from_environment(),
-            debug_assertions,
-            actually_rustdoc: false,
-            cli_forced_codegen_units: codegen_units,
-            cli_forced_thinlto_off: disable_thinlto,
-            remap_path_prefix,
-            edition,
-            json_artifact_notifications,
-        },
-        cfg,
-    )
+            "can't instrument with gcov profiling when compiling incrementally",
+        );
+    }
+
+    if cg.profile_generate.enabled() && cg.profile_use.is_some() {
+        early_error(
+            error_format,
+            "options `-C profile-generate` and `-C profile-use` are exclusive",
+        );
+    }
+
+    let is_unstable_enabled = nightly_options::is_unstable_enabled(matches);
+    let prints = collect_print_requests(
+        &mut cg,
+        &mut debugging_opts,
+        matches,
+        is_unstable_enabled,
+        error_format,
+    );
+
+    let cg = cg;
+
+    let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::from(&m));
+    let target_triple = parse_target_triple(matches, error_format);
+    let opt_level = parse_opt_level(matches, &cg, error_format);
+    // The `-g` and `-C debuginfo` flags specify the same setting, so we want to be able
+    // to use them interchangeably. See the note above (regarding `-O` and `-C opt-level`)
+    // for more details.
+    let debug_assertions = cg.debug_assertions.unwrap_or(opt_level == OptLevel::No);
+    let debuginfo = select_debuginfo(matches, &cg, error_format);
+
+    let mut search_paths = vec![];
+    for s in &matches.opt_strs("L") {
+        search_paths.push(SearchPath::from_cli_opt(&s[..], error_format));
+    }
+
+    let libs = parse_libs(matches, error_format);
+
+    let test = matches.opt_present("test");
+
+    let borrowck_mode = parse_borrowck_mode(&debugging_opts, error_format);
+
+    if !cg.remark.is_empty() && debuginfo == DebugInfo::None {
+        early_warn(
+            error_format,
+            "-C remark requires \"-C debuginfo=n\" to show source locations",
+        );
+    }
+
+    let externs = parse_externs(matches, &debugging_opts, error_format, is_unstable_enabled);
+
+    let crate_name = matches.opt_str("crate-name");
+
+    let remap_path_prefix = parse_remap_path_prefix(matches, error_format);
+
+    Options {
+        crate_types,
+        optimize: opt_level,
+        debuginfo,
+        lint_opts,
+        lint_cap,
+        describe_lints,
+        output_types,
+        search_paths,
+        maybe_sysroot: sysroot_opt,
+        target_triple,
+        test,
+        incremental,
+        debugging_opts,
+        prints,
+        borrowck_mode,
+        cg,
+        error_format,
+        externs,
+        crate_name,
+        alt_std_name: None,
+        libs,
+        unstable_features: UnstableFeatures::from_environment(),
+        debug_assertions,
+        actually_rustdoc: false,
+        cli_forced_codegen_units: codegen_units,
+        cli_forced_thinlto_off: disable_thinlto,
+        remap_path_prefix,
+        edition,
+        json_artifact_notifications,
+    }
 }
 
 pub fn make_crate_type_option() -> RustcOptGroup {
index c117418f63699bd1863b742a879ed789cf638e62..061bbdc307fc4bf4d9d03b7251fec8934c1602cf 100644 (file)
@@ -3,8 +3,9 @@
 use crate::middle::cstore;
 use crate::session::config::{
     build_configuration,
-    build_session_options_and_crate_config,
-    to_crate_config
+    build_session_options,
+    to_crate_config,
+    parse_cfgspecs,
 };
 use crate::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
 use crate::session::build_session;
 use syntax::edition::{Edition, DEFAULT_EDITION};
 use syntax;
 use super::Options;
+use rustc_data_structures::fx::FxHashSet;
+
+pub fn build_session_options_and_crate_config(
+    matches: &getopts::Matches,
+) -> (Options, FxHashSet<(String, Option<String>)>) {
+    (
+        build_session_options(matches),
+        parse_cfgspecs(matches.opt_strs("cfg")),
+    )
+}
 
 impl ExternEntry {
     fn new_public<S: Into<String>,
index f22445f5d4744765c8f8c705d914edec6e929548..b65bf2230b39d953681f3ec4d501b857081a1c4c 100644 (file)
@@ -7,11 +7,10 @@
 
 use crate::lint;
 use crate::lint::builtin::BuiltinLintDiagnostics;
-use crate::session::config::{OutputType, PrintRequest, SwitchWithOptPath};
+use crate::session::config::{OutputType, PrintRequest, Sanitizer, SwitchWithOptPath};
 use crate::session::search_paths::{PathKind, SearchPath};
 use crate::util::nodemap::{FxHashMap, FxHashSet};
 use crate::util::common::{duration_to_secs_str, ErrorReported};
-use crate::util::common::ProfileQueriesMsg;
 
 use rustc_data_structures::base_n;
 use rustc_data_structures::sync::{
 use errors::annotate_snippet_emitter_writer::{AnnotateSnippetEmitterWriter};
 use syntax::ast::{self, NodeId};
 use syntax::edition::Edition;
-use syntax::ext::allocator::AllocatorKind;
+use syntax_expand::allocator::AllocatorKind;
 use syntax::feature_gate::{self, AttributeType};
 use syntax::json::JsonEmitter;
 use syntax::source_map;
-use syntax::parse::{self, ParseSess};
+use syntax::sess::ParseSess;
 use syntax::symbol::Symbol;
 use syntax_pos::{MultiSpan, Span};
 use crate::util::profiling::{SelfProfiler, SelfProfilerRef};
@@ -46,7 +45,7 @@
 use std::io::Write;
 use std::path::PathBuf;
 use std::time::Duration;
-use std::sync::{Arc, mpsc};
+use std::sync::Arc;
 
 mod code_stats;
 pub mod config;
@@ -125,9 +124,6 @@ pub struct Session {
     /// `-Zquery-dep-graph` is specified.
     pub cgu_reuse_tracker: CguReuseTracker,
 
-    /// Used by `-Z profile-queries` in `util::common`.
-    pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
-
     /// Used by `-Z self-profile`.
     pub prof: SelfProfilerRef,
 
@@ -509,13 +505,6 @@ pub fn time_passes(&self) -> bool {
     pub fn time_extended(&self) -> bool {
         self.opts.debugging_opts.time_passes
     }
-    pub fn profile_queries(&self) -> bool {
-        self.opts.debugging_opts.profile_queries
-            || self.opts.debugging_opts.profile_queries_and_keys
-    }
-    pub fn profile_queries_and_keys(&self) -> bool {
-        self.opts.debugging_opts.profile_queries_and_keys
-    }
     pub fn instrument_mcount(&self) -> bool {
         self.opts.debugging_opts.instrument_mcount
     }
@@ -637,6 +626,14 @@ pub fn fewer_names(&self) -> bool {
             .output_types
             .contains_key(&OutputType::LlvmAssembly)
             || self.opts.output_types.contains_key(&OutputType::Bitcode);
+
+        // Address sanitizer and memory sanitizer use alloca name when reporting an issue.
+        let more_names = match self.opts.debugging_opts.sanitizer {
+            Some(Sanitizer::Address) => true,
+            Some(Sanitizer::Memory) => true,
+            _ => more_names,
+        };
+
         self.opts.debugging_opts.fewer_names || !more_names
     }
 
@@ -1162,7 +1159,7 @@ fn build_session_(
     );
     let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
 
-    let parse_sess = parse::ParseSess::with_span_handler(
+    let parse_sess = ParseSess::with_span_handler(
         span_diagnostic,
         source_map,
     );
@@ -1234,7 +1231,6 @@ fn build_session_(
         incr_comp_session: OneThread::new(RefCell::new(IncrCompSession::NotInitialized)),
         cgu_reuse_tracker,
         prof: SelfProfilerRef::new(self_profiler),
-        profile_channel: Lock::new(None),
         perf_stats: PerfStats {
             symbol_hash_time: Lock::new(Duration::from_secs(0)),
             decode_def_path_tables_time: Lock::new(Duration::from_secs(0)),
index a1c97d6c68790017ed11e70740364f402f52d42a..daa4a215a238a4623f1f73685ce4e1bb112dc4b4 100644 (file)
@@ -453,21 +453,17 @@ fn on_unimplemented_note(
         }
     }
 
-    fn find_similar_impl_candidates(&self,
-                                    trait_ref: ty::PolyTraitRef<'tcx>)
-                                    -> Vec<ty::TraitRef<'tcx>>
-    {
-        let simp = fast_reject::simplify_type(self.tcx,
-                                              trait_ref.skip_binder().self_ty(),
-                                              true);
+    fn find_similar_impl_candidates(
+        &self,
+        trait_ref: ty::PolyTraitRef<'tcx>,
+    ) -> Vec<ty::TraitRef<'tcx>> {
+        let simp = fast_reject::simplify_type(self.tcx, trait_ref.skip_binder().self_ty(), true);
         let all_impls = self.tcx.all_impls(trait_ref.def_id());
 
         match simp {
             Some(simp) => all_impls.iter().filter_map(|&def_id| {
                 let imp = self.tcx.impl_trait_ref(def_id).unwrap();
-                let imp_simp = fast_reject::simplify_type(self.tcx,
-                                                          imp.self_ty(),
-                                                          true);
+                let imp_simp = fast_reject::simplify_type(self.tcx, imp.self_ty(), true);
                 if let Some(imp_simp) = imp_simp {
                     if simp != imp_simp {
                         return None
@@ -482,10 +478,11 @@ fn find_similar_impl_candidates(&self,
         }
     }
 
-    fn report_similar_impl_candidates(&self,
-                                      impl_candidates: Vec<ty::TraitRef<'tcx>>,
-                                      err: &mut DiagnosticBuilder<'_>)
-    {
+    fn report_similar_impl_candidates(
+        &self,
+        impl_candidates: Vec<ty::TraitRef<'tcx>>,
+        err: &mut DiagnosticBuilder<'_>,
+    ) {
         if impl_candidates.is_empty() {
             return;
         }
@@ -718,12 +715,23 @@ pub fn report_selection_error(
                             // these notes will often be of the form
                             //     "the type `T` can't be frobnicated"
                             // which is somewhat confusing.
-                            err.help(&format!("consider adding a `where {}` bound",
-                                              trait_ref.to_predicate()));
-                        } else if !have_alt_message {
-                            // Can't show anything else useful, try to find similar impls.
-                            let impl_candidates = self.find_similar_impl_candidates(trait_ref);
-                            self.report_similar_impl_candidates(impl_candidates, &mut err);
+                            self.suggest_restricting_param_bound(
+                                &mut err,
+                                &trait_ref,
+                                obligation.cause.body_id,
+                            );
+                        } else {
+                            if !have_alt_message {
+                                // Can't show anything else useful, try to find similar impls.
+                                let impl_candidates = self.find_similar_impl_candidates(trait_ref);
+                                self.report_similar_impl_candidates(impl_candidates, &mut err);
+                            }
+                            self.suggest_change_mut(
+                                &obligation,
+                                &mut err,
+                                &trait_ref,
+                                points_at_arg,
+                            );
                         }
 
                         // If this error is due to `!: Trait` not implemented but `(): Trait` is
@@ -955,6 +963,175 @@ pub fn report_selection_error(
         err.emit();
     }
 
+    fn suggest_restricting_param_bound(
+        &self,
+        err: &mut DiagnosticBuilder<'_>,
+        trait_ref: &ty::PolyTraitRef<'_>,
+        body_id: hir::HirId,
+    ) {
+        let self_ty = trait_ref.self_ty();
+        let (param_ty, projection) = match &self_ty.kind {
+            ty::Param(_) => (true, None),
+            ty::Projection(projection) => (false, Some(projection)),
+            _ => return,
+        };
+
+        let mut suggest_restriction = |generics: &hir::Generics, msg| {
+            let span = generics.where_clause.span_for_predicates_or_empty_place();
+            if !span.from_expansion() && span.desugaring_kind().is_none() {
+                err.span_suggestion(
+                    generics.where_clause.span_for_predicates_or_empty_place().shrink_to_hi(),
+                    &format!("consider further restricting {}", msg),
+                    format!(
+                        "{} {} ",
+                        if !generics.where_clause.predicates.is_empty() {
+                            ","
+                        } else {
+                            " where"
+                        },
+                        trait_ref.to_predicate(),
+                    ),
+                    Applicability::MachineApplicable,
+                );
+            }
+        };
+
+        // FIXME: Add check for trait bound that is already present, particularly `?Sized` so we
+        //        don't suggest `T: Sized + ?Sized`.
+        let mut hir_id = body_id;
+        while let Some(node) = self.tcx.hir().find(hir_id) {
+            match node {
+                hir::Node::TraitItem(hir::TraitItem {
+                    generics,
+                    kind: hir::TraitItemKind::Method(..), ..
+                }) if param_ty && self_ty == self.tcx.types.self_param => {
+                    // Restricting `Self` for a single method.
+                    suggest_restriction(&generics, "`Self`");
+                    return;
+                }
+
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::Fn(_, _, generics, _), ..
+                }) |
+                hir::Node::TraitItem(hir::TraitItem {
+                    generics,
+                    kind: hir::TraitItemKind::Method(..), ..
+                }) |
+                hir::Node::ImplItem(hir::ImplItem {
+                    generics,
+                    kind: hir::ImplItemKind::Method(..), ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::Trait(_, _, generics, _, _), ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::Impl(_, _, _, generics, ..), ..
+                }) if projection.is_some() => {
+                    // Missing associated type bound.
+                    suggest_restriction(&generics, "the associated type");
+                    return;
+                }
+
+                hir::Node::Item(hir::Item { kind: hir::ItemKind::Struct(_, generics), span, .. }) |
+                hir::Node::Item(hir::Item { kind: hir::ItemKind::Enum(_, generics), span, .. }) |
+                hir::Node::Item(hir::Item { kind: hir::ItemKind::Union(_, generics), span, .. }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::Trait(_, _, generics, ..), span, ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::Impl(_, _, _, generics, ..), span, ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::Fn(_, _, generics, _), span, ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::TyAlias(_, generics), span, ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::TraitAlias(generics, _), span, ..
+                }) |
+                hir::Node::Item(hir::Item {
+                    kind: hir::ItemKind::OpaqueTy(hir::OpaqueTy { generics, .. }), span, ..
+                }) |
+                hir::Node::TraitItem(hir::TraitItem { generics, span, .. }) |
+                hir::Node::ImplItem(hir::ImplItem { generics, span, .. })
+                if param_ty => {
+                    // Missing generic type parameter bound.
+                    let restrict_msg = "consider further restricting this bound";
+                    let param_name = self_ty.to_string();
+                    for param in generics.params.iter().filter(|p| {
+                        &param_name == std::convert::AsRef::<str>::as_ref(&p.name.ident().as_str())
+                    }) {
+                        if param_name.starts_with("impl ") {
+                            // `impl Trait` in argument:
+                            // `fn foo(x: impl Trait) {}` → `fn foo(t: impl Trait + Trait2) {}`
+                            err.span_suggestion(
+                                param.span,
+                                restrict_msg,
+                                // `impl CurrentTrait + MissingTrait`
+                                format!("{} + {}", param.name.ident(), trait_ref),
+                                Applicability::MachineApplicable,
+                            );
+                        } else if generics.where_clause.predicates.is_empty() &&
+                                param.bounds.is_empty()
+                        {
+                            // If there are no bounds whatsoever, suggest adding a constraint
+                            // to the type parameter:
+                            // `fn foo<T>(t: T) {}` → `fn foo<T: Trait>(t: T) {}`
+                            err.span_suggestion(
+                                param.span,
+                                "consider restricting this bound",
+                                format!("{}", trait_ref.to_predicate()),
+                                Applicability::MachineApplicable,
+                            );
+                        } else if !generics.where_clause.predicates.is_empty() {
+                            // There is a `where` clause, so suggest expanding it:
+                            // `fn foo<T>(t: T) where T: Debug {}` →
+                            // `fn foo<T>(t: T) where T: Debug, T: Trait {}`
+                            err.span_suggestion(
+                                generics.where_clause.span().unwrap().shrink_to_hi(),
+                                &format!(
+                                    "consider further restricting type parameter `{}`",
+                                    param_name,
+                                ),
+                                format!(", {}", trait_ref.to_predicate()),
+                                Applicability::MachineApplicable,
+                            );
+                        } else {
+                            // If there is no `where` clause lean towards constraining to the
+                            // type parameter:
+                            // `fn foo<X: Bar, T>(t: T, x: X) {}` → `fn foo<T: Trait>(t: T) {}`
+                            // `fn foo<T: Bar>(t: T) {}` → `fn foo<T: Bar + Trait>(t: T) {}`
+                            let sp = param.span.with_hi(span.hi());
+                            let span = self.tcx.sess.source_map()
+                                .span_through_char(sp, ':');
+                            if sp != param.span && sp != span {
+                                // Only suggest if we have high certainty that the span
+                                // covers the colon in `foo<T: Trait>`.
+                                err.span_suggestion(span, restrict_msg, format!(
+                                    "{} + ",
+                                    trait_ref.to_predicate(),
+                                ), Applicability::MachineApplicable);
+                            } else {
+                                err.span_label(param.span, &format!(
+                                    "consider adding a `where {}` bound",
+                                    trait_ref.to_predicate(),
+                                ));
+                            }
+                        }
+                        return;
+                    }
+                }
+
+                hir::Node::Crate => return,
+
+                _ => {}
+            }
+
+            hir_id = self.tcx.hir().get_parent_item(hir_id);
+        }
+    }
+
     /// When encountering an assignment of an unsized trait, like `let x = ""[..];`, provide a
     /// suggestion to borrow the initializer in order to use have a slice instead.
     fn suggest_borrow_on_unsized_slice(
@@ -1081,9 +1258,11 @@ fn suggest_remove_reference(
 
                     let substs = self.tcx.mk_substs_trait(trait_type, &[]);
                     let new_trait_ref = ty::TraitRef::new(trait_ref.def_id, substs);
-                    let new_obligation = Obligation::new(ObligationCause::dummy(),
-                                                         obligation.param_env,
-                                                         new_trait_ref.to_predicate());
+                    let new_obligation = Obligation::new(
+                        ObligationCause::dummy(),
+                        obligation.param_env,
+                        new_trait_ref.to_predicate(),
+                    );
 
                     if self.predicate_may_hold(&new_obligation) {
                         let sp = self.tcx.sess.source_map()
@@ -1105,6 +1284,77 @@ fn suggest_remove_reference(
         }
     }
 
+    /// Check if the trait bound is implemented for a different mutability and note it in the
+    /// final error.
+    fn suggest_change_mut(
+        &self,
+        obligation: &PredicateObligation<'tcx>,
+        err: &mut DiagnosticBuilder<'tcx>,
+        trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
+        points_at_arg: bool,
+    ) {
+        let span = obligation.cause.span;
+        if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
+            let refs_number = snippet.chars()
+                .filter(|c| !c.is_whitespace())
+                .take_while(|c| *c == '&')
+                .count();
+            if let Some('\'') = snippet.chars()
+                .filter(|c| !c.is_whitespace())
+                .skip(refs_number)
+                .next()
+            { // Do not suggest removal of borrow from type arguments.
+                return;
+            }
+            let trait_ref = self.resolve_vars_if_possible(trait_ref);
+            if trait_ref.has_infer_types() {
+                // Do not ICE while trying to find if a reborrow would succeed on a trait with
+                // unresolved bindings.
+                return;
+            }
+
+            if let ty::Ref(region, t_type, mutability) = trait_ref.skip_binder().self_ty().kind {
+                let trait_type = match mutability {
+                    hir::Mutability::MutMutable => self.tcx.mk_imm_ref(region, t_type),
+                    hir::Mutability::MutImmutable => self.tcx.mk_mut_ref(region, t_type),
+                };
+
+                let substs = self.tcx.mk_substs_trait(&trait_type, &[]);
+                let new_trait_ref = ty::TraitRef::new(trait_ref.skip_binder().def_id, substs);
+                let new_obligation = Obligation::new(
+                    ObligationCause::dummy(),
+                    obligation.param_env,
+                    new_trait_ref.to_predicate(),
+                );
+
+                if self.evaluate_obligation_no_overflow(
+                    &new_obligation,
+                ).must_apply_modulo_regions() {
+                    let sp = self.tcx.sess.source_map()
+                        .span_take_while(span, |c| c.is_whitespace() || *c == '&');
+                    if points_at_arg &&
+                        mutability == hir::Mutability::MutImmutable &&
+                        refs_number > 0
+                    {
+                        err.span_suggestion(
+                            sp,
+                            "consider changing this borrow's mutability",
+                            "&mut ".to_string(),
+                            Applicability::MachineApplicable,
+                        );
+                    } else {
+                        err.note(&format!(
+                            "`{}` is implemented for `{:?}`, but not for `{:?}`",
+                            trait_ref,
+                            trait_type,
+                            trait_ref.skip_binder().self_ty(),
+                        ));
+                    }
+                }
+            }
+        }
+    }
+
     fn suggest_semicolon_removal(
         &self,
         obligation: &PredicateObligation<'tcx>,
index 7f194c2fbbc0f1b6099d3d68aa1dd1108d73b44c..d96330bf0a9b4680b88b16328cea577577d4749b 100644 (file)
@@ -610,7 +610,7 @@ pub struct VtableImplData<'tcx, N> {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)]
 pub struct VtableGeneratorData<'tcx, N> {
     pub generator_def_id: DefId,
-    pub substs: ty::GeneratorSubsts<'tcx>,
+    pub substs: SubstsRef<'tcx>,
     /// Nested obligations. This can be non-empty if the generator
     /// signature contains associated types.
     pub nested: Vec<N>
index a7bb29c699e0e59c7a50589806ef2577adba53b9..d88bbe145d1fd7be647528ec025f91e8e2a78c60 100644 (file)
@@ -1259,7 +1259,7 @@ fn confirm_generator_candidate<'cx, 'tcx>(
     obligation: &ProjectionTyObligation<'tcx>,
     vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>,
 ) -> Progress<'tcx> {
-    let gen_sig = vtable.substs.poly_sig(vtable.generator_def_id, selcx.tcx());
+    let gen_sig = vtable.substs.as_generator().poly_sig(vtable.generator_def_id, selcx.tcx());
     let Normalized {
         value: gen_sig,
         obligations
@@ -1505,8 +1505,8 @@ fn assoc_ty_def(
 
     if let Some(assoc_item) = trait_def
         .ancestors(tcx, impl_def_id)
-        .defs(tcx, assoc_ty_name, ty::AssocKind::Type, trait_def_id)
-        .next() {
+        .leaf_def(tcx, assoc_ty_name, ty::AssocKind::Type) {
+
         assoc_item
     } else {
         // This is saying that neither the trait nor
index eaf5971e4592f32044765236d47701f36a4e49a6..e84c91daf293f39fbbea8dfc04592a1384406264 100644 (file)
@@ -5,6 +5,7 @@
 use syntax::source_map::Span;
 use crate::ty::subst::GenericArg;
 use crate::ty::{self, Ty, TyCtxt};
+use crate::ty::query::Providers;
 
 impl<'cx, 'tcx> At<'cx, 'tcx> {
     /// Given a type `ty` of some value being dropped, computes a set
@@ -33,7 +34,7 @@ pub fn dropck_outlives(&self, ty: Ty<'tcx>) -> InferOk<'tcx, Vec<GenericArg<'tcx
         // Quick check: there are a number of cases that we know do not require
         // any destructor.
         let tcx = self.infcx.tcx;
-        if trivial_dropck_outlives(tcx, ty) {
+        if tcx.trivial_dropck_outlives(ty) {
             return InferOk {
                 value: vec![],
                 obligations: vec![],
@@ -207,15 +208,15 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
         | ty::Error => true,
 
         // [T; N] and [T] have same properties as T.
-        ty::Array(ty, _) | ty::Slice(ty) => trivial_dropck_outlives(tcx, ty),
+        ty::Array(ty, _) | ty::Slice(ty) => tcx.trivial_dropck_outlives(ty),
 
         // (T1..Tn) and closures have same properties as T1..Tn --
         // check if *any* of those are trivial.
-        ty::Tuple(ref tys) => tys.iter().all(|t| trivial_dropck_outlives(tcx, t.expect_ty())),
+        ty::Tuple(ref tys) => tys.iter().all(|t| tcx.trivial_dropck_outlives(t.expect_ty())),
         ty::Closure(def_id, ref substs) => substs
             .as_closure()
             .upvar_tys(def_id, tcx)
-            .all(|t| trivial_dropck_outlives(tcx, t)),
+            .all(|t| tcx.trivial_dropck_outlives(t)),
 
         ty::Adt(def, _) => {
             if Some(def.did) == tcx.lang_items().manually_drop() {
@@ -243,3 +244,10 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
         ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
     }
 }
+
+crate fn provide(p: &mut Providers<'_>) {
+    *p = Providers {
+        trivial_dropck_outlives,
+        ..*p
+    };
+}
index 17684df7e9b8eb79579abf8573c07f3c0a815de0..0d426cab9b751381f3106051b92e97ca269acdc2 100644 (file)
@@ -56,7 +56,7 @@ pub fn evaluate_obligation(
     // Helper function that canonicalizes and runs the query. If an
     // overflow results, we re-run it in the local context so we can
     // report a nice error.
-    fn evaluate_obligation_no_overflow(
+    crate fn evaluate_obligation_no_overflow(
         &self,
         obligation: &PredicateObligation<'tcx>,
     ) -> EvaluationResult {
index 9b956f3e55408a9d6bd4616eab4e317833829ff7..86a32d68fc09eca78d414b9f393028e39db53cae 100644 (file)
@@ -1,5 +1,4 @@
 use crate::infer::canonical::{Canonicalized, CanonicalizedQueryResponse};
-use crate::traits::query::dropck_outlives::trivial_dropck_outlives;
 use crate::traits::query::dropck_outlives::DropckOutlivesResult;
 use crate::traits::query::Fallible;
 use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
@@ -22,7 +21,7 @@ fn try_fast_path(
         tcx: TyCtxt<'tcx>,
         key: &ParamEnvAnd<'tcx, Self>,
     ) -> Option<Self::QueryResponse> {
-        if trivial_dropck_outlives(tcx, key.value.dropped_ty) {
+        if tcx.trivial_dropck_outlives(key.value.dropped_ty) {
             Some(DropckOutlivesResult::default())
         } else {
             None
index 1fae2a2fe8dbffc783595f05aacbf58e5f2541a7..44d611ace77d017abf94ebce568eb950cac09fa3 100644 (file)
@@ -2761,8 +2761,9 @@ fn constituent_types_for_ty(&self, t: Ty<'tcx>) -> Vec<Ty<'tcx>> {
                 .collect(),
 
             ty::Generator(def_id, ref substs, _) => {
-                let witness = substs.witness(def_id, self.tcx());
+                let witness = substs.as_generator().witness(def_id, self.tcx());
                 substs
+                    .as_generator()
                     .upvar_tys(def_id, self.tcx())
                     .chain(iter::once(witness))
                     .collect()
@@ -2818,7 +2819,7 @@ fn collect_predicates_for_types(
                 // binder moved -\
                 let ty: ty::Binder<Ty<'tcx>> = ty::Binder::bind(ty); // <----/
 
-                self.infcx.in_snapshot(|_| {
+                self.infcx.commit_unconditionally(|_| {
                     let (skol_ty, _) = self.infcx
                         .replace_bound_vars_with_placeholders(&ty);
                     let Normalized {
@@ -2931,7 +2932,7 @@ fn confirm_candidate(
     }
 
     fn confirm_projection_candidate(&mut self, obligation: &TraitObligation<'tcx>) {
-        self.infcx.in_snapshot(|snapshot| {
+        self.infcx.commit_unconditionally(|snapshot| {
             let result =
                 self.match_projection_obligation_against_definition_bounds(
                     obligation,
@@ -3053,19 +3054,20 @@ fn vtable_auto_impl(
             nested,
         );
 
-        let trait_obligations: Vec<PredicateObligation<'_>> = self.infcx.in_snapshot(|_| {
-            let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
-            let (trait_ref, _) = self.infcx
-                .replace_bound_vars_with_placeholders(&poly_trait_ref);
-            let cause = obligation.derived_cause(ImplDerivedObligation);
-            self.impl_or_trait_obligations(
-                cause,
-                obligation.recursion_depth + 1,
-                obligation.param_env,
-                trait_def_id,
-                &trait_ref.substs,
-            )
-        });
+        let trait_obligations: Vec<PredicateObligation<'_>> =
+            self.infcx.commit_unconditionally(|_| {
+                let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
+                let (trait_ref, _) = self.infcx
+                    .replace_bound_vars_with_placeholders(&poly_trait_ref);
+                let cause = obligation.derived_cause(ImplDerivedObligation);
+                self.impl_or_trait_obligations(
+                    cause,
+                    obligation.recursion_depth + 1,
+                    obligation.param_env,
+                    trait_def_id,
+                    &trait_ref.substs,
+                )
+            });
 
         // Adds the predicates from the trait.  Note that this contains a `Self: Trait`
         // predicate as usual.  It won't have any effect since auto traits are coinductive.
@@ -3088,7 +3090,7 @@ fn confirm_impl_candidate(
 
         // First, create the substitutions by matching the impl again,
         // this time not in a probe.
-        self.infcx.in_snapshot(|snapshot| {
+        self.infcx.commit_unconditionally(|snapshot| {
             let substs = self.rematch_impl(impl_def_id, obligation, snapshot);
             debug!("confirm_impl_candidate: substs={:?}", substs);
             let cause = obligation.derived_cause(ImplDerivedObligation);
@@ -3252,7 +3254,7 @@ fn confirm_trait_alias_candidate(
             obligation, alias_def_id
         );
 
-        self.infcx.in_snapshot(|_| {
+        self.infcx.commit_unconditionally(|_| {
             let (predicate, _) = self.infcx()
                 .replace_bound_vars_with_placeholders(&obligation.predicate);
             let trait_ref = predicate.trait_ref;
@@ -3324,8 +3326,8 @@ fn confirm_generator_candidate(
         )?);
 
         Ok(VtableGeneratorData {
-            generator_def_id: generator_def_id,
-            substs: substs.clone(),
+            generator_def_id,
+            substs,
             nested: obligations,
         })
     }
@@ -3911,9 +3913,9 @@ fn generator_trait_ref_unnormalized(
         &mut self,
         obligation: &TraitObligation<'tcx>,
         closure_def_id: DefId,
-        substs: ty::GeneratorSubsts<'tcx>,
+        substs: SubstsRef<'tcx>,
     ) -> ty::PolyTraitRef<'tcx> {
-        let gen_sig = substs.poly_sig(closure_def_id, self.tcx());
+        let gen_sig = substs.as_generator().poly_sig(closure_def_id, self.tcx());
 
         // (1) Feels icky to skip the binder here, but OTOH we know
         // that the self-type is an generator type and hence is
index f0389bb037ac5b827aeb4e1e5bcc8e07f1d29d5a..c1c6eb850f591158a4a53fd63a053f7fc73b3261 100644 (file)
@@ -125,7 +125,7 @@ pub fn find_associated_item<'tcx>(
     let trait_def = tcx.trait_def(trait_def_id);
 
     let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
-    match ancestors.defs(tcx, item.ident, item.kind, trait_def_id).next() {
+    match ancestors.leaf_def(tcx, item.ident, item.kind) {
         Some(node_item) => {
             let substs = tcx.infer_ctxt().enter(|infcx| {
                 let param_env = param_env.with_reveal_all();
@@ -419,7 +419,7 @@ fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Option<String>
 
     // The predicates will contain default bounds like `T: Sized`. We need to
     // remove these bounds, and add `T: ?Sized` to any untouched type parameters.
-    let predicates = &tcx.predicates_of(impl_def_id).predicates;
+    let predicates = tcx.predicates_of(impl_def_id).predicates;
     let mut pretty_predicates = Vec::with_capacity(
         predicates.len() + types_without_default_bounds.len());
 
index 43f558d64430e2f94e50c67510da7ae0c4db690f..c64d6748ea97d563e47b7de484320192875c4415 100644 (file)
@@ -7,7 +7,6 @@
 use crate::ty::{self, TyCtxt, TypeFoldable};
 use crate::ty::fast_reject::{self, SimplifiedType};
 use syntax::ast::Ident;
-use crate::util::captures::Captures;
 use crate::util::nodemap::{DefIdMap, FxHashMap};
 
 /// A per-trait graph of impls in specialization order. At the moment, this
@@ -419,6 +418,35 @@ pub fn items(&self, tcx: TyCtxt<'tcx>) -> ty::AssocItemsIterator<'tcx> {
         tcx.associated_items(self.def_id())
     }
 
+    /// Finds an associated item defined in this node.
+    ///
+    /// If this returns `None`, the item can potentially still be found in
+    /// parents of this node.
+    pub fn item(
+        &self,
+        tcx: TyCtxt<'tcx>,
+        trait_item_name: Ident,
+        trait_item_kind: ty::AssocKind,
+        trait_def_id: DefId,
+    ) -> Option<ty::AssocItem> {
+        use crate::ty::AssocKind::*;
+
+        tcx.associated_items(self.def_id())
+            .find(move |impl_item| match (trait_item_kind, impl_item.kind) {
+                | (Const, Const)
+                | (Method, Method)
+                | (Type, Type)
+                | (Type, OpaqueTy)  // assoc. types can be made opaque in impls
+                => tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id),
+
+                | (Const, _)
+                | (Method, _)
+                | (Type, _)
+                | (OpaqueTy, _)
+                => false,
+            })
+    }
+
     pub fn def_id(&self) -> DefId {
         match *self {
             Node::Impl(did) => did,
@@ -427,6 +455,7 @@ pub fn def_id(&self) -> DefId {
     }
 }
 
+#[derive(Copy, Clone)]
 pub struct Ancestors<'tcx> {
     trait_def_id: DefId,
     specialization_graph: &'tcx Graph,
@@ -465,32 +494,18 @@ pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> NodeItem<U> {
 }
 
 impl<'tcx> Ancestors<'tcx> {
-    /// Search the items from the given ancestors, returning each definition
-    /// with the given name and the given kind.
-    // FIXME(#35870): avoid closures being unexported due to `impl Trait`.
-    #[inline]
-    pub fn defs(
-        self,
+    /// Finds the bottom-most (ie. most specialized) definition of an associated
+    /// item.
+    pub fn leaf_def(
+        mut self,
         tcx: TyCtxt<'tcx>,
         trait_item_name: Ident,
         trait_item_kind: ty::AssocKind,
-        trait_def_id: DefId,
-    ) -> impl Iterator<Item = NodeItem<ty::AssocItem>> + Captures<'tcx> + 'tcx {
-        self.flat_map(move |node| {
-            use crate::ty::AssocKind::*;
-            node.items(tcx).filter(move |impl_item| match (trait_item_kind, impl_item.kind) {
-                | (Const, Const)
-                | (Method, Method)
-                | (Type, Type)
-                | (Type, OpaqueTy)
-                => tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id),
-
-                | (Const, _)
-                | (Method, _)
-                | (Type, _)
-                | (OpaqueTy, _)
-                => false,
-            }).map(move |item| NodeItem { node: node, item: item })
+    ) -> Option<NodeItem<ty::AssocItem>> {
+        let trait_def_id = self.trait_def_id;
+        self.find_map(|node| {
+            node.item(tcx, trait_item_name, trait_item_kind, trait_def_id)
+                .map(|item| NodeItem { node, item })
         })
     }
 }
index 18ec2241b2df89605db8ceca1d7a78bf9ec49d49..d8b1effe09bf41de6cff5796c3e2856f2365991d 100644 (file)
@@ -4,7 +4,6 @@
 
 use crate::hir;
 use crate::hir::def_id::DefId;
-use crate::traits::specialize::specialization_graph::NodeItem;
 use crate::ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef};
 use crate::ty::outlives::Component;
 use crate::ty::subst::{GenericArg, Subst, SubstsRef};
@@ -667,8 +666,8 @@ pub fn impl_is_default(self, node_item_def_id: DefId) -> bool {
         }
     }
 
-    pub fn impl_item_is_final(self, node_item: &NodeItem<hir::Defaultness>) -> bool {
-        node_item.item.is_final() && !self.impl_is_default(node_item.node.def_id())
+    pub fn impl_item_is_final(self, assoc_item: &ty::AssocItem) -> bool {
+        assoc_item.defaultness.is_final() && !self.impl_is_default(assoc_item.container.id())
     }
 }
 
index bd4913c88fd1f19686f0c87fd820a93285706bcb..03cb4775bd83f9958fb851128773e1b10261bced 100644 (file)
@@ -16,6 +16,7 @@
 use crate::ty::{self, Ty, TyCtxt};
 use crate::ty::subst::SubstsRef;
 use crate::mir::interpret::Allocation;
+use syntax_pos::Span;
 
 /// The shorthand encoding uses an enum's variant index `usize`
 /// and is offset by this value so it never matches a real variant.
@@ -92,16 +93,16 @@ pub fn encode_with_shorthand<E, T, M>(encoder: &mut E,
     Ok(())
 }
 
-pub fn encode_predicates<'tcx, E, C>(encoder: &mut E,
-                                     predicates: &ty::GenericPredicates<'tcx>,
-                                     cache: C)
-                                     -> Result<(), E::Error>
+pub fn encode_spanned_predicates<'tcx, E, C>(
+    encoder: &mut E,
+    predicates: &'tcx [(ty::Predicate<'tcx>, Span)],
+    cache: C,
+) -> Result<(), E::Error>
     where E: TyEncoder,
           C: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap<ty::Predicate<'tcx>, usize>,
 {
-    predicates.parent.encode(encoder)?;
-    predicates.predicates.len().encode(encoder)?;
-    for (predicate, span) in &predicates.predicates {
+    predicates.len().encode(encoder)?;
+    for (predicate, span) in predicates {
         encode_with_shorthand(encoder, predicate, &cache)?;
         span.encode(encoder)?;
     }
@@ -182,13 +183,15 @@ pub fn decode_ty<D>(decoder: &mut D) -> Result<Ty<'tcx>, D::Error>
 }
 
 #[inline]
-pub fn decode_predicates<D>(decoder: &mut D) -> Result<ty::GenericPredicates<'tcx>, D::Error>
+pub fn decode_spanned_predicates<D>(
+    decoder: &mut D,
+) -> Result<&'tcx [(ty::Predicate<'tcx>, Span)], D::Error>
 where
     D: TyDecoder<'tcx>,
 {
-    Ok(ty::GenericPredicates {
-        parent: Decodable::decode(decoder)?,
-        predicates: (0..decoder.read_usize()?).map(|_| {
+    let tcx = decoder.tcx();
+    Ok(tcx.arena.alloc_from_iter(
+        (0..decoder.read_usize()?).map(|_| {
             // Handle shorthands first, if we have an usize > 0x80.
             let predicate = if decoder.positioned_at_shorthand() {
                 let pos = decoder.read_usize()?;
@@ -202,7 +205,7 @@ pub fn decode_predicates<D>(decoder: &mut D) -> Result<ty::GenericPredicates<'tc
             Ok((predicate, Decodable::decode(decoder)?))
         })
         .collect::<Result<Vec<_>, _>>()?,
-    })
+    ))
 }
 
 #[inline]
@@ -339,6 +342,8 @@ mod __ty_decoder_impl {
             use $crate::ty::subst::SubstsRef;
             use $crate::hir::def_id::{CrateNum};
 
+            use syntax_pos::Span;
+
             use super::$DecoderName;
 
             impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> {
@@ -393,11 +398,11 @@ fn specialized_decode(&mut self) -> Result<ty::Ty<'tcx>, Self::Error> {
                 }
             }
 
-            impl<$($typaram),*> SpecializedDecoder<ty::GenericPredicates<'tcx>>
+            impl<$($typaram),*> SpecializedDecoder<&'tcx [(ty::Predicate<'tcx>, Span)]>
             for $DecoderName<$($typaram),*> {
                 fn specialized_decode(&mut self)
-                                      -> Result<ty::GenericPredicates<'tcx>, Self::Error> {
-                    decode_predicates(self)
+                                      -> Result<&'tcx [(ty::Predicate<'tcx>, Span)], Self::Error> {
+                    decode_spanned_predicates(self)
                 }
             }
 
index 5aad6c1dc18322825a760050565cb4701c5e0355..665d4c2d0696ababaf3fe164ee24ce21d534a088 100644 (file)
@@ -29,7 +29,7 @@
 use crate::traits::{Clause, Clauses, GoalKind, Goal, Goals};
 use crate::ty::{self, DefIdTree, Ty, TypeAndMut};
 use crate::ty::{TyS, TyKind, List};
-use crate::ty::{AdtKind, AdtDef, GeneratorSubsts, Region, Const};
+use crate::ty::{AdtKind, AdtDef, Region, Const};
 use crate::ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
 use crate::ty::RegionKind;
 use crate::ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid, ConstVid};
@@ -43,7 +43,7 @@
 use crate::ty::{BoundVar, BindingMode};
 use crate::ty::CanonicalPolyFnSig;
 use crate::util::common::ErrorReported;
-use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet};
+use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet, NodeMap};
 use crate::util::nodemap::{FxHashMap, FxHashSet};
 use crate::util::profiling::SelfProfilerRef;
 
@@ -148,10 +148,6 @@ fn intern_ty(&self,
     }
 }
 
-pub struct Common<'tcx> {
-    pub empty_predicates: ty::GenericPredicates<'tcx>,
-}
-
 pub struct CommonTypes<'tcx> {
     pub unit: Ty<'tcx>,
     pub bool: Ty<'tcx>,
@@ -1039,9 +1035,6 @@ pub struct GlobalCtxt<'tcx> {
 
     pub prof: SelfProfilerRef,
 
-    /// Common objects.
-    pub common: Common<'tcx>,
-
     /// Common types, pre-interned for your convenience.
     pub types: CommonTypes<'tcx>,
 
@@ -1051,6 +1044,9 @@ pub struct GlobalCtxt<'tcx> {
     /// Common consts, pre-interned for your convenience.
     pub consts: CommonConsts<'tcx>,
 
+    /// Resolutions of `extern crate` items produced by resolver.
+    extern_crate_map: NodeMap<CrateNum>,
+
     /// Map indicating what traits are in scope for places where this
     /// is relevant; generated by resolve.
     trait_map: FxHashMap<DefIndex,
@@ -1210,12 +1206,6 @@ pub fn create_global_ctxt(
             s.fatal(&err);
         });
         let interners = CtxtInterners::new(&arenas.interner);
-        let common = Common {
-            empty_predicates: ty::GenericPredicates {
-                parent: None,
-                predicates: vec![],
-            },
-        };
         let common_types = CommonTypes::new(&interners);
         let common_lifetimes = CommonLifetimes::new(&interners);
         let common_consts = CommonConsts::new(&interners, &common_types);
@@ -1270,10 +1260,10 @@ pub fn create_global_ctxt(
             interners,
             dep_graph,
             prof: s.prof.clone(),
-            common,
             types: common_types,
             lifetimes: common_lifetimes,
             consts: common_consts,
+            extern_crate_map: resolutions.extern_crate_map,
             trait_map,
             export_map: resolutions.export_map.into_iter().map(|(k, v)| {
                 let exports: Vec<_> = v.into_iter().map(|e| {
@@ -2510,7 +2500,7 @@ pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>)
     #[inline]
     pub fn mk_generator(self,
                         id: DefId,
-                        generator_substs: GeneratorSubsts<'tcx>,
+                        generator_substs: SubstsRef<'tcx>,
                         movability: hir::GeneratorMovability)
                         -> Ty<'tcx> {
         self.mk_ty(Generator(id, generator_substs, movability))
@@ -2868,8 +2858,29 @@ fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> S
 
 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
     type Output = Result<R, E>;
-    fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
-        Ok(f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?))
+    fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(mut iter: I, f: F)
+            -> Self::Output {
+        // This code is hot enough that it's worth specializing for the most
+        // common length lists, to avoid the overhead of `SmallVec` creation.
+        // The match arms are in order of frequency. The 1, 2, and 0 cases are
+        // typically hit in ~95% of cases. We assume that if the upper and
+        // lower bounds from `size_hint` agree they are correct.
+        Ok(match iter.size_hint() {
+            (1, Some(1)) => {
+                f(&[iter.next().unwrap()?])
+            }
+            (2, Some(2)) => {
+                let t0 = iter.next().unwrap()?;
+                let t1 = iter.next().unwrap()?;
+                f(&[t0, t1])
+            }
+            (0, Some(0)) => {
+                f(&[])
+            }
+            _ => {
+                f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?)
+            }
+        })
     }
 }
 
@@ -2930,7 +2941,7 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) {
     };
     providers.extern_mod_stmt_cnum = |tcx, id| {
         let id = tcx.hir().as_local_node_id(id).unwrap();
-        tcx.cstore.extern_mod_stmt_cnum_untracked(id)
+        tcx.extern_crate_map.get(&id).cloned()
     };
     providers.all_crate_nums = |tcx, cnum| {
         assert_eq!(cnum, LOCAL_CRATE);
index b513ef5a96670213afccf3b7a18f388526b0da18..d3a3f51cfa47b7a8abb7aa106cbec3556d0b91bd 100644 (file)
@@ -94,7 +94,7 @@ fn add_kind(&mut self, kind: &ty::TyKind<'_>) {
             &ty::Generator(_, ref substs, _) => {
                 self.add_flags(TypeFlags::HAS_TY_CLOSURE);
                 self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES);
-                self.add_substs(&substs.substs);
+                self.add_substs(substs);
             }
 
             &ty::GeneratorWitness(ref ts) => {
@@ -250,7 +250,9 @@ fn add_const(&mut self, c: &ty::Const<'_>) {
             ConstValue::Placeholder(_) => {
                 self.add_flags(TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_CT_PLACEHOLDER);
             }
-            _ => {},
+            ConstValue::Scalar(_) => { }
+            ConstValue::Slice { data: _, start: _, end: _ } => { }
+            ConstValue::ByRef { alloc: _, offset: _ } => { }
         }
     }
 
index f6a5092d30d407b51aa43770cfc99a757dbde4c8..5192075c26e9871068aedc5d7f17fda753228b73 100644 (file)
@@ -911,13 +911,15 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
     }
 
     fn visit_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> bool {
-        if let ty::Const {
-            val: ConstValue::Infer(ty::InferConst::Canonical(debruijn, _)),
-            ..
-        } = *ct {
-            debruijn >= self.outer_index
-        } else {
-            false
+        // we don't have a `visit_infer_const` callback, so we have to
+        // hook in here to catch this case (annoying...), but
+        // otherwise we do want to remember to visit the rest of the
+        // const, as it has types/regions embedded in a lot of other
+        // places.
+        match ct.val {
+            ConstValue::Infer(ty::InferConst::Canonical(debruijn, _))
+                if debruijn >= self.outer_index => true,
+            _ => ct.super_visit_with(self),
         }
     }
 }
index 34f806b15c0c6c32defa00f6855279fbab99eb36..5139c8085a583d7021a0b13fede6e04cdc590e34 100644 (file)
@@ -1,3 +1,4 @@
+use crate::hir::CodegenFnAttrFlags;
 use crate::hir::Unsafety;
 use crate::hir::def::Namespace;
 use crate::hir::def_id::DefId;
@@ -25,6 +26,14 @@ pub enum InstanceDef<'tcx> {
     /// `<T as Trait>::method` where `method` receives unsizeable `self: Self`.
     VtableShim(DefId),
 
+    /// `fn()` pointer where the function itself cannot be turned into a pointer.
+    ///
+    /// One example in the compiler today is functions annotated with `#[track_caller]`, which
+    /// must have their implicit caller location argument populated for a call. Because this is a
+    /// required part of the function's ABI but can't be tracked as a property of the function
+    /// pointer, we create a single "caller location" at the site where the function is reified.
+    ReifyShim(DefId),
+
     /// `<fn() as FnTrait>::call_*`
     /// `DefId` is `FnTrait::call_*`
     FnPtrShim(DefId, Ty<'tcx>),
@@ -71,7 +80,7 @@ fn fn_sig_noadjust(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> {
                 ))
             }
             ty::Generator(def_id, substs, _) => {
-                let sig = substs.poly_sig(def_id, tcx);
+                let sig = substs.as_generator().poly_sig(def_id, tcx);
 
                 let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
                 let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
@@ -123,6 +132,7 @@ pub fn def_id(&self) -> DefId {
         match *self {
             InstanceDef::Item(def_id) |
             InstanceDef::VtableShim(def_id) |
+            InstanceDef::ReifyShim(def_id) |
             InstanceDef::FnPtrShim(def_id, _) |
             InstanceDef::Virtual(def_id, _) |
             InstanceDef::Intrinsic(def_id, ) |
@@ -178,6 +188,9 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
             InstanceDef::VtableShim(_) => {
                 write!(f, " - shim(vtable)")
             }
+            InstanceDef::ReifyShim(_) => {
+                write!(f, " - shim(reify)")
+            }
             InstanceDef::Intrinsic(_) => {
                 write!(f, " - intrinsic")
             }
@@ -290,6 +303,30 @@ pub fn resolve(
         result
     }
 
+    pub fn resolve_for_fn_ptr(
+        tcx: TyCtxt<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        def_id: DefId,
+        substs: SubstsRef<'tcx>,
+    ) -> Option<Instance<'tcx>> {
+        debug!("resolve(def_id={:?}, substs={:?})", def_id, substs);
+        Instance::resolve(tcx, param_env, def_id, substs).map(|resolved| {
+            let has_track_caller = |def| tcx.codegen_fn_attrs(def).flags
+                .contains(CodegenFnAttrFlags::TRACK_CALLER);
+
+            match resolved.def {
+                InstanceDef::Item(def_id) if has_track_caller(def_id) => {
+                    debug!(" => fn pointer created for function with #[track_caller]");
+                    Instance {
+                        def: InstanceDef::ReifyShim(def_id),
+                        substs,
+                    }
+                },
+                _ => resolved,
+            }
+        })
+    }
+
     pub fn resolve_for_vtable(
         tcx: TyCtxt<'tcx>,
         param_env: ty::ParamEnv<'tcx>,
@@ -395,7 +432,7 @@ fn resolve_associated_item<'tcx>(
         traits::VtableGenerator(generator_data) => {
             Some(Instance {
                 def: ty::InstanceDef::Item(generator_data.generator_def_id),
-                substs: generator_data.substs.substs
+                substs: generator_data.substs
             })
         }
         traits::VtableClosure(closure_data) => {
index 6e01e1bf26a53cd2363c61a91464b36888fce6d6..aed9e87a168ce8cdd0dfc81dc192d06885b3d847 100644 (file)
@@ -1,5 +1,5 @@
 use crate::session::{self, DataTypeKind};
-use crate::ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions};
+use crate::ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions, subst::SubstsRef};
 
 use syntax::ast::{self, Ident, IntTy, UintTy};
 use syntax::attr;
@@ -15,7 +15,6 @@
 use crate::hir;
 use crate::ich::StableHashingContext;
 use crate::mir::{GeneratorLayout, GeneratorSavedLocal};
-use crate::ty::GeneratorSubsts;
 use crate::ty::subst::Subst;
 use rustc_index::bit_set::BitSet;
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
@@ -671,7 +670,7 @@ fn layout_raw_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx LayoutDetails, Layou
                 tcx.intern_layout(unit)
             }
 
-            ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, &substs)?,
+            ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, substs)?,
 
             ty::Closure(def_id, ref substs) => {
                 let tys = substs.as_closure().upvar_tys(def_id, tcx);
@@ -825,10 +824,14 @@ fn layout_raw_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx LayoutDetails, Layou
                     });
                     (present_variants.next(), present_variants.next())
                 };
-                if present_first.is_none() {
+                let present_first = match present_first {
+                    present_first @ Some(_) => present_first,
                     // Uninhabited because it has no variants, or only absent ones.
-                    return tcx.layout_raw(param_env.and(tcx.types.never));
-                }
+                    None if def.is_enum() => return tcx.layout_raw(param_env.and(tcx.types.never)),
+                    // if it's a struct, still compute a layout so that we can still compute the
+                    // field offsets
+                    None => Some(VariantIdx::new(0)),
+                };
 
                 let is_struct = !def.is_enum() ||
                     // Only one variant is present.
@@ -1406,12 +1409,12 @@ fn generator_layout(
         &self,
         ty: Ty<'tcx>,
         def_id: hir::def_id::DefId,
-        substs: &GeneratorSubsts<'tcx>,
+        substs: SubstsRef<'tcx>,
     ) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
         use SavedLocalEligibility::*;
         let tcx = self.tcx;
 
-        let subst_field = |ty: Ty<'tcx>| { ty.subst(tcx, substs.substs) };
+        let subst_field = |ty: Ty<'tcx>| { ty.subst(tcx, substs) };
 
         let info = tcx.generator_layout(def_id);
         let (ineligible_locals, assignments) = self.generator_saved_local_eligibility(&info);
@@ -1419,9 +1422,9 @@ fn generator_layout(
         // Build a prefix layout, including "promoting" all ineligible
         // locals as part of the prefix. We compute the layout of all of
         // these fields at once to get optimal packing.
-        let discr_index = substs.prefix_tys(def_id, tcx).count();
+        let discr_index = substs.as_generator().prefix_tys(def_id, tcx).count();
         // FIXME(eddyb) set the correct vaidity range for the discriminant.
-        let discr_layout = self.layout_of(substs.discr_ty(tcx))?;
+        let discr_layout = self.layout_of(substs.as_generator().discr_ty(tcx))?;
         let discr = match &discr_layout.abi {
             Abi::Scalar(s) => s.clone(),
             _ => bug!(),
@@ -1430,7 +1433,7 @@ fn generator_layout(
             .map(|local| subst_field(info.field_tys[local]))
             .map(|ty| tcx.mk_maybe_uninit(ty))
             .map(|ty| self.layout_of(ty));
-        let prefix_layouts = substs.prefix_tys(def_id, tcx)
+        let prefix_layouts = substs.as_generator().prefix_tys(def_id, tcx)
             .map(|ty| self.layout_of(ty))
             .chain(iter::once(Ok(discr_layout)))
             .chain(promoted_layouts)
@@ -2153,7 +2156,7 @@ fn field(this: TyLayout<'tcx>, cx: &C, i: usize) -> C::TyLayout {
             ty::Generator(def_id, ref substs, _) => {
                 match this.variants {
                     Variants::Single { index } => {
-                        substs.state_tys(def_id, tcx)
+                        substs.as_generator().state_tys(def_id, tcx)
                             .nth(index.as_usize()).unwrap()
                             .nth(i).unwrap()
                     }
@@ -2161,7 +2164,7 @@ fn field(this: TyLayout<'tcx>, cx: &C, i: usize) -> C::TyLayout {
                         if i == discr_index {
                             return discr_layout(discr);
                         }
-                        substs.prefix_tys(def_id, tcx).nth(i).unwrap()
+                        substs.as_generator().prefix_tys(def_id, tcx).nth(i).unwrap()
                     }
                 }
             }
index cfd859c33c2ef179c6242b0da1ed6d6e8cabf418..d46ab3769ad55063c88aff33da5e11af552de0e4 100644 (file)
@@ -28,7 +28,7 @@
 use crate::ty::util::{IntTypeExt, Discr};
 use crate::ty::walk::TypeWalker;
 use crate::util::captures::Captures;
-use crate::util::nodemap::{NodeSet, DefIdMap, FxHashMap};
+use crate::util::nodemap::{NodeMap, NodeSet, DefIdMap, FxHashMap};
 use arena::SyncDroplessArena;
 use crate::session::DataTypeKind;
 
@@ -45,7 +45,7 @@
 use std::ops::Range;
 use syntax::ast::{self, Name, Ident, NodeId};
 use syntax::attr;
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::hygiene::ExpnId;
 use syntax::symbol::{kw, sym, Symbol, InternedString};
 use syntax_pos::Span;
 
 
 #[derive(Clone)]
 pub struct Resolutions {
+    pub extern_crate_map: NodeMap<CrateNum>,
     pub trait_map: TraitMap,
     pub maybe_unused_trait_imports: NodeSet,
     pub maybe_unused_extern_crates: Vec<(NodeId, Span)>,
@@ -700,6 +701,13 @@ impl<T> Deref for List<T> {
     type Target = [T];
     #[inline(always)]
     fn deref(&self) -> &[T] {
+        self.as_ref()
+    }
+}
+
+impl<T> AsRef<[T]> for List<T> {
+    #[inline(always)]
+    fn as_ref(&self) -> &[T] {
         unsafe {
             slice::from_raw_parts(self.data.as_ptr(), self.len)
         }
@@ -1010,15 +1018,12 @@ pub fn const_param(&'tcx self, param: &ParamConst, tcx: TyCtxt<'tcx>) -> &Generi
 }
 
 /// Bounds on generics.
-#[derive(Clone, Default, Debug, HashStable)]
+#[derive(Copy, Clone, Default, Debug, RustcEncodable, RustcDecodable, HashStable)]
 pub struct GenericPredicates<'tcx> {
     pub parent: Option<DefId>,
-    pub predicates: Vec<(Predicate<'tcx>, Span)>,
+    pub predicates: &'tcx [(Predicate<'tcx>, Span)],
 }
 
-impl<'tcx> rustc_serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {}
-impl<'tcx> rustc_serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {}
-
 impl<'tcx> GenericPredicates<'tcx> {
     pub fn instantiate(
         &self,
@@ -2313,7 +2318,7 @@ pub fn non_enum_variant(&self) -> &VariantDef {
     }
 
     #[inline]
-    pub fn predicates(&self, tcx: TyCtxt<'tcx>) -> &'tcx GenericPredicates<'tcx> {
+    pub fn predicates(&self, tcx: TyCtxt<'tcx>) -> GenericPredicates<'tcx> {
         tcx.predicates_of(self.did)
     }
 
@@ -2553,7 +2558,7 @@ fn sized_constraint_for_ty(&self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Vec<Ty<'tc
                     def_id: sized_trait,
                     substs: tcx.mk_substs_trait(ty, &[])
                 }).to_predicate();
-                let predicates = &tcx.predicates_of(self.did).predicates;
+                let predicates = tcx.predicates_of(self.did).predicates;
                 if predicates.iter().any(|(p, _)| *p == sized_predicate) {
                     vec![]
                 } else {
@@ -3026,6 +3031,7 @@ pub fn instance_mir(self, instance: ty::InstanceDef<'tcx>) -> &'tcx Body<'tcx> {
                 self.optimized_mir(did)
             }
             ty::InstanceDef::VtableShim(..) |
+            ty::InstanceDef::ReifyShim(..) |
             ty::InstanceDef::Intrinsic(..) |
             ty::InstanceDef::FnPtrShim(..) |
             ty::InstanceDef::Virtual(..) |
@@ -3394,6 +3400,7 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) {
     layout::provide(providers);
     util::provide(providers);
     constness::provide(providers);
+    crate::traits::query::dropck_outlives::provide(providers);
     *providers = ty::query::Providers {
         asyncness,
         associated_item,
index 3ea767d5115984e067baf243e82c2d2bb95abb25..80e77cdfad0b6ec97f5eaa990f2e3e88bb97eaf1 100644 (file)
@@ -69,7 +69,7 @@ pub fn push_outlives_components(&self, ty0: Ty<'tcx>,
 
             ty::Generator(def_id, ref substs, _) => {
                 // Same as the closure case
-                for upvar_ty in substs.upvar_tys(def_id, *self) {
+                for upvar_ty in substs.as_generator().upvar_tys(def_id, *self) {
                     self.compute_components(upvar_ty, out);
                 }
 
index 21c018d9ee6de5a3e2719f52ceacd8221be5beda..df39d0ccc9eed08eab509f2504669679056ad7c0 100644 (file)
@@ -8,7 +8,7 @@
 use rustc::hir::def_id::DefId;
 use rustc::mir::interpret::ConstValue;
 use rustc::ty::subst::SubstsRef;
-use rustc::ty::{self, Const, GeneratorSubsts, Instance, Ty, TyCtxt};
+use rustc::ty::{self, Const, Instance, Ty, TyCtxt};
 use rustc::{bug, hir};
 use std::fmt::Write;
 use std::iter;
@@ -154,7 +154,7 @@ pub fn push_type_name(&self, t: Ty<'tcx>, output: &mut String, debug: bool) {
                     self.push_type_name(sig.output(), output, debug);
                 }
             }
-            ty::Generator(def_id, GeneratorSubsts { substs }, _)
+            ty::Generator(def_id,  substs, _)
             | ty::Closure(def_id, substs) => {
                 self.push_def_path(def_id, output);
                 let generics = self.tcx.generics_of(self.tcx.closure_base_def_id(def_id));
index ad4be788dae4e49cdde02124c17b5381d8b9c7a0..363109a0582df603c46fcea2c271505dabd1cfe6 100644 (file)
@@ -605,8 +605,8 @@ fn pretty_print_type(
             }
             ty::Str => p!(write("str")),
             ty::Generator(did, substs, movability) => {
-                let upvar_tys = substs.upvar_tys(did, self.tcx());
-                let witness = substs.witness(did, self.tcx());
+                let upvar_tys = substs.as_generator().upvar_tys(did, self.tcx());
+                let witness = substs.as_generator().witness(did, self.tcx());
                 if movability == hir::GeneratorMovability::Movable {
                     p!(write("[generator"));
                 } else {
@@ -689,7 +689,7 @@ fn pretty_print_type(
                 if self.tcx().sess.verbose() {
                     p!(write(
                         " closure_kind_ty={:?} closure_sig_ty={:?}",
-                        substs.as_closure().kind(did, self.tcx()),
+                        substs.as_closure().kind_ty(did, self.tcx()),
                         substs.as_closure().sig_ty(did, self.tcx())
                     ));
                 }
@@ -698,7 +698,9 @@ fn pretty_print_type(
             },
             ty::Array(ty, sz) => {
                 p!(write("["), print(ty), write("; "));
-                if let ConstValue::Unevaluated(..) = sz.val {
+                if self.tcx().sess.verbose() {
+                    p!(write("{:?}", sz));
+                } else if let ConstValue::Unevaluated(..) = sz.val {
                     // do not try to evalute unevaluated constants. If we are const evaluating an
                     // array length anon const, rustc will (with debug assertions) print the
                     // constant's path. Which will end up here again.
@@ -855,126 +857,127 @@ fn pretty_print_const(
     ) -> Result<Self::Const, Self::Error> {
         define_scoped_cx!(self);
 
-        let u8 = self.tcx().types.u8;
-        if let ty::FnDef(did, substs) = ct.ty.kind {
-            p!(print_value_path(did, substs));
+        if self.tcx().sess.verbose() {
+            p!(write("Const({:?}: {:?})", ct.val, ct.ty));
             return Ok(self);
         }
-        if let ConstValue::Unevaluated(did, substs) = ct.val {
-            match self.tcx().def_kind(did) {
-                | Some(DefKind::Static)
-                | Some(DefKind::Const)
-                | Some(DefKind::AssocConst) => p!(print_value_path(did, substs)),
-                _ => if did.is_local() {
-                    let span = self.tcx().def_span(did);
-                    if let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span) {
-                        p!(write("{}", snip))
+
+        let u8 = self.tcx().types.u8;
+
+        match (ct.val, &ct.ty.kind) {
+            (_,  ty::FnDef(did, substs)) => p!(print_value_path(*did, substs)),
+            (ConstValue::Unevaluated(did, substs), _) => {
+                match self.tcx().def_kind(did) {
+                    | Some(DefKind::Static)
+                    | Some(DefKind::Const)
+                    | Some(DefKind::AssocConst) => p!(print_value_path(did, substs)),
+                    _ => if did.is_local() {
+                        let span = self.tcx().def_span(did);
+                        if let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span) {
+                            p!(write("{}", snip))
+                        } else {
+                            p!(write("_: "), print(ct.ty))
+                        }
                     } else {
                         p!(write("_: "), print(ct.ty))
-                    }
+                    },
+                }
+            },
+            (ConstValue::Infer(..), _) =>  p!(write("_: "), print(ct.ty)),
+            (ConstValue::Param(ParamConst { name, .. }), _) => p!(write("{}", name)),
+            (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Bool) =>
+                p!(write("{}", if data == 0 { "false" } else { "true" })),
+            (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Float(ast::FloatTy::F32)) =>
+                p!(write("{}f32", Single::from_bits(data))),
+            (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Float(ast::FloatTy::F64)) =>
+                p!(write("{}f64", Double::from_bits(data))),
+            (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Uint(ui)) => {
+                let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(*ui)).size();
+                let max = truncate(u128::max_value(), bit_size);
+
+                if data == max {
+                    p!(write("std::{}::MAX", ui))
                 } else {
-                    p!(write("_: "), print(ct.ty))
-                },
-            }
-            return Ok(self);
-        }
-        if let ConstValue::Infer(..) = ct.val {
-            p!(write("_: "), print(ct.ty));
-            return Ok(self);
-        }
-        if let ConstValue::Param(ParamConst { name, .. }) = ct.val {
-            p!(write("{}", name));
-            return Ok(self);
-        }
-        if let ConstValue::Scalar(Scalar::Raw { data, .. }) = ct.val {
-            match ct.ty.kind {
-                ty::Bool => {
-                    p!(write("{}", if data == 0 { "false" } else { "true" }));
-                    return Ok(self);
-                },
-                ty::Float(ast::FloatTy::F32) => {
-                    p!(write("{}f32", Single::from_bits(data)));
-                    return Ok(self);
-                },
-                ty::Float(ast::FloatTy::F64) => {
-                    p!(write("{}f64", Double::from_bits(data)));
-                    return Ok(self);
-                },
-                ty::Uint(ui) => {
-                    let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(ui)).size();
-                    let max = truncate(u128::max_value(), bit_size);
+                    p!(write("{}{}", data, ui))
+                };
+            },
+            (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Int(i)) => {
+                let bit_size = Integer::from_attr(&self.tcx(), SignedInt(*i))
+                    .size().bits() as u128;
+                let min = 1u128 << (bit_size - 1);
+                let max = min - 1;
+
+                let ty = self.tcx().lift(&ct.ty).unwrap();
+                let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty))
+                    .unwrap()
+                    .size;
+                match data {
+                    d if d == min => p!(write("std::{}::MIN", i)),
+                    d if d == max => p!(write("std::{}::MAX", i)),
+                    _ => p!(write("{}{}", sign_extend(data, size) as i128, i))
+                }
+            },
+            (ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Char) =>
+                p!(write("{:?}", ::std::char::from_u32(data as u32).unwrap())),
+            (ConstValue::Scalar(_), ty::RawPtr(_)) => p!(write("{{pointer}}")),
+            (ConstValue::Scalar(Scalar::Ptr(ptr)), ty::FnPtr(_)) => {
+                let instance = {
+                    let alloc_map = self.tcx().alloc_map.lock();
+                    alloc_map.unwrap_fn(ptr.alloc_id)
+                };
+                p!(print_value_path(instance.def_id(), instance.substs));
+            },
+            _ => {
+                let printed = if let ty::Ref(_, ref_ty, _) = ct.ty.kind {
+                    let byte_str = match (ct.val, &ref_ty.kind) {
+                        (ConstValue::Scalar(Scalar::Ptr(ptr)), ty::Array(t, n)) if *t == u8 => {
+                            let n = n.eval_usize(self.tcx(), ty::ParamEnv::empty());
+                            Some(self.tcx()
+                                .alloc_map.lock()
+                                .unwrap_memory(ptr.alloc_id)
+                                .get_bytes(&self.tcx(), ptr, Size::from_bytes(n)).unwrap())
+                        },
+                        (ConstValue::Slice { data, start, end }, ty::Slice(t)) if *t == u8 => {
+                            // The `inspect` here is okay since we checked the bounds, and there are
+                            // no relocations (we have an active slice reference here). We don't use
+                            // this result to affect interpreter execution.
+                            Some(data.inspect_with_undef_and_ptr_outside_interpreter(start..end))
+                        },
+                        _ => None,
+                    };
 
-                    if data == max {
-                        p!(write("std::{}::MAX", ui))
+                    if let Some(byte_str) = byte_str {
+                        p!(write("b\""));
+                        for &c in byte_str {
+                            for e in std::ascii::escape_default(c) {
+                                self.write_char(e as char)?;
+                            }
+                        }
+                        p!(write("\""));
+                        true
+                    } else if let (ConstValue::Slice { data, start, end }, ty::Str) =
+                        (ct.val, &ref_ty.kind)
+                    {
+                        // The `inspect` here is okay since we checked the bounds, and there are no
+                        // relocations (we have an active `str` reference here). We don't use this
+                        // result to affect interpreter execution.
+                        let slice = data.inspect_with_undef_and_ptr_outside_interpreter(start..end);
+                        let s = ::std::str::from_utf8(slice)
+                            .expect("non utf8 str from miri");
+                        p!(write("{:?}", s));
+                        true
                     } else {
-                        p!(write("{}{}", data, ui))
-                    };
-                    return Ok(self);
-                },
-                ty::Int(i) =>{
-                    let bit_size = Integer::from_attr(&self.tcx(), SignedInt(i))
-                        .size().bits() as u128;
-                    let min = 1u128 << (bit_size - 1);
-                    let max = min - 1;
-
-                    let ty = self.tcx().lift(&ct.ty).unwrap();
-                    let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty))
-                        .unwrap()
-                        .size;
-                    match data {
-                        d if d == min => p!(write("std::{}::MIN", i)),
-                        d if d == max => p!(write("std::{}::MAX", i)),
-                        _ => p!(write("{}{}", sign_extend(data, size) as i128, i))
-                    }
-                    return Ok(self);
-                },
-                ty::Char => {
-                    p!(write("{:?}", ::std::char::from_u32(data as u32).unwrap()));
-                    return Ok(self);
-                }
-                _ => {},
-            }
-        }
-        if let ty::Ref(_, ref_ty, _) = ct.ty.kind {
-            let byte_str = match (ct.val, &ref_ty.kind) {
-                (ConstValue::Scalar(Scalar::Ptr(ptr)), ty::Array(t, n)) if *t == u8 => {
-                    let n = n.eval_usize(self.tcx(), ty::ParamEnv::empty());
-                    Some(self.tcx()
-                        .alloc_map.lock()
-                        .unwrap_memory(ptr.alloc_id)
-                        .get_bytes(&self.tcx(), ptr, Size::from_bytes(n)).unwrap())
-                },
-                (ConstValue::Slice { data, start, end }, ty::Slice(t)) if *t == u8 => {
-                    // The `inspect` here is okay since we checked the bounds, and there are no
-                    // relocations (we have an active slice reference here). We don't use this
-                    // result to affect interpreter execution.
-                    Some(data.inspect_with_undef_and_ptr_outside_interpreter(start..end))
-                },
-                (ConstValue::Slice { data, start, end }, ty::Str) => {
-                    // The `inspect` here is okay since we checked the bounds, and there are no
-                    // relocations (we have an active `str` reference here). We don't use this
-                    // result to affect interpreter execution.
-                    let slice = data.inspect_with_undef_and_ptr_outside_interpreter(start..end);
-                    let s = ::std::str::from_utf8(slice)
-                        .expect("non utf8 str from miri");
-                    p!(write("{:?}", s));
-                    return Ok(self);
-                },
-                _ => None,
-            };
-            if let Some(byte_str) = byte_str {
-                p!(write("b\""));
-                for &c in byte_str {
-                    for e in std::ascii::escape_default(c) {
-                        self.write_char(e as char)?;
+                        false
                     }
+                } else {
+                    false
+                };
+                if !printed {
+                    // fallback
+                    p!(write("{:?} : ", ct.val), print(ct.ty))
                 }
-                p!(write("\""));
-                return Ok(self);
             }
-        }
-        p!(write("{:?} : ", ct.val), print(ct.ty));
-
+        };
         Ok(self)
     }
 }
@@ -1480,7 +1483,7 @@ fn name_by_region_index(index: usize) -> InternedString {
         }
 
         // Replace any anonymous late-bound regions with named
-        // variants, using gensym'd identifiers, so that we can
+        // variants, using new unique identifiers, so that we can
         // clearly differentiate between named and unnamed regions in
         // the output. We'll probably want to tweak this over time to
         // decide just how much information to give.
index 91082c59ba05ae7a23501902fb66c52eccd71ed4..c1c6a655d96a9132fe1e366f24b356a2582e1681 100644 (file)
@@ -73,6 +73,17 @@ impl<'tcx, M: QueryAccessors<'tcx, Key = DefId>> QueryDescription<'tcx> for M {
             format!("processing {:?} with query `{}`", def_id, name).into()
         }
     }
+
+    default fn cache_on_disk(_: TyCtxt<'tcx>, _: Self::Key, _: Option<&Self::Value>) -> bool {
+        false
+    }
+
+    default fn try_load_from_disk(
+        _: TyCtxt<'tcx>,
+        _: SerializedDepNodeIndex,
+    ) -> Option<Self::Value> {
+        bug!("QueryDescription::load_from_disk() called for an unsupported query.")
+    }
 }
 
 impl<'tcx> QueryDescription<'tcx> for queries::analysis<'tcx> {
index 97fafe341a3114ec5c82dc5f0cffce40741eb650..21a7cf00b283f54c43d5783e440697a090527d0f 100644 (file)
@@ -882,15 +882,16 @@ fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
     }
 }
 
-impl<'a, 'tcx, E> SpecializedEncoder<ty::GenericPredicates<'tcx>> for CacheEncoder<'a, 'tcx, E>
+impl<'a, 'tcx, E> SpecializedEncoder<&'tcx [(ty::Predicate<'tcx>, Span)]>
+    for CacheEncoder<'a, 'tcx, E>
 where
     E: 'a + TyEncoder,
 {
     #[inline]
     fn specialized_encode(&mut self,
-                          predicates: &ty::GenericPredicates<'tcx>)
+                          predicates: &&'tcx [(ty::Predicate<'tcx>, Span)])
                           -> Result<(), Self::Error> {
-        ty_codec::encode_predicates(self, predicates,
+        ty_codec::encode_spanned_predicates(self, predicates,
             |encoder| &mut encoder.predicate_shorthands)
     }
 }
@@ -1075,7 +1076,7 @@ fn encode_query_results<'a, 'tcx, Q, E>(
     let desc = &format!("encode_query_results for {}",
         ::std::any::type_name::<Q>());
 
-    time_ext(tcx.sess.time_extended(), Some(tcx.sess), desc, || {
+    time_ext(tcx.sess.time_extended(), desc, || {
         let shards = Q::query_cache(tcx).lock_shards();
         assert!(shards.iter().all(|shard| shard.active.is_empty()));
         for (key, entry) in shards.iter().flat_map(|shard| shard.results.iter()) {
index 955f1447c55b67c3d61aeec068a2860accd35b65..7f05e553bc9765273d91dfd4f7157cad4873bf68 100644 (file)
@@ -9,8 +9,6 @@
 use crate::ty::query::config::{QueryConfig, QueryDescription};
 use crate::ty::query::job::{QueryJob, QueryResult, QueryInfo};
 
-use crate::util::common::{profq_msg, ProfileQueriesMsg, QueryMsg};
-
 use errors::DiagnosticBuilder;
 use errors::Level;
 use errors::Diagnostic;
@@ -62,33 +60,6 @@ fn default() -> QueryCache<'tcx, M> {
     }
 }
 
-// If enabled, sends a message to the profile-queries thread.
-macro_rules! profq_msg {
-    ($tcx:expr, $msg:expr) => {
-        if cfg!(debug_assertions) {
-            if $tcx.sess.profile_queries() {
-                profq_msg($tcx.sess, $msg)
-            }
-        }
-    }
-}
-
-// If enabled, formats a key using its debug string, which can be
-// expensive to compute (in terms of time).
-macro_rules! profq_query_msg {
-    ($query:expr, $tcx:expr, $key:expr) => {{
-        let msg = if cfg!(debug_assertions) {
-            if $tcx.sess.profile_queries_and_keys() {
-                Some(format!("{:?}", $key))
-            } else { None }
-        } else { None };
-        QueryMsg {
-            query: $query,
-            msg,
-        }
-    }}
-}
-
 /// A type representing the responsibility to execute the job in the `job` field.
 /// This will poison the relevant query if dropped.
 pub(super) struct JobOwner<'a, 'tcx, Q: QueryDescription<'tcx>> {
@@ -111,7 +82,6 @@ pub(super) fn try_get(tcx: TyCtxt<'tcx>, span: Span, key: &Q::Key) -> TryGetJob<
         loop {
             let mut lock = cache.get_shard_by_value(key).lock();
             if let Some(value) = lock.results.get(key) {
-                profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
                 tcx.prof.query_cache_hit(Q::NAME);
                 let result = (value.value.clone(), value.index);
                 #[cfg(debug_assertions)]
@@ -358,13 +328,6 @@ pub(super) fn get_query<Q: QueryDescription<'tcx>>(self, span: Span, key: Q::Key
                key,
                span);
 
-        profq_msg!(self,
-            ProfileQueriesMsg::QueryBegin(
-                span.data(),
-                profq_query_msg!(Q::NAME.as_str(), self, key),
-            )
-        );
-
         let job = match JobOwner::try_get(self, span, &key) {
             TryGetJob::NotYetStarted(job) => job,
             TryGetJob::Cycle(result) => return result,
@@ -383,7 +346,6 @@ pub(super) fn get_query<Q: QueryDescription<'tcx>>(self, span: Span, key: Q::Key
 
         if Q::ANON {
 
-            profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
             let prof_timer = self.prof.query_provider(Q::NAME);
 
             let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
@@ -395,7 +357,6 @@ pub(super) fn get_query<Q: QueryDescription<'tcx>>(self, span: Span, key: Q::Key
             });
 
             drop(prof_timer);
-            profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
             self.dep_graph.read_index(dep_node_index);
 
@@ -468,7 +429,6 @@ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'tcx>>(
         };
 
         let result = if let Some(result) = result {
-            profq_msg!(self, ProfileQueriesMsg::CacheHit);
             result
         } else {
             // We could not load a result from the on-disk cache, so
@@ -489,10 +449,6 @@ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'tcx>>(
             self.incremental_verify_ich::<Q>(&result, dep_node, dep_node_index);
         }
 
-        if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
-            self.dep_graph.mark_loaded_from_cache(dep_node_index, true);
-        }
-
         result
     }
 
@@ -546,7 +502,6 @@ fn force_query_with_job<Q: QueryDescription<'tcx>>(
                  - dep-node: {:?}",
                 key, dep_node);
 
-        profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
         let prof_timer = self.prof.query_provider(Q::NAME);
 
         let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
@@ -568,11 +523,6 @@ fn force_query_with_job<Q: QueryDescription<'tcx>>(
         });
 
         drop(prof_timer);
-        profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
-
-        if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
-            self.dep_graph.mark_loaded_from_cache(dep_node_index, false);
-        }
 
         if unlikely!(!diagnostics.is_empty()) {
             if dep_node.kind != crate::dep_graph::DepKind::Null {
@@ -614,19 +564,12 @@ pub(super) fn ensure_query<Q: QueryDescription<'tcx>>(self, key: Q::Key) -> () {
 
             let _ = self.get_query::<Q>(DUMMY_SP, key);
         } else {
-            profq_msg!(self, ProfileQueriesMsg::CacheHit);
             self.prof.query_cache_hit(Q::NAME);
         }
     }
 
     #[allow(dead_code)]
     fn force_query<Q: QueryDescription<'tcx>>(self, key: Q::Key, span: Span, dep_node: DepNode) {
-        profq_msg!(
-            self,
-            ProfileQueriesMsg::QueryBegin(span.data(),
-                                          profq_query_msg!(Q::NAME.as_str(), self, key))
-        );
-
         // We may be concurrently trying both execute and force a query.
         // Ensure that only one of them runs the query.
         let job = match JobOwner::try_get(self, span, &key) {
@@ -1191,37 +1134,6 @@ pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool {
         return false
     }
 
-    macro_rules! def_id {
-        () => {
-            if let Some(def_id) = dep_node.extract_def_id(tcx) {
-                def_id
-            } else {
-                // Return from the whole function.
-                return false
-            }
-        }
-    };
-
-    macro_rules! krate {
-        () => { (def_id!()).krate }
-    };
-
-    macro_rules! force_ex {
-        ($tcx:expr, $query:ident, $key:expr) => {
-            {
-                $tcx.force_query::<crate::ty::query::queries::$query<'_>>(
-                    $key,
-                    DUMMY_SP,
-                    *dep_node
-                );
-            }
-        }
-    };
-
-    macro_rules! force {
-        ($query:ident, $key:expr) => { force_ex!(tcx, $query, $key) }
-    };
-
     rustc_dep_node_force!([dep_node, tcx]
         // These are inputs that are expected to be pre-allocated and that
         // should therefore always be red or green already.
@@ -1240,7 +1152,19 @@ macro_rules! force {
             bug!("force_from_dep_node: encountered {:?}", dep_node)
         }
 
-        DepKind::Analysis => { force!(analysis, krate!()); }
+        DepKind::Analysis => {
+            let def_id = if let Some(def_id) = dep_node.extract_def_id(tcx) {
+                def_id
+            } else {
+                // Return from the whole function.
+                return false
+            };
+            tcx.force_query::<crate::ty::query::queries::analysis<'_>>(
+                def_id.krate,
+                DUMMY_SP,
+                *dep_node
+            );
+        }
     );
 
     true
index 5489c6f5d5afba67540fe5e12aaad357c952cb7c..41f34703622e7f9606828a90947243840a97429d 100644 (file)
@@ -8,7 +8,7 @@
 use crate::ty::subst::{GenericArg, GenericArgKind, SubstsRef};
 use crate::ty::{self, Ty, TyCtxt, TypeFoldable};
 use crate::ty::error::{ExpectedFound, TypeError};
-use crate::mir::interpret::{ConstValue, get_slice_bytes, Scalar};
+use crate::mir::interpret::{ConstValue, get_slice_bytes};
 use std::rc::Rc;
 use std::iter;
 use rustc_target::spec::abi;
@@ -561,37 +561,39 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
     // implement both `PartialEq` and `Eq`, corresponding to
     // `structural_match` types.
     // FIXME(const_generics): check for `structural_match` synthetic attribute.
-    match (eagerly_eval(a), eagerly_eval(b)) {
+    let new_const_val = match (eagerly_eval(a), eagerly_eval(b)) {
         (ConstValue::Infer(_), _) | (_, ConstValue::Infer(_)) => {
             // The caller should handle these cases!
             bug!("var types encountered in super_relate_consts: {:?} {:?}", a, b)
         }
         (ConstValue::Param(a_p), ConstValue::Param(b_p)) if a_p.index == b_p.index => {
-            Ok(a)
+            return Ok(a);
         }
         (ConstValue::Placeholder(p1), ConstValue::Placeholder(p2)) if p1 == p2 => {
-            Ok(a)
+            return Ok(a);
         }
-        (a_val @ ConstValue::Scalar(Scalar::Raw { .. }), b_val @ _)
-            if a.ty == b.ty && a_val == b_val =>
-        {
-            Ok(tcx.mk_const(ty::Const {
-                val: a_val,
-                ty: a.ty,
-            }))
+        (ConstValue::Scalar(a_val), ConstValue::Scalar(b_val)) if a.ty == b.ty => {
+            if a_val == b_val {
+                Ok(ConstValue::Scalar(a_val))
+            } else if let ty::FnPtr(_) = a.ty.kind {
+                let alloc_map = tcx.alloc_map.lock();
+                let a_instance = alloc_map.unwrap_fn(a_val.to_ptr().unwrap().alloc_id);
+                let b_instance = alloc_map.unwrap_fn(b_val.to_ptr().unwrap().alloc_id);
+                if a_instance == b_instance {
+                    Ok(ConstValue::Scalar(a_val))
+                } else {
+                    Err(TypeError::ConstMismatch(expected_found(relation, &a, &b)))
+                }
+            } else {
+                Err(TypeError::ConstMismatch(expected_found(relation, &a, &b)))
+            }
         }
 
-        // FIXME(const_generics): we should either handle `Scalar::Ptr` or add a comment
-        // saying that we're not handling it intentionally.
-
         (a_val @ ConstValue::Slice { .. }, b_val @ ConstValue::Slice { .. }) => {
             let a_bytes = get_slice_bytes(&tcx, a_val);
             let b_bytes = get_slice_bytes(&tcx, b_val);
             if a_bytes == b_bytes {
-                Ok(tcx.mk_const(ty::Const {
-                    val: a_val,
-                    ty: a.ty,
-                }))
+                Ok(a_val)
             } else {
                 Err(TypeError::ConstMismatch(expected_found(relation, &a, &b)))
             }
@@ -602,16 +604,16 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
         // FIXME(const_generics): this is wrong, as it is a projection
         (ConstValue::Unevaluated(a_def_id, a_substs),
             ConstValue::Unevaluated(b_def_id, b_substs)) if a_def_id == b_def_id => {
-                let substs =
-                    relation.relate_with_variance(ty::Variance::Invariant, &a_substs, &b_substs)?;
-                Ok(tcx.mk_const(ty::Const {
-                    val: ConstValue::Unevaluated(a_def_id, &substs),
-                    ty: a.ty,
-                }))
-            }
-
-        _ => Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))),
-    }
+            let substs =
+                relation.relate_with_variance(ty::Variance::Invariant, &a_substs, &b_substs)?;
+            Ok(ConstValue::Unevaluated(a_def_id, &substs))
+        }
+        _ =>  Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))),
+    };
+    new_const_val.map(|val| tcx.mk_const(ty::Const {
+        val,
+        ty: a.ty,
+    }))
 }
 
 impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
index 6b0df7fb92a4a0c0a3779fab388f99ae1b665d5f..83ec98f9ddd2f2dfbd87ab39978e22681bfd9ade 100644 (file)
@@ -761,6 +761,8 @@ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
                 Some(ty::InstanceDef::Item(def_id)),
             ty::InstanceDef::VtableShim(def_id) =>
                 Some(ty::InstanceDef::VtableShim(def_id)),
+            ty::InstanceDef::ReifyShim(def_id) =>
+                Some(ty::InstanceDef::ReifyShim(def_id)),
             ty::InstanceDef::Intrinsic(def_id) =>
                 Some(ty::InstanceDef::Intrinsic(def_id)),
             ty::InstanceDef::FnPtrShim(def_id, ref ty) =>
@@ -966,6 +968,7 @@ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
             def: match self.def {
                 Item(did) => Item(did.fold_with(folder)),
                 VtableShim(did) => VtableShim(did.fold_with(folder)),
+                ReifyShim(did) => ReifyShim(did.fold_with(folder)),
                 Intrinsic(did) => Intrinsic(did.fold_with(folder)),
                 FnPtrShim(did, ty) => FnPtrShim(
                     did.fold_with(folder),
@@ -994,7 +997,7 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
         use crate::ty::InstanceDef::*;
         self.substs.visit_with(visitor) ||
         match self.def {
-            Item(did) | VtableShim(did) | Intrinsic(did) | Virtual(did, _) => {
+            Item(did) | VtableShim(did) | ReifyShim(did) | Intrinsic(did) | Virtual(did, _) => {
                 did.visit_with(visitor)
             },
             FnPtrShim(did, ty) | CloneShim(did, ty) => {
@@ -1215,16 +1218,23 @@ impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoBorrow<'tcx> {
     }
 }
 
-BraceStructTypeFoldableImpl! {
-    impl<'tcx> TypeFoldable<'tcx> for ty::GenericPredicates<'tcx> {
-        parent, predicates
-    }
-}
-
 impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::Predicate<'tcx>> {
     fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
-        let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
-        folder.tcx().intern_predicates(&v)
+        // This code is hot enough that it's worth specializing for a list of
+        // length 0. (No other length is common enough to be worth singling
+        // out).
+        if self.len() == 0 {
+            self
+        } else {
+            // Don't bother interning if nothing changed, which is the common
+            // case.
+            let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
+            if v[..] == self[..] {
+                self
+            } else {
+                folder.tcx().intern_predicates(&v)
+            }
+        }
     }
 
     fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
index 3f09bf749edc8a6eb71b74c05c07b9bf488375c1..4af73fa389a7db87f26b353981a38697cab8020f 100644 (file)
@@ -13,7 +13,7 @@
 use crate::ty::subst::{InternalSubsts, Subst, SubstsRef, GenericArg, GenericArgKind};
 use crate::ty::{self, AdtDef, Discr, DefIdTree, TypeFlags, Ty, TyCtxt, TypeFoldable};
 use crate::ty::{List, TyS, ParamEnvAnd, ParamEnv};
-use crate::ty::layout::{Size, Integer, IntegerExt, VariantIdx};
+use crate::ty::layout::VariantIdx;
 use crate::util::captures::Captures;
 use crate::mir::interpret::{Scalar, GlobalId};
 
@@ -24,7 +24,6 @@
 use std::ops::Range;
 use rustc_target::spec::abi;
 use syntax::ast::{self, Ident};
-use syntax::attr::{SignedInt, UnsignedInt};
 use syntax::symbol::{kw, InternedString};
 
 use self::InferTy::*;
@@ -163,7 +162,7 @@ pub enum TyKind<'tcx> {
 
     /// The anonymous type of a generator. Used to represent the type of
     /// `|a| yield a`.
-    Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability),
+    Generator(DefId, SubstsRef<'tcx>, hir::GeneratorMovability),
 
     /// A type representin the types stored inside a generator.
     /// This should only appear in GeneratorInteriors.
@@ -512,7 +511,7 @@ pub fn discriminant_for_variant(
     /// variant indices.
     #[inline]
     pub fn discriminants(
-        &'tcx self,
+        self,
         def_id: DefId,
         tcx: TyCtxt<'tcx>,
     ) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
@@ -524,7 +523,7 @@ pub fn discriminants(
     /// Calls `f` with a reference to the name of the enumerator for the given
     /// variant `v`.
     #[inline]
-    pub fn variant_name(&self, v: VariantIdx) -> Cow<'static, str> {
+    pub fn variant_name(self, v: VariantIdx) -> Cow<'static, str> {
         match v.as_usize() {
             Self::UNRESUMED => Cow::from(Self::UNRESUMED_NAME),
             Self::RETURNED => Cow::from(Self::RETURNED_NAME),
@@ -570,7 +569,7 @@ pub fn prefix_tys(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> impl Iterator<Item
 #[derive(Debug, Copy, Clone)]
 pub enum UpvarSubsts<'tcx> {
     Closure(SubstsRef<'tcx>),
-    Generator(GeneratorSubsts<'tcx>),
+    Generator(SubstsRef<'tcx>),
 }
 
 impl<'tcx> UpvarSubsts<'tcx> {
@@ -582,7 +581,7 @@ pub fn upvar_tys(
     ) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
         let upvar_kinds = match self {
             UpvarSubsts::Closure(substs) => substs.as_closure().split(def_id, tcx).upvar_kinds,
-            UpvarSubsts::Generator(substs) => substs.split(def_id, tcx).upvar_kinds,
+            UpvarSubsts::Generator(substs) => substs.as_generator().split(def_id, tcx).upvar_kinds,
         };
         upvar_kinds.iter().map(|t| {
             if let GenericArgKind::Type(ty) = t.unpack() {
@@ -1776,6 +1775,10 @@ pub fn is_phantom_data(&self) -> bool {
     #[inline]
     pub fn is_bool(&self) -> bool { self.kind == Bool }
 
+    /// Returns `true` if this type is a `str`.
+    #[inline]
+    pub fn is_str(&self) -> bool { self.kind == Str }
+
     #[inline]
     pub fn is_param(&self, index: u32) -> bool {
         match self.kind {
@@ -2109,7 +2112,8 @@ pub fn tuple_fields(&self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> {
     pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option<Range<VariantIdx>> {
         match self.kind {
             TyKind::Adt(adt, _) => Some(adt.variant_range()),
-            TyKind::Generator(def_id, substs, _) => Some(substs.variant_range(def_id, tcx)),
+            TyKind::Generator(def_id, substs, _) =>
+                Some(substs.as_generator().variant_range(def_id, tcx)),
             _ => None,
         }
     }
@@ -2126,7 +2130,7 @@ pub fn discriminant_for_variant(
         match self.kind {
             TyKind::Adt(adt, _) => Some(adt.discriminant_for_variant(tcx, variant_index)),
             TyKind::Generator(def_id, substs, _) =>
-                Some(substs.discriminant_for_variant(def_id, tcx, variant_index)),
+                Some(substs.as_generator().discriminant_for_variant(def_id, tcx, variant_index)),
             _ => None,
         }
     }
@@ -2149,7 +2153,7 @@ pub fn discriminant_for_variant(
                 out.extend(substs.regions())
             }
             Closure(_, ref substs ) |
-            Generator(_, GeneratorSubsts { ref substs }, _) => {
+            Generator(_, ref substs, _) => {
                 out.extend(substs.regions())
             }
             Projection(ref data) | UnnormalizedProjection(ref data) => {
@@ -2199,7 +2203,9 @@ pub fn to_opt_closure_kind(&self) -> Option<ty::ClosureKind> {
                 _ => bug!("cannot convert type `{:?}` to a closure kind", self),
             },
 
-            Infer(_) => None,
+            // "Bound" types appear in canonical queries when the
+            // closure type is not yet known
+            Bound(..) | Infer(_) => None,
 
             Error => Some(ty::ClosureKind::Fn),
 
@@ -2299,20 +2305,7 @@ pub fn try_eval_bits(
         ty: Ty<'tcx>,
     ) -> Option<u128> {
         assert_eq!(self.ty, ty);
-        // This is purely an optimization -- layout_of is a pretty expensive operation,
-        // but if we can determine the size without calling it, we don't need all that complexity
-        // (hashing, caching, etc.). As such, try to skip it.
-        let size = match ty.kind {
-            ty::Bool => Size::from_bytes(1),
-            ty::Char => Size::from_bytes(4),
-            ty::Int(ity) => {
-                Integer::from_attr(&tcx, SignedInt(ity)).size()
-            }
-            ty::Uint(uty) => {
-                Integer::from_attr(&tcx, UnsignedInt(uty)).size()
-            }
-            _ => tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size,
-        };
+        let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size;
         // if `ty` does not depend on generic parameters, use an empty param_env
         self.eval(tcx, param_env).val.try_to_bits(size)
     }
index 537192b0a2e58dccf61c50aaa0a09a033b97ac46..4081c02a33ca41f87eecb7dc6fdcf69458b73579 100644 (file)
@@ -5,7 +5,7 @@
 use crate::ty::{self, Lift, List, Ty, TyCtxt, InferConst, ParamConst};
 use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
 use crate::mir::interpret::ConstValue;
-use crate::ty::sty::ClosureSubsts;
+use crate::ty::sty::{ClosureSubsts, GeneratorSubsts};
 
 use rustc_serialize::{self, Encodable, Encoder, Decodable, Decoder};
 use syntax_pos::{Span, DUMMY_SP};
@@ -194,6 +194,14 @@ pub fn as_closure(&'a self) -> ClosureSubsts<'a> {
         }
     }
 
+    /// Interpret these substitutions as the substitutions of a generator type.
+    /// Closure substitutions have a particular structure controlled by the
+    /// compiler that encodes information like the signature and generator kind;
+    /// see `ty::GeneratorSubsts` struct for more comments.
+    pub fn as_generator(&'tcx self) -> GeneratorSubsts<'tcx> {
+        GeneratorSubsts { substs: self }
+    }
+
     /// Creates a `InternalSubsts` that maps each generic parameter to itself.
     pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> {
         Self::for_item(tcx, def_id, |param, _| {
@@ -394,14 +402,41 @@ pub fn truncate_to(&self, tcx: TyCtxt<'tcx>, generics: &ty::Generics) -> SubstsR
 
 impl<'tcx> TypeFoldable<'tcx> for SubstsRef<'tcx> {
     fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
-        let params: SmallVec<[_; 8]> = self.iter().map(|k| k.fold_with(folder)).collect();
-
-        // If folding doesn't change the substs, it's faster to avoid
-        // calling `mk_substs` and instead reuse the existing substs.
-        if params[..] == self[..] {
-            self
-        } else {
-            folder.tcx().intern_substs(&params)
+        // This code is hot enough that it's worth specializing for the most
+        // common length lists, to avoid the overhead of `SmallVec` creation.
+        // The match arms are in order of frequency. The 1, 2, and 0 cases are
+        // typically hit in 90--99.99% of cases. When folding doesn't change
+        // the substs, it's faster to reuse the existing substs rather than
+        // calling `intern_substs`.
+        match self.len() {
+            1 => {
+                let param0 = self[0].fold_with(folder);
+                if param0 == self[0] {
+                    self
+                } else {
+                    folder.tcx().intern_substs(&[param0])
+                }
+            }
+            2 => {
+                let param0 = self[0].fold_with(folder);
+                let param1 = self[1].fold_with(folder);
+                if param0 == self[0] && param1 == self[1] {
+                    self
+                } else {
+                    folder.tcx().intern_substs(&[param0, param1])
+                }
+            }
+            0 => {
+                self
+            }
+            _ => {
+                let params: SmallVec<[_; 8]> = self.iter().map(|k| k.fold_with(folder)).collect();
+                if params[..] == self[..] {
+                    self
+                } else {
+                    folder.tcx().intern_substs(&params)
+                }
+            }
         }
     }
 
index d0e95a18c59fcb046656a17cbe36d376b25530a0..e1eab2c6579e1892df943f0629e79e34541b7220 100644 (file)
@@ -697,6 +697,9 @@ struct OpaqueTypeExpander<'tcx> {
             // that type, and when we finish expanding that type we remove the
             // its DefId.
             seen_opaque_tys: FxHashSet<DefId>,
+            // Cache of all expansions we've seen so far. This is a critical
+            // optimization for some large types produced by async fn trees.
+            expanded_cache: FxHashMap<(DefId, SubstsRef<'tcx>), Ty<'tcx>>,
             primary_def_id: DefId,
             found_recursion: bool,
             tcx: TyCtxt<'tcx>,
@@ -713,9 +716,16 @@ fn expand_opaque_ty(
                 }
                 let substs = substs.fold_with(self);
                 if self.seen_opaque_tys.insert(def_id) {
-                    let generic_ty = self.tcx.type_of(def_id);
-                    let concrete_ty = generic_ty.subst(self.tcx, substs);
-                    let expanded_ty = self.fold_ty(concrete_ty);
+                    let expanded_ty = match self.expanded_cache.get(&(def_id, substs)) {
+                        Some(expanded_ty) => expanded_ty,
+                        None => {
+                            let generic_ty = self.tcx.type_of(def_id);
+                            let concrete_ty = generic_ty.subst(self.tcx, substs);
+                            let expanded_ty = self.fold_ty(concrete_ty);
+                            self.expanded_cache.insert((def_id, substs), expanded_ty);
+                            expanded_ty
+                        }
+                    };
                     self.seen_opaque_tys.remove(&def_id);
                     Some(expanded_ty)
                 } else {
@@ -735,14 +745,17 @@ fn tcx(&self) -> TyCtxt<'tcx> {
             fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
                 if let ty::Opaque(def_id, substs) = t.kind {
                     self.expand_opaque_ty(def_id, substs).unwrap_or(t)
-                } else {
+                } else if t.has_projections() {
                     t.super_fold_with(self)
+                } else {
+                    t
                 }
             }
         }
 
         let mut visitor = OpaqueTypeExpander {
             seen_opaque_tys: FxHashSet::default(),
+            expanded_cache: FxHashMap::default(),
             primary_def_id: def_id,
             found_recursion: false,
             tcx: self,
@@ -1017,34 +1030,25 @@ pub fn peel_refs(&'tcx self) -> Ty<'tcx> {
 }
 
 fn is_copy_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
-    let (param_env, ty) = query.into_parts();
-    let trait_def_id = tcx.require_lang_item(lang_items::CopyTraitLangItem, None);
-    tcx.infer_ctxt()
-        .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions(
-            &infcx,
-            param_env,
-            ty,
-            trait_def_id,
-            DUMMY_SP,
-        ))
+    is_item_raw(tcx, query, lang_items::CopyTraitLangItem)
 }
 
 fn is_sized_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
-    let (param_env, ty) = query.into_parts();
-    let trait_def_id = tcx.require_lang_item(lang_items::SizedTraitLangItem, None);
-    tcx.infer_ctxt()
-        .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions(
-            &infcx,
-            param_env,
-            ty,
-            trait_def_id,
-            DUMMY_SP,
-        ))
+    is_item_raw(tcx, query, lang_items::SizedTraitLangItem)
+
 }
 
 fn is_freeze_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
+    is_item_raw(tcx, query, lang_items::FreezeTraitLangItem)
+}
+
+fn is_item_raw<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
+    item: lang_items::LangItem,
+) -> bool {
     let (param_env, ty) = query.into_parts();
-    let trait_def_id = tcx.require_lang_item(lang_items::FreezeTraitLangItem, None);
+    let trait_def_id = tcx.require_lang_item(item, None);
     tcx.infer_ctxt()
         .enter(|infcx| traits::type_known_to_meet_bound_modulo_regions(
             &infcx,
@@ -1105,6 +1109,9 @@ fn needs_drop_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>
 
         ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
 
+        // Zero-length arrays never contain anything to drop.
+        ty::Array(_, len) if len.try_eval_usize(tcx, param_env) == Some(0) => false,
+
         // Structural recursion.
         ty::Array(ty, _) | ty::Slice(ty) => needs_drop(ty),
 
index 1895ab83674eb634384c0c53734db9fb1bfaa385..f5b1902e3cc8c5ab0085125d7c51f4c61351db96 100644 (file)
@@ -110,12 +110,10 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) {
         ty::Adt(_, substs) | ty::Opaque(_, substs) => {
             stack.extend(substs.types().rev());
         }
-        ty::Closure(_, ref substs) => {
+        ty::Closure(_, ref substs)
+        | ty::Generator(_, ref substs, _) => {
             stack.extend(substs.types().rev());
         }
-        ty::Generator(_, ref substs, _) => {
-            stack.extend(substs.substs.types().rev());
-        }
         ty::GeneratorWitness(ts) => {
             stack.extend(ts.skip_binder().iter().cloned().rev());
         }
index 0f472126695e0720cacd8138162c6d79e3707a56..3e52a6aa50850c12901e0f9b8c5cac6e418a5374 100644 (file)
@@ -6,11 +6,8 @@
 use std::fmt::Debug;
 use std::time::{Duration, Instant};
 
-use std::sync::mpsc::{Sender};
-use syntax_pos::{SpanData};
 use syntax::symbol::{Symbol, sym};
 use rustc_macros::HashStable;
-use crate::dep_graph::{DepNode};
 use crate::session::Session;
 
 #[cfg(test)]
 
 thread_local!(static TIME_DEPTH: Cell<usize> = Cell::new(0));
 
-/// Parameters to the `Dump` variant of type `ProfileQueriesMsg`.
-#[derive(Clone,Debug)]
-pub struct ProfQDumpParams {
-    /// A base path for the files we will dump.
-    pub path:String,
-    /// To ensure that the compiler waits for us to finish our dumps.
-    pub ack:Sender<()>,
-    /// Toggle dumping a log file with every `ProfileQueriesMsg`.
-    pub dump_profq_msg_log:bool,
-}
-
 #[allow(nonstandard_style)]
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct QueryMsg {
@@ -44,53 +30,6 @@ pub struct QueryMsg {
     pub msg: Option<String>,
 }
 
-/// A sequence of these messages induce a trace of query-based incremental compilation.
-// FIXME(matthewhammer): Determine whether we should include cycle detection here or not.
-#[derive(Clone,Debug)]
-pub enum ProfileQueriesMsg {
-    /// Begin a timed pass.
-    TimeBegin(String),
-    /// End a timed pass.
-    TimeEnd,
-    /// Begin a task (see `dep_graph::graph::with_task`).
-    TaskBegin(DepNode),
-    /// End a task.
-    TaskEnd,
-    /// Begin a new query.
-    /// Cannot use `Span` because queries are sent to other thread.
-    QueryBegin(SpanData, QueryMsg),
-    /// Query is satisfied by using an already-known value for the given key.
-    CacheHit,
-    /// Query requires running a provider; providers may nest, permitting queries to nest.
-    ProviderBegin,
-    /// Query is satisfied by a provider terminating with a value.
-    ProviderEnd,
-    /// Dump a record of the queries to the given path.
-    Dump(ProfQDumpParams),
-    /// Halt the profiling/monitoring background thread.
-    Halt
-}
-
-/// If enabled, send a message to the profile-queries thread.
-pub fn profq_msg(sess: &Session, msg: ProfileQueriesMsg) {
-    if let Some(s) = sess.profile_channel.borrow().as_ref() {
-        s.send(msg).unwrap()
-    } else {
-        // Do nothing.
-    }
-}
-
-/// Set channel for profile queries channel.
-pub fn profq_set_chan(sess: &Session, s: Sender<ProfileQueriesMsg>) -> bool {
-    let mut channel = sess.profile_channel.borrow_mut();
-    if channel.is_none() {
-        *channel = Some(s);
-        true
-    } else {
-        false
-    }
-}
-
 /// Read the current depth of `time()` calls. This is used to
 /// encourage indentation across threads.
 pub fn time_depth() -> usize {
@@ -107,10 +46,10 @@ pub fn set_time_depth(depth: usize) {
 pub fn time<T, F>(sess: &Session, what: &str, f: F) -> T where
     F: FnOnce() -> T,
 {
-    time_ext(sess.time_passes(), Some(sess), what, f)
+    time_ext(sess.time_passes(), what, f)
 }
 
-pub fn time_ext<T, F>(do_it: bool, sess: Option<&Session>, what: &str, f: F) -> T where
+pub fn time_ext<T, F>(do_it: bool, what: &str, f: F) -> T where
     F: FnOnce() -> T,
 {
     if !do_it { return f(); }
@@ -121,19 +60,9 @@ pub fn time_ext<T, F>(do_it: bool, sess: Option<&Session>, what: &str, f: F) ->
         r
     });
 
-    if let Some(sess) = sess {
-        if cfg!(debug_assertions) {
-            profq_msg(sess, ProfileQueriesMsg::TimeBegin(what.to_string()))
-        }
-    }
     let start = Instant::now();
     let rv = f();
     let dur = start.elapsed();
-    if let Some(sess) = sess {
-        if cfg!(debug_assertions) {
-            profq_msg(sess, ProfileQueriesMsg::TimeEnd)
-        }
-    }
 
     print_time_passes_entry(true, what, dur);
 
index 5d43bf6ae28bf662c208365f483ac7732bf4790d..8c60c030eacdb3a1e864e4995ef925cfaccb8a86 100644 (file)
@@ -3,7 +3,7 @@
 use crate::attributes;
 use libc::c_uint;
 use rustc::ty::TyCtxt;
-use syntax::ext::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS};
+use syntax_expand::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS};
 
 use crate::ModuleLlvm;
 use crate::llvm::{self, False, True};
@@ -68,7 +68,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut ModuleLlvm, kind: Alloc
 
         let llbb = llvm::LLVMAppendBasicBlockInContext(llcx,
                                                        llfn,
-                                                       "entry\0".as_ptr() as *const _);
+                                                       "entry\0".as_ptr().cast());
 
         let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);
         llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);
@@ -80,7 +80,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut ModuleLlvm, kind: Alloc
                                           args.as_ptr(),
                                           args.len() as c_uint,
                                           None,
-                                          "\0".as_ptr() as *const _);
+                                          "\0".as_ptr().cast());
         llvm::LLVMSetTailCall(ret, True);
         if output.is_some() {
             llvm::LLVMBuildRet(llbuilder, ret);
index 33b50401b22f149ac9f11d8464f949433848e2e2..6a36a4a50cbf3f2210b81c9dbfd244d8778a5dcb 100644 (file)
@@ -96,10 +96,12 @@ pub fn set_probestack(cx: &CodegenCx<'ll, '_>, llfn: &'ll Value) {
     }
 
     // Currently stack probes seem somewhat incompatible with the address
-    // sanitizer. With asan we're already protected from stack overflow anyway
-    // so we don't really need stack probes regardless.
-    if let Some(Sanitizer::Address) = cx.sess().opts.debugging_opts.sanitizer {
-        return
+    // sanitizer and thread sanitizer. With asan we're already protected from
+    // stack overflow anyway so we don't really need stack probes regardless.
+    match cx.sess().opts.debugging_opts.sanitizer {
+        Some(Sanitizer::Address) |
+        Some(Sanitizer::Thread) => return,
+        _ => {},
     }
 
     // probestack doesn't play nice either with `-C profile-generate`.
@@ -268,31 +270,37 @@ pub fn from_fn_attrs(
         // optimize based on this!
         false
     } else if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::UNWIND) {
-        // If a specific #[unwind] attribute is present, use that
+        // If a specific #[unwind] attribute is present, use that.
         true
     } else if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_ALLOCATOR_NOUNWIND) {
-        // Special attribute for allocator functions, which can't unwind
+        // Special attribute for allocator functions, which can't unwind.
         false
-    } else if let Some(id) = id {
+    } else {
         let sig = cx.tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig);
-        if cx.tcx.is_foreign_item(id) {
-            // Foreign items like `extern "C" { fn foo(); }` are assumed not to
-            // unwind
-            false
-        } else if sig.abi != Abi::Rust && sig.abi != Abi::RustCall {
-            // Any items defined in Rust that *don't* have the `extern` ABI are
-            // defined to not unwind. We insert shims to abort if an unwind
-            // happens to enforce this.
-            false
-        } else {
-            // Anything else defined in Rust is assumed that it can possibly
-            // unwind
+        if sig.abi == Abi::Rust || sig.abi == Abi::RustCall {
+            // Any Rust method (or `extern "Rust" fn` or `extern
+            // "rust-call" fn`) is explicitly allowed to unwind
+            // (unless it has no-unwind attribute, handled above).
             true
+        } else {
+            // Anything else is either:
+            //
+            //  1. A foreign item using a non-Rust ABI (like `extern "C" { fn foo(); }`), or
+            //
+            //  2. A Rust item using a non-Rust ABI (like `extern "C" fn foo() { ... }`).
+            //
+            // Foreign items (case 1) are assumed to not unwind; it is
+            // UB otherwise. (At least for now; see also
+            // rust-lang/rust#63909 and Rust RFC 2753.)
+            //
+            // Items defined in Rust with non-Rust ABIs (case 2) are also
+            // not supposed to unwind. Whether this should be enforced
+            // (versus stating it is UB) and *how* it would be enforced
+            // is currently under discussion; see rust-lang/rust#58794.
+            //
+            // In either case, we mark item as explicitly nounwind.
+            false
         }
-    } else {
-        // assume this can possibly unwind, avoiding the application of a
-        // `nounwind` attribute below.
-        true
     });
 
     // Always annotate functions with the target-cpu they are compiled for.
index c4368d2cb8b45df859f68b601512e617ae352702..7437b1e3c8a32e2f7c31599d57a117fd86015de7 100644 (file)
@@ -116,7 +116,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
                 info!("adding bytecode {}", name);
                 let bc_encoded = data.data();
 
-                let (bc, id) = time_ext(cgcx.time_passes, None, &format!("decode {}", name), || {
+                let (bc, id) = time_ext(cgcx.time_passes, &format!("decode {}", name), || {
                     match DecodedBytecode::new(bc_encoded) {
                         Ok(b) => Ok((b.bytecode(), b.identifier().to_string())),
                         Err(e) => Err(diag_handler.fatal(&e)),
@@ -295,7 +295,7 @@ fn fat_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
         for (bc_decoded, name) in serialized_modules {
             let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_link_module");
             info!("linking {:?}", name);
-            time_ext(cgcx.time_passes, None, &format!("ll link {:?}", name), || {
+            time_ext(cgcx.time_passes, &format!("ll link {:?}", name), || {
                 let data = bc_decoded.data();
                 linker.add(&data).map_err(|()| {
                     let msg = format!("failed to load bc of {:?}", name);
@@ -546,7 +546,7 @@ pub(crate) fn run_pass_manager(cgcx: &CodegenContext<LlvmCodegenBackend>,
         llvm::LLVMRustAddAnalysisPasses(module.module_llvm.tm, pm, module.module_llvm.llmod());
 
         if config.verify_llvm_ir {
-            let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr() as *const _);
+            let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr().cast());
             llvm::LLVMRustAddPass(pm, pass.unwrap());
         }
 
@@ -581,16 +581,16 @@ pub(crate) fn run_pass_manager(cgcx: &CodegenContext<LlvmCodegenBackend>,
         // We always generate bitcode through ThinLTOBuffers,
         // which do not support anonymous globals
         if config.bitcode_needed() {
-            let pass = llvm::LLVMRustFindAndCreatePass("name-anon-globals\0".as_ptr() as *const _);
+            let pass = llvm::LLVMRustFindAndCreatePass("name-anon-globals\0".as_ptr().cast());
             llvm::LLVMRustAddPass(pm, pass.unwrap());
         }
 
         if config.verify_llvm_ir {
-            let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr() as *const _);
+            let pass = llvm::LLVMRustFindAndCreatePass("verify\0".as_ptr().cast());
             llvm::LLVMRustAddPass(pm, pass.unwrap());
         }
 
-        time_ext(cgcx.time_passes, None, "LTO passes", ||
+        time_ext(cgcx.time_passes, "LTO passes", ||
              llvm::LLVMRunPassManager(pm, module.module_llvm.llmod()));
 
         llvm::LLVMDisposePassManager(pm);
index 78db90b57b53d4ed968ccd1f5e21d96a775be45c..52f3a1cbb5c30b06959ddd66d120f79557fe2b4a 100644 (file)
@@ -221,8 +221,8 @@ pub fn new(cgcx: &'a CodegenContext<LlvmCodegenBackend>,
                llcx: &'a llvm::Context) -> Self {
         let data = Box::into_raw(Box::new((cgcx, handler)));
         unsafe {
-            llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, data as *mut _);
-            llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, data as *mut _);
+            llvm::LLVMRustSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, data.cast());
+            llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, data.cast());
         }
         DiagnosticHandlers { data, llcx }
     }
@@ -427,7 +427,6 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
         {
             let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_function_passes");
             time_ext(config.time_passes,
-                        None,
                         &format!("llvm function passes [{}]", module_name.unwrap()),
                         || {
                 llvm::LLVMRustRunFunctionPassManager(fpm, llmod)
@@ -436,7 +435,6 @@ pub(crate) unsafe fn optimize(cgcx: &CodegenContext<LlvmCodegenBackend>,
         {
             let _timer = cgcx.prof.generic_activity("LLVM_module_optimize_module_passes");
             time_ext(config.time_passes,
-                    None,
                     &format!("llvm module passes [{}]", module_name.unwrap()),
                     || {
                 llvm::LLVMRunPassManager(mpm, llmod)
@@ -538,7 +536,7 @@ unsafe fn with_codegen<'ll, F, R>(tm: &'ll llvm::TargetMachine,
             embed_bitcode(cgcx, llcx, llmod, None);
         }
 
-        time_ext(config.time_passes, None, &format!("codegen passes [{}]", module_name.unwrap()),
+        time_ext(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()),
             || -> Result<(), FatalError> {
             if config.emit_ir {
                 let _timer = cgcx.prof.generic_activity("LLVM_module_codegen_emit_ir");
@@ -672,7 +670,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>,
     let llglobal = llvm::LLVMAddGlobal(
         llmod,
         common::val_ty(llconst),
-        "rustc.embedded.module\0".as_ptr() as *const _,
+        "rustc.embedded.module\0".as_ptr().cast(),
     );
     llvm::LLVMSetInitializer(llglobal, llconst);
 
@@ -684,7 +682,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>,
     } else {
         ".llvmbc\0"
     };
-    llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _);
+    llvm::LLVMSetSection(llglobal, section.as_ptr().cast());
     llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage);
     llvm::LLVMSetGlobalConstant(llglobal, llvm::True);
 
@@ -692,7 +690,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>,
     let llglobal = llvm::LLVMAddGlobal(
         llmod,
         common::val_ty(llconst),
-        "rustc.embedded.cmdline\0".as_ptr() as *const _,
+        "rustc.embedded.cmdline\0".as_ptr().cast(),
     );
     llvm::LLVMSetInitializer(llglobal, llconst);
     let section = if  is_apple {
@@ -700,7 +698,7 @@ unsafe fn embed_bitcode(cgcx: &CodegenContext<LlvmCodegenBackend>,
     } else {
         ".llvmcmd\0"
     };
-    llvm::LLVMSetSection(llglobal, section.as_ptr() as *const _);
+    llvm::LLVMSetSection(llglobal, section.as_ptr().cast());
     llvm::LLVMRustSetLinkage(llglobal, llvm::Linkage::PrivateLinkage);
 }
 
@@ -842,7 +840,7 @@ fn create_msvc_imps(
         for (imp_name, val) in globals {
             let imp = llvm::LLVMAddGlobal(llmod,
                                           i8p_ty,
-                                          imp_name.as_ptr() as *const _);
+                                          imp_name.as_ptr().cast());
             llvm::LLVMSetInitializer(imp, consts::ptrcast(val, i8p_ty));
             llvm::LLVMRustSetLinkage(imp, llvm::Linkage::ExternalLinkage);
         }
index 71a6067fd48a14f89f8d58fb67bcc80aa084d398..98be0ae44335f14723e212d60012aa8a25d5f54c 100644 (file)
@@ -52,6 +52,7 @@ fn drop(&mut self) {
 
 impl BackendTypes for Builder<'_, 'll, 'tcx> {
     type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
+    type Function = <CodegenCx<'ll, 'tcx> as BackendTypes>::Function;
     type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
     type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
     type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
index 35d5107842d5b3464b2d7b6f8a86b6bffdb62f12..08fa23f2a7c9e31b09b539309692a655f9384926 100644 (file)
@@ -33,7 +33,7 @@ pub fn get_fn(
     assert!(!instance.substs.has_param_types());
 
     let sig = instance.fn_sig(cx.tcx());
-    if let Some(&llfn) = cx.instances().borrow().get(&instance) {
+    if let Some(&llfn) = cx.instances.borrow().get(&instance) {
         return llfn;
     }
 
index 6fbea9646b8a9fe524514000c23c70a6f3ee8f17..a1a5232d588327c7fc8d3eb95bcadd8bd8181557 100644 (file)
@@ -2,7 +2,7 @@
 
 //! Code that is useful in various codegen modules.
 
-use crate::llvm::{self, True, False, Bool, BasicBlock, OperandBundleDef};
+use crate::llvm::{self, True, False, Bool, BasicBlock, OperandBundleDef, ConstantInt};
 use crate::abi;
 use crate::consts;
 use crate::type_::Type;
@@ -86,6 +86,8 @@ pub fn bundle(&self) -> &OperandBundleDef<'ll> {
 
 impl BackendTypes for CodegenCx<'ll, 'tcx> {
     type Value = &'ll Value;
+    type Function = &'ll Value;
+
     type BasicBlock = &'ll BasicBlock;
     type Type = &'ll Type;
     type Funclet = Funclet<'ll>;
@@ -243,33 +245,23 @@ fn const_struct(
         struct_in_context(self.llcx, elts, packed)
     }
 
-    fn const_to_uint(&self, v: &'ll Value) -> u64 {
-        unsafe {
+    fn const_to_opt_uint(&self, v: &'ll Value) -> Option<u64> {
+        try_as_const_integral(v).map(|v| unsafe {
             llvm::LLVMConstIntGetZExtValue(v)
-        }
-    }
-
-    fn is_const_integral(&self, v: &'ll Value) -> bool {
-        unsafe {
-            llvm::LLVMIsAConstantInt(v).is_some()
-        }
+        })
     }
 
     fn const_to_opt_u128(&self, v: &'ll Value, sign_ext: bool) -> Option<u128> {
-        unsafe {
-            if self.is_const_integral(v) {
-                let (mut lo, mut hi) = (0u64, 0u64);
-                let success = llvm::LLVMRustConstInt128Get(v, sign_ext,
-                                                           &mut hi, &mut lo);
-                if success {
-                    Some(hi_lo_to_u128(lo, hi))
-                } else {
-                    None
-                }
+        try_as_const_integral(v).and_then(|v| unsafe {
+            let (mut lo, mut hi) = (0u64, 0u64);
+            let success = llvm::LLVMRustConstInt128Get(v, sign_ext,
+                                                        &mut hi, &mut lo);
+            if success {
+                Some(hi_lo_to_u128(lo, hi))
             } else {
                 None
             }
-        }
+        })
     }
 
     fn scalar_to_backend(
@@ -305,7 +297,7 @@ fn scalar_to_backend(
                         }
                     }
                     Some(GlobalAlloc::Function(fn_instance)) => {
-                        self.get_fn(fn_instance)
+                        self.get_fn_addr(fn_instance)
                     }
                     Some(GlobalAlloc::Static(def_id)) => {
                         assert!(self.tcx.is_static(def_id));
@@ -386,3 +378,9 @@ pub fn struct_in_context(
 fn hi_lo_to_u128(lo: u64, hi: u64) -> u128 {
     ((hi as u128) << 64) | (lo as u128)
 }
+
+fn try_as_const_integral(v: &'ll Value) -> Option<&'ll ConstantInt> {
+    unsafe {
+        llvm::LLVMIsAConstantInt(v)
+    }
+}
index cba5ee3260c16a8ed58d9cd4aad28d56500b6280..d4df5b4a804ef8345e06ba20a87b1fcd496f1f8c 100644 (file)
@@ -488,7 +488,7 @@ fn codegen_static(
                 if let Some(section) = attrs.link_section {
                     let section = llvm::LLVMMDStringInContext(
                         self.llcx,
-                        section.as_str().as_ptr() as *const _,
+                        section.as_str().as_ptr().cast(),
                         section.as_str().len() as c_uint,
                     );
                     assert!(alloc.relocations().is_empty());
@@ -500,14 +500,14 @@ fn codegen_static(
                         0..alloc.len());
                     let alloc = llvm::LLVMMDStringInContext(
                         self.llcx,
-                        bytes.as_ptr() as *const _,
+                        bytes.as_ptr().cast(),
                         bytes.len() as c_uint,
                     );
                     let data = [section, alloc];
                     let meta = llvm::LLVMMDNodeInContext(self.llcx, data.as_ptr(), 2);
                     llvm::LLVMAddNamedMetadataOperand(
                         self.llmod,
-                        "wasm.custom_sections\0".as_ptr() as *const _,
+                        "wasm.custom_sections\0".as_ptr().cast(),
                         meta,
                     );
                 }
index 58ce97039099e7bc7dc0477a84e483d82ac6d268..2da93877172141890b20c3ba11cbc146e5c83733 100644 (file)
@@ -20,7 +20,6 @@
 use rustc::ty::{self, Ty, TyCtxt, Instance};
 use rustc::util::nodemap::FxHashMap;
 use rustc_target::spec::{HasTargetSpec, Target};
-use rustc_codegen_ssa::callee::resolve_and_get_fn;
 use rustc_codegen_ssa::base::wants_msvc_seh;
 use crate::callee::get_fn;
 
@@ -211,7 +210,7 @@ pub unsafe fn create_module(
     // If skipping the PLT is enabled, we need to add some module metadata
     // to ensure intrinsic calls don't use it.
     if !sess.needs_plt() {
-        let avoid_plt = "RtLibUseGOT\0".as_ptr() as *const _;
+        let avoid_plt = "RtLibUseGOT\0".as_ptr().cast();
         llvm::LLVMRustAddModuleFlag(llmod, avoid_plt, 1);
     }
 
@@ -327,11 +326,11 @@ fn vtables(&self) -> &RefCell<FxHashMap<(Ty<'tcx>,
         &self.vtables
     }
 
-    fn instances(&self) -> &RefCell<FxHashMap<Instance<'tcx>, &'ll Value>> {
-        &self.instances
+    fn get_fn(&self, instance: Instance<'tcx>) -> &'ll Value {
+        get_fn(self, instance)
     }
 
-    fn get_fn(&self, instance: Instance<'tcx>) -> &'ll Value {
+    fn get_fn_addr(&self, instance: Instance<'tcx>) -> &'ll Value {
         get_fn(self, instance)
     }
 
@@ -362,7 +361,14 @@ fn eh_personality(&self) -> &'ll Value {
         let tcx = self.tcx;
         let llfn = match tcx.lang_items().eh_personality() {
             Some(def_id) if !wants_msvc_seh(self.sess()) => {
-                resolve_and_get_fn(self, def_id, tcx.intern_substs(&[]))
+                self.get_fn_addr(
+                    ty::Instance::resolve(
+                        tcx,
+                        ty::ParamEnv::reveal_all(),
+                        def_id,
+                        tcx.intern_substs(&[]),
+                    ).unwrap()
+                )
             }
             _ => {
                 let name = if wants_msvc_seh(self.sess()) {
@@ -390,7 +396,14 @@ fn eh_unwind_resume(&self) -> &'ll Value {
         let tcx = self.tcx;
         assert!(self.sess().target.target.options.custom_unwind_resume);
         if let Some(def_id) = tcx.lang_items().eh_unwind_resume() {
-            let llfn = resolve_and_get_fn(self, def_id, tcx.intern_substs(&[]));
+            let llfn = self.get_fn_addr(
+                ty::Instance::resolve(
+                    tcx,
+                    ty::ParamEnv::reveal_all(),
+                    def_id,
+                    tcx.intern_substs(&[]),
+                ).unwrap()
+            );
             unwresume.set(Some(llfn));
             return llfn;
         }
@@ -537,6 +550,7 @@ macro_rules! vector_types {
         ifn!("llvm.trap", fn() -> void);
         ifn!("llvm.debugtrap", fn() -> void);
         ifn!("llvm.frameaddress", fn(t_i32) -> i8p);
+        ifn!("llvm.sideeffect", fn() -> void);
 
         ifn!("llvm.powi.f32", fn(t_f32, t_i32) -> t_f32);
         ifn!("llvm.powi.v2f32", fn(t_v2f32, t_i32) -> t_v2f32);
index 04c9e93c7a527cf79c41c18d035706cc740ca987..9ed1c1730a697b1a44307c5d72be2454adb8a0b7 100644 (file)
@@ -37,7 +37,7 @@ pub fn get_or_insert_gdb_debug_scripts_section_global(cx: &CodegenCx<'ll, '_>)
 
     let section_var = unsafe {
         llvm::LLVMGetNamedGlobal(cx.llmod,
-                                 c_section_var_name.as_ptr() as *const _)
+                                 c_section_var_name.as_ptr().cast())
     };
 
     section_var.unwrap_or_else(|| {
@@ -52,7 +52,7 @@ pub fn get_or_insert_gdb_debug_scripts_section_global(cx: &CodegenCx<'ll, '_>)
                                                      llvm_type).unwrap_or_else(||{
                 bug!("symbol `{}` is already defined", section_var_name)
             });
-            llvm::LLVMSetSection(section_var, section_name.as_ptr() as *const _);
+            llvm::LLVMSetSection(section_var, section_name.as_ptr().cast());
             llvm::LLVMSetInitializer(section_var, cx.const_bytes(section_contents));
             llvm::LLVMSetGlobalConstant(section_var, llvm::True);
             llvm::LLVMSetUnnamedAddr(section_var, llvm::True);
index e69f4b6aca19a9e9d45b596e44966fe5dc25858b..438a660b8a8673b5895737f732373da26868cbf3 100644 (file)
@@ -30,7 +30,7 @@
 use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt};
 use rustc::ty::layout::{self, Align, Integer, IntegerExt, LayoutOf,
                         PrimitiveExt, Size, TyLayout, VariantIdx};
-use rustc::ty::subst::GenericArgKind;
+use rustc::ty::subst::{GenericArgKind, SubstsRef};
 use rustc::session::config::{self, DebugInfo};
 use rustc::util::nodemap::FxHashMap;
 use rustc_fs_util::path_to_c_string;
@@ -692,9 +692,10 @@ pub fn type_metadata(
                                    Some(containing_scope)).finalize(cx)
         }
         ty::Generator(def_id, substs,  _) => {
-            let upvar_tys : Vec<_> = substs.prefix_tys(def_id, cx.tcx).map(|t| {
-                cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t)
-            }).collect();
+            let upvar_tys : Vec<_> = substs
+                .as_generator().prefix_tys(def_id, cx.tcx).map(|t| {
+                    cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), t)
+                }).collect();
             prepare_enum_metadata(cx,
                                   t,
                                   def_id,
@@ -960,9 +961,9 @@ pub fn compile_unit_metadata(
             file_metadata,
             producer.as_ptr(),
             tcx.sess.opts.optimize != config::OptLevel::No,
-            flags.as_ptr() as *const _,
+            flags.as_ptr().cast(),
             0,
-            split_name.as_ptr() as *const _,
+            split_name.as_ptr().cast(),
             kind);
 
         if tcx.sess.opts.debugging_opts.profile {
@@ -991,7 +992,7 @@ pub fn compile_unit_metadata(
         if tcx.sess.opts.target_triple.triple().starts_with("wasm32") {
             let name_metadata = llvm::LLVMMDStringInContext(
                 debug_context.llcontext,
-                rustc_producer.as_ptr() as *const _,
+                rustc_producer.as_ptr().cast(),
                 rustc_producer.as_bytes().len() as c_uint,
             );
             llvm::LLVMAddNamedMetadataOperand(
@@ -1338,7 +1339,7 @@ fn create_member_descriptions(&self, cx: &CodegenCx<'ll, 'tcx>)
                 ty::Adt(adt, _) => VariantInfo::Adt(&adt.variants[index]),
                 ty::Generator(def_id, substs, _) => {
                     let generator_layout = cx.tcx.generator_layout(*def_id);
-                    VariantInfo::Generator(*substs, generator_layout, index)
+                    VariantInfo::Generator(substs, generator_layout, index)
                 }
                 _ => bug!(),
             }
@@ -1611,7 +1612,7 @@ enum EnumDiscriminantInfo<'ll> {
 #[derive(Copy, Clone)]
 enum VariantInfo<'tcx> {
     Adt(&'tcx ty::VariantDef),
-    Generator(ty::GeneratorSubsts<'tcx>, &'tcx GeneratorLayout<'tcx>, VariantIdx),
+    Generator(SubstsRef<'tcx>, &'tcx GeneratorLayout<'tcx>, VariantIdx),
 }
 
 impl<'tcx> VariantInfo<'tcx> {
@@ -1619,7 +1620,7 @@ fn map_struct_name<R>(&self, f: impl FnOnce(&str) -> R) -> R {
         match self {
             VariantInfo::Adt(variant) => f(&variant.ident.as_str()),
             VariantInfo::Generator(substs, _, variant_index) =>
-                f(&substs.variant_name(*variant_index)),
+                f(&substs.as_generator().variant_name(*variant_index)),
         }
     }
 
@@ -1763,9 +1764,10 @@ fn prepare_enum_metadata(
                 })
                 .collect(),
             ty::Generator(_, substs, _) => substs
+                .as_generator()
                 .variant_range(enum_def_id, cx.tcx)
                 .map(|variant_index| {
-                    let name = SmallCStr::new(&substs.variant_name(variant_index));
+                    let name = SmallCStr::new(&substs.as_generator().variant_name(variant_index));
                     unsafe {
                         Some(llvm::LLVMRustDIBuilderCreateEnumerator(
                             DIB(cx),
index e0e0cd5f739e2718474d2b2828948cf8a2157901..6e4ed42c45e9792ad2e019c8a0d50457310b34b2 100644 (file)
@@ -127,20 +127,20 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) {
         if cx.sess().target.target.options.is_like_osx ||
            cx.sess().target.target.options.is_like_android {
             llvm::LLVMRustAddModuleFlag(cx.llmod,
-                                        "Dwarf Version\0".as_ptr() as *const _,
+                                        "Dwarf Version\0".as_ptr().cast(),
                                         2)
         }
 
         // Indicate that we want CodeView debug information on MSVC
         if cx.sess().target.target.options.is_like_msvc {
             llvm::LLVMRustAddModuleFlag(cx.llmod,
-                                        "CodeView\0".as_ptr() as *const _,
+                                        "CodeView\0".as_ptr().cast(),
                                         1)
         }
 
         // Prevent bitcode readers from deleting the debug info.
         let ptr = "Debug Info Version\0".as_ptr();
-        llvm::LLVMRustAddModuleFlag(cx.llmod, ptr as *const _,
+        llvm::LLVMRustAddModuleFlag(cx.llmod, ptr.cast(),
                                     llvm::LLVMRustDebugMetadataVersion());
     };
 }
diff --git a/src/librustc_codegen_llvm/error_codes.rs b/src/librustc_codegen_llvm/error_codes.rs
deleted file mode 100644 (file)
index 042e51e..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-register_diagnostics! {
-
-E0511: r##"
-Invalid monomorphization of an intrinsic function was used. Erroneous code
-example:
-
-```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail)
-#![feature(platform_intrinsics)]
-
-extern "platform-intrinsic" {
-    fn simd_add<T>(a: T, b: T) -> T;
-}
-
-fn main() {
-    unsafe { simd_add(0, 1); }
-    // error: invalid monomorphization of `simd_add` intrinsic
-}
-```
-
-The generic type has to be a SIMD type. Example:
-
-```
-#![feature(repr_simd)]
-#![feature(platform_intrinsics)]
-
-#[repr(simd)]
-#[derive(Copy, Clone)]
-struct i32x2(i32, i32);
-
-extern "platform-intrinsic" {
-    fn simd_add<T>(a: T, b: T) -> T;
-}
-
-unsafe { simd_add(i32x2(0, 0), i32x2(1, 2)); } // ok!
-```
-"##,
-
-}
index b7a410c3760cdd5cfa7e300674c4da0bf9c0d888..3df8d4c26903bd43e859632e2c53a334b69689aa 100644 (file)
@@ -20,9 +20,9 @@
 use rustc::hir;
 use syntax::ast::{self, FloatTy};
 
+use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
 use rustc_codegen_ssa::traits::*;
 
-use rustc::session::Session;
 use syntax_pos::Span;
 
 use std::cmp::Ordering;
@@ -724,6 +724,13 @@ fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value {
         self.call(expect, &[cond, self.const_bool(expected)], None)
     }
 
+    fn sideeffect(&mut self) {
+        if self.tcx.sess.opts.debugging_opts.insert_sideeffect {
+            let fnname = self.get_intrinsic(&("llvm.sideeffect"));
+            self.call(fnname, &[], None);
+        }
+    }
+
     fn va_start(&mut self, va_list: &'ll Value) -> &'ll Value {
         let intrinsic = self.cx().get_intrinsic("llvm.va_start");
         self.call(intrinsic, &[va_list], None)
@@ -810,6 +817,7 @@ fn codegen_msvc_try(
 ) {
     let llfn = get_rust_try_fn(bx, &mut |mut bx| {
         bx.set_personality_fn(bx.eh_personality());
+        bx.sideeffect();
 
         let mut normal = bx.build_sibling_block("normal");
         let mut catchswitch = bx.build_sibling_block("catchswitch");
@@ -933,6 +941,8 @@ fn codegen_gnu_try(
         // expected to be `*mut *mut u8` for this to actually work, but that's
         // managed by the standard library.
 
+        bx.sideeffect();
+
         let mut then = bx.build_sibling_block("then");
         let mut catch = bx.build_sibling_block("catch");
 
@@ -1016,10 +1026,6 @@ fn get_rust_try_fn<'ll, 'tcx>(
     rust_try
 }
 
-fn span_invalid_monomorphization_error(a: &Session, b: Span, c: &str) {
-    span_err!(a, b, E0511, "{}", c);
-}
-
 fn generic_simd_intrinsic(
     bx: &mut Builder<'a, 'll, 'tcx>,
     name: &str,
index 87eab484fafc697bddefc47f6273e59bee5b11b5..9b55bef0c514db7ae73520cdaa1b21fe247c5c6d 100644 (file)
@@ -19,7 +19,6 @@
 #![feature(link_args)]
 #![feature(static_nobundle)]
 #![feature(trusted_len)]
-#![feature(mem_take)]
 
 use back::write::{create_target_machine, create_informational_target_machine};
 use syntax_pos::symbol::Symbol;
@@ -39,7 +38,8 @@
 extern crate rustc_driver as _;
 
 #[macro_use] extern crate log;
-#[macro_use] extern crate syntax;
+extern crate syntax;
+extern crate syntax_expand;
 extern crate syntax_pos;
 extern crate rustc_errors as errors;
 
@@ -49,7 +49,7 @@
 use rustc_codegen_ssa::CompiledModule;
 use errors::{FatalError, Handler};
 use rustc::dep_graph::WorkProduct;
-use syntax::ext::allocator::AllocatorKind;
+use syntax_expand::allocator::AllocatorKind;
 use syntax_pos::symbol::InternedString;
 pub use llvm_util::target_features;
 use std::any::Any;
@@ -65,8 +65,6 @@
 use rustc_codegen_ssa::ModuleCodegen;
 use rustc_codegen_utils::codegen_backend::CodegenBackend;
 
-mod error_codes;
-
 mod back {
     pub mod archive;
     pub mod bytecode;
@@ -259,10 +257,6 @@ fn print_version(&self) {
         llvm_util::print_version();
     }
 
-    fn diagnostics(&self) -> &[(&'static str, &'static str)] {
-        &error_codes::DIAGNOSTICS
-    }
-
     fn target_features(&self, sess: &Session) -> Vec<Symbol> {
         target_features(sess)
     }
@@ -272,15 +266,10 @@ fn metadata_loader(&self) -> Box<dyn MetadataLoader + Sync> {
     }
 
     fn provide(&self, providers: &mut ty::query::Providers<'_>) {
-        rustc_codegen_utils::symbol_names::provide(providers);
-        rustc_codegen_ssa::back::symbol_export::provide(providers);
-        rustc_codegen_ssa::base::provide_both(providers);
         attributes::provide(providers);
     }
 
     fn provide_extern(&self, providers: &mut ty::query::Providers<'_>) {
-        rustc_codegen_ssa::back::symbol_export::provide_extern(providers);
-        rustc_codegen_ssa::base::provide_both(providers);
         attributes::provide_extern(providers);
     }
 
index b07214fdc03f3c22b4f93d2e462369ca87cd1bff..a2313b933a6c970b56e63811f4a904bd40a7a81e 100644 (file)
@@ -510,6 +510,7 @@ struct InvariantOpaque<'a> {
 extern { pub type Context; }
 extern { pub type Type; }
 extern { pub type Value; }
+extern { pub type ConstantInt; }
 extern { pub type Metadata; }
 extern { pub type BasicBlock; }
 #[repr(C)]
@@ -719,8 +720,8 @@ pub fn LLVMStructTypeInContext(C: &'a Context,
     pub fn LLVMConstInt(IntTy: &Type, N: c_ulonglong, SignExtend: Bool) -> &Value;
     pub fn LLVMConstIntOfArbitraryPrecision(IntTy: &Type, Wn: c_uint, Ws: *const u64) -> &Value;
     pub fn LLVMConstReal(RealTy: &Type, N: f64) -> &Value;
-    pub fn LLVMConstIntGetZExtValue(ConstantVal: &Value) -> c_ulonglong;
-    pub fn LLVMRustConstInt128Get(ConstantVal: &Value, SExt: bool,
+    pub fn LLVMConstIntGetZExtValue(ConstantVal: &ConstantInt) -> c_ulonglong;
+    pub fn LLVMRustConstInt128Get(ConstantVal: &ConstantInt, SExt: bool,
                                   high: &mut u64, low: &mut u64) -> bool;
 
 
@@ -1666,7 +1667,7 @@ pub fn LLVMRustDIBuilderCreateDebugLocation(Context: &'a Context,
     #[allow(improper_ctypes)]
     pub fn LLVMRustWriteValueToString(value_ref: &Value, s: &RustString);
 
-    pub fn LLVMIsAConstantInt(value_ref: &Value) -> Option<&Value>;
+    pub fn LLVMIsAConstantInt(value_ref: &Value) -> Option<&ConstantInt>;
 
     pub fn LLVMRustPassKind(Pass: &Pass) -> PassKind;
     pub fn LLVMRustFindAndCreatePass(Pass: *const c_char) -> Option<&'static mut Pass>;
index 81a99bc5019b3ece4fd7bfe9873132b84dd35f15..d921bbc96adeebeedcab5dfbeab0530ede3ee620 100644 (file)
@@ -65,7 +65,7 @@ fn uncached_llvm_type<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
             if let (&ty::Generator(_, substs, _), &layout::Variants::Single { index })
                  = (&layout.ty.kind, &layout.variants)
             {
-                write!(&mut name, "::{}", substs.variant_name(index)).unwrap();
+                write!(&mut name, "::{}", substs.as_generator().variant_name(index)).unwrap();
             }
             Some(name)
         }
index c7d09a423d5e3b7c2ec9aef13a13a4480c66c811..6992f93d9994950e425ac467e5c28d3066bf938e 100644 (file)
@@ -21,6 +21,7 @@ tempfile = "3.1"
 
 rustc_serialize = { path = "../libserialize", package = "serialize" }
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
 rustc = { path = "../librustc" }
 rustc_apfloat = { path = "../librustc_apfloat" }
index c8bb2e7ee9965ba5c1eb9475de66c1311c5af991..a09a0c22c1ba5ec8dbd1918c809ee71fbde5fa99 100644 (file)
@@ -84,7 +84,7 @@ pub trait BuilderMethods<'a, 'tcx>:
 {
     fn new_block<'b>(
         cx: &'a Self::CodegenCx,
-        llfn: Self::Value,
+        llfn: Self::Function,
         name: &'b str
     ) -> Self;
     /* ... */
index 3b7ae5e33d5e716e7d869d790617fa83a1ac969c..1c5d3b1a890ee76dfcfcd60966208b55aeed3bfe 100644 (file)
@@ -323,6 +323,7 @@ fn link_rlib<'a, B: ArchiveBuilder<'a>>(sess: &'a Session,
             NativeLibraryKind::NativeStatic => {}
             NativeLibraryKind::NativeStaticNobundle |
             NativeLibraryKind::NativeFramework |
+            NativeLibraryKind::NativeRawDylib |
             NativeLibraryKind::NativeUnknown => continue,
         }
         if let Some(name) = lib.name {
@@ -883,7 +884,8 @@ pub fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLibrary
                     Some(format!("-framework {}", name))
                 },
                 // These are included, no need to print them
-                NativeLibraryKind::NativeStatic => None,
+                NativeLibraryKind::NativeStatic |
+                NativeLibraryKind::NativeRawDylib => None,
             }
         })
         .collect();
@@ -1293,7 +1295,11 @@ pub fn add_local_native_libraries(cmd: &mut dyn Linker,
             NativeLibraryKind::NativeUnknown => cmd.link_dylib(name),
             NativeLibraryKind::NativeFramework => cmd.link_framework(name),
             NativeLibraryKind::NativeStaticNobundle => cmd.link_staticlib(name),
-            NativeLibraryKind::NativeStatic => cmd.link_whole_staticlib(name, &search_path)
+            NativeLibraryKind::NativeStatic => cmd.link_whole_staticlib(name, &search_path),
+            NativeLibraryKind::NativeRawDylib => {
+                // FIXME(#58713): Proper handling for raw dylibs.
+                bug!("raw_dylib feature not yet implemented");
+            },
         }
     }
 }
@@ -1385,7 +1391,9 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
             _ if codegen_results.crate_info.profiler_runtime == Some(cnum) => {
                 add_static_crate::<B>(cmd, sess, codegen_results, tmpdir, crate_type, cnum);
             }
-            _ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) => {
+            _ if codegen_results.crate_info.sanitizer_runtime == Some(cnum) &&
+                  crate_type == config::CrateType::Executable => {
+                // Link the sanitizer runtimes only if we are actually producing an executable
                 link_sanitizer_runtime::<B>(cmd, sess, codegen_results, tmpdir, cnum);
             }
             // compiler-builtins are always placed last to ensure that they're
@@ -1527,7 +1535,7 @@ fn add_static_crate<'a, B: ArchiveBuilder<'a>>(cmd: &mut dyn Linker,
         let name = cratepath.file_name().unwrap().to_str().unwrap();
         let name = &name[3..name.len() - 5]; // chop off lib/.rlib
 
-        time_ext(sess.time_extended(), Some(sess), &format!("altering {}.rlib", name), || {
+        time_ext(sess.time_extended(), &format!("altering {}.rlib", name), || {
             let mut archive = <B as ArchiveBuilder>::new(sess, &dst, Some(cratepath));
             archive.update_symbols();
 
@@ -1678,7 +1686,11 @@ pub fn add_upstream_native_libraries(
                 // ignore statically included native libraries here as we've
                 // already included them when we included the rust library
                 // previously
-                NativeLibraryKind::NativeStatic => {}
+                NativeLibraryKind::NativeStatic => {},
+                NativeLibraryKind::NativeRawDylib => {
+                    // FIXME(#58713): Proper handling for raw dylibs.
+                    bug!("raw_dylib feature not yet implemented");
+                },
             }
         }
     }
index 9078f77f1f7a25db8580f9dc82e77b3b0d3e8172..d866a10f06935b55117f0a7b95469297add79afb 100644 (file)
@@ -14,7 +14,7 @@
 use rustc::ty::subst::SubstsRef;
 use rustc::util::nodemap::{FxHashMap, DefIdMap};
 use rustc_index::vec::IndexVec;
-use syntax::ext::allocator::ALLOCATOR_METHODS;
+use syntax_expand::allocator::ALLOCATOR_METHODS;
 
 pub type ExportedSymbols = FxHashMap<
     CrateNum,
index caaa500766d014a1f7c755e016bbc538decfa294..8bc815f2c622ed61406cab1ac0ce2e596cf14388 100644 (file)
 use rustc::util::profiling::SelfProfilerRef;
 use rustc_fs_util::link_or_copy;
 use rustc_data_structures::svh::Svh;
-use rustc_errors::{Handler, Level, FatalError, DiagnosticId};
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::{Handler, Level, FatalError, DiagnosticId, SourceMapperDyn};
 use rustc_errors::emitter::{Emitter};
 use rustc_target::spec::MergeFunctions;
 use syntax::attr;
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::hygiene::ExpnId;
 use syntax_pos::symbol::{Symbol, sym};
 use jobserver::{Client, Acquired};
 
@@ -321,8 +322,6 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
     let (coordinator_send, coordinator_receive) = channel();
     let sess = tcx.sess;
 
-    sess.prof.generic_activity_start("codegen_and_optimize_crate");
-
     let crate_name = tcx.crate_name(LOCAL_CRATE);
     let crate_hash = tcx.crate_hash(LOCAL_CRATE);
     let no_builtins = attr::contains_name(&tcx.hir().krate().attrs, sym::no_builtins);
@@ -1665,13 +1664,13 @@ pub fn fatal(&self, msg: &str) {
 }
 
 impl Emitter for SharedEmitter {
-    fn emit_diagnostic(&mut self, db: &rustc_errors::Diagnostic) {
+    fn emit_diagnostic(&mut self, diag: &rustc_errors::Diagnostic) {
         drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
-            msg: db.message(),
-            code: db.code.clone(),
-            lvl: db.level,
+            msg: diag.message(),
+            code: diag.code.clone(),
+            lvl: diag.level,
         })));
-        for child in &db.children {
+        for child in &diag.children {
             drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
                 msg: child.message(),
                 code: None,
@@ -1680,6 +1679,9 @@ fn emit_diagnostic(&mut self, db: &rustc_errors::Diagnostic) {
         }
         drop(self.sender.send(SharedEmitterMessage::AbortIfErrors));
     }
+    fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> {
+        None
+    }
 }
 
 impl SharedEmitterMain {
@@ -1774,8 +1776,6 @@ pub fn join(
             self.backend.print_pass_timings()
         }
 
-        sess.prof.generic_activity_end("codegen_and_optimize_crate");
-
         (CodegenResults {
             crate_name: self.crate_name,
             crate_hash: self.crate_hash,
index 935087714a7ebb3b170b13f639cca83bbff43a58..f4f3dd4d2d295520629a458f2d320ed11a8071db 100644 (file)
@@ -36,7 +36,6 @@
 use crate::back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
     submit_post_lto_module_to_llvm};
 use crate::{MemFlags, CrateInfo};
-use crate::callee;
 use crate::common::{RealPredicate, TypeKind, IntPredicate};
 use crate::meth;
 use crate::mir;
@@ -169,12 +168,6 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
             let ptr_ty = bx.cx().type_ptr_to(bx.cx().backend_type(bx.cx().layout_of(b)));
             (bx.pointercast(src, ptr_ty), unsized_info(bx.cx(), a, b, None))
         }
-        (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
-            let (a, b) = (src_ty.boxed_ty(), dst_ty.boxed_ty());
-            assert!(bx.cx().type_is_sized(a));
-            let ptr_ty = bx.cx().type_ptr_to(bx.cx().backend_type(bx.cx().layout_of(b)));
-            (bx.pointercast(src, ptr_ty), unsized_info(bx.cx(), a, b, None))
-        }
         (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
             assert_eq!(def_a, def_b);
 
@@ -197,6 +190,8 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
             }
             let (lldata, llextra) = result.unwrap();
             // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
+            // FIXME(eddyb) move these out of this `match` arm, so they're always
+            // applied, uniformly, no matter the source/destination types.
             (bx.bitcast(lldata, bx.cx().scalar_pair_element_backend_type(dst_layout, 0, true)),
              bx.bitcast(llextra, bx.cx().scalar_pair_element_backend_type(dst_layout, 1, true)))
         }
@@ -213,31 +208,27 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
 ) {
     let src_ty = src.layout.ty;
     let dst_ty = dst.layout.ty;
-    let mut coerce_ptr = || {
-        let (base, info) = match bx.load_operand(src).val {
-            OperandValue::Pair(base, info) => {
-                // fat-ptr to fat-ptr unsize preserves the vtable
-                // i.e., &'a fmt::Debug+Send => &'a fmt::Debug
-                // So we need to pointercast the base to ensure
-                // the types match up.
-                let thin_ptr = dst.layout.field(bx.cx(), FAT_PTR_ADDR);
-                (bx.pointercast(base, bx.cx().backend_type(thin_ptr)), info)
-            }
-            OperandValue::Immediate(base) => {
-                unsize_thin_ptr(bx, base, src_ty, dst_ty)
-            }
-            OperandValue::Ref(..) => bug!()
-        };
-        OperandValue::Pair(base, info).store(bx, dst);
-    };
     match (&src_ty.kind, &dst_ty.kind) {
         (&ty::Ref(..), &ty::Ref(..)) |
         (&ty::Ref(..), &ty::RawPtr(..)) |
         (&ty::RawPtr(..), &ty::RawPtr(..)) => {
-            coerce_ptr()
-        }
-        (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
-            coerce_ptr()
+            let (base, info) = match bx.load_operand(src).val {
+                OperandValue::Pair(base, info) => {
+                    // fat-ptr to fat-ptr unsize preserves the vtable
+                    // i.e., &'a fmt::Debug+Send => &'a fmt::Debug
+                    // So we need to pointercast the base to ensure
+                    // the types match up.
+                    // FIXME(eddyb) use `scalar_pair_element_backend_type` here,
+                    // like `unsize_thin_ptr` does.
+                    let thin_ptr = dst.layout.field(bx.cx(), FAT_PTR_ADDR);
+                    (bx.pointercast(base, bx.cx().backend_type(thin_ptr)), info)
+                }
+                OperandValue::Immediate(base) => {
+                    unsize_thin_ptr(bx, base, src_ty, dst_ty)
+                }
+                OperandValue::Ref(..) => bug!()
+            };
+            OperandValue::Pair(base, info).store(bx, dst);
         }
 
         (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
@@ -377,8 +368,7 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
     let sig = instance.fn_sig(cx.tcx());
     let sig = cx.tcx().normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig);
 
-    let lldecl = cx.instances().borrow().get(&instance).cloned().unwrap_or_else(||
-        bug!("Instance `{:?}` not already declared", instance));
+    let lldecl = cx.get_fn(instance);
 
     let mir = cx.tcx().instance_mir(instance.def);
     mir::codegen_mir::<Bx>(cx, lldecl, &mir, instance, sig);
@@ -400,7 +390,7 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(cx: &'
         return;
     }
 
-    let main_llfn = cx.get_fn(instance);
+    let main_llfn = cx.get_fn_addr(instance);
 
     let et = cx.tcx().entry_fn(LOCAL_CRATE).map(|e| e.1);
     match et {
@@ -416,8 +406,11 @@ fn create_entry_fn<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
         rust_main_def_id: DefId,
         use_start_lang_item: bool,
     ) {
-        let llfty =
-            cx.type_func(&[cx.type_int(), cx.type_ptr_to(cx.type_i8p())], cx.type_int());
+        let llfty = if cx.sess().target.target.options.main_needs_argc_argv {
+            cx.type_func(&[cx.type_int(), cx.type_ptr_to(cx.type_i8p())], cx.type_int())
+        } else {
+            cx.type_func(&[], cx.type_int())
+        };
 
         let main_ret_ty = cx.tcx().fn_sig(rust_main_def_id).output();
         // Given that `main()` has no arguments,
@@ -447,18 +440,29 @@ fn create_entry_fn<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
 
         bx.insert_reference_to_gdb_debug_scripts_section_global();
 
-        // Params from native main() used as args for rust start function
-        let param_argc = bx.get_param(0);
-        let param_argv = bx.get_param(1);
-        let arg_argc = bx.intcast(param_argc, cx.type_isize(), true);
-        let arg_argv = param_argv;
+        let (arg_argc, arg_argv) = if cx.sess().target.target.options.main_needs_argc_argv {
+            // Params from native main() used as args for rust start function
+            let param_argc = bx.get_param(0);
+            let param_argv = bx.get_param(1);
+            let arg_argc = bx.intcast(param_argc, cx.type_isize(), true);
+            let arg_argv = param_argv;
+            (arg_argc, arg_argv)
+        } else {
+            // The Rust start function doesn't need argc and argv, so just pass zeros.
+            let arg_argc = bx.const_int(cx.type_int(), 0);
+            let arg_argv = bx.const_null(cx.type_ptr_to(cx.type_i8p()));
+            (arg_argc, arg_argv)
+        };
 
         let (start_fn, args) = if use_start_lang_item {
             let start_def_id = cx.tcx().require_lang_item(StartFnLangItem, None);
-            let start_fn = callee::resolve_and_get_fn(
-                cx,
-                start_def_id,
-                cx.tcx().intern_substs(&[main_ret_ty.into()]),
+            let start_fn = cx.get_fn_addr(
+                ty::Instance::resolve(
+                    cx.tcx(),
+                    ty::ParamEnv::reveal_all(),
+                    start_def_id,
+                    cx.tcx().intern_substs(&[main_ret_ty.into()]),
+                ).unwrap()
             );
             (start_fn, vec![bx.pointercast(rust_main, cx.type_ptr_to(cx.type_i8p())),
                             arg_argc, arg_argv])
diff --git a/src/librustc_codegen_ssa/callee.rs b/src/librustc_codegen_ssa/callee.rs
deleted file mode 100644 (file)
index 4744dd6..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-use crate::traits::*;
-use rustc::ty;
-use rustc::ty::subst::SubstsRef;
-use rustc::hir::def_id::DefId;
-
-pub fn resolve_and_get_fn<'tcx, Cx: CodegenMethods<'tcx>>(
-    cx: &Cx,
-    def_id: DefId,
-    substs: SubstsRef<'tcx>,
-) -> Cx::Value {
-    cx.get_fn(
-        ty::Instance::resolve(
-            cx.tcx(),
-            ty::ParamEnv::reveal_all(),
-            def_id,
-            substs
-        ).unwrap()
-    )
-}
-
-pub fn resolve_and_get_fn_for_vtable<'tcx,
-    Cx: Backend<'tcx> + MiscMethods<'tcx> + TypeMethods<'tcx>
->(
-    cx: &Cx,
-    def_id: DefId,
-    substs: SubstsRef<'tcx>,
-) -> Cx::Value {
-    cx.get_fn(
-        ty::Instance::resolve_for_vtable(
-            cx.tcx(),
-            ty::ParamEnv::reveal_all(),
-            def_id,
-            substs
-        ).unwrap()
-    )
-}
index e3aa35ef4eb5e5f5b16925cdf8405b4fb9503950..ac39ca98476670ed2761ade3808667e35b4b2e51 100644 (file)
@@ -1,6 +1,7 @@
 #![allow(non_camel_case_types, non_snake_case)]
 
 use rustc::ty::{Ty, TyCtxt};
+use rustc::session::Session;
 use syntax_pos::Span;
 
 use rustc::hir::def_id::DefId;
@@ -200,3 +201,7 @@ pub fn shift_mask_val<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
         _ => bug!("shift_mask_val: expected Integer or Vector, found {:?}", kind),
     }
 }
+
+pub fn span_invalid_monomorphization_error(a: &Session, b: Span, c: &str) {
+    span_err!(a, b, E0511, "{}", c);
+}
index 8ff41c275a8f45283e387847e7d5861dd3abd94b..02e26d8f6ecf0f9bebb552fc5dbb11b3717c7120 100644 (file)
@@ -1,5 +1,40 @@
 syntax::register_diagnostics! {
 
+E0511: r##"
+Invalid monomorphization of an intrinsic function was used. Erroneous code
+example:
+
+```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail)
+#![feature(platform_intrinsics)]
+
+extern "platform-intrinsic" {
+    fn simd_add<T>(a: T, b: T) -> T;
+}
+
+fn main() {
+    unsafe { simd_add(0, 1); }
+    // error: invalid monomorphization of `simd_add` intrinsic
+}
+```
+
+The generic type has to be a SIMD type. Example:
+
+```
+#![feature(repr_simd)]
+#![feature(platform_intrinsics)]
+
+#[repr(simd)]
+#[derive(Copy, Clone)]
+struct i32x2(i32, i32);
+
+extern "platform-intrinsic" {
+    fn simd_add<T>(a: T, b: T) -> T;
+}
+
+unsafe { simd_add(i32x2(0, 0), i32x2(1, 2)); } // ok!
+```
+"##,
+
 E0668: r##"
 Malformed inline assembly rejected by LLVM.
 
index 5017a60ca699a2cd83303bc2ac92dec92e85a6f4..0221a04b045188840e07efef6cf66154fdfecb9c 100644 (file)
@@ -10,7 +10,6 @@
 #![feature(in_band_lifetimes)]
 #![feature(nll)]
 #![feature(trusted_len)]
-#![feature(mem_take)]
 #![feature(associated_type_bounds)]
 
 #![recursion_limit="256"]
@@ -28,6 +27,7 @@
 use rustc::session::config::{OutputFilenames, OutputType};
 use rustc::middle::lang_items::LangItem;
 use rustc::hir::def_id::CrateNum;
+use rustc::ty::query::Providers;
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::sync::Lrc;
 use rustc_data_structures::svh::Svh;
@@ -42,7 +42,6 @@
 pub mod mir;
 pub mod debuginfo;
 pub mod base;
-pub mod callee;
 pub mod glue;
 pub mod meth;
 pub mod mono_item;
@@ -157,3 +156,13 @@ pub struct CodegenResults {
     pub linker_info: back::linker::LinkerInfo,
     pub crate_info: CrateInfo,
 }
+
+pub fn provide(providers: &mut Providers<'_>) {
+    crate::back::symbol_export::provide(providers);
+    crate::base::provide_both(providers);
+}
+
+pub fn provide_extern(providers: &mut Providers<'_>) {
+    crate::back::symbol_export::provide_extern(providers);
+    crate::base::provide_both(providers);
+}
index 7fe9f5f25130a90d2b46270380326f6f37226c0b..266d2e5b18d22bbbfe2aa256b472cb495bb02c1e 100644 (file)
@@ -1,6 +1,5 @@
 use rustc_target::abi::call::FnType;
 
-use crate::callee;
 use crate::traits::*;
 
 use rustc::ty::{self, Ty, Instance};
@@ -92,7 +91,14 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
 
     let methods = methods.cloned().map(|opt_mth| {
         opt_mth.map_or(nullptr, |(def_id, substs)| {
-            callee::resolve_and_get_fn_for_vtable(cx, def_id, substs)
+            cx.get_fn_addr(
+                ty::Instance::resolve_for_vtable(
+                    cx.tcx(),
+                    ty::ParamEnv::reveal_all(),
+                    def_id,
+                    substs,
+                ).unwrap()
+            )
         })
     });
 
@@ -102,7 +108,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
     // `get_vtable` in rust_mir/interpret/traits.rs
     // /////////////////////////////////////////////////////////////////////////////////////////////
     let components: Vec<_> = [
-        cx.get_fn(Instance::resolve_drop_in_place(cx.tcx(), ty)),
+        cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty)),
         cx.const_usize(layout.size.bytes()),
         cx.const_usize(layout.align.abi.bytes())
     ].iter().cloned().chain(methods).collect();
index 3a1d0a2577521a09fc620690ecf5268d33e6c512..dc77d025c005fa4307d5dd77f187a95ffd31609f 100644 (file)
@@ -149,6 +149,26 @@ fn do_call<'c, 'b, Bx: BuilderMethods<'b, 'tcx>>(
             }
         }
     }
+
+    // Generate sideeffect intrinsic if jumping to any of the targets can form
+    // a loop.
+    fn maybe_sideeffect<'b, 'tcx2: 'b, Bx: BuilderMethods<'b, 'tcx2>>(
+        &self,
+        mir: &'b mir::Body<'tcx>,
+        bx: &mut Bx,
+        targets: &[mir::BasicBlock],
+    ) {
+        if bx.tcx().sess.opts.debugging_opts.insert_sideeffect {
+            if targets.iter().any(|target| {
+                *target <= *self.bb
+                    && target
+                        .start_location()
+                        .is_predecessor_of(self.bb.start_location(), mir)
+            }) {
+                bx.sideeffect();
+            }
+        }
+    }
 }
 
 /// Codegen implementations for some terminator variants.
@@ -197,6 +217,7 @@ fn codegen_switchint_terminator<'b>(
             let lltrue = helper.llblock(self, targets[0]);
             let llfalse = helper.llblock(self, targets[1]);
             if switch_ty == bx.tcx().types.bool {
+                helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
                 // Don't generate trivial icmps when switching on bool
                 if let [0] = values[..] {
                     bx.cond_br(discr.immediate(), llfalse, lltrue);
@@ -210,9 +231,11 @@ fn codegen_switchint_terminator<'b>(
                 );
                 let llval = bx.const_uint_big(switch_llty, values[0]);
                 let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
+                helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
                 bx.cond_br(cmp, lltrue, llfalse);
             }
         } else {
+            helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
             let (otherwise, targets) = targets.split_last().unwrap();
             bx.switch(
                 discr.immediate(),
@@ -308,6 +331,7 @@ fn codegen_drop_terminator<'b>(
 
         if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
             // we don't actually need to drop anything.
+            helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
             helper.funclet_br(self, &mut bx, target);
             return
         }
@@ -334,10 +358,11 @@ fn codegen_drop_terminator<'b>(
                 (meth::DESTRUCTOR.get_fn(&mut bx, vtable, &fn_ty), fn_ty)
             }
             _ => {
-                (bx.get_fn(drop_fn),
+                (bx.get_fn_addr(drop_fn),
                  FnType::of_instance(&bx, drop_fn))
             }
         };
+        helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
         helper.do_call(self, &mut bx, fn_ty, drop_fn, args,
                        Some((ReturnDest::Nothing, target)),
                        unwind);
@@ -373,6 +398,7 @@ fn codegen_assert_terminator<'b>(
 
         // Don't codegen the panic block if success if known.
         if const_cond == Some(expected) {
+            helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
             helper.funclet_br(self, &mut bx, target);
             return;
         }
@@ -383,6 +409,7 @@ fn codegen_assert_terminator<'b>(
         // Create the failure block and the conditional branch to it.
         let lltarget = helper.llblock(self, target);
         let panic_block = self.new_block("panic");
+        helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
         if expected {
             bx.cond_br(cond, lltarget, panic_block.llbb());
         } else {
@@ -433,7 +460,7 @@ fn codegen_assert_terminator<'b>(
         let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
         let instance = ty::Instance::mono(bx.tcx(), def_id);
         let fn_ty = FnType::of_instance(&bx, instance);
-        let llfn = bx.get_fn(instance);
+        let llfn = bx.get_fn_addr(instance);
 
         // Codegen the actual panic invoke/call.
         helper.do_call(self, &mut bx, fn_ty, llfn, &args, None, cleanup);
@@ -486,6 +513,7 @@ fn codegen_call_terminator<'b>(
             if let Some(destination_ref) = destination.as_ref() {
                 let &(ref dest, target) = destination_ref;
                 self.codegen_transmute(&mut bx, &args[0], dest);
+                helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
                 helper.funclet_br(self, &mut bx, target);
             } else {
                 // If we are trying to transmute to an uninhabited type,
@@ -513,6 +541,7 @@ fn codegen_call_terminator<'b>(
             Some(ty::InstanceDef::DropGlue(_, None)) => {
                 // Empty drop glue; a no-op.
                 let &(_, target) = destination.as_ref().unwrap();
+                helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
                 helper.funclet_br(self, &mut bx, target);
                 return;
             }
@@ -547,8 +576,11 @@ fn codegen_call_terminator<'b>(
                     common::langcall(bx.tcx(), Some(span), "", lang_items::PanicFnLangItem);
                 let instance = ty::Instance::mono(bx.tcx(), def_id);
                 let fn_ty = FnType::of_instance(&bx, instance);
-                let llfn = bx.get_fn(instance);
+                let llfn = bx.get_fn_addr(instance);
 
+                if let Some((_, target)) = destination.as_ref() {
+                    helper.maybe_sideeffect(self.mir, &mut bx, &[*target]);
+                }
                 // Codegen the actual panic invoke/call.
                 helper.do_call(
                     self,
@@ -561,7 +593,9 @@ fn codegen_call_terminator<'b>(
                 );
             } else {
                 // a NOP
-                helper.funclet_br(self, &mut bx, destination.as_ref().unwrap().1)
+                let target = destination.as_ref().unwrap().1;
+                helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
+                helper.funclet_br(self, &mut bx, target);
             }
             return;
         }
@@ -670,6 +704,7 @@ fn codegen_call_terminator<'b>(
             }
 
             if let Some((_, target)) = *destination {
+                helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
                 helper.funclet_br(self, &mut bx, target);
             } else {
                 bx.unreachable();
@@ -758,10 +793,13 @@ fn codegen_call_terminator<'b>(
 
         let fn_ptr = match (llfn, instance) {
             (Some(llfn), _) => llfn,
-            (None, Some(instance)) => bx.get_fn(instance),
+            (None, Some(instance)) => bx.get_fn_addr(instance),
             _ => span_bug!(span, "no llfn for call"),
         };
 
+        if let Some((_, target)) = destination.as_ref() {
+            helper.maybe_sideeffect(self.mir, &mut bx, &[*target]);
+        }
         helper.do_call(self, &mut bx, fn_ty, fn_ptr, &llargs,
                        destination.as_ref().map(|&(_, target)| (ret_dest, target)),
                        cleanup);
@@ -811,6 +849,7 @@ fn codegen_terminator(
             }
 
             mir::TerminatorKind::Goto { target } => {
+                helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
                 helper.funclet_br(self, &mut bx, target);
             }
 
index 5787447d6230fc2aa100f72ff363a9afb7d05577..1a2e796a5b7be6adc44b375316295c035d5ed2c9 100644 (file)
@@ -30,7 +30,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
 
     debug_context: FunctionDebugContext<Bx::DIScope>,
 
-    llfn: Bx::Value,
+    llfn: Bx::Function,
 
     cx: &'a Bx::CodegenCx,
 
@@ -183,7 +183,7 @@ fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
 
 pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
     cx: &'a Bx::CodegenCx,
-    llfn: Bx::Value,
+    llfn: Bx::Function,
     mir: &'a Body<'tcx>,
     instance: Instance<'tcx>,
     sig: ty::FnSig<'tcx>,
@@ -200,6 +200,8 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
         bx.set_personality_fn(cx.eh_personality());
     }
 
+    bx.sideeffect();
+
     let cleanup_kinds = analyze::cleanup_kinds(&mir);
     // Allocate a `Block` for every basic block, except
     // the start block, if nothing loops back to it.
@@ -636,7 +638,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
                         ty::Generator(def_id, substs, _) => (def_id, substs),
                         _ => bug!("generator layout without generator substs"),
                     };
-                    let state_tys = gen_substs.state_tys(def_id, tcx);
+                    let state_tys = gen_substs.as_generator().state_tys(def_id, tcx);
 
                     generator_layout.variant_fields.iter()
                         .zip(state_tys)
index 2d97f828f073de5ab50238f799b78c3916efc390..1d1bc2a81a2ca7459205a34391f665b92ec35a4e 100644 (file)
@@ -394,8 +394,8 @@ pub fn project_index<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
         // Statically compute the offset if we can, otherwise just use the element size,
         // as this will yield the lowest alignment.
         let layout = self.layout.field(bx, 0);
-        let offset = if bx.is_const_integral(llindex) {
-            layout.size.checked_mul(bx.const_to_uint(llindex), bx).unwrap_or(layout.size)
+        let offset = if let Some(llindex) = bx.const_to_opt_uint(llindex) {
+            layout.size.checked_mul(llindex, bx).unwrap_or(layout.size)
         } else {
             layout.size
         };
index 6ffa561f3fecf456627d8b29474e15cd26c45d33..27442bb6bff887f54d866f6fe090267d1e89e427 100644 (file)
@@ -10,7 +10,6 @@
 
 use crate::base;
 use crate::MemFlags;
-use crate::callee;
 use crate::common::{self, RealPredicate, IntPredicate};
 
 use crate::traits::*;
@@ -95,7 +94,7 @@ pub fn codegen_rvalue(
                     let size = bx.const_usize(dest.layout.size.bytes());
 
                     // Use llvm.memset.p0i8.* to initialize all zero arrays
-                    if bx.cx().is_const_integral(v) && bx.cx().const_to_uint(v) == 0 {
+                    if bx.cx().const_to_opt_uint(v) == Some(0) {
                         let fill = bx.cx().const_u8(0);
                         bx.memset(start, fill, size, dest.align, MemFlags::empty());
                         return bx;
@@ -190,7 +189,15 @@ pub fn codegen_rvalue_operand(
                                     bug!("reifying a fn ptr that requires const arguments");
                                 }
                                 OperandValue::Immediate(
-                                    callee::resolve_and_get_fn(bx.cx(), def_id, substs))
+                                    bx.get_fn_addr(
+                                        ty::Instance::resolve_for_fn_ptr(
+                                            bx.tcx(),
+                                            ty::ParamEnv::reveal_all(),
+                                            def_id,
+                                            substs
+                                        ).unwrap()
+                                    )
+                                )
                             }
                             _ => {
                                 bug!("{} cannot be reified to a fn ptr", operand.layout.ty)
@@ -205,7 +212,7 @@ pub fn codegen_rvalue_operand(
                                     def_id,
                                     substs,
                                     ty::ClosureKind::FnOnce);
-                                OperandValue::Immediate(bx.cx().get_fn(instance))
+                                OperandValue::Immediate(bx.cx().get_fn_addr(instance))
                             }
                             _ => {
                                 bug!("{} cannot be cast to a fn ptr", operand.layout.ty)
@@ -488,7 +495,7 @@ pub fn codegen_rvalue_operand(
                     }
                 };
                 let instance = ty::Instance::mono(bx.tcx(), def_id);
-                let r = bx.cx().get_fn(instance);
+                let r = bx.cx().get_fn_addr(instance);
                 let call = bx.call(r, &[llsize, llalign], None);
                 let val = bx.pointercast(call, llty_ptr);
 
index cb197f51460a1901c66efce9e2d15d47863fcb22..7cae3e9ade5903f3987434b3e6a2e59615568e3e 100644 (file)
@@ -9,11 +9,13 @@
 use rustc_codegen_utils::codegen_backend::CodegenBackend;
 use std::sync::Arc;
 use std::sync::mpsc;
-use syntax::ext::allocator::AllocatorKind;
+use syntax_expand::allocator::AllocatorKind;
 use syntax_pos::symbol::InternedString;
 
 pub trait BackendTypes {
     type Value: CodegenObject;
+    type Function: CodegenObject;
+
     type BasicBlock: Copy;
     type Type: CodegenObject;
     type Funclet;
index 1886701fb3a88d95e1c9d4c274c1afe2f64039b6..62b5bcbb6c9dcff67c89e7899ae4d0a50c0e7951 100644 (file)
@@ -34,7 +34,7 @@ pub trait BuilderMethods<'a, 'tcx>:
     + HasTargetSpec
 
 {
-    fn new_block<'b>(cx: &'a Self::CodegenCx, llfn: Self::Value, name: &'b str) -> Self;
+    fn new_block<'b>(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &'b str) -> Self;
     fn with_cx(cx: &'a Self::CodegenCx) -> Self;
     fn build_sibling_block(&self, name: &str) -> Self;
     fn cx(&self) -> &Self::CodegenCx;
index e7ce03f1836198293376a553e1e5ee7027ae4f74..95ada60fae08dd74ffc6f926967232489a27e50d 100644 (file)
@@ -21,11 +21,9 @@ pub trait ConstMethods<'tcx>: BackendTypes {
 
     fn const_struct(&self, elts: &[Self::Value], packed: bool) -> Self::Value;
 
-    fn const_to_uint(&self, v: Self::Value) -> u64;
+    fn const_to_opt_uint(&self, v: Self::Value) -> Option<u64>;
     fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option<u128>;
 
-    fn is_const_integral(&self, v: Self::Value) -> bool;
-
     fn scalar_to_backend(
         &self,
         cv: Scalar,
index e75f247da96135ed3a791f4359e7de62238474f3..989e6cf9dcaf1ea57aad2df096189b725a1adab8 100644 (file)
@@ -20,7 +20,7 @@ fn create_function_debug_context(
         &self,
         instance: Instance<'tcx>,
         sig: ty::FnSig<'tcx>,
-        llfn: Self::Value,
+        llfn: Self::Function,
         mir: &mir::Body<'_>,
     ) -> FunctionDebugContext<Self::DIScope>;
 
index 624a982b619ee7b2d26f298779897df95c444b39..cd42044e48df8cf6d6c5cf0dfa458acc50c31b85 100644 (file)
@@ -17,13 +17,13 @@ pub trait DeclareMethods<'tcx>: BackendTypes {
     ///
     /// If there’s a value with the same name already declared, the function will
     /// update the declaration and return existing Value instead.
-    fn declare_cfn(&self, name: &str, fn_type: Self::Type) -> Self::Value;
+    fn declare_cfn(&self, name: &str, fn_type: Self::Type) -> Self::Function;
 
     /// Declare a Rust function.
     ///
     /// If there’s a value with the same name already declared, the function will
     /// update the declaration and return existing Value instead.
-    fn declare_fn(&self, name: &str, sig: ty::PolyFnSig<'tcx>) -> Self::Value;
+    fn declare_fn(&self, name: &str, sig: ty::PolyFnSig<'tcx>) -> Self::Function;
 
     /// Declare a global with an intention to define it.
     ///
index 7c79cd6021031d7691328453e672272ffdb657b6..2c484084c4a20e063bc0c67d51423501b16eb787 100644 (file)
@@ -20,6 +20,7 @@ fn codegen_intrinsic_call(
     fn abort(&mut self);
     fn assume(&mut self, val: Self::Value);
     fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value;
+    fn sideeffect(&mut self);
     /// Trait method used to inject `va_start` on the "spoofed" `VaListImpl` in
     /// Rust defined C-variadic functions.
     fn va_start(&mut self, val: Self::Value) -> Self::Value;
index 46c88a6113ebe486e853728ea09fefd6605749cd..658ddd0028076970726f1d05b38a9b39400b7699 100644 (file)
@@ -11,14 +11,14 @@ fn vtables(
         &self,
     ) -> &RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Self::Value>>;
     fn check_overflow(&self) -> bool;
-    fn instances(&self) -> &RefCell<FxHashMap<Instance<'tcx>, Self::Value>>;
-    fn get_fn(&self, instance: Instance<'tcx>) -> Self::Value;
+    fn get_fn(&self, instance: Instance<'tcx>) -> Self::Function;
+    fn get_fn_addr(&self, instance: Instance<'tcx>) -> Self::Value;
     fn eh_personality(&self) -> Self::Value;
     fn eh_unwind_resume(&self) -> Self::Value;
     fn sess(&self) -> &Session;
     fn codegen_unit(&self) -> &Arc<CodegenUnit<'tcx>>;
     fn used_statics(&self) -> &RefCell<Vec<Self::Value>>;
-    fn set_frame_pointer_elimination(&self, llfn: Self::Value);
-    fn apply_target_cpu_attr(&self, llfn: Self::Value);
+    fn set_frame_pointer_elimination(&self, llfn: Self::Function);
+    fn apply_target_cpu_attr(&self, llfn: Self::Function);
     fn create_used_variable(&self);
 }
index efe4a25570104c4f8881c97119111bcc2a77d1e2..4318ef16494a11194af39f0c434f3488106909ec 100644 (file)
@@ -88,6 +88,7 @@ pub trait HasCodegen<'tcx>:
     type CodegenCx: CodegenMethods<'tcx>
         + BackendTypes<
             Value = Self::Value,
+            Function = Self::Function,
             BasicBlock = Self::BasicBlock,
             Type = Self::Type,
             Funclet = Self::Funclet,
index 2e3af8431eed0d7f4ddc730b3b82b805531a8c8a..1077c1f42637ebf85a00c5d41a9b3c5b378a0eef 100644 (file)
@@ -25,7 +25,6 @@ fn print(&self, _req: PrintRequest, _sess: &Session) {}
     fn target_features(&self, _sess: &Session) -> Vec<Symbol> { vec![] }
     fn print_passes(&self) {}
     fn print_version(&self) {}
-    fn diagnostics(&self) -> &[(&'static str, &'static str)] { &[] }
 
     fn metadata_loader(&self) -> Box<dyn MetadataLoader + Sync>;
     fn provide(&self, _providers: &mut Providers<'_>);
index 1201446afb53128dddbd1b0a52134801ae72d6ff..66920342ff6ba3b894ec3135af73e45a0257fa90 100644 (file)
@@ -18,6 +18,7 @@
 extern crate rustc;
 
 use rustc::ty::TyCtxt;
+use rustc::ty::query::Providers;
 use rustc::hir::def_id::LOCAL_CRATE;
 use syntax::symbol::sym;
 
@@ -37,3 +38,7 @@ pub fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
         }
     }
 }
+
+pub fn provide(providers: &mut Providers<'_>) {
+    crate::symbol_names::provide(providers);
+}
index 5bcb3b4ceb3ba33f06d3bc0331e52fad0fe4e3a8..cf575c54293c76ab690f86c98783088b5c42e189 100644 (file)
@@ -225,7 +225,7 @@ fn print_type(
             ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) |
             ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) |
             ty::Closure(def_id, substs) |
-            ty::Generator(def_id, ty::GeneratorSubsts { substs }, _) => {
+            ty::Generator(def_id, substs, _) => {
                 self.print_def_path(def_id, substs)
             }
             _ => self.pretty_print_type(ty),
index 2ad699d7c6f08808ab46a3f060940556ca5e1d95..55b148fceb2177393101b9524258e9d8da68b64c 100644 (file)
@@ -415,7 +415,7 @@ fn print_type(
             ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs }) |
             ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs }) |
             ty::Closure(def_id, substs) |
-            ty::Generator(def_id, ty::GeneratorSubsts { substs }, _) => {
+            ty::Generator(def_id, substs, _) => {
                 self = self.print_def_path(def_id, substs)?;
             }
             ty::Foreign(def_id) => {
index e020f2f8da940e19d4a27c8b93e473eb77734702..065c8436ae06aa54fc5c19e9e727e94cf7873177 100644 (file)
@@ -20,8 +20,8 @@ graphviz = { path = "../libgraphviz" }
 cfg-if = "0.1.2"
 crossbeam-utils = { version = "0.6.5", features = ["nightly"] }
 stable_deref_trait = "1.0.0"
-rayon = { version = "0.2.0", package = "rustc-rayon" }
-rayon-core = { version = "0.2.0", package = "rustc-rayon-core" }
+rayon = { version = "0.3.0", package = "rustc-rayon" }
+rayon-core = { version = "0.3.0", package = "rustc-rayon-core" }
 rustc-hash = "1.0.1"
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
 rustc_index = { path = "../librustc_index", package = "rustc_index" }
index 31cb22098b8e93b905fe23f07e287e9c3abdf3ca..d0ff6108d6ea6183fc3d035198b588585ac206e2 100644 (file)
@@ -2,6 +2,7 @@
 use std::mem;
 use std::borrow::Borrow;
 use std::collections::hash_map::RawEntryMut;
+use smallvec::SmallVec;
 use crate::fx::{FxHasher, FxHashMap};
 use crate::sync::{Lock, LockGuard};
 
@@ -18,7 +19,7 @@
 #[cfg(not(parallel_compiler))]
 const SHARD_BITS: usize = 0;
 
-const SHARDS: usize = 1 << SHARD_BITS;
+pub const SHARDS: usize = 1 << SHARD_BITS;
 
 /// An array of cache-line aligned inner locked structures with convenience methods.
 #[derive(Clone)]
@@ -29,21 +30,36 @@ pub struct Sharded<T> {
 impl<T: Default> Default for Sharded<T> {
     #[inline]
     fn default() -> Self {
+        Self::new(|| T::default())
+    }
+}
+
+impl<T> Sharded<T> {
+    #[inline]
+    pub fn new(mut value: impl FnMut() -> T) -> Self {
+        // Create a vector of the values we want
+        let mut values: SmallVec<[_; SHARDS]> = (0..SHARDS).map(|_| {
+            CacheAligned(Lock::new(value()))
+        }).collect();
+
+        // Create an unintialized array
         let mut shards: mem::MaybeUninit<[CacheAligned<Lock<T>>; SHARDS]> =
             mem::MaybeUninit::uninit();
-        let first = shards.as_mut_ptr() as *mut CacheAligned<Lock<T>>;
+
         unsafe {
-            for i in 0..SHARDS {
-                first.add(i).write(CacheAligned(Lock::new(T::default())));
-            }
+            // Copy the values into our array
+            let first = shards.as_mut_ptr() as *mut CacheAligned<Lock<T>>;
+            values.as_ptr().copy_to_nonoverlapping(first, SHARDS);
+
+            // Ignore the content of the vector
+            values.set_len(0);
+
             Sharded {
                 shards: shards.assume_init(),
             }
         }
     }
-}
 
-impl<T> Sharded<T> {
     #[inline]
     pub fn get_shard_by_value<K: Hash + ?Sized>(&self, val: &K) -> &Lock<T> {
         if SHARDS == 1 {
index 3277b85c281443278329629b81c1d6d7f859acae..9622c290039d5ddac11616b97714f55a47d8abe4 100644 (file)
@@ -1,6 +1,6 @@
 //! This module defines types which are thread safe if cfg!(parallel_compiler) is true.
 //!
-//! `Lrc` is an alias of either Rc or Arc.
+//! `Lrc` is an alias of `Arc` if cfg!(parallel_compiler) is true, `Rc` otherwise.
 //!
 //! `Lock` is a mutex.
 //! It internally uses `parking_lot::Mutex` if cfg!(parallel_compiler) is true,
@@ -12,7 +12,7 @@
 //!
 //! `MTLock` is a mutex which disappears if cfg!(parallel_compiler) is false.
 //!
-//! `MTRef` is a immutable reference if cfg!(parallel_compiler), and an mutable reference otherwise.
+//! `MTRef` is an immutable reference if cfg!(parallel_compiler), and a mutable reference otherwise.
 //!
 //! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync
 //! depending on the value of cfg!(parallel_compiler).
 use std::ops::{Deref, DerefMut};
 use crate::owning_ref::{Erased, OwningRef};
 
-pub fn serial_join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
-    where A: FnOnce() -> RA,
-          B: FnOnce() -> RB
-{
-    (oper_a(), oper_b())
-}
-
-pub struct SerialScope;
-
-impl SerialScope {
-    pub fn spawn<F>(&self, f: F)
-        where F: FnOnce(&SerialScope)
-    {
-        f(self)
-    }
-}
-
-pub fn serial_scope<F, R>(f: F) -> R
-    where F: FnOnce(&SerialScope) -> R
-{
-    f(&SerialScope)
-}
-
 pub use std::sync::atomic::Ordering::SeqCst;
 pub use std::sync::atomic::Ordering;
 
@@ -176,8 +153,28 @@ pub fn fetch_add(&self, val: T, _: Ordering) -> T {
         pub type AtomicU32 = Atomic<u32>;
         pub type AtomicU64 = Atomic<u64>;
 
-        pub use self::serial_join as join;
-        pub use self::serial_scope as scope;
+        pub fn join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
+            where A: FnOnce() -> RA,
+                  B: FnOnce() -> RB
+        {
+            (oper_a(), oper_b())
+        }
+
+        pub struct SerialScope;
+
+        impl SerialScope {
+            pub fn spawn<F>(&self, f: F)
+                where F: FnOnce(&SerialScope)
+            {
+                f(self)
+            }
+        }
+
+        pub fn scope<F, R>(f: F) -> R
+            where F: FnOnce(&SerialScope) -> R
+        {
+            f(&SerialScope)
+        }
 
         #[macro_export]
         macro_rules! parallel {
index dd088b68a239afe03b107d036168dbe3ca7d29e7..f33cb4e215d332a4b7edef4bc953d2d170233196 100644 (file)
@@ -166,7 +166,8 @@ pub fn run_compiler(
         None => return Ok(()),
     };
 
-    let (sopts, cfg) = config::build_session_options_and_crate_config(&matches);
+    let sopts = config::build_session_options(&matches);
+    let cfg = config::parse_cfgspecs(matches.opt_strs("cfg"));
 
     let mut dummy_config = |sopts, cfg, diagnostic_output| {
         let mut config = interface::Config {
@@ -615,7 +616,7 @@ pub fn list_metadata(sess: &Session,
                     let mut v = Vec::new();
                     locator::list_file_metadata(&sess.target.target,
                                                 path,
-                                                &*cstore.metadata_loader,
+                                                cstore,
                                                 &mut v)
                             .unwrap();
                     println!("{}", String::from_utf8(v).unwrap());
index 0281d10fd930ee1ac290f77a5848176e0201b52c..491bc2aa6a2eb7034669b33c4ae7cbba214164e5 100644 (file)
@@ -31,24 +31,28 @@ pub struct AnnotateSnippetEmitterWriter {
 
 impl Emitter for AnnotateSnippetEmitterWriter {
     /// The entry point for the diagnostics generation
-    fn emit_diagnostic(&mut self, db: &Diagnostic) {
-        let mut children = db.children.clone();
-        let (mut primary_span, suggestions) = self.primary_span_formatted(&db);
+    fn emit_diagnostic(&mut self, diag: &Diagnostic) {
+        let mut children = diag.children.clone();
+        let (mut primary_span, suggestions) = self.primary_span_formatted(&diag);
 
         self.fix_multispans_in_std_macros(&self.source_map,
                                           &mut primary_span,
                                           &mut children,
-                                          &db.level,
+                                          &diag.level,
                                           self.external_macro_backtrace);
 
-        self.emit_messages_default(&db.level,
-                                   db.message(),
-                                   &db.code,
+        self.emit_messages_default(&diag.level,
+                                   diag.message(),
+                                   &diag.code,
                                    &primary_span,
                                    &children,
                                    &suggestions);
     }
 
+    fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> {
+        self.source_map.as_ref()
+    }
+
     fn should_show_explain(&self) -> bool {
         !self.short_message
     }
index 3f1b91256c46811c5d3e7653155395ff80c966ef..fd74d8673da4dbc482430bd764caccf5d61a0dd6 100644 (file)
@@ -298,9 +298,31 @@ pub fn tool_only_multipart_suggestion(
     /// * may contain a name of a function, variable, or type, but not whole expressions
     ///
     /// See `CodeSuggestion` for more information.
-    pub fn span_suggestion(&mut self, sp: Span, msg: &str,
-                                       suggestion: String,
-                                       applicability: Applicability) -> &mut Self {
+    pub fn span_suggestion(
+        &mut self,
+        sp: Span,
+        msg: &str,
+        suggestion: String,
+        applicability: Applicability,
+    ) -> &mut Self {
+        self.span_suggestion_with_style(
+            sp,
+            msg,
+            suggestion,
+            applicability,
+            SuggestionStyle::ShowCode,
+        );
+        self
+    }
+
+    pub fn span_suggestion_with_style(
+        &mut self,
+        sp: Span,
+        msg: &str,
+        suggestion: String,
+        applicability: Applicability,
+        style: SuggestionStyle,
+    ) -> &mut Self {
         self.suggestions.push(CodeSuggestion {
             substitutions: vec![Substitution {
                 parts: vec![SubstitutionPart {
@@ -309,16 +331,37 @@ pub fn span_suggestion(&mut self, sp: Span, msg: &str,
                 }],
             }],
             msg: msg.to_owned(),
-            style: SuggestionStyle::ShowCode,
+            style,
             applicability,
         });
         self
     }
 
+    pub fn span_suggestion_verbose(
+        &mut self,
+        sp: Span,
+        msg: &str,
+        suggestion: String,
+        applicability: Applicability,
+    ) -> &mut Self {
+        self.span_suggestion_with_style(
+            sp,
+            msg,
+            suggestion,
+            applicability,
+            SuggestionStyle::ShowAlways,
+        );
+        self
+    }
+
     /// Prints out a message with multiple suggested edits of the code.
-    pub fn span_suggestions(&mut self, sp: Span, msg: &str,
-        suggestions: impl Iterator<Item = String>, applicability: Applicability) -> &mut Self
-    {
+    pub fn span_suggestions(
+        &mut self,
+        sp: Span,
+        msg: &str,
+        suggestions: impl Iterator<Item = String>,
+        applicability: Applicability,
+    ) -> &mut Self {
         self.suggestions.push(CodeSuggestion {
             substitutions: suggestions.map(|snippet| Substitution {
                 parts: vec![SubstitutionPart {
@@ -340,17 +383,13 @@ pub fn span_suggestions(&mut self, sp: Span, msg: &str,
     pub fn span_suggestion_short(
         &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability
     ) -> &mut Self {
-        self.suggestions.push(CodeSuggestion {
-            substitutions: vec![Substitution {
-                parts: vec![SubstitutionPart {
-                    snippet: suggestion,
-                    span: sp,
-                }],
-            }],
-            msg: msg.to_owned(),
-            style: SuggestionStyle::HideCodeInline,
+        self.span_suggestion_with_style(
+            sp,
+            msg,
+            suggestion,
             applicability,
-        });
+            SuggestionStyle::HideCodeInline,
+        );
         self
     }
 
@@ -363,17 +402,13 @@ pub fn span_suggestion_short(
     pub fn span_suggestion_hidden(
         &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability
     ) -> &mut Self {
-        self.suggestions.push(CodeSuggestion {
-            substitutions: vec![Substitution {
-                parts: vec![SubstitutionPart {
-                    snippet: suggestion,
-                    span: sp,
-                }],
-            }],
-            msg: msg.to_owned(),
-            style: SuggestionStyle::HideCodeAlways,
+        self.span_suggestion_with_style(
+            sp,
+            msg,
+            suggestion,
             applicability,
-        });
+            SuggestionStyle::HideCodeAlways,
+        );
         self
     }
 
@@ -384,17 +419,13 @@ pub fn span_suggestion_hidden(
     pub fn tool_only_span_suggestion(
         &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability
     ) -> &mut Self {
-        self.suggestions.push(CodeSuggestion {
-            substitutions: vec![Substitution {
-                parts: vec![SubstitutionPart {
-                    snippet: suggestion,
-                    span: sp,
-                }],
-            }],
-            msg: msg.to_owned(),
-            style: SuggestionStyle::CompletelyHidden,
+        self.span_suggestion_with_style(
+            sp,
+            msg,
+            suggestion,
             applicability,
-        });
+            SuggestionStyle::CompletelyHidden,
+        );
         self
     }
 
index 9aea46da68b1a340631b88c6807754463f46f697..1e486786f6861a90dff68ed0b2b7e0a11dfb7e05 100644 (file)
@@ -13,7 +13,7 @@
 
 use crate::{
     Level, CodeSuggestion, Diagnostic, SubDiagnostic,
-    SuggestionStyle, SourceMapperDyn, DiagnosticId,
+    SuggestionStyle, SourceMapper, SourceMapperDyn, DiagnosticId,
 };
 use crate::Level::Error;
 use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style};
@@ -180,7 +180,7 @@ fn right(&self, line_len: usize) -> usize {
 /// Emitter trait for emitting errors.
 pub trait Emitter {
     /// Emit a structured diagnostic.
-    fn emit_diagnostic(&mut self, db: &Diagnostic);
+    fn emit_diagnostic(&mut self, diag: &Diagnostic);
 
     /// Emit a notification that an artifact has been output.
     /// This is currently only supported for the JSON format,
@@ -192,6 +192,8 @@ fn should_show_explain(&self) -> bool {
         true
     }
 
+    fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>>;
+
     /// Formats the substitutions of the primary_span
     ///
     /// The are a lot of conditions to this method, but in short:
@@ -204,10 +206,10 @@ fn should_show_explain(&self) -> bool {
     ///   we return the original `primary_span` and the original suggestions.
     fn primary_span_formatted<'a>(
         &mut self,
-        db: &'a Diagnostic
+        diag: &'a Diagnostic,
     ) -> (MultiSpan, &'a [CodeSuggestion]) {
-        let mut primary_span = db.span.clone();
-        if let Some((sugg, rest)) = db.suggestions.split_first() {
+        let mut primary_span = diag.span.clone();
+        if let Some((sugg, rest)) = diag.suggestions.split_first() {
             if rest.is_empty() &&
                // ^ if there is only one suggestion
                // don't display multi-suggestions as labels
@@ -218,10 +220,14 @@ fn primary_span_formatted<'a>(
                sugg.msg.split_whitespace().count() < 10 &&
                // don't display multiline suggestions as labels
                !sugg.substitutions[0].parts[0].snippet.contains('\n') &&
-               // when this style is set we want the suggestion to be a message, not inline
-               sugg.style != SuggestionStyle::HideCodeAlways &&
-               // trivial suggestion for tooling's sake, never shown
-               sugg.style != SuggestionStyle::CompletelyHidden
+               ![
+                    // when this style is set we want the suggestion to be a message, not inline
+                    SuggestionStyle::HideCodeAlways,
+                    // trivial suggestion for tooling's sake, never shown
+                    SuggestionStyle::CompletelyHidden,
+                    // subtle suggestion, never shown inline
+                    SuggestionStyle::ShowAlways,
+               ].contains(&sugg.style)
             {
                 let substitution = &sugg.substitutions[0].parts[0].snippet.trim();
                 let msg = if substitution.len() == 0 || sugg.style.hide_inline() {
@@ -230,7 +236,20 @@ fn primary_span_formatted<'a>(
                     format!("help: {}", sugg.msg)
                 } else {
                     // Show the default suggestion text with the substitution
-                    format!("help: {}: `{}`", sugg.msg, substitution)
+                    format!(
+                        "help: {}{}: `{}`",
+                        sugg.msg,
+                        if self.source_map().map(|sm| is_case_difference(
+                            &**sm,
+                            substitution,
+                            sugg.substitutions[0].parts[0].span,
+                        )).unwrap_or(false) {
+                            " (notice the capitalization)"
+                        } else {
+                            ""
+                        },
+                        substitution,
+                    )
                 };
                 primary_span.push_span_label(sugg.substitutions[0].parts[0].span, msg);
 
@@ -241,10 +260,10 @@ fn primary_span_formatted<'a>(
                 // to be consistent. We could try to figure out if we can
                 // make one (or the first one) inline, but that would give
                 // undue importance to a semi-random suggestion
-                (primary_span, &db.suggestions)
+                (primary_span, &diag.suggestions)
             }
         } else {
-            (primary_span, &db.suggestions)
+            (primary_span, &diag.suggestions)
         }
     }
 
@@ -378,19 +397,23 @@ fn fix_multispan_in_std_macros(&self,
 }
 
 impl Emitter for EmitterWriter {
-    fn emit_diagnostic(&mut self, db: &Diagnostic) {
-        let mut children = db.children.clone();
-        let (mut primary_span, suggestions) = self.primary_span_formatted(&db);
+    fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> {
+        self.sm.as_ref()
+    }
+
+    fn emit_diagnostic(&mut self, diag: &Diagnostic) {
+        let mut children = diag.children.clone();
+        let (mut primary_span, suggestions) = self.primary_span_formatted(&diag);
 
         self.fix_multispans_in_std_macros(&self.sm,
                                           &mut primary_span,
                                           &mut children,
-                                          &db.level,
+                                          &diag.level,
                                           self.external_macro_backtrace);
 
-        self.emit_messages_default(&db.level,
-                                   &db.styled_message(),
-                                   &db.code,
+        self.emit_messages_default(&diag.level,
+                                   &diag.styled_message(),
+                                   &diag.code,
                                    &primary_span,
                                    &children,
                                    &suggestions);
@@ -1457,7 +1480,9 @@ fn emit_suggestion_default(
         let suggestions = suggestion.splice_lines(&**sm);
 
         let mut row_num = 2;
-        for &(ref complete, ref parts) in suggestions.iter().take(MAX_SUGGESTIONS) {
+        let mut notice_capitalization = false;
+        for (complete, parts, only_capitalization) in suggestions.iter().take(MAX_SUGGESTIONS) {
+            notice_capitalization |= only_capitalization;
             // Only show underline if the suggestion spans a single line and doesn't cover the
             // entirety of the code output. If you have multiple replacements in the same line
             // of code, show the underline.
@@ -1548,7 +1573,10 @@ fn emit_suggestion_default(
         }
         if suggestions.len() > MAX_SUGGESTIONS {
             let msg = format!("and {} other candidates", suggestions.len() - MAX_SUGGESTIONS);
-            buffer.puts(row_num, 0, &msg, Style::NoStyle);
+            buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
+        } else if notice_capitalization {
+            let msg = "notice the capitalization difference";
+            buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
         }
         emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
         Ok(())
@@ -2030,3 +2058,18 @@ fn drop(&mut self) {
         }
     }
 }
+
+/// Whether the original and suggested code are visually similar enough to warrant extra wording.
+pub fn is_case_difference(sm: &dyn SourceMapper, suggested: &str, sp: Span) -> bool {
+    // FIXME: this should probably be extended to also account for `FO0` → `FOO` and unicode.
+    let found = sm.span_to_snippet(sp).unwrap();
+    let ascii_confusables = &['c', 'f', 'i', 'k', 'o', 's', 'u', 'v', 'w', 'x', 'y', 'z'];
+    // All the chars that differ in capitalization are confusable (above):
+    let confusable = found.chars().zip(suggested.chars()).filter(|(f, s)| f != s).all(|(f, s)| {
+        (ascii_confusables.contains(&f) || ascii_confusables.contains(&s))
+    });
+    confusable && found.to_lowercase() == suggested.to_lowercase()
+            // FIXME: We sometimes suggest the same thing we already have, which is a
+            //        bug, but be defensive against that here.
+            && found != suggested
+}
index f9dc13ce97eea79c6fed0db0a275736922038518..63df052a225041095d263a75ae11e51d45dd33f9 100644 (file)
@@ -13,7 +13,7 @@
 
 use Level::*;
 
-use emitter::{Emitter, EmitterWriter};
+use emitter::{Emitter, EmitterWriter, is_case_difference};
 use registry::Registry;
 
 use rustc_data_structures::sync::{self, Lrc, Lock};
 mod styled_buffer;
 mod lock;
 
-use syntax_pos::{BytePos,
-                 Loc,
-                 FileLinesResult,
-                 SourceFile,
-                 FileName,
-                 MultiSpan,
-                 Span};
+use syntax_pos::{
+    BytePos,
+    FileLinesResult,
+    FileName,
+    Loc,
+    MultiSpan,
+    SourceFile,
+    Span,
+    SpanSnippetError,
+};
 
 /// Indicates the confidence in the correctness of a suggestion.
 ///
@@ -81,6 +84,8 @@ pub enum SuggestionStyle {
     /// This will *not* show the code if the suggestion is inline *and* the suggested code is
     /// empty.
     ShowCode,
+    /// Always show the suggested code independently.
+    ShowAlways,
 }
 
 impl SuggestionStyle {
@@ -145,6 +150,7 @@ pub trait SourceMapper {
     fn lookup_char_pos(&self, pos: BytePos) -> Loc;
     fn span_to_lines(&self, sp: Span) -> FileLinesResult;
     fn span_to_string(&self, sp: Span) -> String;
+    fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError>;
     fn span_to_filename(&self, sp: Span) -> FileName;
     fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
     fn call_span_if_macro(&self, sp: Span) -> Span;
@@ -153,9 +159,12 @@ pub trait SourceMapper {
 }
 
 impl CodeSuggestion {
-    /// Returns the assembled code suggestions and whether they should be shown with an underline.
-    pub fn splice_lines(&self, cm: &SourceMapperDyn)
-                        -> Vec<(String, Vec<SubstitutionPart>)> {
+    /// Returns the assembled code suggestions, whether they should be shown with an underline
+    /// and whether the substitution only differs in capitalization.
+    pub fn splice_lines(
+        &self,
+        cm: &SourceMapperDyn,
+    ) -> Vec<(String, Vec<SubstitutionPart>, bool)> {
         use syntax_pos::{CharPos, Pos};
 
         fn push_trailing(buf: &mut String,
@@ -230,6 +239,7 @@ fn push_trailing(buf: &mut String,
                 prev_hi = cm.lookup_char_pos(part.span.hi());
                 prev_line = fm.get_line(prev_hi.line - 1);
             }
+            let only_capitalization = is_case_difference(cm, &buf, bounding_span);
             // if the replacement already ends with a newline, don't print the next line
             if !buf.ends_with('\n') {
                 push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, None);
@@ -238,7 +248,7 @@ fn push_trailing(buf: &mut String,
             while buf.ends_with('\n') {
                 buf.pop();
             }
-            (buf, substitution.parts)
+            (buf, substitution.parts, only_capitalization)
         }).collect()
     }
 }
index 6e03618d2b0b5f24b6d2296d5e9f60317284aaff..b12ab9e4576025c8efb7d17777df90fe7e94458b 100644 (file)
@@ -111,7 +111,7 @@ pub fn puts(&mut self, line: usize, col: usize, string: &str, style: Style) {
 
     pub fn prepend(&mut self, line: usize, string: &str, style: Style) {
         self.ensure_lines(line);
-        let string_len = string.len();
+        let string_len = string.chars().count();
 
         // Push the old content over to make room for new content
         for _ in 0..string_len {
index 90aefb0f32416f8bf56358ba695fb78df55ddb41..673da52c3250e62f0f27552a00952688f43f68a3 100644 (file)
@@ -101,6 +101,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
     // before we fire the background thread.
 
     let time_passes = sess.time_passes();
+    let prof = sess.prof.clone();
 
     if sess.opts.incremental.is_none() {
         // No incremental compilation.
@@ -160,7 +161,9 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
     }
 
     MaybeAsync::Async(std::thread::spawn(move || {
-        time_ext(time_passes, None, "background load prev dep-graph", move || {
+        time_ext(time_passes, "background load prev dep-graph", move || {
+            let _prof_timer = prof.generic_activity("incr_comp_load_dep_graph");
+
             match load_data(report_incremental_info, &path) {
                 LoadResult::DataOutOfDate => LoadResult::DataOutOfDate,
                 LoadResult::Error { message } => LoadResult::Error { message },
@@ -198,6 +201,8 @@ pub fn load_query_result_cache(sess: &Session) -> OnDiskCache<'_> {
         return OnDiskCache::new_empty(sess.source_map());
     }
 
+    let _prof_timer = sess.prof.generic_activity("incr_comp_load_query_result_cache");
+
     match load_data(sess.opts.debugging_opts.incremental_info, &query_cache_path(sess)) {
         LoadResult::Ok{ data: (bytes, start_pos) } => OnDiskCache::new(sess, bytes, start_pos),
         _ => OnDiskCache::new_empty(sess.source_map())
index 6af065513ee0dccf9f6f00343ea1a491bb335bb5..f5935c9969baa04b367cb654e4d1b2b524c5a63f 100644 (file)
@@ -241,6 +241,8 @@ fn encode_work_product_index(work_products: &FxHashMap<WorkProductId, WorkProduc
 
 fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
     time(tcx.sess, "serialize query result cache", || {
+        let _timer = tcx.prof.generic_activity("incr_comp_serialize_result_cache");
+
         tcx.serialize_query_result_cache(encoder).unwrap();
     })
 }
index 8c49e0dde0dcd3729468d4fa4a8e23f3f1a82db6..9ed5ef5a539a1bc01fd771e7c04df41e868c6196 100644 (file)
@@ -13,8 +13,9 @@
 pub const WORD_BYTES: usize = mem::size_of::<Word>();
 pub const WORD_BITS: usize = WORD_BYTES * 8;
 
-/// A fixed-size bitset type with a dense representation. It does not support
-/// resizing after creation; use `GrowableBitSet` for that.
+/// A fixed-size bitset type with a dense representation.
+///
+/// NOTE: Use [`GrowableBitSet`] if you need support for resizing after creation.
 ///
 /// `T` is an index type, typically a newtyped `usize` wrapper, but it can also
 /// just be `usize`.
@@ -22,6 +23,8 @@
 /// All operations that involve an element will panic if the element is equal
 /// to or greater than the domain size. All operations that involve two bitsets
 /// will panic if the bitsets have differing domain sizes.
+///
+/// [`GrowableBitSet`]: struct.GrowableBitSet.html
 #[derive(Clone, Eq, PartialEq, RustcDecodable, RustcEncodable)]
 pub struct BitSet<T: Idx> {
     domain_size: usize,
@@ -168,11 +171,7 @@ pub fn words(&self) -> &[Word] {
     /// Iterates over the indices of set bits in a sorted order.
     #[inline]
     pub fn iter(&self) -> BitIter<'_, T> {
-        BitIter {
-            cur: None,
-            iter: self.words.iter().enumerate(),
-            marker: PhantomData,
-        }
+        BitIter::new(&self.words)
     }
 
     /// Duplicates the set as a hybrid set.
@@ -291,26 +290,55 @@ fn to_string(&self) -> String {
 }
 
 pub struct BitIter<'a, T: Idx> {
-    cur: Option<(Word, usize)>,
-    iter: iter::Enumerate<slice::Iter<'a, Word>>,
+    /// A copy of the current word, but with any already-visited bits cleared.
+    /// (This lets us use `trailing_zeros()` to find the next set bit.) When it
+    /// is reduced to 0, we move onto the next word.
+    word: Word,
+
+    /// The offset (measured in bits) of the current word.
+    offset: usize,
+
+    /// Underlying iterator over the words.
+    iter: slice::Iter<'a, Word>,
+
     marker: PhantomData<T>
 }
 
+impl<'a, T: Idx> BitIter<'a, T> {
+    #[inline]
+    fn new(words: &'a [Word]) -> BitIter<'a, T> {
+        // We initialize `word` and `offset` to degenerate values. On the first
+        // call to `next()` we will fall through to getting the first word from
+        // `iter`, which sets `word` to the first word (if there is one) and
+        // `offset` to 0. Doing it this way saves us from having to maintain
+        // additional state about whether we have started.
+        BitIter {
+            word: 0,
+            offset: std::usize::MAX - (WORD_BITS - 1),
+            iter: words.iter(),
+            marker: PhantomData,
+        }
+    }
+}
+
 impl<'a, T: Idx> Iterator for BitIter<'a, T> {
     type Item = T;
     fn next(&mut self) -> Option<T> {
         loop {
-            if let Some((ref mut word, offset)) = self.cur {
-                let bit_pos = word.trailing_zeros() as usize;
-                if bit_pos != WORD_BITS {
-                    let bit = 1 << bit_pos;
-                    *word ^= bit;
-                    return Some(T::new(bit_pos + offset))
-                }
+            if self.word != 0 {
+                // Get the position of the next set bit in the current word,
+                // then clear the bit.
+                let bit_pos = self.word.trailing_zeros() as usize;
+                let bit = 1 << bit_pos;
+                self.word ^= bit;
+                return Some(T::new(bit_pos + self.offset))
             }
 
-            let (i, word) = self.iter.next()?;
-            self.cur = Some((*word, WORD_BITS * i));
+            // Move onto the next word. `wrapping_add()` is needed to handle
+            // the degenerate initial value given to `offset` in `new()`.
+            let word = self.iter.next()?;
+            self.word = *word;
+            self.offset = self.offset.wrapping_add(WORD_BITS);
         }
     }
 }
@@ -851,11 +879,7 @@ pub fn words(&self) -> &[Word] {
     pub fn iter(&self, row: R) -> BitIter<'_, C> {
         assert!(row.index() < self.num_rows);
         let (start, end) = self.range(row);
-        BitIter {
-            cur: None,
-            iter: self.words[start..end].iter().enumerate(),
-            marker: PhantomData,
-        }
+        BitIter::new(&self.words[start..end])
     }
 
     /// Returns the number of elements in `row`.
index 780f7a7ffa9edee38e6de8ec5a4084f3b72977ed..7a0d931ca732ae159a0e721857934fd1df0c80d9 100644 (file)
@@ -11,10 +11,11 @@ doctest = false
 
 [dependencies]
 log = "0.4"
-rayon = { version = "0.2.0", package = "rustc-rayon" }
+rayon = { version = "0.3.0", package = "rustc-rayon" }
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
 syntax = { path = "../libsyntax" }
 syntax_ext = { path = "../libsyntax_ext" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
 rustc_serialize = { path = "../libserialize", package = "serialize" }
 rustc = { path = "../librustc" }
index dae8fb242d58cd69cada05ce9ed638480bcfd3d7..b26bd75c974c6b67edae5cdc985ab475cf77d484 100644 (file)
@@ -1,6 +1,5 @@
 use crate::queries::Queries;
 use crate::util;
-use crate::profile;
 pub use crate::passes::BoxedResolver;
 
 use rustc::lint;
@@ -115,17 +114,7 @@ pub fn run_compiler_in_existing_thread_pool<F, R>(config: Config, f: F) -> R
         compiler.sess.diagnostic().print_error_count(&util::diagnostics_registry());
     });
 
-    if compiler.sess.profile_queries() {
-        profile::begin(&compiler.sess);
-    }
-
-    let r = f(&compiler);
-
-    if compiler.sess.profile_queries() {
-        profile::dump(&compiler.sess, "profile_queries".to_string())
-    }
-
-    r
+    f(&compiler)
 }
 
 pub fn run_compiler<F, R>(mut config: Config, f: F) -> R
index 2e593d441553a014c28a85bafb16e1b033e272f4..6be36e9b9001b80d608950547b3b07a7f7b865ad 100644 (file)
@@ -16,6 +16,5 @@
 mod queries;
 pub mod util;
 mod proc_macro_decls;
-mod profile;
 
 pub use interface::{run_compiler, Config};
index bf5e86017fc7b6e7d311fef436b12a7b04466c9a..89de5714695defea27ec132cf48446c9fc5d7259 100644 (file)
@@ -35,7 +35,7 @@
 use rustc_typeck as typeck;
 use syntax::{self, ast, visit};
 use syntax::early_buffered_lints::BufferedEarlyLint;
-use syntax::ext::base::{NamedSyntaxExtension, ExtCtxt};
+use syntax_expand::base::{NamedSyntaxExtension, ExtCtxt};
 use syntax::mut_visit::MutVisitor;
 use syntax::parse::{self, PResult};
 use syntax::util::node_count::NodeCounter;
@@ -130,7 +130,7 @@ pub fn configure_and_expand(
     let crate_name = crate_name.to_string();
     let (result, resolver) = BoxedResolver::new(static move || {
         let sess = &*sess;
-        let mut crate_loader = CrateLoader::new(sess, &*cstore, &crate_name);
+        let crate_loader = CrateLoader::new(sess, &*cstore, &crate_name);
         let resolver_arenas = Resolver::arenas();
         let res = configure_and_expand_inner(
             sess,
@@ -138,7 +138,7 @@ pub fn configure_and_expand(
             krate,
             &crate_name,
             &resolver_arenas,
-            &mut crate_loader,
+            &crate_loader,
             plugin_info,
         );
         let mut resolver = match res {
@@ -169,6 +169,7 @@ fn from_owned_resolver(
         ExpansionResult {
             defs: Steal::new(resolver.definitions),
             resolutions: Steal::new(Resolutions {
+                extern_crate_map: resolver.extern_crate_map,
                 export_map: resolver.export_map,
                 trait_map: resolver.trait_map,
                 glob_map: resolver.glob_map,
@@ -187,6 +188,7 @@ pub fn from_resolver_ref(
         ExpansionResult {
             defs: Steal::new(resolver.definitions.clone()),
             resolutions: Steal::new(Resolutions {
+                extern_crate_map: resolver.extern_crate_map.clone(),
                 export_map: resolver.export_map.clone(),
                 trait_map: resolver.trait_map.clone(),
                 glob_map: resolver.glob_map.clone(),
@@ -250,6 +252,8 @@ pub fn register_plugins<'a>(
 
     if sess.opts.incremental.is_some() {
         time(sess, "garbage-collect incremental cache directory", || {
+            let _prof_timer =
+                sess.prof.generic_activity("incr_comp_garbage_collect_session_directories");
             if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
                 warn!(
                     "Error while trying to garbage collect incremental \
@@ -317,7 +321,7 @@ fn configure_and_expand_inner<'a>(
     mut krate: ast::Crate,
     crate_name: &str,
     resolver_arenas: &'a ResolverArenas<'a>,
-    crate_loader: &'a mut CrateLoader<'a>,
+    crate_loader: &'a CrateLoader<'a>,
     plugin_info: PluginInfo,
 ) -> Result<(ast::Crate, Resolver<'a>)> {
     time(sess, "pre-AST-expansion lint checks", || {
@@ -393,12 +397,12 @@ fn configure_and_expand_inner<'a>(
 
         // Create the config for macro expansion
         let features = sess.features_untracked();
-        let cfg = syntax::ext::expand::ExpansionConfig {
+        let cfg = syntax_expand::expand::ExpansionConfig {
             features: Some(&features),
             recursion_limit: *sess.recursion_limit.get(),
             trace_mac: sess.opts.debugging_opts.trace_macros,
             should_test: sess.opts.test,
-            ..syntax::ext::expand::ExpansionConfig::default(crate_name.to_string())
+            ..syntax_expand::expand::ExpansionConfig::default(crate_name.to_string())
         };
 
         let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
@@ -539,7 +543,8 @@ pub fn lower_to_hir(
 ) -> Result<hir::map::Forest> {
     // Lower AST to HIR.
     let hir_forest = time(sess, "lowering AST -> HIR", || {
-        let hir_crate = lower_crate(sess, cstore, &dep_graph, &krate, resolver);
+        let nt_to_tokenstream = syntax::parse::nt_to_tokenstream;
+        let hir_crate = lower_crate(sess, cstore, &dep_graph, &krate, resolver, nt_to_tokenstream);
 
         if sess.opts.debugging_opts.hir_stats {
             hir_stats::print_hir_stats(&hir_crate);
@@ -554,7 +559,7 @@ pub fn lower_to_hir(
 
     // Discard hygiene data, which isn't required after lowering to HIR.
     if !sess.opts.debugging_opts.keep_hygiene_data {
-        syntax::ext::hygiene::clear_syntax_context_map();
+        syntax_expand::hygiene::clear_syntax_context_map();
     }
 
     Ok(hir_forest)
@@ -660,16 +665,15 @@ fn write_out_deps(compiler: &Compiler, outputs: &OutputFilenames, out_filenames:
 
         if sess.binary_dep_depinfo() {
             for cnum in compiler.cstore.crates_untracked() {
-                let metadata = compiler.cstore.crate_data_as_rc_any(cnum);
-                let metadata = metadata.downcast_ref::<cstore::CrateMetadata>().unwrap();
-                if let Some((path, _)) = &metadata.source.dylib {
-                    files.push(escape_dep_filename(&FileName::Real(path.clone())));
+                let source = compiler.cstore.crate_source_untracked(cnum);
+                if let Some((path, _)) = source.dylib {
+                    files.push(escape_dep_filename(&FileName::Real(path)));
                 }
-                if let Some((path, _)) = &metadata.source.rlib {
-                    files.push(escape_dep_filename(&FileName::Real(path.clone())));
+                if let Some((path, _)) = source.rlib {
+                    files.push(escape_dep_filename(&FileName::Real(path)));
                 }
-                if let Some((path, _)) = &metadata.source.rmeta {
-                    files.push(escape_dep_filename(&FileName::Real(path.clone())));
+                if let Some((path, _)) = source.rmeta {
+                    files.push(escape_dep_filename(&FileName::Real(path)));
                 }
             }
         }
@@ -780,20 +784,20 @@ pub fn default_provide(providers: &mut ty::query::Providers<'_>) {
     ty::provide(providers);
     traits::provide(providers);
     stability::provide(providers);
-    middle::intrinsicck::provide(providers);
-    middle::liveness::provide(providers);
     reachable::provide(providers);
     rustc_passes::provide(providers);
     rustc_traits::provide(providers);
     middle::region::provide(providers);
-    middle::entry::provide(providers);
     cstore::provide(providers);
     lint::provide(providers);
     rustc_lint::provide(providers);
+    rustc_codegen_utils::provide(providers);
+    rustc_codegen_ssa::provide(providers);
 }
 
 pub fn default_provide_extern(providers: &mut ty::query::Providers<'_>) {
     cstore::provide_extern(providers);
+    rustc_codegen_ssa::provide_extern(providers);
 }
 
 declare_box_region_type!(
@@ -892,7 +896,7 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
     time(sess, "misc checking 1", || {
         parallel!({
             entry_point = time(sess, "looking for entry point", || {
-                middle::entry::find_entry_point(tcx)
+                rustc_passes::entry::find_entry_point(tcx)
             });
 
             time(sess, "looking for plugin registrar", || {
@@ -973,7 +977,7 @@ fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
                     tcx.ensure().check_private_in_public(LOCAL_CRATE);
                 });
             }, {
-                time(sess, "death checking", || middle::dead::check_crate(tcx));
+                time(sess, "death checking", || rustc_passes::dead::check_crate(tcx));
             },  {
                 time(sess, "unused lib feature checking", || {
                     stability::check_unused_or_stable_features(tcx)
diff --git a/src/librustc_interface/profile/mod.rs b/src/librustc_interface/profile/mod.rs
deleted file mode 100644 (file)
index 2e71d46..0000000
+++ /dev/null
@@ -1,297 +0,0 @@
-use log::debug;
-use rustc::dep_graph::DepNode;
-use rustc::session::Session;
-use rustc::util::common::{ProfQDumpParams, ProfileQueriesMsg, profq_msg, profq_set_chan};
-use std::sync::mpsc::{Receiver};
-use std::io::{Write};
-use std::time::{Duration, Instant};
-
-pub mod trace;
-
-/// begin a profile thread, if not already running
-pub fn begin(sess: &Session) {
-    use std::thread;
-    use std::sync::mpsc::{channel};
-    let (tx, rx) = channel();
-    if profq_set_chan(sess, tx) {
-        thread::spawn(move || profile_queries_thread(rx));
-    }
-}
-
-/// dump files with profiling information to the given base path, and
-/// wait for this dump to complete.
-///
-/// wraps the RPC (send/recv channel logic) of requesting a dump.
-pub fn dump(sess: &Session, path: String) {
-    use std::sync::mpsc::{channel};
-    let (tx, rx) = channel();
-    let params = ProfQDumpParams {
-        path,
-        ack: tx,
-        // FIXME: Add another compiler flag to toggle whether this log
-        // is written; false for now
-        dump_profq_msg_log: true,
-    };
-    profq_msg(sess, ProfileQueriesMsg::Dump(params));
-    let _ = rx.recv().unwrap();
-}
-
-// State for parsing recursive trace structure in separate thread, via messages
-#[derive(Clone, Eq, PartialEq)]
-enum ParseState {
-    // No (local) parse state; may be parsing a tree, focused on a
-    // sub-tree that could be anything.
-    Clear,
-    // Have Query information from the last message
-    HaveQuery(trace::Query, Instant),
-    // Have "time-begin" information from the last message (doit flag, and message)
-    HaveTimeBegin(String, Instant),
-    // Have "task-begin" information from the last message
-    HaveTaskBegin(DepNode, Instant),
-}
-struct StackFrame {
-    pub parse_st: ParseState,
-    pub traces:   Vec<trace::Rec>,
-}
-
-fn total_duration(traces: &[trace::Rec]) -> Duration {
-    Duration::new(0, 0) + traces.iter().map(|t| t.dur_total).sum()
-}
-
-// profiling thread; retains state (in local variables) and dump traces, upon request.
-fn profile_queries_thread(r: Receiver<ProfileQueriesMsg>) {
-    use self::trace::*;
-    use std::fs::File;
-
-    let mut profq_msgs: Vec<ProfileQueriesMsg> = vec![];
-    let mut frame: StackFrame = StackFrame { parse_st: ParseState::Clear, traces: vec![] };
-    let mut stack: Vec<StackFrame> = vec![];
-    loop {
-        let msg = r.recv();
-        if let Err(_recv_err) = msg {
-            // FIXME: Perhaps do something smarter than simply quitting?
-            break
-        };
-        let msg = msg.unwrap();
-        debug!("profile_queries_thread: {:?}", msg);
-
-        // Meta-level versus _actual_ queries messages
-        match msg {
-            ProfileQueriesMsg::Halt => return,
-            ProfileQueriesMsg::Dump(params) => {
-                assert!(stack.is_empty());
-                assert!(frame.parse_st == ParseState::Clear);
-
-                // write log of all messages
-                if params.dump_profq_msg_log {
-                    let mut log_file =
-                        File::create(format!("{}.log.txt", params.path)).unwrap();
-                    for m in profq_msgs.iter() {
-                        writeln!(&mut log_file, "{:?}", m).unwrap()
-                    };
-                }
-
-                // write HTML file, and counts file
-                let html_path = format!("{}.html", params.path);
-                let mut html_file = File::create(&html_path).unwrap();
-
-                let counts_path = format!("{}.counts.txt", params.path);
-                let mut counts_file = File::create(&counts_path).unwrap();
-
-                writeln!(html_file,
-                    "<html>\n<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">",
-                    "profile_queries.css").unwrap();
-                writeln!(html_file, "<style>").unwrap();
-                trace::write_style(&mut html_file);
-                writeln!(html_file, "</style>\n</head>\n<body>").unwrap();
-                trace::write_traces(&mut html_file, &mut counts_file, &frame.traces);
-                writeln!(html_file, "</body>\n</html>").unwrap();
-
-                let ack_path = format!("{}.ack", params.path);
-                let ack_file = File::create(&ack_path).unwrap();
-                drop(ack_file);
-
-                // Tell main thread that we are done, e.g., so it can exit
-                params.ack.send(()).unwrap();
-            }
-            // Actual query message:
-            msg => {
-                // Record msg in our log
-                profq_msgs.push(msg.clone());
-                // Respond to the message, knowing that we've already handled Halt and Dump, above.
-                match (frame.parse_st.clone(), msg) {
-                    (_, ProfileQueriesMsg::Halt) | (_, ProfileQueriesMsg::Dump(_)) => {
-                        unreachable!();
-                    },
-                    // Parse State: Clear
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::QueryBegin(span, querymsg)) => {
-                        let start = Instant::now();
-                        frame.parse_st = ParseState::HaveQuery
-                            (Query { span, msg: querymsg }, start)
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::CacheHit) => {
-                        panic!("parse error: unexpected CacheHit; expected QueryBegin")
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::ProviderBegin) => {
-                        panic!("parse error: expected QueryBegin before beginning a provider")
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::ProviderEnd) => {
-                        let provider_extent = frame.traces;
-                        match stack.pop() {
-                            None =>
-                                panic!("parse error: expected a stack frame; found an empty stack"),
-                            Some(old_frame) => {
-                                match old_frame.parse_st {
-                                    ParseState::HaveQuery(q, start) => {
-                                        let duration = start.elapsed();
-                                        frame = StackFrame{
-                                            parse_st: ParseState::Clear,
-                                            traces: old_frame.traces
-                                        };
-                                        let dur_extent = total_duration(&provider_extent);
-                                        let trace = Rec {
-                                            effect: Effect::QueryBegin(q, CacheCase::Miss),
-                                            extent: Box::new(provider_extent),
-                                            start: start,
-                                            dur_self: duration - dur_extent,
-                                            dur_total: duration,
-                                        };
-                                        frame.traces.push( trace );
-                                    },
-                                    _ => panic!("internal parse error: malformed parse stack")
-                                }
-                            }
-                        }
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::TimeBegin(msg)) => {
-                        let start = Instant::now();
-                        frame.parse_st = ParseState::HaveTimeBegin(msg, start);
-                        stack.push(frame);
-                        frame = StackFrame{parse_st: ParseState::Clear, traces: vec![]};
-                    },
-                    (_, ProfileQueriesMsg::TimeBegin(_)) => {
-                        panic!("parse error; did not expect time begin here");
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::TimeEnd) => {
-                        let provider_extent = frame.traces;
-                        match stack.pop() {
-                            None =>
-                                panic!("parse error: expected a stack frame; found an empty stack"),
-                            Some(old_frame) => {
-                                match old_frame.parse_st {
-                                    ParseState::HaveTimeBegin(msg, start) => {
-                                        let duration = start.elapsed();
-                                        frame = StackFrame{
-                                            parse_st: ParseState::Clear,
-                                            traces: old_frame.traces
-                                        };
-                                        let dur_extent = total_duration(&provider_extent);
-                                        let trace = Rec {
-                                            effect: Effect::TimeBegin(msg),
-                                            extent: Box::new(provider_extent),
-                                            start: start,
-                                            dur_total: duration,
-                                            dur_self: duration - dur_extent,
-                                        };
-                                        frame.traces.push( trace );
-                                    },
-                                    _ => panic!("internal parse error: malformed parse stack")
-                                }
-                            }
-                        }
-                    },
-                    (_, ProfileQueriesMsg::TimeEnd) => {
-                        panic!("parse error")
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::TaskBegin(key)) => {
-                        let start = Instant::now();
-                        frame.parse_st = ParseState::HaveTaskBegin(key, start);
-                        stack.push(frame);
-                        frame = StackFrame{ parse_st: ParseState::Clear, traces: vec![] };
-                    },
-                    (_, ProfileQueriesMsg::TaskBegin(_)) => {
-                        panic!("parse error; did not expect time begin here");
-                    },
-                    (ParseState::Clear,
-                     ProfileQueriesMsg::TaskEnd) => {
-                        let provider_extent = frame.traces;
-                        match stack.pop() {
-                            None =>
-                                panic!("parse error: expected a stack frame; found an empty stack"),
-                            Some(old_frame) => {
-                                match old_frame.parse_st {
-                                    ParseState::HaveTaskBegin(key, start) => {
-                                        let duration = start.elapsed();
-                                        frame = StackFrame{
-                                            parse_st: ParseState::Clear,
-                                            traces: old_frame.traces
-                                        };
-                                        let dur_extent = total_duration(&provider_extent);
-                                        let trace = Rec {
-                                            effect: Effect::TaskBegin(key),
-                                            extent: Box::new(provider_extent),
-                                            start: start,
-                                            dur_total: duration,
-                                            dur_self: duration - dur_extent,
-                                        };
-                                        frame.traces.push( trace );
-                                    },
-                                    _ => panic!("internal parse error: malformed parse stack")
-                                }
-                            }
-                        }
-                    },
-                    (_, ProfileQueriesMsg::TaskEnd) => {
-                        panic!("parse error")
-                    },
-                    // Parse State: HaveQuery
-                    (ParseState::HaveQuery(q,start),
-                     ProfileQueriesMsg::CacheHit) => {
-                        let duration = start.elapsed();
-                        let trace : Rec = Rec{
-                            effect: Effect::QueryBegin(q, CacheCase::Hit),
-                            extent: Box::new(vec![]),
-                            start: start,
-                            dur_self: duration,
-                            dur_total: duration,
-                        };
-                        frame.traces.push( trace );
-                        frame.parse_st = ParseState::Clear;
-                    },
-                    (ParseState::HaveQuery(_, _),
-                     ProfileQueriesMsg::ProviderBegin) => {
-                        stack.push(frame);
-                        frame = StackFrame{ parse_st: ParseState::Clear, traces: vec![] };
-                    },
-
-                    // Parse errors:
-
-                    (ParseState::HaveQuery(q, _),
-                     ProfileQueriesMsg::ProviderEnd) => {
-                        panic!("parse error: unexpected ProviderEnd; \
-                                expected something else to follow BeginQuery for {:?}", q)
-                    },
-                    (ParseState::HaveQuery(q1, _),
-                     ProfileQueriesMsg::QueryBegin(span2, querymsg2)) => {
-                        panic!("parse error: unexpected QueryBegin; \
-                                earlier query is unfinished: {:?} and now {:?}",
-                               q1, Query{span:span2, msg: querymsg2})
-                    },
-                    (ParseState::HaveTimeBegin(_, _), _) => {
-                        unreachable!()
-                    },
-                    (ParseState::HaveTaskBegin(_, _), _) => {
-                        unreachable!()
-                    },
-                }
-            }
-        }
-    }
-}
diff --git a/src/librustc_interface/profile/trace.rs b/src/librustc_interface/profile/trace.rs
deleted file mode 100644 (file)
index 95c4ea6..0000000
+++ /dev/null
@@ -1,304 +0,0 @@
-use super::*;
-use syntax_pos::SpanData;
-use rustc_data_structures::fx::FxHashMap;
-use rustc::util::common::QueryMsg;
-use std::fs::File;
-use std::time::{Duration, Instant};
-use rustc::dep_graph::{DepNode};
-
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct Query {
-    pub span: SpanData,
-    pub msg: QueryMsg,
-}
-pub enum Effect {
-    QueryBegin(Query, CacheCase),
-    TimeBegin(String),
-    TaskBegin(DepNode),
-}
-pub enum CacheCase {
-    Hit, Miss
-}
-/// Recursive trace structure
-pub struct Rec {
-    pub effect: Effect,
-    pub start: Instant,
-    pub dur_self: Duration,
-    pub dur_total: Duration,
-    pub extent: Box<Vec<Rec>>,
-}
-pub struct QueryMetric {
-    pub count: usize,
-    pub dur_self: Duration,
-    pub dur_total: Duration,
-}
-
-fn cons(s: &str) -> String {
-    let first = s.split(|d| d == '(' || d == '{').next();
-    assert!(first.is_some() && first != Some(""));
-    first.unwrap().to_owned()
-}
-
-pub fn cons_of_query_msg(q: &trace::Query) -> String {
-    cons(&format!("{:?}", q.msg))
-}
-
-pub fn cons_of_key(k: &DepNode) -> String {
-    cons(&format!("{:?}", k))
-}
-
-// First return value is text; second return value is a CSS class
-pub fn html_of_effect(eff: &Effect) -> (String, String) {
-    match *eff {
-        Effect::TimeBegin(ref msg) => {
-            (msg.clone(),
-             "time-begin".to_string())
-        },
-        Effect::TaskBegin(ref key) => {
-            let cons = cons_of_key(key);
-            (cons.clone(), format!("{} task-begin", cons))
-        },
-        Effect::QueryBegin(ref qmsg, ref cc) => {
-            let cons = cons_of_query_msg(qmsg);
-            (cons.clone(),
-             format!("{} {}",
-                     cons,
-                     match *cc {
-                         CacheCase::Hit => "hit",
-                         CacheCase::Miss => "miss",
-                     }))
-        }
-    }
-}
-
-// First return value is text; second return value is a CSS class
-fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) {
-    use rustc::util::common::duration_to_secs_str;
-    (duration_to_secs_str(dur.clone()), String::new())
-}
-
-fn html_of_fraction(frac: f64) -> (String, &'static str) {
-    let css = {
-        if       frac > 0.50  { "frac-50" }
-        else if  frac > 0.40  { "frac-40" }
-        else if  frac > 0.30  { "frac-30" }
-        else if  frac > 0.20  { "frac-20" }
-        else if  frac > 0.10  { "frac-10" }
-        else if  frac > 0.05  { "frac-05" }
-        else if  frac > 0.02  { "frac-02" }
-        else if  frac > 0.01  { "frac-01" }
-        else if  frac > 0.001 { "frac-001" }
-        else                  { "frac-0" }
-    };
-    let percent = frac * 100.0;
-
-    if percent > 0.1 {
-        (format!("{:.1}%", percent), css)
-    } else {
-        ("< 0.1%".to_string(), css)
-    }
-}
-
-fn total_duration(traces: &[Rec]) -> Duration {
-    Duration::new(0, 0) + traces.iter().map(|t| t.dur_total).sum()
-}
-
-fn duration_div(nom: Duration, den: Duration) -> f64 {
-    fn to_nanos(d: Duration) -> u64 {
-        d.as_secs() * 1_000_000_000 + d.subsec_nanos() as u64
-    }
-
-    to_nanos(nom) as f64 / to_nanos(den) as f64
-}
-
-fn write_traces_rec(file: &mut File, traces: &[Rec], total: Duration, depth: usize) {
-    for t in traces {
-        let (eff_text, eff_css_classes) = html_of_effect(&t.effect);
-        let (dur_text, dur_css_classes) = html_of_duration(&t.start, &t.dur_total);
-        let fraction = duration_div(t.dur_total, total);
-        let percent = fraction * 100.0;
-        let (frc_text, frc_css_classes) = html_of_fraction(fraction);
-        writeln!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">",
-                 depth,
-                 t.extent.len(),
-                 /* Heuristic for 'important' CSS class: */
-                 if t.extent.len() > 5 || percent >= 1.0 { " important" } else { "" },
-                 eff_css_classes,
-                 dur_css_classes,
-                 frc_css_classes,
-        ).unwrap();
-        writeln!(file, "<div class=\"eff\">{}</div>", eff_text).unwrap();
-        writeln!(file, "<div class=\"dur\">{}</div>", dur_text).unwrap();
-        writeln!(file, "<div class=\"frc\">{}</div>", frc_text).unwrap();
-        write_traces_rec(file, &t.extent, total, depth + 1);
-        writeln!(file, "</div>").unwrap();
-    }
-}
-
-fn compute_counts_rec(counts: &mut FxHashMap<String,QueryMetric>, traces: &[Rec]) {
-    counts.reserve(traces.len());
-    for t in traces.iter() {
-        match t.effect {
-            Effect::TimeBegin(ref msg) => {
-                let qm = match counts.get(msg) {
-                    Some(_qm) => panic!("TimeBegin with non-unique, repeat message"),
-                    None => QueryMetric {
-                        count: 1,
-                        dur_self: t.dur_self,
-                        dur_total: t.dur_total,
-                    }
-                };
-                counts.insert(msg.clone(), qm);
-            },
-            Effect::TaskBegin(ref key) => {
-                let cons = cons_of_key(key);
-                let qm = match counts.get(&cons) {
-                    Some(qm) =>
-                        QueryMetric {
-                            count: qm.count + 1,
-                            dur_self: qm.dur_self + t.dur_self,
-                            dur_total: qm.dur_total + t.dur_total,
-                        },
-                    None => QueryMetric {
-                        count: 1,
-                        dur_self: t.dur_self,
-                        dur_total: t.dur_total,
-                    }
-                };
-                counts.insert(cons, qm);
-            },
-            Effect::QueryBegin(ref qmsg, ref _cc) => {
-                let qcons = cons_of_query_msg(qmsg);
-                let qm = match counts.get(&qcons) {
-                    Some(qm) =>
-                        QueryMetric {
-                            count: qm.count + 1,
-                            dur_total: qm.dur_total + t.dur_total,
-                            dur_self: qm.dur_self + t.dur_self
-                        },
-                    None => QueryMetric {
-                        count: 1,
-                        dur_total: t.dur_total,
-                        dur_self: t.dur_self,
-                    }
-                };
-                counts.insert(qcons, qm);
-            }
-        }
-        compute_counts_rec(counts, &t.extent)
-    }
-}
-
-pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap<String, QueryMetric>) {
-    use rustc::util::common::duration_to_secs_str;
-    use std::cmp::Reverse;
-
-    let mut data = counts.iter().map(|(ref cons, ref qm)|
-        (cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())
-    ).collect::<Vec<_>>();
-
-    data.sort_by_key(|k| Reverse(k.3));
-    for (cons, count, dur_total, dur_self) in data {
-        writeln!(count_file, "{}, {}, {}, {}",
-                 cons, count,
-                 duration_to_secs_str(dur_total),
-                 duration_to_secs_str(dur_self)
-        ).unwrap();
-    }
-}
-
-pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &[Rec]) {
-    let capacity = traces.iter().fold(0, |acc, t| acc + 1 + t.extent.len());
-    let mut counts = FxHashMap::with_capacity_and_hasher(capacity, Default::default());
-    compute_counts_rec(&mut counts, traces);
-    write_counts(counts_file, &mut counts);
-
-    let total: Duration = total_duration(traces);
-    write_traces_rec(html_file, traces, total, 0)
-}
-
-pub fn write_style(html_file: &mut File) {
-    write!(html_file, "{}", "
-body {
-    font-family: sans-serif;
-    background: black;
-}
-.trace {
-    color: black;
-    display: inline-block;
-    border-style: solid;
-    border-color: red;
-    border-width: 1px;
-    border-radius: 5px;
-    padding: 0px;
-    margin: 1px;
-    font-size: 0px;
-}
-.task-begin {
-    border-width: 1px;
-    color: white;
-    border-color: #ff8;
-    font-size: 0px;
-}
-.miss {
-    border-color: red;
-    border-width: 1px;
-}
-.extent-0 {
-    padding: 2px;
-}
-.time-begin {
-    border-width: 4px;
-    font-size: 12px;
-    color: white;
-    border-color: #afa;
-}
-.important {
-    border-width: 3px;
-    font-size: 12px;
-    color: white;
-    border-color: #f77;
-}
-.hit {
-    padding: 0px;
-    border-color: blue;
-    border-width: 3px;
-}
-.eff {
-  color: #fff;
-  display: inline-block;
-}
-.frc {
-  color: #7f7;
-  display: inline-block;
-}
-.dur {
-  display: none
-}
-.frac-50 {
-  padding: 10px;
-  border-width: 10px;
-  font-size: 32px;
-}
-.frac-40 {
-  padding: 8px;
-  border-width: 8px;
-  font-size: 24px;
-}
-.frac-30 {
-  padding: 6px;
-  border-width: 6px;
-  font-size: 18px;
-}
-.frac-20 {
-  padding: 4px;
-  border-width: 6px;
-  font-size: 16px;
-}
-.frac-10 {
-  padding: 2px;
-  border-width: 6px;
-  font-size: 14px;
-}
-").unwrap();
-}
index b05bad554f406e803213e4c0fbb134411601dfa2..0c272f0c4563b01b5dbb01125bfdecf7c8c8556b 100644 (file)
@@ -202,11 +202,12 @@ pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
     stderr: &Option<Arc<Mutex<Vec<u8>>>>,
     f: F,
 ) -> R {
-    use rayon::{ThreadPool, ThreadPoolBuilder};
+    use rayon::{ThreadBuilder, ThreadPool, ThreadPoolBuilder};
 
     let gcx_ptr = &Lock::new(0);
 
     let mut config = ThreadPoolBuilder::new()
+        .thread_name(|_| "rustc".to_string())
         .acquire_thread_handler(jobserver::acquire_thread)
         .release_thread_handler(jobserver::release_thread)
         .num_threads(threads)
@@ -225,20 +226,20 @@ pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
                 // the thread local rustc uses. syntax_globals and syntax_pos_globals are
                 // captured and set on the new threads. ty::tls::with_thread_locals sets up
                 // thread local callbacks from libsyntax
-                let main_handler = move |worker: &mut dyn FnMut()| {
+                let main_handler = move |thread: ThreadBuilder| {
                     syntax::GLOBALS.set(syntax_globals, || {
                         syntax_pos::GLOBALS.set(syntax_pos_globals, || {
                             if let Some(stderr) = stderr {
                                 io::set_panic(Some(box Sink(stderr.clone())));
                             }
                             ty::tls::with_thread_locals(|| {
-                                ty::tls::GCX_PTR.set(gcx_ptr, || worker())
+                                ty::tls::GCX_PTR.set(gcx_ptr, || thread.run())
                             })
                         })
                     })
                 };
 
-                ThreadPool::scoped_pool(config, main_handler, with_pool).unwrap()
+                config.build_scoped(main_handler, with_pool).unwrap()
             })
         })
     })
index d0a7eab071c311de1e85c66bc8ed89829c15cfac..08f6f43ab0cff4831c94c6b421c4e03d97928188 100644 (file)
@@ -45,7 +45,7 @@
 use syntax_pos::{BytePos, Span};
 use syntax::symbol::{Symbol, kw, sym};
 use syntax::errors::{Applicability, DiagnosticBuilder};
-use syntax::print::pprust::expr_to_string;
+use syntax::print::pprust::{self, expr_to_string};
 use syntax::visit::FnKind;
 
 use rustc::hir::{self, GenericParamKind, PatKind};
@@ -701,7 +701,8 @@ fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &ast::Attribute) {
             }
         }
         if attr.check_name(sym::no_start) || attr.check_name(sym::crate_id) {
-            let msg = format!("use of deprecated attribute `{}`: no longer used.", attr.path);
+            let path_str = pprust::path_to_string(&attr.path);
+            let msg = format!("use of deprecated attribute `{}`: no longer used.", path_str);
             lint_deprecated_attr(cx, attr, &msg, None);
         }
     }
@@ -1240,7 +1241,7 @@ fn check_item(
         if cx.tcx.features().trivial_bounds {
             let def_id = cx.tcx.hir().local_def_id(item.hir_id);
             let predicates = cx.tcx.predicates_of(def_id);
-            for &(predicate, span) in &predicates.predicates {
+            for &(predicate, span) in predicates.predicates {
                 let predicate_kind_name = match predicate {
                     Trait(..) => "Trait",
                     TypeOutlives(..) |
index ea2e1d9ecc53f46553207c643ec5eed7c560de96..2edc8fadf45684dae76eb1bce80429c4b97e548a 100644 (file)
@@ -1,4 +1,4 @@
 syntax::register_diagnostics! {
 ;
-    E0721, // `await` keyword
+//  E0721, // `await` keyword
 }
index 0e054013cd779ed30eab29353d0dd4c1c7cc9b33..e3860e229d6b5d4b89960a3e0fc87160baf372e4 100644 (file)
@@ -255,6 +255,7 @@ macro_rules! register_passes {
                     UNUSED_MUT,
                     UNREACHABLE_CODE,
                     UNREACHABLE_PATTERNS,
+                    OVERLAPPING_PATTERNS,
                     UNUSED_MUST_USE,
                     UNUSED_UNSAFE,
                     PATH_STATEMENTS,
index 3d14a78c33f7ea74975e877b3862d3f1f417747b..aa6dfa50dddf3030267ec8325078545196ae0296 100644 (file)
@@ -631,6 +631,16 @@ fn check_type_for_ffi(&self,
                             };
                         }
 
+                        let is_non_exhaustive =
+                            def.non_enum_variant().is_field_list_non_exhaustive();
+                        if is_non_exhaustive && !def.did.is_local() {
+                            return FfiUnsafe {
+                                ty,
+                                reason: "this struct is non-exhaustive",
+                                help: None,
+                            };
+                        }
+
                         if def.non_enum_variant().fields.is_empty() {
                             return FfiUnsafe {
                                 ty,
@@ -730,8 +740,25 @@ fn check_type_for_ffi(&self,
                             }
                         }
 
+                        if def.is_variant_list_non_exhaustive() && !def.did.is_local() {
+                            return FfiUnsafe {
+                                ty,
+                                reason: "this enum is non-exhaustive",
+                                help: None,
+                            };
+                        }
+
                         // Check the contained variants.
                         for variant in &def.variants {
+                            let is_non_exhaustive = variant.is_field_list_non_exhaustive();
+                            if is_non_exhaustive && !variant.def_id.is_local() {
+                                return FfiUnsafe {
+                                    ty,
+                                    reason: "this enum has non-exhaustive variants",
+                                    help: None,
+                                };
+                            }
+
                             for field in &variant.fields {
                                 let field_ty = cx.normalize_erasing_regions(
                                     ParamEnv::reveal_all(),
index 3b3995832cb4cb1f9663c6bad8ae604fbc8e3a7e..a93946df68f9235d484b50da6fae287bada285ab 100644 (file)
@@ -156,7 +156,7 @@ fn check_must_use_ty<'tcx>(
                 }
                 ty::Opaque(def, _) => {
                     let mut has_emitted = false;
-                    for (predicate, _) in &cx.tcx.predicates_of(def).predicates {
+                    for (predicate, _) in cx.tcx.predicates_of(def).predicates {
                         if let ty::Predicate::Trait(ref poly_trait_predicate) = predicate {
                             let trait_ref = poly_trait_predicate.skip_binder().trait_ref;
                             let def_id = trait_ref.def_id;
index f989ebc6dfd8e0210c707e4523b7a550f176b64f..c28fcb1a395ff999d2637908c08b8667ea3a164d 100644 (file)
@@ -8,8 +8,8 @@ edition = "2018"
 proc-macro = true
 
 [dependencies]
-synstructure = "0.10.2"
-syn = { version = "0.15.22", features = ["full"] }
-proc-macro2 = "0.4.24"
-quote = "0.6.10"
+synstructure = "0.12.1"
+syn = { version = "1", features = ["full"] }
+proc-macro2 = "1"
+quote = "1"
 itertools = "0.8"
index a708f3191dcf84e4bb427a107542e3a0b8992b96..735cfb11b365c4876c7d8720a98183a32ea5c734 100644 (file)
@@ -15,22 +15,22 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
     };
     for attr in &field.attrs {
         if let Ok(meta) = attr.parse_meta() {
-            if &meta.name().to_string() != "stable_hasher" {
+            if !meta.path().is_ident("stable_hasher") {
                 continue;
             }
             let mut any_attr = false;
             if let Meta::List(list) = meta {
                 for nested in list.nested.iter() {
                     if let NestedMeta::Meta(meta) = nested {
-                        if &meta.name().to_string() == "ignore" {
+                        if meta.path().is_ident("ignore") {
                             attrs.ignore = true;
                             any_attr = true;
                         }
-                        if &meta.name().to_string() == "project" {
+                        if meta.path().is_ident("project") {
                             if let Meta::List(list) = meta {
                                 if let Some(nested) = list.nested.iter().next() {
                                     if let NestedMeta::Meta(meta) = nested {
-                                        attrs.project = Some(meta.name());
+                                        attrs.project = meta.path().get_ident().cloned();
                                         any_attr = true;
                                     }
                                 }
index 9a68dd0f5e3ced95e1ee9b3fd42d7b042e8fbbf5..139e1b554cf90a2597a6ec606404b8034b3ceba0 100644 (file)
@@ -495,7 +495,11 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
                 dep_node_force_stream.extend(quote! {
                     DepKind::#name => {
                         if let Some(key) = RecoverKey::recover($tcx, $dep_node) {
-                            force_ex!($tcx, #name, key);
+                            $tcx.force_query::<crate::ty::query::queries::#name<'_>>(
+                                key,
+                                DUMMY_SP,
+                                *$dep_node
+                            );
                         } else {
                             return false;
                         }
index 032470e1400abbb42455be173960d7f251b236e8..18192e35f8a209fc240618799811199174936ef9 100644 (file)
@@ -22,4 +22,5 @@ rustc_index = { path = "../librustc_index" }
 rustc_serialize = { path = "../libserialize", package = "serialize" }
 stable_deref_trait = "1.0.0"
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
index 042252bc13e615c9a052a6328e6567f6b4445e1b..6a2da5d49887d3f8da42968f946ed73a3559f5af 100644 (file)
@@ -1,19 +1,20 @@
 //! Validates all used crates and extern libraries and loads their metadata
 
-use crate::cstore::{self, CStore, CrateSource, MetadataBlob};
+use crate::cstore::{self, CStore, MetadataBlob};
 use crate::locator::{self, CratePaths};
 use crate::schema::{CrateRoot, CrateDep};
-use rustc_data_structures::sync::{Lrc, RwLock, Lock};
+use rustc_data_structures::sync::{Lrc, RwLock, Lock, AtomicCell};
 
 use rustc::hir::def_id::CrateNum;
 use rustc_data_structures::svh::Svh;
+use rustc::dep_graph::DepNodeIndex;
 use rustc::middle::cstore::DepKind;
 use rustc::mir::interpret::AllocDecodingState;
 use rustc::session::{Session, CrateDisambiguator};
 use rustc::session::config::{Sanitizer, self};
 use rustc_target::spec::{PanicStrategy, TargetTriple};
 use rustc::session::search_paths::PathKind;
-use rustc::middle::cstore::{ExternCrate, ExternCrateSource};
+use rustc::middle::cstore::{CrateSource, ExternCrate, ExternCrateSource};
 use rustc::util::common::record_time;
 use rustc::util::nodemap::FxHashSet;
 use rustc::hir::map::Definitions;
 
 use syntax::ast;
 use syntax::attr;
-use syntax::ext::allocator::{global_allocator_spans, AllocatorKind};
+use syntax_expand::allocator::{global_allocator_spans, AllocatorKind};
 use syntax::symbol::{Symbol, sym};
 use syntax::{span_err, span_fatal};
 use syntax_pos::{Span, DUMMY_SP};
 use log::{debug, info, log_enabled};
 use proc_macro::bridge::client::ProcMacro;
 
-pub struct Library {
-    pub dylib: Option<(PathBuf, PathKind)>,
-    pub rlib: Option<(PathBuf, PathKind)>,
-    pub rmeta: Option<(PathBuf, PathKind)>,
+crate struct Library {
+    pub source: CrateSource,
     pub metadata: MetadataBlob,
 }
 
 pub struct CrateLoader<'a> {
-    pub sess: &'a Session,
+    sess: &'a Session,
     cstore: &'a CStore,
     local_crate_name: Symbol,
 }
@@ -188,7 +187,7 @@ fn verify_no_symbol_conflicts(&self,
     }
 
     fn register_crate(
-        &mut self,
+        &self,
         host_lib: Option<Library>,
         root: Option<&CratePaths>,
         span: Span,
@@ -196,7 +195,10 @@ fn register_crate(
         dep_kind: DepKind,
         name: Symbol
     ) -> (CrateNum, Lrc<cstore::CrateMetadata>) {
-        let crate_root = lib.metadata.get_root();
+        let _prof_timer = self.sess.prof.generic_activity("metadata_register_crate");
+
+        let Library { source, metadata } = lib;
+        let crate_root = metadata.get_root();
         self.verify_no_symbol_conflicts(span, &crate_root);
 
         let private_dep = self.sess.opts.externs.get(&name.as_str())
@@ -214,28 +216,22 @@ fn register_crate(
         let root = if let Some(root) = root {
             root
         } else {
-            crate_paths = CratePaths {
-                ident: crate_root.name.to_string(),
-                dylib: lib.dylib.clone().map(|p| p.0),
-                rlib:  lib.rlib.clone().map(|p| p.0),
-                rmeta: lib.rmeta.clone().map(|p| p.0),
-            };
+            crate_paths = CratePaths { name: crate_root.name, source: source.clone() };
             &crate_paths
         };
 
-        let Library { dylib, rlib, rmeta, metadata } = lib;
         let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
 
         let dependencies: Vec<CrateNum> = cnum_map.iter().cloned().collect();
 
         let raw_proc_macros =  crate_root.proc_macro_data.map(|_| {
             let temp_root;
-            let (dlsym_dylib, dlsym_root) = match &host_lib {
+            let (dlsym_source, dlsym_root) = match &host_lib {
                 Some(host_lib) =>
-                    (&host_lib.dylib, { temp_root = host_lib.metadata.get_root(); &temp_root }),
-                None => (&dylib, &crate_root),
+                    (&host_lib.source, { temp_root = host_lib.metadata.get_root(); &temp_root }),
+                None => (&source, &crate_root),
             };
-            let dlsym_dylib = dlsym_dylib.as_ref().expect("no dylib for a proc-macro crate");
+            let dlsym_dylib = dlsym_source.dylib.as_ref().expect("no dylib for a proc-macro crate");
             self.dlsym_proc_macros(&dlsym_dylib.0, dlsym_root.disambiguator, span)
         });
 
@@ -264,14 +260,10 @@ fn register_crate(
             source_map_import_info: RwLock::new(vec![]),
             alloc_decoding_state: AllocDecodingState::new(interpret_alloc_index),
             dep_kind: Lock::new(dep_kind),
-            source: cstore::CrateSource {
-                dylib,
-                rlib,
-                rmeta,
-            },
+            source,
             private_dep,
-            span,
-            raw_proc_macros
+            raw_proc_macros,
+            dep_node_index: AtomicCell::new(DepNodeIndex::INVALID),
         };
 
         let cmeta = Lrc::new(cmeta);
@@ -280,7 +272,7 @@ fn register_crate(
     }
 
     fn load_proc_macro<'b>(
-        &mut self,
+        &self,
         locate_ctxt: &mut locator::Context<'b>,
         path_kind: PathKind,
     ) -> Option<(LoadResult, Option<Library>)>
@@ -335,7 +327,7 @@ fn load_proc_macro<'b>(
     }
 
     fn resolve_crate<'b>(
-        &'b mut self,
+        &'b self,
         name: Symbol,
         span: Span,
         dep_kind: DepKind,
@@ -345,7 +337,7 @@ fn resolve_crate<'b>(
     }
 
     fn maybe_resolve_crate<'b>(
-        &'b mut self,
+        &'b self,
         name: Symbol,
         span: Span,
         mut dep_kind: DepKind,
@@ -405,7 +397,7 @@ fn maybe_resolve_crate<'b>(
         }
     }
 
-    fn load(&mut self, locate_ctxt: &mut locator::Context<'_>) -> Option<LoadResult> {
+    fn load(&self, locate_ctxt: &mut locator::Context<'_>) -> Option<LoadResult> {
         let library = locate_ctxt.maybe_load_library_crate()?;
 
         // In the case that we're loading a crate, but not matching
@@ -432,7 +424,7 @@ fn load(&mut self, locate_ctxt: &mut locator::Context<'_>) -> Option<LoadResult>
         }
     }
 
-    fn update_extern_crate(&mut self,
+    fn update_extern_crate(&self,
                            cnum: CrateNum,
                            mut extern_crate: ExternCrate,
                            visited: &mut FxHashSet<(CrateNum, bool)>)
@@ -474,7 +466,7 @@ fn update_extern_crate(&mut self,
     }
 
     // Go through the crate metadata and load any crates that it references
-    fn resolve_crate_deps(&mut self,
+    fn resolve_crate_deps(&self,
                           root: &CratePaths,
                           crate_root: &CrateRoot<'_>,
                           metadata: &MetadataBlob,
@@ -504,7 +496,7 @@ fn resolve_crate_deps(&mut self,
         })).collect()
     }
 
-    fn read_extension_crate(&mut self, name: Symbol, span: Span) -> ExtensionCrate {
+    fn read_extension_crate(&self, name: Symbol, span: Span) -> ExtensionCrate {
         info!("read extension crate `{}`", name);
         let target_triple = self.sess.opts.target_triple.clone();
         let host_triple = TargetTriple::from_triple(config::host_triple());
@@ -554,7 +546,7 @@ fn read_extension_crate(&mut self, name: Symbol, span: Span) -> ExtensionCrate {
                 (data.source.dylib.clone(), PMDSource::Registered(data))
             }
             LoadResult::Loaded(library) => {
-                let dylib = library.dylib.clone();
+                let dylib = library.source.dylib.clone();
                 let metadata = PMDSource::Owned(library);
                 (dylib, metadata)
             }
@@ -600,7 +592,7 @@ fn dlsym_proc_macros(&self,
 
     /// Look for a plugin registrar. Returns library path, crate
     /// SVH and DefIndex of the registrar function.
-    pub fn find_plugin_registrar(&mut self,
+    pub fn find_plugin_registrar(&self,
                                  span: Span,
                                  name: Symbol)
                                  -> Option<(PathBuf, CrateDisambiguator)> {
@@ -633,7 +625,7 @@ pub fn find_plugin_registrar(&mut self,
         }
     }
 
-    fn inject_panic_runtime(&mut self, krate: &ast::Crate) {
+    fn inject_panic_runtime(&self, krate: &ast::Crate) {
         // If we're only compiling an rlib, then there's no need to select a
         // panic runtime, so we just skip this section entirely.
         let any_non_rlib = self.sess.crate_types.borrow().iter().any(|ct| {
@@ -714,7 +706,7 @@ fn inject_panic_runtime(&mut self, krate: &ast::Crate) {
                                   &|data| data.root.needs_panic_runtime);
     }
 
-    fn inject_sanitizer_runtime(&mut self) {
+    fn inject_sanitizer_runtime(&self) {
         if let Some(ref sanitizer) = self.sess.opts.debugging_opts.sanitizer {
             // Sanitizers can only be used on some tested platforms with
             // executables linked to `std`
@@ -812,7 +804,7 @@ fn inject_sanitizer_runtime(&mut self) {
         }
     }
 
-    fn inject_profiler_runtime(&mut self) {
+    fn inject_profiler_runtime(&self) {
         if self.sess.opts.debugging_opts.profile ||
            self.sess.opts.cg.profile_generate.enabled()
         {
@@ -829,7 +821,7 @@ fn inject_profiler_runtime(&mut self) {
         }
     }
 
-    fn inject_allocator_crate(&mut self, krate: &ast::Crate) {
+    fn inject_allocator_crate(&self, krate: &ast::Crate) {
         let has_global_allocator = match &*global_allocator_spans(krate) {
             [span1, span2, ..] => {
                 self.sess.struct_span_err(*span2, "cannot define multiple global allocators")
@@ -968,7 +960,7 @@ fn inject_dependency_if(&self,
 }
 
 impl<'a> CrateLoader<'a> {
-    pub fn postprocess(&mut self, krate: &ast::Crate) {
+    pub fn postprocess(&self, krate: &ast::Crate) {
         self.inject_sanitizer_runtime();
         self.inject_profiler_runtime();
         self.inject_allocator_crate(krate);
@@ -979,9 +971,7 @@ pub fn postprocess(&mut self, krate: &ast::Crate) {
         }
     }
 
-    pub fn process_extern_crate(
-        &mut self, item: &ast::Item, definitions: &Definitions,
-    ) -> CrateNum {
+    pub fn process_extern_crate(&self, item: &ast::Item, definitions: &Definitions) -> CrateNum {
         match item.kind {
             ast::ItemKind::ExternCrate(orig_name) => {
                 debug!("resolving extern crate stmt. ident: {} orig_name: {:?}",
@@ -1014,18 +1004,13 @@ pub fn process_extern_crate(
                     },
                     &mut FxHashSet::default(),
                 );
-                self.cstore.add_extern_mod_stmt_cnum(item.id, cnum);
                 cnum
             }
             _ => bug!(),
         }
     }
 
-    pub fn process_path_extern(
-        &mut self,
-        name: Symbol,
-        span: Span,
-    ) -> CrateNum {
+    pub fn process_path_extern(&self, name: Symbol, span: Span) -> CrateNum {
         let cnum = self.resolve_crate(name, span, DepKind::Explicit, None).0;
 
         self.update_extern_crate(
@@ -1043,11 +1028,7 @@ pub fn process_path_extern(
         cnum
     }
 
-    pub fn maybe_process_path_extern(
-        &mut self,
-        name: Symbol,
-        span: Span,
-    ) -> Option<CrateNum> {
+    pub fn maybe_process_path_extern(&self, name: Symbol, span: Span) -> Option<CrateNum> {
         let cnum = self.maybe_resolve_crate(name, span, DepKind::Explicit, None).ok()?.0;
 
         self.update_extern_crate(
index 833c846573f6339176a752d8b68cae94792921eb..9a0b98ffb73a9af45bf2a1231256ef0b6042630a 100644 (file)
@@ -2,21 +2,18 @@
 // crates and libraries
 
 use crate::schema;
+use rustc::dep_graph::DepNodeIndex;
 use rustc::hir::def_id::{CrateNum, DefIndex};
 use rustc::hir::map::definitions::DefPathTable;
-use rustc::middle::cstore::{DepKind, ExternCrate, MetadataLoader};
+use rustc::middle::cstore::{CrateSource, DepKind, ExternCrate, MetadataLoader};
 use rustc::mir::interpret::AllocDecodingState;
 use rustc_index::vec::IndexVec;
-use rustc::util::nodemap::{FxHashMap, NodeMap};
-
-use rustc_data_structures::sync::{Lrc, RwLock, Lock};
+use rustc::util::nodemap::FxHashMap;
+use rustc_data_structures::sync::{Lrc, RwLock, Lock, MetadataRef, AtomicCell};
 use syntax::ast;
-use syntax::ext::base::SyntaxExtension;
+use syntax_expand::base::SyntaxExtension;
 use syntax_pos;
-
-pub use rustc::middle::cstore::{NativeLibrary, NativeLibraryKind, LinkagePreference};
-pub use rustc::middle::cstore::NativeLibraryKind::*;
-pub use rustc::middle::cstore::{CrateSource, LibSource, ForeignModule};
+use proc_macro::bridge::client::ProcMacro;
 
 pub use crate::cstore_impl::{provide, provide_extern};
 
 // local crate numbers (as generated during this session). Each external
 // crate may refer to types in other external crates, and each has their
 // own crate numbers.
-pub type CrateNumMap = IndexVec<CrateNum, CrateNum>;
-
-pub use rustc_data_structures::sync::MetadataRef;
-use syntax_pos::Span;
-use proc_macro::bridge::client::ProcMacro;
+crate type CrateNumMap = IndexVec<CrateNum, CrateNum>;
 
-pub struct MetadataBlob(pub MetadataRef);
+crate struct MetadataBlob(pub MetadataRef);
 
 /// Holds information about a syntax_pos::SourceFile imported from another crate.
 /// See `imported_source_files()` for more information.
-pub struct ImportedSourceFile {
+crate struct ImportedSourceFile {
     /// This SourceFile's byte-offset within the source_map of its original crate
     pub original_start_pos: syntax_pos::BytePos,
     /// The end of this SourceFile within the source_map of its original crate
@@ -44,52 +37,66 @@ pub struct ImportedSourceFile {
 }
 
 pub struct CrateMetadata {
-    /// Information about the extern crate that caused this crate to
-    /// be loaded. If this is `None`, then the crate was injected
-    /// (e.g., by the allocator)
-    pub extern_crate: Lock<Option<ExternCrate>>,
-
-    pub blob: MetadataBlob,
-    pub cnum_map: CrateNumMap,
-    pub cnum: CrateNum,
-    pub dependencies: Lock<Vec<CrateNum>>,
-    pub source_map_import_info: RwLock<Vec<ImportedSourceFile>>,
-
-    /// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
-    pub alloc_decoding_state: AllocDecodingState,
+    /// The primary crate data - binary metadata blob.
+    crate blob: MetadataBlob,
 
-    // NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
-    // lifetime is only used behind `Lazy`, and therefore acts like an
-    // universal (`for<'tcx>`), that is paired up with whichever `TyCtxt`
-    // is being used to decode those values.
-    pub root: schema::CrateRoot<'static>,
+    // --- Some data pre-decoded from the metadata blob, usually for performance ---
 
+    /// Properties of the whole crate.
+    /// NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
+    /// lifetime is only used behind `Lazy`, and therefore acts like an
+    /// universal (`for<'tcx>`), that is paired up with whichever `TyCtxt`
+    /// is being used to decode those values.
+    crate root: schema::CrateRoot<'static>,
     /// For each definition in this crate, we encode a key. When the
     /// crate is loaded, we read all the keys and put them in this
     /// hashmap, which gives the reverse mapping. This allows us to
     /// quickly retrace a `DefPath`, which is needed for incremental
     /// compilation support.
-    pub def_path_table: Lrc<DefPathTable>,
-
-    pub trait_impls: FxHashMap<(u32, DefIndex), schema::Lazy<[DefIndex]>>,
-
-    pub dep_kind: Lock<DepKind>,
-    pub source: CrateSource,
-
+    crate def_path_table: Lrc<DefPathTable>,
+    /// Trait impl data.
+    /// FIXME: Used only from queries and can use query cache,
+    /// so pre-decoding can probably be avoided.
+    crate trait_impls: FxHashMap<(u32, DefIndex), schema::Lazy<[DefIndex]>>,
+    /// Proc macro descriptions for this crate, if it's a proc macro crate.
+    crate raw_proc_macros: Option<&'static [ProcMacro]>,
+    /// Source maps for code from the crate.
+    crate source_map_import_info: RwLock<Vec<ImportedSourceFile>>,
+    /// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
+    crate alloc_decoding_state: AllocDecodingState,
+    /// The `DepNodeIndex` of the `DepNode` representing this upstream crate.
+    /// It is initialized on the first access in `get_crate_dep_node_index()`.
+    /// Do not access the value directly, as it might not have been initialized yet.
+    /// The field must always be initialized to `DepNodeIndex::INVALID`.
+    crate dep_node_index: AtomicCell<DepNodeIndex>,
+
+    // --- Other significant crate properties ---
+
+    /// ID of this crate, from the current compilation session's point of view.
+    crate cnum: CrateNum,
+    /// Maps crate IDs as they are were seen from this crate's compilation sessions into
+    /// IDs as they are seen from the current compilation session.
+    crate cnum_map: CrateNumMap,
+    /// Same ID set as `cnum_map` plus maybe some injected crates like panic runtime.
+    crate dependencies: Lock<Vec<CrateNum>>,
+    /// How to link (or not link) this crate to the currently compiled crate.
+    crate dep_kind: Lock<DepKind>,
+    /// Filesystem location of this crate.
+    crate source: CrateSource,
     /// Whether or not this crate should be consider a private dependency
     /// for purposes of the 'exported_private_dependencies' lint
-    pub private_dep: bool,
+    crate private_dep: bool,
 
-    pub span: Span,
+    // --- Data used only for improving diagnostics ---
 
-    pub raw_proc_macros: Option<&'static [ProcMacro]>,
+    /// Information about the `extern crate` item or path that caused this crate to be loaded.
+    /// If this is `None`, then the crate was injected (e.g., by the allocator).
+    crate extern_crate: Lock<Option<ExternCrate>>,
 }
 
 pub struct CStore {
     metas: RwLock<IndexVec<CrateNum, Option<Lrc<CrateMetadata>>>>,
-    /// Map from NodeId's of local extern crate statements to crate numbers
-    extern_mod_crate_map: Lock<NodeMap<CrateNum>>,
-    pub metadata_loader: Box<dyn MetadataLoader + Sync>,
+    crate metadata_loader: Box<dyn MetadataLoader + Sync>,
 }
 
 pub enum LoadedMacro {
@@ -105,30 +112,29 @@ pub fn new(metadata_loader: Box<dyn MetadataLoader + Sync>) -> CStore {
             // corresponding `CrateNum`. This first entry will always remain
             // `None`.
             metas: RwLock::new(IndexVec::from_elem_n(None, 1)),
-            extern_mod_crate_map: Default::default(),
             metadata_loader,
         }
     }
 
-    pub(super) fn alloc_new_crate_num(&self) -> CrateNum {
+    crate fn alloc_new_crate_num(&self) -> CrateNum {
         let mut metas = self.metas.borrow_mut();
         let cnum = CrateNum::new(metas.len());
         metas.push(None);
         cnum
     }
 
-    pub(super) fn get_crate_data(&self, cnum: CrateNum) -> Lrc<CrateMetadata> {
+    crate fn get_crate_data(&self, cnum: CrateNum) -> Lrc<CrateMetadata> {
         self.metas.borrow()[cnum].clone()
             .unwrap_or_else(|| panic!("Failed to get crate data for {:?}", cnum))
     }
 
-    pub(super) fn set_crate_data(&self, cnum: CrateNum, data: Lrc<CrateMetadata>) {
+    crate fn set_crate_data(&self, cnum: CrateNum, data: Lrc<CrateMetadata>) {
         let mut metas = self.metas.borrow_mut();
         assert!(metas[cnum].is_none(), "Overwriting crate metadata entry");
         metas[cnum] = Some(data);
     }
 
-    pub(super) fn iter_crate_data<I>(&self, mut i: I)
+    crate fn iter_crate_data<I>(&self, mut i: I)
         where I: FnMut(CrateNum, &Lrc<CrateMetadata>)
     {
         for (k, v) in self.metas.borrow().iter_enumerated() {
@@ -138,16 +144,14 @@ pub(super) fn iter_crate_data<I>(&self, mut i: I)
         }
     }
 
-    pub(super) fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec<CrateNum> {
+    crate fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec<CrateNum> {
         let mut ordering = Vec::new();
         self.push_dependencies_in_postorder(&mut ordering, krate);
         ordering.reverse();
         ordering
     }
 
-    pub(super) fn push_dependencies_in_postorder(&self,
-                                                 ordering: &mut Vec<CrateNum>,
-                                                 krate: CrateNum) {
+    crate fn push_dependencies_in_postorder(&self, ordering: &mut Vec<CrateNum>, krate: CrateNum) {
         if ordering.contains(&krate) {
             return;
         }
@@ -162,7 +166,7 @@ pub(super) fn push_dependencies_in_postorder(&self,
         ordering.push(krate);
     }
 
-    pub(super) fn do_postorder_cnums_untracked(&self) -> Vec<CrateNum> {
+    crate fn do_postorder_cnums_untracked(&self) -> Vec<CrateNum> {
         let mut ordering = Vec::new();
         for (num, v) in self.metas.borrow().iter_enumerated() {
             if let &Some(_) = v {
@@ -171,12 +175,4 @@ pub(super) fn do_postorder_cnums_untracked(&self) -> Vec<CrateNum> {
         }
         return ordering
     }
-
-    pub(super) fn add_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId, cnum: CrateNum) {
-        self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum);
-    }
-
-    pub(super) fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum> {
-        self.extern_mod_crate_map.borrow().get(&emod_id).cloned()
-    }
 }
index 91532d84473ab173338a8b120bc7e283388afb9c..4cd1ff7b4a4ff8ed47ae02a8b3742a8df27f02a5 100644 (file)
@@ -6,8 +6,7 @@
 use crate::schema;
 
 use rustc::ty::query::QueryConfig;
-use rustc::middle::cstore::{CrateStore, DepKind,
-                            EncodedMetadata, NativeLibraryKind};
+use rustc::middle::cstore::{CrateSource, CrateStore, DepKind, EncodedMetadata, NativeLibraryKind};
 use rustc::middle::exported_symbols::ExportedSymbol;
 use rustc::middle::stability::DeprecationEntry;
 use rustc::middle::dependency_format::Linkage;
@@ -33,6 +32,7 @@
 use syntax::edition::Edition;
 use syntax::parse::source_file_to_stream;
 use syntax::parse::parser::emit_unclosed_delims;
+use syntax::source_map::Spanned;
 use syntax::symbol::Symbol;
 use syntax_pos::{Span, FileName};
 use rustc_index::bit_set::BitSet;
@@ -47,23 +47,22 @@ pub fn provide_extern<$lt>(providers: &mut Providers<$lt>) {
                 $tcx: TyCtxt<$lt>,
                 def_id_arg: T,
             ) -> <ty::queries::$name<$lt> as QueryConfig<$lt>>::Value {
+                let _prof_timer =
+                    $tcx.prof.generic_activity("metadata_decode_entry");
+
                 #[allow(unused_variables)]
                 let ($def_id, $other) = def_id_arg.into_args();
                 assert!(!$def_id.is_local());
 
-                let def_path_hash = $tcx.def_path_hash(DefId {
-                    krate: $def_id.krate,
-                    index: CRATE_DEF_INDEX
-                });
-                let dep_node = def_path_hash
-                    .to_dep_node(rustc::dep_graph::DepKind::CrateMetadata);
-                // The DepNodeIndex of the DepNode::CrateMetadata should be
-                // cached somewhere, so that we can use read_index().
-                $tcx.dep_graph.read(dep_node);
-
                 let $cdata = $tcx.crate_data_as_rc_any($def_id.krate);
                 let $cdata = $cdata.downcast_ref::<cstore::CrateMetadata>()
                     .expect("CrateStore created data is not a CrateMetadata");
+
+                if $tcx.dep_graph.is_fully_enabled() {
+                    let crate_dep_node_index = $cdata.get_crate_dep_node_index($tcx);
+                    $tcx.dep_graph.read_index(crate_dep_node_index);
+                }
+
                 $compute
             })*
 
@@ -98,11 +97,9 @@ fn into_args(self) -> (DefId, DefId) { (self.0.as_def_id(), self.1) }
     generics_of => {
         tcx.arena.alloc(cdata.get_generics(def_id.index, tcx.sess))
     }
-    predicates_of => { tcx.arena.alloc(cdata.get_predicates(def_id.index, tcx)) }
-    predicates_defined_on => {
-        tcx.arena.alloc(cdata.get_predicates_defined_on(def_id.index, tcx))
-    }
-    super_predicates_of => { tcx.arena.alloc(cdata.get_super_predicates(def_id.index, tcx)) }
+    predicates_of => { cdata.get_predicates(def_id.index, tcx) }
+    predicates_defined_on => { cdata.get_predicates_defined_on(def_id.index, tcx) }
+    super_predicates_of => { cdata.get_super_predicates(def_id.index, tcx) }
     trait_def => {
         tcx.arena.alloc(cdata.get_trait_def(def_id.index, tcx.sess))
     }
@@ -270,7 +267,11 @@ pub fn provide(providers: &mut Providers<'_>) {
     // resolve! Does this work? Unsure! That's what the issue is about
     *providers = Providers {
         is_dllimport_foreign_item: |tcx, id| {
-            tcx.native_library_kind(id) == Some(NativeLibraryKind::NativeUnknown)
+            match tcx.native_library_kind(id) {
+                Some(NativeLibraryKind::NativeUnknown) |
+                Some(NativeLibraryKind::NativeRawDylib) => true,
+                _ => false,
+            }
         },
         is_statically_included_foreign_item: |tcx, id| {
             match tcx.native_library_kind(id) {
@@ -410,26 +411,12 @@ pub fn export_macros_untracked(&self, cnum: CrateNum) {
         }
     }
 
-    pub fn dep_kind_untracked(&self, cnum: CrateNum) -> DepKind {
-        let data = self.get_crate_data(cnum);
-        let r = *data.dep_kind.lock();
-        r
-    }
-
     pub fn crate_edition_untracked(&self, cnum: CrateNum) -> Edition {
         self.get_crate_data(cnum).root.edition
     }
 
-    pub fn struct_field_names_untracked(&self, def: DefId) -> Vec<ast::Name> {
-        self.get_crate_data(def.krate).get_struct_field_names(def.index)
-    }
-
-    pub fn ctor_kind_untracked(&self, def: DefId) -> def::CtorKind {
-        self.get_crate_data(def.krate).get_ctor_kind(def.index)
-    }
-
-    pub fn item_attrs_untracked(&self, def: DefId, sess: &Session) -> Lrc<[ast::Attribute]> {
-        self.get_crate_data(def.krate).get_item_attrs(def.index, sess)
+    pub fn struct_field_names_untracked(&self, def: DefId, sess: &Session) -> Vec<Spanned<Symbol>> {
+        self.get_crate_data(def.krate).get_struct_field_names(def.index, sess)
     }
 
     pub fn item_children_untracked(
@@ -444,6 +431,8 @@ pub fn item_children_untracked(
     }
 
     pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
+        let _prof_timer = sess.prof.generic_activity("metadata_load_macro");
+
         let data = self.get_crate_data(id.krate);
         if data.is_proc_macro_crate() {
             return LoadedMacro::ProcMacro(data.load_proc_macro(id.index, sess));
@@ -487,6 +476,10 @@ pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
     pub fn associated_item_cloned_untracked(&self, def: DefId) -> ty::AssocItem {
         self.get_crate_data(def.krate).get_associated_item(def.index)
     }
+
+    pub fn crate_source_untracked(&self, cnum: CrateNum) -> CrateSource {
+        self.get_crate_data(cnum).source.clone()
+    }
 }
 
 impl CrateStore for cstore::CStore {
@@ -521,20 +514,10 @@ fn crate_hash_untracked(&self, cnum: CrateNum) -> Svh
     /// parent `DefId` as well as some idea of what kind of data the
     /// `DefId` refers to.
     fn def_key(&self, def: DefId) -> DefKey {
-        // Note: loading the def-key (or def-path) for a def-id is not
-        // a *read* of its metadata. This is because the def-id is
-        // really just an interned shorthand for a def-path, which is the
-        // canonical name for an item.
-        //
-        // self.dep_graph.read(DepNode::MetaData(def));
         self.get_crate_data(def.krate).def_key(def.index)
     }
 
     fn def_path(&self, def: DefId) -> DefPath {
-        // See `Note` above in `def_key()` for why this read is
-        // commented out:
-        //
-        // self.dep_graph.read(DepNode::MetaData(def));
         self.get_crate_data(def.krate).def_path(def.index)
     }
 
@@ -553,11 +536,6 @@ fn crates_untracked(&self) -> Vec<CrateNum>
         result
     }
 
-    fn extern_mod_stmt_cnum_untracked(&self, emod_id: ast::NodeId) -> Option<CrateNum>
-    {
-        self.do_extern_mod_stmt_cnum(emod_id)
-    }
-
     fn postorder_cnums_untracked(&self) -> Vec<CrateNum> {
         self.do_postorder_cnums_untracked()
     }
index 34f86707ad3222606bbb5960daf50db4c8d6fc28..b8b0030244028d25c343b8f9cd7aba7ffb93b058 100644 (file)
@@ -1,18 +1,20 @@
 // Decoding metadata from a single crate's metadata
 
-use crate::cstore::{self, CrateMetadata, MetadataBlob, NativeLibrary, ForeignModule};
+use crate::cstore::{self, CrateMetadata, MetadataBlob};
 use crate::schema::*;
+use crate::table::{FixedSizeEncoding, PerDefTable};
 
 use rustc_index::vec::IndexVec;
 use rustc_data_structures::sync::{Lrc, ReadGuard};
 use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash};
 use rustc::hir;
-use rustc::middle::cstore::LinkagePreference;
+use rustc::middle::cstore::{LinkagePreference, NativeLibrary, ForeignModule};
 use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel};
 use rustc::hir::def::{self, Res, DefKind, CtorOf, CtorKind};
 use rustc::hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::FxHashMap;
+use rustc::dep_graph::{DepNodeIndex, DepKind};
 use rustc::middle::lang_items;
 use rustc::mir::{self, interpret};
 use rustc::mir::interpret::AllocDecodingSession;
 
 use std::io;
 use std::mem;
+use std::num::NonZeroUsize;
 use std::u32;
 
-use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque};
+use rustc_serialize::{Decodable, Decoder, Encodable, SpecializedDecoder, opaque};
 use syntax::attr;
 use syntax::ast::{self, Ident};
-use syntax::source_map;
+use syntax::source_map::{self, respan, Spanned};
 use syntax::symbol::{Symbol, sym};
-use syntax::ext::base::{MacroKind, SyntaxExtensionKind, SyntaxExtension};
+use syntax_expand::base::{MacroKind, SyntaxExtensionKind, SyntaxExtension};
 use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, symbol::{InternedString}};
 use log::debug;
 use proc_macro::bridge::client::ProcMacro;
-use syntax::ext::proc_macro::{AttrProcMacro, ProcMacroDerive, BangProcMacro};
+use syntax_expand::proc_macro::{AttrProcMacro, ProcMacroDerive, BangProcMacro};
 
-pub struct DecodeContext<'a, 'tcx> {
+crate struct DecodeContext<'a, 'tcx> {
     opaque: opaque::Decoder<'a>,
     cdata: Option<&'a CrateMetadata>,
     sess: Option<&'tcx Session>,
@@ -53,7 +56,7 @@ pub struct DecodeContext<'a, 'tcx> {
 }
 
 /// Abstract over the various ways one can create metadata decoders.
-pub trait Metadata<'a, 'tcx>: Copy {
+crate trait Metadata<'a, 'tcx>: Copy {
     fn raw_bytes(self) -> &'a [u8];
     fn cdata(self) -> Option<&'a CrateMetadata> { None }
     fn sess(self) -> Option<&'tcx Session> { None }
@@ -128,31 +131,31 @@ fn tcx(self) -> Option<TyCtxt<'tcx>> {
     }
 }
 
-impl<'a, 'tcx, T: Decodable> Lazy<T> {
-    pub fn decode<M: Metadata<'a, 'tcx>>(self, meta: M) -> T {
-        let mut dcx = meta.decoder(self.position);
+impl<'a, 'tcx, T: Encodable + Decodable> Lazy<T> {
+    crate fn decode<M: Metadata<'a, 'tcx>>(self, metadata: M) -> T {
+        let mut dcx = metadata.decoder(self.position.get());
         dcx.lazy_state = LazyState::NodeStart(self.position);
         T::decode(&mut dcx).unwrap()
     }
 }
 
-impl<'a: 'x, 'tcx: 'x, 'x, T: Decodable> Lazy<[T]> {
-    pub fn decode<M: Metadata<'a, 'tcx>>(
+impl<'a: 'x, 'tcx: 'x, 'x, T: Encodable + Decodable> Lazy<[T]> {
+    crate fn decode<M: Metadata<'a, 'tcx>>(
         self,
-        meta: M,
+        metadata: M,
     ) -> impl ExactSizeIterator<Item = T> + Captures<'a> + Captures<'tcx> + 'x {
-        let mut dcx = meta.decoder(self.position);
+        let mut dcx = metadata.decoder(self.position.get());
         dcx.lazy_state = LazyState::NodeStart(self.position);
         (0..self.meta).map(move |_| T::decode(&mut dcx).unwrap())
     }
 }
 
 impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
-    pub fn tcx(&self) -> TyCtxt<'tcx> {
+    fn tcx(&self) -> TyCtxt<'tcx> {
         self.tcx.expect("missing TyCtxt in DecodeContext")
     }
 
-    pub fn cdata(&self) -> &'a CrateMetadata {
+    fn cdata(&self) -> &'a CrateMetadata {
         self.cdata.expect("missing CrateMetadata in DecodeContext")
     }
 
@@ -165,13 +168,14 @@ fn read_lazy_with_meta<T: ?Sized + LazyMeta>(
         let position = match self.lazy_state {
             LazyState::NoNode => bug!("read_lazy_with_meta: outside of a metadata node"),
             LazyState::NodeStart(start) => {
+                let start = start.get();
                 assert!(distance + min_size <= start);
                 start - distance - min_size
             }
-            LazyState::Previous(last_min_end) => last_min_end + distance,
+            LazyState::Previous(last_min_end) => last_min_end.get() + distance,
         };
-        self.lazy_state = LazyState::Previous(position + min_size);
-        Ok(Lazy::from_position_and_meta(position, meta))
+        self.lazy_state = LazyState::Previous(NonZeroUsize::new(position + min_size).unwrap());
+        Ok(Lazy::from_position_and_meta(NonZeroUsize::new(position).unwrap(), meta))
     }
 }
 
@@ -234,13 +238,13 @@ fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
     }
 }
 
-impl<'a, 'tcx, T> SpecializedDecoder<Lazy<T>> for DecodeContext<'a, 'tcx> {
+impl<'a, 'tcx, T: Encodable> SpecializedDecoder<Lazy<T>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<Lazy<T>, Self::Error> {
         self.read_lazy_with_meta(())
     }
 }
 
-impl<'a, 'tcx, T> SpecializedDecoder<Lazy<[T]>> for DecodeContext<'a, 'tcx> {
+impl<'a, 'tcx, T: Encodable> SpecializedDecoder<Lazy<[T]>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<Lazy<[T]>, Self::Error> {
         let len = self.read_usize()?;
         if len == 0 {
@@ -251,6 +255,14 @@ fn specialized_decode(&mut self) -> Result<Lazy<[T]>, Self::Error> {
     }
 }
 
+impl<'a, 'tcx, T> SpecializedDecoder<Lazy<PerDefTable<T>>> for DecodeContext<'a, 'tcx>
+    where Option<T>: FixedSizeEncoding,
+{
+    fn specialized_decode(&mut self) -> Result<Lazy<PerDefTable<T>>, Self::Error> {
+        let len = self.read_usize()?;
+        self.read_lazy_with_meta(len)
+    }
+}
 
 impl<'a, 'tcx> SpecializedDecoder<DefId> for DecodeContext<'a, 'tcx> {
     #[inline]
@@ -378,24 +390,28 @@ fn specialized_decode(&mut self) -> Result<mir::ClearCrossCrate<T>, Self::Error>
 implement_ty_decoder!( DecodeContext<'a, 'tcx> );
 
 impl<'tcx> MetadataBlob {
-    pub fn is_compatible(&self) -> bool {
+    crate fn is_compatible(&self) -> bool {
         self.raw_bytes().starts_with(METADATA_HEADER)
     }
 
-    pub fn get_rustc_version(&self) -> String {
-        Lazy::<String>::from_position(METADATA_HEADER.len() + 4).decode(self)
+    crate fn get_rustc_version(&self) -> String {
+        Lazy::<String>::from_position(
+            NonZeroUsize::new(METADATA_HEADER.len() + 4).unwrap(),
+        ).decode(self)
     }
 
-    pub fn get_root(&self) -> CrateRoot<'tcx> {
+    crate fn get_root(&self) -> CrateRoot<'tcx> {
         let slice = self.raw_bytes();
         let offset = METADATA_HEADER.len();
         let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) |
                    ((slice[offset + 2] as u32) << 8) |
                    ((slice[offset + 3] as u32) << 0)) as usize;
-        Lazy::<CrateRoot<'tcx>>::from_position(pos).decode(self)
+        Lazy::<CrateRoot<'tcx>>::from_position(
+            NonZeroUsize::new(pos).unwrap(),
+        ).decode(self)
     }
 
-    pub fn list_crate_metadata(&self,
+    crate fn list_crate_metadata(&self,
                                out: &mut dyn io::Write) -> io::Result<()> {
         write!(out, "=External Dependencies=\n")?;
         let root = self.get_root();
@@ -448,7 +464,7 @@ fn def_kind(&self) -> Option<DefKind> {
 }
 
 impl<'a, 'tcx> CrateMetadata {
-    pub fn is_proc_macro_crate(&self) -> bool {
+    crate fn is_proc_macro_crate(&self) -> bool {
         self.root.proc_macro_decls_static.is_some()
     }
 
@@ -457,27 +473,20 @@ fn is_proc_macro(&self, id: DefIndex) -> bool {
             self.root.proc_macro_data.unwrap().decode(self).find(|x| *x == id).is_some()
     }
 
-    fn entry_unless_proc_macro(&self, id: DefIndex) -> Option<Entry<'tcx>> {
-        match self.is_proc_macro(id) {
-            true => None,
-            false => Some(self.entry(id)),
-        }
-    }
-
-    fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
-        self.root.entries_index.lookup(self.blob.raw_bytes(), item_id)
+    fn maybe_kind(&self, item_id: DefIndex) -> Option<EntryKind<'tcx>> {
+        self.root.per_def.kind.get(self, item_id).map(|k| k.decode(self))
     }
 
-    fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
-        match self.maybe_entry(item_id) {
-            None => {
-                bug!("entry: id not found: {:?} in crate {:?} with number {}",
-                     item_id,
-                     self.root.name,
-                     self.cnum)
-            }
-            Some(d) => d.decode(self),
-        }
+    fn kind(&self, item_id: DefIndex) -> EntryKind<'tcx> {
+        assert!(!self.is_proc_macro(item_id));
+        self.maybe_kind(item_id).unwrap_or_else(|| {
+            bug!(
+                "CrateMetadata::kind({:?}): id not found, in crate {:?} with number {}",
+                item_id,
+                self.root.name,
+                self.cnum,
+            )
+        })
     }
 
     fn local_def_id(&self, index: DefIndex) -> DefId {
@@ -498,7 +507,7 @@ fn raw_proc_macro(&self, id: DefIndex) -> &ProcMacro {
         &self.raw_proc_macros.unwrap()[pos]
     }
 
-    pub fn item_name(&self, item_index: DefIndex) -> Symbol {
+    crate fn item_name(&self, item_index: DefIndex) -> Symbol {
         if !self.is_proc_macro(item_index) {
             self.def_key(item_index)
                 .disambiguated_data
@@ -511,9 +520,9 @@ pub fn item_name(&self, item_index: DefIndex) -> Symbol {
         }
     }
 
-    pub fn def_kind(&self, index: DefIndex) -> Option<DefKind> {
+    crate fn def_kind(&self, index: DefIndex) -> Option<DefKind> {
         if !self.is_proc_macro(index) {
-            self.entry(index).kind.def_kind()
+            self.kind(index).def_kind()
         } else {
             Some(DefKind::Macro(
                 macro_kind(self.raw_proc_macro(index))
@@ -521,8 +530,8 @@ pub fn def_kind(&self, index: DefIndex) -> Option<DefKind> {
         }
     }
 
-    pub fn get_span(&self, index: DefIndex, sess: &Session) -> Span {
-        self.entry(index).span.decode((self, sess))
+    crate fn get_span(&self, index: DefIndex, sess: &Session) -> Span {
+        self.root.per_def.span.get(self, index).unwrap().decode((self, sess))
     }
 
     crate fn load_proc_macro(&self, id: DefIndex, sess: &Session) -> SyntaxExtension {
@@ -551,12 +560,12 @@ pub fn get_span(&self, index: DefIndex, sess: &Session) -> Span {
             helper_attrs,
             self.root.edition,
             Symbol::intern(name),
-            &self.get_attributes(&self.entry(id), sess),
+            &self.get_item_attrs(id, sess),
         )
     }
 
-    pub fn get_trait_def(&self, item_id: DefIndex, sess: &Session) -> ty::TraitDef {
-        match self.entry(item_id).kind {
+    crate fn get_trait_def(&self, item_id: DefIndex, sess: &Session) -> ty::TraitDef {
+        match self.kind(item_id) {
             EntryKind::Trait(data) => {
                 let data = data.decode((self, sess));
                 ty::TraitDef::new(self.local_def_id(item_id),
@@ -581,18 +590,24 @@ pub fn get_trait_def(&self, item_id: DefIndex, sess: &Session) -> ty::TraitDef {
     fn get_variant(
         &self,
         tcx: TyCtxt<'tcx>,
-        item: &Entry<'_>,
+        kind: &EntryKind<'_>,
         index: DefIndex,
         parent_did: DefId,
-        adt_kind: ty::AdtKind,
     ) -> ty::VariantDef {
-        let data = match item.kind {
+        let data = match kind {
             EntryKind::Variant(data) |
             EntryKind::Struct(data, _) |
             EntryKind::Union(data, _) => data.decode(self),
             _ => bug!(),
         };
 
+        let adt_kind = match kind {
+            EntryKind::Variant(_) => ty::AdtKind::Enum,
+            EntryKind::Struct(..) => ty::AdtKind::Struct,
+            EntryKind::Union(..) => ty::AdtKind::Union,
+            _ => bug!(),
+        };
+
         let variant_did = if adt_kind == ty::AdtKind::Enum {
             Some(self.local_def_id(index))
         } else {
@@ -606,14 +621,12 @@ fn get_variant(
             variant_did,
             ctor_did,
             data.discr,
-            item.children.decode(self).map(|index| {
-                let f = self.entry(index);
-                ty::FieldDef {
+            self.root.per_def.children.get(self, index).unwrap_or(Lazy::empty())
+                .decode(self).map(|index| ty::FieldDef {
                     did: self.local_def_id(index),
                     ident: Ident::with_dummy_span(self.item_name(index)),
-                    vis: f.visibility.decode(self)
-                }
-            }).collect(),
+                    vis: self.get_visibility(index),
+                }).collect(),
             data.ctor_kind,
             adt_kind,
             parent_did,
@@ -621,53 +634,53 @@ fn get_variant(
         )
     }
 
-    pub fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> &'tcx ty::AdtDef {
-        let item = self.entry(item_id);
+    crate fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> &'tcx ty::AdtDef {
+        let kind = self.kind(item_id);
         let did = self.local_def_id(item_id);
 
-        let (kind, repr) = match item.kind {
+        let (adt_kind, repr) = match kind {
             EntryKind::Enum(repr) => (ty::AdtKind::Enum, repr),
             EntryKind::Struct(_, repr) => (ty::AdtKind::Struct, repr),
             EntryKind::Union(_, repr) => (ty::AdtKind::Union, repr),
             _ => bug!("get_adt_def called on a non-ADT {:?}", did),
         };
 
-        let variants = if let ty::AdtKind::Enum = kind {
-            item.children
+        let variants = if let ty::AdtKind::Enum = adt_kind {
+            self.root.per_def.children.get(self, item_id).unwrap_or(Lazy::empty())
                 .decode(self)
                 .map(|index| {
-                    self.get_variant(tcx, &self.entry(index), index, did, kind)
+                    self.get_variant(tcx, &self.kind(index), index, did)
                 })
                 .collect()
         } else {
-            std::iter::once(self.get_variant(tcx, &item, item_id, did, kind)).collect()
+            std::iter::once(self.get_variant(tcx, &kind, item_id, did)).collect()
         };
 
-        tcx.alloc_adt_def(did, kind, variants, repr)
+        tcx.alloc_adt_def(did, adt_kind, variants, repr)
     }
 
-    pub fn get_predicates(
+    crate fn get_predicates(
         &self,
         item_id: DefIndex,
         tcx: TyCtxt<'tcx>,
     ) -> ty::GenericPredicates<'tcx> {
-        self.entry(item_id).predicates.unwrap().decode((self, tcx))
+        self.root.per_def.predicates.get(self, item_id).unwrap().decode((self, tcx))
 }
 
-    pub fn get_predicates_defined_on(
+    crate fn get_predicates_defined_on(
         &self,
         item_id: DefIndex,
         tcx: TyCtxt<'tcx>,
     ) -> ty::GenericPredicates<'tcx> {
-        self.entry(item_id).predicates_defined_on.unwrap().decode((self, tcx))
+        self.root.per_def.predicates_defined_on.get(self, item_id).unwrap().decode((self, tcx))
     }
 
-    pub fn get_super_predicates(
+    crate fn get_super_predicates(
         &self,
         item_id: DefIndex,
         tcx: TyCtxt<'tcx>,
     ) -> ty::GenericPredicates<'tcx> {
-        let super_predicates = match self.entry(item_id).kind {
+        let super_predicates = match self.kind(item_id) {
             EntryKind::Trait(data) => data.decode(self).super_predicates,
             EntryKind::TraitAlias(data) => data.decode(self).super_predicates,
             _ => bug!("def-index does not refer to trait or trait alias"),
@@ -676,67 +689,66 @@ pub fn get_super_predicates(
         super_predicates.decode((self, tcx))
     }
 
-    pub fn get_generics(&self,
-                        item_id: DefIndex,
-                        sess: &Session)
-                        -> ty::Generics {
-        self.entry(item_id).generics.unwrap().decode((self, sess))
+    crate fn get_generics(&self, item_id: DefIndex, sess: &Session) -> ty::Generics {
+        self.root.per_def.generics.get(self, item_id).unwrap().decode((self, sess))
     }
 
-    pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
-        self.entry(id).ty.unwrap().decode((self, tcx))
+    crate fn get_type(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
+        self.root.per_def.ty.get(self, id).unwrap().decode((self, tcx))
     }
 
-    pub fn get_stability(&self, id: DefIndex) -> Option<attr::Stability> {
+    crate fn get_stability(&self, id: DefIndex) -> Option<attr::Stability> {
         match self.is_proc_macro(id) {
             true => self.root.proc_macro_stability.clone(),
-            false => self.entry(id).stability.map(|stab| stab.decode(self)),
+            false => self.root.per_def.stability.get(self, id).map(|stab| stab.decode(self)),
         }
     }
 
-    pub fn get_deprecation(&self, id: DefIndex) -> Option<attr::Deprecation> {
-        self.entry_unless_proc_macro(id)
-            .and_then(|entry| entry.deprecation.map(|depr| depr.decode(self)))
+    crate fn get_deprecation(&self, id: DefIndex) -> Option<attr::Deprecation> {
+        self.root.per_def.deprecation.get(self, id)
+            .filter(|_| !self.is_proc_macro(id))
+            .map(|depr| depr.decode(self))
     }
 
-    pub fn get_visibility(&self, id: DefIndex) -> ty::Visibility {
+    crate fn get_visibility(&self, id: DefIndex) -> ty::Visibility {
         match self.is_proc_macro(id) {
             true => ty::Visibility::Public,
-            false => self.entry(id).visibility.decode(self),
+            false => self.root.per_def.visibility.get(self, id).unwrap().decode(self),
         }
     }
 
     fn get_impl_data(&self, id: DefIndex) -> ImplData<'tcx> {
-        match self.entry(id).kind {
+        match self.kind(id) {
             EntryKind::Impl(data) => data.decode(self),
             _ => bug!(),
         }
     }
 
-    pub fn get_parent_impl(&self, id: DefIndex) -> Option<DefId> {
+    crate fn get_parent_impl(&self, id: DefIndex) -> Option<DefId> {
         self.get_impl_data(id).parent_impl
     }
 
-    pub fn get_impl_polarity(&self, id: DefIndex) -> ty::ImplPolarity {
+    crate fn get_impl_polarity(&self, id: DefIndex) -> ty::ImplPolarity {
         self.get_impl_data(id).polarity
     }
 
-    pub fn get_impl_defaultness(&self, id: DefIndex) -> hir::Defaultness {
+    crate fn get_impl_defaultness(&self, id: DefIndex) -> hir::Defaultness {
         self.get_impl_data(id).defaultness
     }
 
-    pub fn get_coerce_unsized_info(&self,
-                                   id: DefIndex)
-                                   -> Option<ty::adjustment::CoerceUnsizedInfo> {
+    crate fn get_coerce_unsized_info(
+        &self,
+        id: DefIndex,
+    ) -> Option<ty::adjustment::CoerceUnsizedInfo> {
         self.get_impl_data(id).coerce_unsized_info
     }
 
-    pub fn get_impl_trait(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Option<ty::TraitRef<'tcx>> {
+    crate fn get_impl_trait(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Option<ty::TraitRef<'tcx>> {
         self.get_impl_data(id).trait_ref.map(|tr| tr.decode((self, tcx)))
     }
 
     /// Iterates over all the stability attributes in the given crate.
-    pub fn get_lib_features(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(ast::Name, Option<ast::Name>)] {
+    crate fn get_lib_features(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(ast::Name, Option<ast::Name>)] {
         // FIXME: For a proc macro crate, not sure whether we should return the "host"
         // features or an empty Vec. Both don't cause ICEs.
         tcx.arena.alloc_from_iter(self.root
@@ -745,7 +757,7 @@ pub fn get_lib_features(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(ast::Name, Option<a
     }
 
     /// Iterates over the language items in the given crate.
-    pub fn get_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] {
+    crate fn get_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] {
         if self.is_proc_macro_crate() {
             // Proc macro crates do not export any lang-items to the target.
             &[]
@@ -758,7 +770,7 @@ pub fn get_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] {
     }
 
     /// Iterates over the diagnostic items in the given crate.
-    pub fn get_diagnostic_items(
+    crate fn get_diagnostic_items(
         &self,
         tcx: TyCtxt<'tcx>,
     ) -> &'tcx FxHashMap<Symbol, DefId> {
@@ -775,7 +787,7 @@ pub fn get_diagnostic_items(
     }
 
     /// Iterates over each child of the given item.
-    pub fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F, sess: &Session)
+    crate fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F, sess: &Session)
         where F: FnMut(def::Export<hir::HirId>)
     {
         if let Some(proc_macros_ids) = self.root.proc_macro_data.map(|d| d.decode(self)) {
@@ -802,38 +814,42 @@ pub fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F, sess: &Sessio
         }
 
         // Find the item.
-        let item = match self.maybe_entry(id) {
+        let kind = match self.maybe_kind(id) {
             None => return,
-            Some(item) => item.decode((self, sess)),
+            Some(kind) => kind,
         };
 
         // Iterate over all children.
         let macros_only = self.dep_kind.lock().macros_only();
-        for child_index in item.children.decode((self, sess)) {
+        let children = self.root.per_def.children.get(self, id).unwrap_or(Lazy::empty());
+        for child_index in children.decode((self, sess)) {
             if macros_only {
                 continue
             }
 
             // Get the item.
-            if let Some(child) = self.maybe_entry(child_index) {
-                let child = child.decode((self, sess));
-                match child.kind {
+            if let Some(child_kind) = self.maybe_kind(child_index) {
+                match child_kind {
                     EntryKind::MacroDef(..) => {}
                     _ if macros_only => continue,
                     _ => {}
                 }
 
                 // Hand off the item to the callback.
-                match child.kind {
+                match child_kind {
                     // FIXME(eddyb) Don't encode these in children.
                     EntryKind::ForeignMod => {
-                        for child_index in child.children.decode((self, sess)) {
+                        let child_children =
+                            self.root.per_def.children.get(self, child_index)
+                                .unwrap_or(Lazy::empty());
+                        for child_index in child_children.decode((self, sess)) {
                             if let Some(kind) = self.def_kind(child_index) {
                                 callback(def::Export {
                                     res: Res::Def(kind, self.local_def_id(child_index)),
                                     ident: Ident::with_dummy_span(self.item_name(child_index)),
                                     vis: self.get_visibility(child_index),
-                                    span: self.entry(child_index).span.decode((self, sess)),
+                                    span: self.root.per_def.span.get(self, child_index).unwrap()
+                                        .decode((self, sess)),
                                 });
                             }
                         }
@@ -845,7 +861,7 @@ pub fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F, sess: &Sessio
                 }
 
                 let def_key = self.def_key(child_index);
-                let span = child.span.decode((self, sess));
+                let span = self.get_span(child_index, sess);
                 if let (Some(kind), Some(name)) =
                     (self.def_kind(child_index), def_key.disambiguated_data.data.get_opt_name()) {
                     let ident = Ident::from_interned_str(name);
@@ -898,7 +914,7 @@ pub fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F, sess: &Sessio
             }
         }
 
-        if let EntryKind::Mod(data) = item.kind {
+        if let EntryKind::Mod(data) = kind {
             for exp in data.decode((self, sess)).reexports.decode((self, sess)) {
                 match exp.res {
                     Res::Def(DefKind::Macro(..), _) => {}
@@ -910,33 +926,35 @@ pub fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F, sess: &Sessio
         }
     }
 
-    pub fn is_item_mir_available(&self, id: DefIndex) -> bool {
+    crate fn is_item_mir_available(&self, id: DefIndex) -> bool {
         !self.is_proc_macro(id) &&
-        self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some()
+            self.root.per_def.mir.get(self, id).is_some()
     }
 
-    pub fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> {
-        self.entry_unless_proc_macro(id)
-            .and_then(|entry| entry.mir.map(|mir| mir.decode((self, tcx))))
+    crate fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> {
+        self.root.per_def.mir.get(self, id)
+            .filter(|_| !self.is_proc_macro(id))
             .unwrap_or_else(|| {
-                bug!("get_optimized_mir: missing MIR for `{:?}", self.local_def_id(id))
+                bug!("get_optimized_mir: missing MIR for `{:?}`", self.local_def_id(id))
             })
+            .decode((self, tcx))
     }
 
-    pub fn get_promoted_mir(
+    crate fn get_promoted_mir(
         &self,
         tcx: TyCtxt<'tcx>,
         id: DefIndex,
     ) -> IndexVec<Promoted, Body<'tcx>> {
-        self.entry_unless_proc_macro(id)
-            .and_then(|entry| entry.promoted_mir.map(|promoted| promoted.decode((self, tcx))))
+        self.root.per_def.promoted_mir.get(self, id)
+            .filter(|_| !self.is_proc_macro(id))
             .unwrap_or_else(|| {
                 bug!("get_promoted_mir: missing MIR for `{:?}`", self.local_def_id(id))
             })
+            .decode((self, tcx))
     }
 
-    pub fn mir_const_qualif(&self, id: DefIndex) -> u8 {
-        match self.entry(id).kind {
+    crate fn mir_const_qualif(&self, id: DefIndex) -> u8 {
+        match self.kind(id) {
             EntryKind::Const(qualif, _) |
             EntryKind::AssocConst(AssocContainer::ImplDefault, qualif, _) |
             EntryKind::AssocConst(AssocContainer::ImplFinal, qualif, _) => {
@@ -946,13 +964,12 @@ pub fn mir_const_qualif(&self, id: DefIndex) -> u8 {
         }
     }
 
-    pub fn get_associated_item(&self, id: DefIndex) -> ty::AssocItem {
-        let item = self.entry(id);
+    crate fn get_associated_item(&self, id: DefIndex) -> ty::AssocItem {
         let def_key = self.def_key(id);
         let parent = self.local_def_id(def_key.parent.unwrap());
         let name = def_key.disambiguated_data.data.get_opt_name().unwrap();
 
-        let (kind, container, has_self) = match item.kind {
+        let (kind, container, has_self) = match self.kind(id) {
             EntryKind::AssocConst(container, _, _) => {
                 (ty::AssocKind::Const, container, false)
             }
@@ -972,7 +989,7 @@ pub fn get_associated_item(&self, id: DefIndex) -> ty::AssocItem {
         ty::AssocItem {
             ident: Ident::from_interned_str(name),
             kind,
-            vis: item.visibility.decode(self),
+            vis: self.get_visibility(id),
             defaultness: container.defaultness(),
             def_id: self.local_def_id(id),
             container: container.with_def_id(parent),
@@ -980,12 +997,13 @@ pub fn get_associated_item(&self, id: DefIndex) -> ty::AssocItem {
         }
     }
 
-    pub fn get_item_variances(&self, id: DefIndex) -> Vec<ty::Variance> {
-        self.entry(id).variances.decode(self).collect()
+    crate fn get_item_variances(&self, id: DefIndex) -> Vec<ty::Variance> {
+        self.root.per_def.variances.get(self, id).unwrap_or(Lazy::empty())
+            .decode(self).collect()
     }
 
-    pub fn get_ctor_kind(&self, node_id: DefIndex) -> CtorKind {
-        match self.entry(node_id).kind {
+    crate fn get_ctor_kind(&self, node_id: DefIndex) -> CtorKind {
+        match self.kind(node_id) {
             EntryKind::Struct(data, _) |
             EntryKind::Union(data, _) |
             EntryKind::Variant(data) => data.decode(self).ctor_kind,
@@ -993,8 +1011,8 @@ pub fn get_ctor_kind(&self, node_id: DefIndex) -> CtorKind {
         }
     }
 
-    pub fn get_ctor_def_id(&self, node_id: DefIndex) -> Option<DefId> {
-        match self.entry(node_id).kind {
+    crate fn get_ctor_def_id(&self, node_id: DefIndex) -> Option<DefId> {
+        match self.kind(node_id) {
             EntryKind::Struct(data, _) => {
                 data.decode(self).ctor.map(|index| self.local_def_id(index))
             }
@@ -1005,8 +1023,7 @@ pub fn get_ctor_def_id(&self, node_id: DefIndex) -> Option<DefId> {
         }
     }
 
-
-    pub fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Lrc<[ast::Attribute]> {
+    crate fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Lrc<[ast::Attribute]> {
         // The attributes for a tuple struct/variant are attached to the definition, not the ctor;
         // we assume that someone passing in a tuple struct ctor is actually wanting to
         // look at the definition
@@ -1017,22 +1034,22 @@ pub fn get_item_attrs(&self, node_id: DefIndex, sess: &Session) -> Lrc<[ast::Att
             node_id
         };
 
-        let item = self.entry(item_id);
-        Lrc::from(self.get_attributes(&item, sess))
+        Lrc::from(self.root.per_def.attributes.get(self, item_id).unwrap_or(Lazy::empty())
+            .decode((self, sess))
+            .collect::<Vec<_>>())
     }
 
-    pub fn get_struct_field_names(&self, id: DefIndex) -> Vec<ast::Name> {
-        self.entry(id)
-            .children
+    crate fn get_struct_field_names(
+        &self,
+        id: DefIndex,
+        sess: &Session,
+    ) -> Vec<Spanned<ast::Name>> {
+        self.root.per_def.children.get(self, id).unwrap_or(Lazy::empty())
             .decode(self)
-            .map(|index| self.item_name(index))
+            .map(|index| respan(self.get_span(index, sess), self.item_name(index)))
             .collect()
     }
 
-    fn get_attributes(&self, item: &Entry<'tcx>, sess: &Session) -> Vec<ast::Attribute> {
-        item.attributes.decode((self, sess)).collect()
-    }
-
     // Translate a DefId from the current compilation environment to a DefId
     // for an external crate.
     fn reverse_translate_def_id(&self, did: DefId) -> Option<DefId> {
@@ -1048,18 +1065,19 @@ fn reverse_translate_def_id(&self, did: DefId) -> Option<DefId> {
         None
     }
 
-    pub fn get_inherent_implementations_for_type(
+    crate fn get_inherent_implementations_for_type(
         &self,
         tcx: TyCtxt<'tcx>,
         id: DefIndex,
     ) -> &'tcx [DefId] {
-        tcx.arena.alloc_from_iter(self.entry(id)
-                                      .inherent_impls
-                                      .decode(self)
-                                      .map(|index| self.local_def_id(index)))
+        tcx.arena.alloc_from_iter(
+            self.root.per_def.inherent_impls.get(self, id).unwrap_or(Lazy::empty())
+                .decode(self)
+                .map(|index| self.local_def_id(index))
+        )
     }
 
-    pub fn get_implementations_for_trait(
+    crate fn get_implementations_for_trait(
         &self,
         tcx: TyCtxt<'tcx>,
         filter: Option<DefId>,
@@ -1090,7 +1108,7 @@ pub fn get_implementations_for_trait(
         }
     }
 
-    pub fn get_trait_of_item(&self, id: DefIndex) -> Option<DefId> {
+    crate fn get_trait_of_item(&self, id: DefIndex) -> Option<DefId> {
         let def_key = self.def_key(id);
         match def_key.disambiguated_data.data {
             DefPathData::TypeNs(..) | DefPathData::ValueNs(..) => (),
@@ -1098,7 +1116,7 @@ pub fn get_trait_of_item(&self, id: DefIndex) -> Option<DefId> {
             _ => return None,
         }
         def_key.parent.and_then(|parent_index| {
-            match self.entry(parent_index).kind {
+            match self.kind(parent_index) {
                 EntryKind::Trait(_) |
                 EntryKind::TraitAlias(_) => Some(self.local_def_id(parent_index)),
                 _ => None,
@@ -1107,7 +1125,7 @@ pub fn get_trait_of_item(&self, id: DefIndex) -> Option<DefId> {
     }
 
 
-    pub fn get_native_libraries(&self, sess: &Session) -> Vec<NativeLibrary> {
+    crate fn get_native_libraries(&self, sess: &Session) -> Vec<NativeLibrary> {
         if self.is_proc_macro_crate() {
             // Proc macro crates do not have any *target* native libraries.
             vec![]
@@ -1116,7 +1134,7 @@ pub fn get_native_libraries(&self, sess: &Session) -> Vec<NativeLibrary> {
         }
     }
 
-    pub fn get_foreign_modules(&self, tcx: TyCtxt<'tcx>) -> &'tcx [ForeignModule] {
+    crate fn get_foreign_modules(&self, tcx: TyCtxt<'tcx>) -> &'tcx [ForeignModule] {
         if self.is_proc_macro_crate() {
             // Proc macro crates do not have any *target* foreign modules.
             &[]
@@ -1125,7 +1143,7 @@ pub fn get_foreign_modules(&self, tcx: TyCtxt<'tcx>) -> &'tcx [ForeignModule] {
         }
     }
 
-    pub fn get_dylib_dependency_formats(
+    crate fn get_dylib_dependency_formats(
         &self,
         tcx: TyCtxt<'tcx>,
     ) -> &'tcx [(CrateNum, LinkagePreference)] {
@@ -1139,7 +1157,7 @@ pub fn get_dylib_dependency_formats(
             }))
     }
 
-    pub fn get_missing_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] {
+    crate fn get_missing_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] {
         if self.is_proc_macro_crate() {
             // Proc macro crates do not depend on any target weak lang-items.
             &[]
@@ -1150,8 +1168,8 @@ pub fn get_missing_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::La
         }
     }
 
-    pub fn get_fn_param_names(&self, id: DefIndex) -> Vec<ast::Name> {
-        let param_names = match self.entry(id).kind {
+    crate fn get_fn_param_names(&self, id: DefIndex) -> Vec<ast::Name> {
+        let param_names = match self.kind(id) {
             EntryKind::Fn(data) |
             EntryKind::ForeignFn(data) => data.decode(self).param_names,
             EntryKind::Method(data) => data.decode(self).fn_data.param_names,
@@ -1160,7 +1178,7 @@ pub fn get_fn_param_names(&self, id: DefIndex) -> Vec<ast::Name> {
         param_names.decode(self).collect()
     }
 
-    pub fn exported_symbols(
+    crate fn exported_symbols(
         &self,
         tcx: TyCtxt<'tcx>,
     ) -> Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
@@ -1173,24 +1191,23 @@ pub fn exported_symbols(
         }
     }
 
-    pub fn get_rendered_const(&self, id: DefIndex) -> String {
-        match self.entry(id).kind {
+    crate fn get_rendered_const(&self, id: DefIndex) -> String {
+        match self.kind(id) {
             EntryKind::Const(_, data) |
             EntryKind::AssocConst(_, _, data) => data.decode(self).0,
             _ => bug!(),
         }
     }
 
-    pub fn get_macro(&self, id: DefIndex) -> MacroDef {
-        let entry = self.entry(id);
-        match entry.kind {
+    crate fn get_macro(&self, id: DefIndex) -> MacroDef {
+        match self.kind(id) {
             EntryKind::MacroDef(macro_def) => macro_def.decode(self),
             _ => bug!(),
         }
     }
 
     crate fn is_const_fn_raw(&self, id: DefIndex) -> bool {
-        let constness = match self.entry(id).kind {
+        let constness = match self.kind(id) {
             EntryKind::Method(data) => data.decode(self).fn_data.constness,
             EntryKind::Fn(data) => data.decode(self).constness,
             EntryKind::Variant(..) | EntryKind::Struct(..) => hir::Constness::Const,
@@ -1199,17 +1216,17 @@ pub fn get_macro(&self, id: DefIndex) -> MacroDef {
         constness == hir::Constness::Const
     }
 
-    pub fn asyncness(&self, id: DefIndex) -> hir::IsAsync {
-         match self.entry(id).kind {
+    crate fn asyncness(&self, id: DefIndex) -> hir::IsAsync {
+         match self.kind(id) {
             EntryKind::Fn(data) => data.decode(self).asyncness,
             EntryKind::Method(data) => data.decode(self).fn_data.asyncness,
             EntryKind::ForeignFn(data) => data.decode(self).asyncness,
-            _ => bug!("asyncness: expect functions entry."),
+            _ => bug!("asyncness: expected function kind"),
         }
     }
 
-    pub fn is_foreign_item(&self, id: DefIndex) -> bool {
-        match self.entry(id).kind {
+    crate fn is_foreign_item(&self, id: DefIndex) -> bool {
+        match self.kind(id) {
             EntryKind::ForeignImmStatic |
             EntryKind::ForeignMutStatic |
             EntryKind::ForeignFn(_) => true,
@@ -1218,7 +1235,7 @@ pub fn is_foreign_item(&self, id: DefIndex) -> bool {
     }
 
     crate fn static_mutability(&self, id: DefIndex) -> Option<hir::Mutability> {
-        match self.entry(id).kind {
+        match self.kind(id) {
             EntryKind::ImmStatic |
             EntryKind::ForeignImmStatic => Some(hir::MutImmutable),
             EntryKind::MutStatic |
@@ -1227,8 +1244,8 @@ pub fn is_foreign_item(&self, id: DefIndex) -> bool {
         }
     }
 
-    pub fn fn_sig(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
-        let sig = match self.entry(id).kind {
+    crate fn fn_sig(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
+        let sig = match self.kind(id) {
             EntryKind::Fn(data) |
             EntryKind::ForeignFn(data) => data.decode(self).sig,
             EntryKind::Method(data) => data.decode(self).fn_data.sig,
@@ -1241,7 +1258,7 @@ pub fn fn_sig(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
     }
 
     #[inline]
-    pub fn def_key(&self, index: DefIndex) -> DefKey {
+    crate fn def_key(&self, index: DefIndex) -> DefKey {
         let mut key = self.def_path_table.def_key(index);
         if self.is_proc_macro(index) {
             let name = self.raw_proc_macro(index).name();
@@ -1251,13 +1268,13 @@ pub fn def_key(&self, index: DefIndex) -> DefKey {
     }
 
     // Returns the path leading to the thing with this `id`.
-    pub fn def_path(&self, id: DefIndex) -> DefPath {
+    crate fn def_path(&self, id: DefIndex) -> DefPath {
         debug!("def_path(cnum={:?}, id={:?})", self.cnum, id);
         DefPath::make(self.cnum, id, |parent| self.def_key(parent))
     }
 
     #[inline]
-    pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
+    crate fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
         self.def_path_table.def_path_hash(index)
     }
 
@@ -1286,9 +1303,10 @@ pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
     ///
     /// Proc macro crates don't currently export spans, so this function does not have
     /// to work for them.
-    pub fn imported_source_files(&'a self,
-                                 local_source_map: &source_map::SourceMap)
-                                 -> ReadGuard<'a, Vec<cstore::ImportedSourceFile>> {
+    fn imported_source_files(
+        &'a self,
+        local_source_map: &source_map::SourceMap,
+    ) -> ReadGuard<'a, Vec<cstore::ImportedSourceFile>> {
         {
             let source_files = self.source_map_import_info.borrow();
             if !source_files.is_empty() {
@@ -1365,6 +1383,30 @@ pub fn imported_source_files(&'a self,
         // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
         self.source_map_import_info.borrow()
     }
+
+    /// Get the `DepNodeIndex` corresponding this crate. The result of this
+    /// method is cached in the `dep_node_index` field.
+    pub(super) fn get_crate_dep_node_index(&self, tcx: TyCtxt<'tcx>) -> DepNodeIndex {
+        let mut dep_node_index = self.dep_node_index.load();
+
+        if unlikely!(dep_node_index == DepNodeIndex::INVALID) {
+            // We have not cached the DepNodeIndex for this upstream crate yet,
+            // so use the dep-graph to find it out and cache it.
+            // Note that multiple threads can enter this block concurrently.
+            // That is fine because the DepNodeIndex remains constant
+            // throughout the whole compilation session, and multiple stores
+            // would always write the same value.
+
+            let def_path_hash = self.def_path_hash(CRATE_DEF_INDEX);
+            let dep_node = def_path_hash.to_dep_node(DepKind::CrateMetadata);
+
+            dep_node_index = tcx.dep_graph.dep_node_index_of(&dep_node);
+            assert!(dep_node_index != DepNodeIndex::INVALID);
+            self.dep_node_index.store(dep_node_index);
+        }
+
+        dep_node_index
+    }
 }
 
 // Cannot be implemented on 'ProcMacro', as libproc_macro
index 9a30623b33d626a6f8c70f081d31888ef46baa2f..7f76a9730e198392cde7ac039bef100cc77ad373 100644 (file)
@@ -60,7 +60,7 @@
 use rustc::util::nodemap::FxHashMap;
 use rustc_target::spec::PanicStrategy;
 
-pub fn calculate(tcx: TyCtxt<'_>) -> Dependencies {
+crate fn calculate(tcx: TyCtxt<'_>) -> Dependencies {
     tcx.sess.crate_types.borrow().iter().map(|&ty| {
         let linkage = calculate_type(tcx, ty);
         verify_ok(tcx, &linkage);
index 4c279361ff5e1c657b2781670e53634a948b8e55..3871eb89f7b42c42e4c49dd5bc181d04659b00f2 100644 (file)
@@ -32,30 +32,6 @@ pub fn open(filename: Option<&Path>) -> Result<DynamicLibrary, String> {
         }
     }
 
-    /// Loads a dynamic library into the global namespace (RTLD_GLOBAL on Unix)
-    /// and do it now (don't use RTLD_LAZY on Unix).
-    pub fn open_global_now(filename: &Path) -> Result<DynamicLibrary, String> {
-        let maybe_library = dl::open_global_now(filename.as_os_str());
-        match maybe_library {
-            Err(err) => Err(err),
-            Ok(handle) => Ok(DynamicLibrary { handle })
-        }
-    }
-
-    /// Returns the environment variable for this process's dynamic library
-    /// search path
-    pub fn envvar() -> &'static str {
-        if cfg!(windows) {
-            "PATH"
-        } else if cfg!(target_os = "macos") {
-            "DYLD_LIBRARY_PATH"
-        } else if cfg!(target_os = "haiku") {
-            "LIBRARY_PATH"
-        } else {
-            "LD_LIBRARY_PATH"
-        }
-    }
-
     /// Accesses the value at the symbol of the dynamic library.
     pub unsafe fn symbol<T>(&self, symbol: &str) -> Result<*mut T, String> {
         // This function should have a lifetime constraint of 'a on
@@ -83,7 +59,7 @@ mod dl {
     use std::ptr;
     use std::str;
 
-    pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> {
+    pub(super) fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> {
         check_for_errors_in(|| {
             unsafe {
                 match filename {
@@ -94,13 +70,6 @@ pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> {
         })
     }
 
-    pub fn open_global_now(filename: &OsStr) -> Result<*mut u8, String> {
-        check_for_errors_in(|| unsafe {
-            let s = CString::new(filename.as_bytes()).unwrap();
-            libc::dlopen(s.as_ptr(), libc::RTLD_GLOBAL | libc::RTLD_NOW) as *mut u8
-        })
-    }
-
     unsafe fn open_external(filename: &OsStr) -> *mut u8 {
         let s = CString::new(filename.as_bytes()).unwrap();
         libc::dlopen(s.as_ptr(), libc::RTLD_LAZY) as *mut u8
@@ -110,8 +79,8 @@ unsafe fn open_internal() -> *mut u8 {
         libc::dlopen(ptr::null(), libc::RTLD_LAZY) as *mut u8
     }
 
-    pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where
-        F: FnOnce() -> T,
+    fn check_for_errors_in<T, F>(f: F) -> Result<T, String>
+        where F: FnOnce() -> T,
     {
         use std::sync::{Mutex, Once};
         static INIT: Once = Once::new();
@@ -139,14 +108,15 @@ pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where
         }
     }
 
-    pub unsafe fn symbol(handle: *mut u8,
-                         symbol: *const libc::c_char)
-                         -> Result<*mut u8, String> {
+    pub(super) unsafe fn symbol(
+        handle: *mut u8,
+        symbol: *const libc::c_char,
+    ) -> Result<*mut u8, String> {
         check_for_errors_in(|| {
             libc::dlsym(handle as *mut libc::c_void, symbol) as *mut u8
         })
     }
-    pub unsafe fn close(handle: *mut u8) {
+    pub(super) unsafe fn close(handle: *mut u8) {
         libc::dlclose(handle as *mut libc::c_void); ()
     }
 }
@@ -178,11 +148,7 @@ fn GetProcAddress(handle: HMODULE,
         fn FreeLibrary(handle: HMODULE) -> BOOL;
     }
 
-    pub fn open_global_now(filename: &OsStr) -> Result<*mut u8, String> {
-        open(Some(filename))
-    }
-
-    pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> {
+    pub(super) fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> {
         // disable "dll load failed" error dialog.
         let prev_error_mode = unsafe {
             // SEM_FAILCRITICALERRORS 0x01
@@ -225,14 +191,15 @@ pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> {
         result
     }
 
-    pub unsafe fn symbol(handle: *mut u8,
-                         symbol: *const c_char)
-                         -> Result<*mut u8, String> {
+    pub(super) unsafe fn symbol(
+        handle: *mut u8,
+        symbol: *const c_char,
+    ) -> Result<*mut u8, String> {
         let ptr = GetProcAddress(handle as HMODULE, symbol) as *mut u8;
         ptr_result(ptr)
     }
 
-    pub unsafe fn close(handle: *mut u8) {
+    pub(super) unsafe fn close(handle: *mut u8) {
         FreeLibrary(handle as HMODULE);
     }
 
index bbfbba2e0d8f4c7a6140d4f1b45c8f95764ad410..6ae8c2fc6c694b2b72d4cb0d943d5b60b3a5af12 100644 (file)
@@ -1,11 +1,11 @@
-use crate::index::Index;
 use crate::schema::*;
+use crate::table::{FixedSizeEncoding, PerDefTable};
 
 use rustc::middle::cstore::{LinkagePreference, NativeLibrary,
                             EncodedMetadata, ForeignModule};
 use rustc::hir::def::CtorKind;
 use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId, LocalDefId, LOCAL_CRATE};
-use rustc::hir::GenericParamKind;
+use rustc::hir::{GenericParamKind, AnonConst};
 use rustc::hir::map::definitions::DefPathTable;
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_index::vec::IndexVec;
@@ -15,7 +15,7 @@
 use rustc::middle::lang_items;
 use rustc::mir::{self, interpret};
 use rustc::traits::specialization_graph;
-use rustc::ty::{self, Ty, TyCtxt, ReprOptions, SymbolName};
+use rustc::ty::{self, Ty, TyCtxt, SymbolName};
 use rustc::ty::codec::{self as ty_codec, TyEncoder};
 use rustc::ty::layout::VariantIdx;
 
 use rustc::util::nodemap::FxHashMap;
 
 use rustc_data_structures::stable_hasher::StableHasher;
+use rustc_data_structures::sync::Lrc;
 use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
 
 use std::hash::Hash;
+use std::num::NonZeroUsize;
 use std::path::Path;
-use rustc_data_structures::sync::Lrc;
 use std::u32;
 use syntax::ast;
 use syntax::attr;
-use syntax::ext::proc_macro::is_proc_macro_attr;
+use syntax_expand::proc_macro::is_proc_macro_attr;
 use syntax::source_map::Spanned;
 use syntax::symbol::{kw, sym, Ident, Symbol};
 use syntax_pos::{self, FileName, SourceFile, Span};
 use rustc::hir::intravisit::{Visitor, NestedVisitorMap};
 use rustc::hir::intravisit;
 
-pub struct EncodeContext<'tcx> {
+struct EncodeContext<'tcx> {
     opaque: opaque::Encoder,
-    pub tcx: TyCtxt<'tcx>,
+    tcx: TyCtxt<'tcx>,
 
-    entries_index: Index<'tcx>,
+    per_def: PerDefTables<'tcx>,
 
     lazy_state: LazyState,
     type_shorthands: FxHashMap<Ty<'tcx>, usize>,
@@ -59,6 +60,27 @@ pub struct EncodeContext<'tcx> {
     source_file_cache: Lrc<SourceFile>,
 }
 
+#[derive(Default)]
+struct PerDefTables<'tcx> {
+    kind: PerDefTable<Lazy<EntryKind<'tcx>>>,
+    visibility: PerDefTable<Lazy<ty::Visibility>>,
+    span: PerDefTable<Lazy<Span>>,
+    attributes: PerDefTable<Lazy<[ast::Attribute]>>,
+    children: PerDefTable<Lazy<[DefIndex]>>,
+    stability: PerDefTable<Lazy<attr::Stability>>,
+    deprecation: PerDefTable<Lazy<attr::Deprecation>>,
+
+    ty: PerDefTable<Lazy<Ty<'tcx>>>,
+    inherent_impls: PerDefTable<Lazy<[DefIndex]>>,
+    variances: PerDefTable<Lazy<[ty::Variance]>>,
+    generics: PerDefTable<Lazy<ty::Generics>>,
+    predicates: PerDefTable<Lazy<ty::GenericPredicates<'tcx>>>,
+    predicates_defined_on: PerDefTable<Lazy<ty::GenericPredicates<'tcx>>>,
+
+    mir: PerDefTable<Lazy<mir::Body<'tcx>>>,
+    promoted_mir: PerDefTable<Lazy<IndexVec<mir::Promoted, mir::Body<'tcx>>>>,
+}
+
 macro_rules! encoder_methods {
     ($($name:ident($ty:ty);)*) => {
         $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> {
@@ -97,13 +119,13 @@ fn emit_unit(&mut self) -> Result<(), Self::Error> {
     }
 }
 
-impl<'tcx, T> SpecializedEncoder<Lazy<T>> for EncodeContext<'tcx> {
+impl<'tcx, T: Encodable> SpecializedEncoder<Lazy<T>> for EncodeContext<'tcx> {
     fn specialized_encode(&mut self, lazy: &Lazy<T>) -> Result<(), Self::Error> {
         self.emit_lazy_distance(*lazy)
     }
 }
 
-impl<'tcx, T> SpecializedEncoder<Lazy<[T]>> for EncodeContext<'tcx> {
+impl<'tcx, T: Encodable> SpecializedEncoder<Lazy<[T]>> for EncodeContext<'tcx> {
     fn specialized_encode(&mut self, lazy: &Lazy<[T]>) -> Result<(), Self::Error> {
         self.emit_usize(lazy.meta)?;
         if lazy.meta == 0 {
@@ -113,6 +135,15 @@ fn specialized_encode(&mut self, lazy: &Lazy<[T]>) -> Result<(), Self::Error> {
     }
 }
 
+impl<'tcx, T> SpecializedEncoder<Lazy<PerDefTable<T>>> for EncodeContext<'tcx>
+    where Option<T>: FixedSizeEncoding,
+{
+    fn specialized_encode(&mut self, lazy: &Lazy<PerDefTable<T>>) -> Result<(), Self::Error> {
+        self.emit_usize(lazy.meta)?;
+        self.emit_lazy_distance(*lazy)
+    }
+}
+
 impl<'tcx> SpecializedEncoder<CrateNum> for EncodeContext<'tcx> {
     #[inline]
     fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> {
@@ -212,11 +243,11 @@ fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Se
     }
 }
 
-impl<'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext<'tcx> {
+impl<'tcx> SpecializedEncoder<&'tcx [(ty::Predicate<'tcx>, Span)]> for EncodeContext<'tcx> {
     fn specialized_encode(&mut self,
-                          predicates: &ty::GenericPredicates<'tcx>)
+                          predicates: &&'tcx [(ty::Predicate<'tcx>, Span)])
                           -> Result<(), Self::Error> {
-        ty_codec::encode_predicates(self, predicates, |ecx| &mut ecx.predicate_shorthands)
+        ty_codec::encode_spanned_predicates(self, predicates, |ecx| &mut ecx.predicate_shorthands)
     }
 }
 
@@ -257,7 +288,7 @@ fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) {
     }
 }
 
-impl<I, T> EncodeContentsForLazy<[T]> for I
+impl<I, T: Encodable> EncodeContentsForLazy<[T]> for I
     where I: IntoIterator,
           I::Item: EncodeContentsForLazy<T>,
 {
@@ -266,15 +297,28 @@ fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'tcx>) -> usize {
     }
 }
 
+// Shorthand for `$self.$tables.$table.set($key, $self.lazy($value))`, which would
+// normally need extra variables to avoid errors about multiple mutable borrows.
+macro_rules! record {
+    ($self:ident.$tables:ident.$table:ident[$key:expr] <- $value:expr) => {{
+        {
+            let value = $value;
+            let lazy = $self.lazy(value);
+            $self.$tables.$table.set($key, lazy);
+        }
+    }}
+}
+
 impl<'tcx> EncodeContext<'tcx> {
     fn emit_lazy_distance<T: ?Sized + LazyMeta>(
         &mut self,
         lazy: Lazy<T>,
     ) -> Result<(), <Self as Encoder>::Error> {
-        let min_end = lazy.position + T::min_size(lazy.meta);
+        let min_end = lazy.position.get() + T::min_size(lazy.meta);
         let distance = match self.lazy_state {
             LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"),
             LazyState::NodeStart(start) => {
+                let start = start.get();
                 assert!(min_end <= start);
                 start - min_end
             }
@@ -284,10 +328,10 @@ fn emit_lazy_distance<T: ?Sized + LazyMeta>(
                     "make sure that the calls to `lazy*` \
                     are in the same order as the metadata fields",
                 );
-                lazy.position - last_min_end
+                lazy.position.get() - last_min_end.get()
             }
         };
-        self.lazy_state = LazyState::Previous(min_end);
+        self.lazy_state = LazyState::Previous(NonZeroUsize::new(min_end).unwrap());
         self.emit_usize(distance)
     }
 
@@ -295,42 +339,22 @@ fn lazy<T: ?Sized + LazyMeta>(
         &mut self,
         value: impl EncodeContentsForLazy<T>,
     ) -> Lazy<T> {
-        let pos = self.position();
+        let pos = NonZeroUsize::new(self.position()).unwrap();
 
         assert_eq!(self.lazy_state, LazyState::NoNode);
         self.lazy_state = LazyState::NodeStart(pos);
         let meta = value.encode_contents_for_lazy(self);
         self.lazy_state = LazyState::NoNode;
 
-        assert!(pos + <T>::min_size(meta) <= self.position());
+        assert!(pos.get() + <T>::min_size(meta) <= self.position());
 
         Lazy::from_position_and_meta(pos, meta)
     }
 
-    /// Emit the data for a `DefId` to the metadata. The function to
-    /// emit the data is `op`, and it will be given `data` as
-    /// arguments. This `record` function will call `op` to generate
-    /// the `Entry` (which may point to other encoded information)
-    /// and will then record the `Lazy<Entry>` for use in the index.
-    // FIXME(eddyb) remove this.
-    pub fn record<DATA>(&mut self,
-                        id: DefId,
-                        op: impl FnOnce(&mut Self, DATA) -> Entry<'tcx>,
-                        data: DATA)
-    {
-        assert!(id.is_local());
-
-        let entry = op(self, data);
-        let entry = self.lazy(entry);
-        self.entries_index.record(id, entry);
-    }
-
     fn encode_info_for_items(&mut self) {
         let krate = self.tcx.hir().krate();
         let vis = Spanned { span: syntax_pos::DUMMY_SP, node: hir::VisibilityKind::Public };
-        self.record(DefId::local(CRATE_DEF_INDEX),
-                     EncodeContext::encode_info_for_mod,
-                     (hir::CRATE_HIR_ID, &krate.module, &krate.attrs, &vis));
+        self.encode_info_for_mod(hir::CRATE_HIR_ID, &krate.module, &krate.attrs, &vis);
         krate.visit_all_item_likes(&mut self.as_deep_visitor());
         for macro_def in &krate.exported_macros {
             self.visit_macro_def(macro_def);
@@ -474,8 +498,26 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
 
 
         i = self.position();
-        let entries_index = self.entries_index.write_index(&mut self.opaque);
-        let entries_index_bytes = self.position() - i;
+        let per_def = LazyPerDefTables {
+            kind: self.per_def.kind.encode(&mut self.opaque),
+            visibility: self.per_def.visibility.encode(&mut self.opaque),
+            span: self.per_def.span.encode(&mut self.opaque),
+            attributes: self.per_def.attributes.encode(&mut self.opaque),
+            children: self.per_def.children.encode(&mut self.opaque),
+            stability: self.per_def.stability.encode(&mut self.opaque),
+            deprecation: self.per_def.deprecation.encode(&mut self.opaque),
+
+            ty: self.per_def.ty.encode(&mut self.opaque),
+            inherent_impls: self.per_def.inherent_impls.encode(&mut self.opaque),
+            variances: self.per_def.variances.encode(&mut self.opaque),
+            generics: self.per_def.generics.encode(&mut self.opaque),
+            predicates: self.per_def.predicates.encode(&mut self.opaque),
+            predicates_defined_on: self.per_def.predicates_defined_on.encode(&mut self.opaque),
+
+            mir: self.per_def.mir.encode(&mut self.opaque),
+            promoted_mir: self.per_def.promoted_mir.encode(&mut self.opaque),
+        };
+        let per_def_bytes = self.position() - i;
 
         // Encode the proc macro data
         i = self.position();
@@ -534,7 +576,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
             impls,
             exported_symbols,
             interpret_alloc_index,
-            entries_index,
+            per_def,
         });
 
         let total_bytes = self.position();
@@ -559,7 +601,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
             println!("  def-path table bytes: {}", def_path_table_bytes);
             println!(" proc-macro-data-bytes: {}", proc_macro_data_bytes);
             println!("            item bytes: {}", item_bytes);
-            println!("   entries index bytes: {}", entries_index_bytes);
+            println!("   per-def table bytes: {}", per_def_bytes);
             println!("            zero bytes: {}", zero_bytes);
             println!("           total bytes: {}", total_bytes);
         }
@@ -569,23 +611,21 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
 }
 
 impl EncodeContext<'tcx> {
-    fn encode_variances_of(&mut self, def_id: DefId) -> Lazy<[ty::Variance]> {
+    fn encode_variances_of(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_variances_of({:?})", def_id);
-        let tcx = self.tcx;
-        self.lazy(&tcx.variances_of(def_id)[..])
+        record!(self.per_def.variances[def_id] <- &self.tcx.variances_of(def_id)[..]);
     }
 
-    fn encode_item_type(&mut self, def_id: DefId) -> Lazy<Ty<'tcx>> {
-        let tcx = self.tcx;
-        let ty = tcx.type_of(def_id);
-        debug!("EncodeContext::encode_item_type({:?}) => {:?}", def_id, ty);
-        self.lazy(ty)
+    fn encode_item_type(&mut self, def_id: DefId) {
+        debug!("EncodeContext::encode_item_type({:?})", def_id);
+        record!(self.per_def.ty[def_id] <- self.tcx.type_of(def_id));
     }
 
     fn encode_enum_variant_info(
         &mut self,
-        (enum_did, index): (DefId, VariantIdx),
-    ) -> Entry<'tcx> {
+        enum_did: DefId,
+        index: VariantIdx,
+    ) {
         let tcx = self.tcx;
         let def = tcx.adt_def(enum_did);
         let variant = &def.variants[index];
@@ -607,38 +647,32 @@ fn encode_enum_variant_info(
         let enum_id = tcx.hir().as_local_hir_id(enum_did).unwrap();
         let enum_vis = &tcx.hir().expect_item(enum_id).vis;
 
-        Entry {
-            kind: EntryKind::Variant(self.lazy(data)),
-            visibility: self.lazy(ty::Visibility::from_hir(enum_vis, enum_id, tcx)),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: self.encode_attributes(&tcx.get_attrs(def_id)),
-            children: self.lazy(variant.fields.iter().map(|f| {
-                assert!(f.did.is_local());
-                f.did.index
-            })),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: if variant.ctor_kind == CtorKind::Fn {
-                self.encode_variances_of(def_id)
-            } else {
-                Lazy::empty()
-            },
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: self.encode_optimized_mir(def_id),
-            promoted_mir: self.encode_promoted_mir(def_id),
+        record!(self.per_def.kind[def_id] <- EntryKind::Variant(self.lazy(data)));
+        record!(self.per_def.visibility[def_id] <-
+            ty::Visibility::from_hir(enum_vis, enum_id, self.tcx));
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        record!(self.per_def.attributes[def_id] <- &self.tcx.get_attrs(def_id)[..]);
+        record!(self.per_def.children[def_id] <- variant.fields.iter().map(|f| {
+            assert!(f.did.is_local());
+            f.did.index
+        }));
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        self.encode_item_type(def_id);
+        if variant.ctor_kind == CtorKind::Fn {
+            self.encode_variances_of(def_id);
         }
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
+        self.encode_optimized_mir(def_id);
+        self.encode_promoted_mir(def_id);
     }
 
     fn encode_enum_variant_ctor(
         &mut self,
-        (enum_did, index): (DefId, VariantIdx),
-    ) -> Entry<'tcx> {
+        enum_did: DefId,
+        index: VariantIdx,
+    ) {
         let tcx = self.tcx;
         let def = tcx.adt_def(enum_did);
         let variant = &def.variants[index];
@@ -665,35 +699,28 @@ fn encode_enum_variant_ctor(
             ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX));
         }
 
-        Entry {
-            kind: EntryKind::Variant(self.lazy(data)),
-            visibility: self.lazy(ctor_vis),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: Lazy::empty(),
-            children: Lazy::empty(),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: if variant.ctor_kind == CtorKind::Fn {
-                self.encode_variances_of(def_id)
-            } else {
-                Lazy::empty()
-            },
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: self.encode_optimized_mir(def_id),
-            promoted_mir: self.encode_promoted_mir(def_id),
+        record!(self.per_def.kind[def_id] <- EntryKind::Variant(self.lazy(data)));
+        record!(self.per_def.visibility[def_id] <- ctor_vis);
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        self.encode_item_type(def_id);
+        if variant.ctor_kind == CtorKind::Fn {
+            self.encode_variances_of(def_id);
         }
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
+        self.encode_optimized_mir(def_id);
+        self.encode_promoted_mir(def_id);
     }
 
     fn encode_info_for_mod(
         &mut self,
-        (id, md, attrs, vis): (hir::HirId, &hir::Mod, &[ast::Attribute], &hir::Visibility),
-    ) -> Entry<'tcx> {
+        id: hir::HirId,
+        md: &hir::Mod,
+        attrs: &[ast::Attribute],
+        vis: &hir::Visibility,
+    ) {
         let tcx = self.tcx;
         let def_id = tcx.hir().local_def_id(id);
         debug!("EncodeContext::encode_info_for_mod({:?})", def_id);
@@ -705,33 +732,23 @@ fn encode_info_for_mod(
             },
         };
 
-        Entry {
-            kind: EntryKind::Mod(self.lazy(data)),
-            visibility: self.lazy(ty::Visibility::from_hir(vis, id, tcx)),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: self.encode_attributes(attrs),
-            children: self.lazy(md.item_ids.iter().map(|item_id| {
-                tcx.hir().local_def_id(item_id.id).index
-            })),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: None,
-            inherent_impls: Lazy::empty(),
-            variances: Lazy::empty(),
-            generics: None,
-            predicates: None,
-            predicates_defined_on: None,
-
-            mir: None,
-            promoted_mir: None,
-        }
+        record!(self.per_def.kind[def_id] <- EntryKind::Mod(self.lazy(data)));
+        record!(self.per_def.visibility[def_id] <- ty::Visibility::from_hir(vis, id, self.tcx));
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        record!(self.per_def.attributes[def_id] <- attrs);
+        record!(self.per_def.children[def_id] <- md.item_ids.iter().map(|item_id| {
+            tcx.hir().local_def_id(item_id.id).index
+        }));
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
     }
 
     fn encode_field(
         &mut self,
-        (adt_def_id, variant_index, field_index): (DefId, VariantIdx, usize),
-    ) -> Entry<'tcx> {
+        adt_def_id: DefId,
+        variant_index: VariantIdx,
+        field_index: usize,
+    ) {
         let tcx = self.tcx;
         let variant = &tcx.adt_def(adt_def_id).variants[variant_index];
         let field = &variant.fields[field_index];
@@ -742,28 +759,18 @@ fn encode_field(
         let variant_id = tcx.hir().as_local_hir_id(variant.def_id).unwrap();
         let variant_data = tcx.hir().expect_variant_data(variant_id);
 
-        Entry {
-            kind: EntryKind::Field,
-            visibility: self.lazy(field.vis),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: self.encode_attributes(&variant_data.fields()[field_index].attrs),
-            children: Lazy::empty(),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: Lazy::empty(),
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: None,
-            promoted_mir: None,
-        }
+        record!(self.per_def.kind[def_id] <- EntryKind::Field);
+        record!(self.per_def.visibility[def_id] <- field.vis);
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        record!(self.per_def.attributes[def_id] <- &variant_data.fields()[field_index].attrs);
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        self.encode_item_type(def_id);
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
     }
 
-    fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> {
+    fn encode_struct_ctor(&mut self, adt_def_id: DefId, def_id: DefId) {
         debug!("EncodeContext::encode_struct_ctor({:?})", def_id);
         let tcx = self.tcx;
         let adt_def = tcx.adt_def(adt_def_id);
@@ -797,52 +804,38 @@ fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<
             ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX));
         }
 
-        let repr_options = get_repr_options(tcx, adt_def_id);
-
-        Entry {
-            kind: EntryKind::Struct(self.lazy(data), repr_options),
-            visibility: self.lazy(ctor_vis),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: Lazy::empty(),
-            children: Lazy::empty(),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: if variant.ctor_kind == CtorKind::Fn {
-                self.encode_variances_of(def_id)
-            } else {
-                Lazy::empty()
-            },
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: self.encode_optimized_mir(def_id),
-            promoted_mir: self.encode_promoted_mir(def_id),
+        record!(self.per_def.kind[def_id] <- EntryKind::Struct(self.lazy(data), adt_def.repr));
+        record!(self.per_def.visibility[def_id] <- ctor_vis);
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        self.encode_item_type(def_id);
+        if variant.ctor_kind == CtorKind::Fn {
+            self.encode_variances_of(def_id);
         }
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
+        self.encode_optimized_mir(def_id);
+        self.encode_promoted_mir(def_id);
     }
 
-    fn encode_generics(&mut self, def_id: DefId) -> Lazy<ty::Generics> {
+    fn encode_generics(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_generics({:?})", def_id);
-        let tcx = self.tcx;
-        self.lazy(tcx.generics_of(def_id))
+        record!(self.per_def.generics[def_id] <- self.tcx.generics_of(def_id));
     }
 
-    fn encode_predicates(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
+    fn encode_predicates(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_predicates({:?})", def_id);
-        let tcx = self.tcx;
-        self.lazy(&*tcx.predicates_of(def_id))
+        record!(self.per_def.predicates[def_id] <- self.tcx.predicates_of(def_id));
     }
 
-    fn encode_predicates_defined_on(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
+    fn encode_predicates_defined_on(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_predicates_defined_on({:?})", def_id);
-        let tcx = self.tcx;
-        self.lazy(&*tcx.predicates_defined_on(def_id))
+        record!(self.per_def.predicates_defined_on[def_id] <-
+            self.tcx.predicates_defined_on(def_id))
     }
 
-    fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
+    fn encode_info_for_trait_item(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_info_for_trait_item({:?})", def_id);
         let tcx = self.tcx;
 
@@ -859,7 +852,7 @@ fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
                 span_bug!(ast_item.span, "traits cannot have final items"),
         };
 
-        let kind = match trait_item.kind {
+        record!(self.per_def.kind[def_id] <- match trait_item.kind {
             ty::AssocKind::Const => {
                 let rendered =
                     hir::print::to_string(self.tcx.hir(), |s| s.print_trait_item(ast_item));
@@ -868,7 +861,7 @@ fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
                 EntryKind::AssocConst(container, ConstQualif { mir: 0 }, rendered_const)
             }
             ty::AssocKind::Method => {
-                let fn_data = if let hir::TraitItemKind::Method(method_sig, m) = &ast_item.kind {
+                let fn_data = if let hir::TraitItemKind::Method(m_sig, m) = &ast_item.kind {
                     let param_names = match *m {
                         hir::TraitMethod::Required(ref names) => {
                             self.encode_fn_param_names(names)
@@ -878,10 +871,10 @@ fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
                         }
                     };
                     FnData {
-                        asyncness: method_sig.header.asyncness,
+                        asyncness: m_sig.header.asyncness,
                         constness: hir::Constness::NotConst,
                         param_names,
-                        sig: self.lazy(&tcx.fn_sig(def_id)),
+                        sig: self.lazy(tcx.fn_sig(def_id)),
                     }
                 } else {
                     bug!()
@@ -894,44 +887,31 @@ fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
             }
             ty::AssocKind::Type => EntryKind::AssocType(container),
             ty::AssocKind::OpaqueTy => span_bug!(ast_item.span, "opaque type in trait"),
-        };
-
-        Entry {
-            kind,
-            visibility: self.lazy(trait_item.vis),
-            span: self.lazy(ast_item.span),
-            attributes: self.encode_attributes(&ast_item.attrs),
-            children: Lazy::empty(),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: match trait_item.kind {
-                ty::AssocKind::Const |
-                ty::AssocKind::Method => {
-                    Some(self.encode_item_type(def_id))
-                }
-                ty::AssocKind::Type => {
-                    if trait_item.defaultness.has_value() {
-                        Some(self.encode_item_type(def_id))
-                    } else {
-                        None
-                    }
+        });
+        record!(self.per_def.visibility[def_id] <- trait_item.vis);
+        record!(self.per_def.span[def_id] <- ast_item.span);
+        record!(self.per_def.attributes[def_id] <- &ast_item.attrs);
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        match trait_item.kind {
+            ty::AssocKind::Const |
+            ty::AssocKind::Method => {
+                self.encode_item_type(def_id);
+            }
+            ty::AssocKind::Type => {
+                if trait_item.defaultness.has_value() {
+                    self.encode_item_type(def_id);
                 }
-                ty::AssocKind::OpaqueTy => unreachable!(),
-            },
-            inherent_impls: Lazy::empty(),
-            variances: if trait_item.kind == ty::AssocKind::Method {
-                self.encode_variances_of(def_id)
-            } else {
-                Lazy::empty()
-            },
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: self.encode_optimized_mir(def_id),
-            promoted_mir: self.encode_promoted_mir(def_id),
+            }
+            ty::AssocKind::OpaqueTy => unreachable!(),
+        }
+        if trait_item.kind == ty::AssocKind::Method {
+            self.encode_variances_of(def_id);
         }
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
+        self.encode_optimized_mir(def_id);
+        self.encode_promoted_mir(def_id);
     }
 
     fn metadata_output_only(&self) -> bool {
@@ -939,7 +919,7 @@ fn metadata_output_only(&self) -> bool {
         !self.tcx.sess.opts.output_types.should_codegen()
     }
 
-    fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
+    fn encode_info_for_impl_item(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_info_for_impl_item({:?})", def_id);
         let tcx = self.tcx;
 
@@ -954,7 +934,7 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
                 span_bug!(ast_item.span, "impl items always have values (currently)"),
         };
 
-        let kind = match impl_item.kind {
+        record!(self.per_def.kind[def_id] <- match impl_item.kind {
             ty::AssocKind::Const => {
                 if let hir::ImplItemKind::Const(_, body_id) = ast_item.kind {
                     let mir = self.tcx.at(ast_item.span).mir_const_qualif(def_id).0;
@@ -972,7 +952,7 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
                         asyncness: sig.header.asyncness,
                         constness: sig.header.constness,
                         param_names: self.encode_fn_param_names_for_body(body),
-                        sig: self.lazy(&tcx.fn_sig(def_id)),
+                        sig: self.lazy(tcx.fn_sig(def_id)),
                     }
                 } else {
                     bug!()
@@ -985,8 +965,18 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
             }
             ty::AssocKind::OpaqueTy => EntryKind::AssocOpaqueTy(container),
             ty::AssocKind::Type => EntryKind::AssocType(container)
-        };
-
+        });
+        record!(self.per_def.visibility[def_id] <- impl_item.vis);
+        record!(self.per_def.span[def_id] <- ast_item.span);
+        record!(self.per_def.attributes[def_id] <- &ast_item.attrs);
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        self.encode_item_type(def_id);
+        if impl_item.kind == ty::AssocKind::Method {
+            self.encode_variances_of(def_id);
+        }
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
         let mir = match ast_item.kind {
             hir::ImplItemKind::Const(..) => true,
             hir::ImplItemKind::Method(ref sig, _) => {
@@ -1001,29 +991,9 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
             hir::ImplItemKind::OpaqueTy(..) |
             hir::ImplItemKind::TyAlias(..) => false,
         };
-
-        Entry {
-            kind,
-            visibility: self.lazy(impl_item.vis),
-            span: self.lazy(ast_item.span),
-            attributes: self.encode_attributes(&ast_item.attrs),
-            children: Lazy::empty(),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: if impl_item.kind == ty::AssocKind::Method {
-                self.encode_variances_of(def_id)
-            } else {
-                Lazy::empty()
-            },
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: if mir { self.encode_optimized_mir(def_id) } else { None },
-            promoted_mir: if mir { self.encode_promoted_mir(def_id) } else { None },
+        if mir {
+            self.encode_optimized_mir(def_id);
+            self.encode_promoted_mir(def_id);
         }
     }
 
@@ -1044,51 +1014,44 @@ fn encode_fn_param_names(&mut self, param_names: &[ast::Ident]) -> Lazy<[ast::Na
         self.lazy(param_names.iter().map(|ident| ident.name))
     }
 
-    fn encode_optimized_mir(&mut self, def_id: DefId) -> Option<Lazy<mir::Body<'tcx>>> {
+    fn encode_optimized_mir(&mut self, def_id: DefId) {
         debug!("EntryBuilder::encode_mir({:?})", def_id);
         if self.tcx.mir_keys(LOCAL_CRATE).contains(&def_id) {
-            let mir = self.tcx.optimized_mir(def_id);
-            Some(self.lazy(mir))
-        } else {
-            None
+            record!(self.per_def.mir[def_id] <- self.tcx.optimized_mir(def_id));
         }
     }
 
-    fn encode_promoted_mir(
-        &mut self,
-        def_id: DefId,
-    ) -> Option<Lazy<IndexVec<mir::Promoted, mir::Body<'tcx>>>> {
+    fn encode_promoted_mir(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_promoted_mir({:?})", def_id);
         if self.tcx.mir_keys(LOCAL_CRATE).contains(&def_id) {
-            let promoted = self.tcx.promoted_mir(def_id);
-            Some(self.lazy(promoted))
-        } else {
-            None
+            record!(self.per_def.promoted_mir[def_id] <- self.tcx.promoted_mir(def_id));
         }
     }
 
     // Encodes the inherent implementations of a structure, enumeration, or trait.
-    fn encode_inherent_implementations(&mut self, def_id: DefId) -> Lazy<[DefIndex]> {
+    fn encode_inherent_implementations(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_inherent_implementations({:?})", def_id);
         let implementations = self.tcx.inherent_impls(def_id);
-        if implementations.is_empty() {
-            Lazy::empty()
-        } else {
-            self.lazy(implementations.iter().map(|&def_id| {
+        if !implementations.is_empty() {
+            record!(self.per_def.inherent_impls[def_id] <- implementations.iter().map(|&def_id| {
                 assert!(def_id.is_local());
                 def_id.index
-            }))
+            }));
         }
     }
 
-    fn encode_stability(&mut self, def_id: DefId) -> Option<Lazy<attr::Stability>> {
+    fn encode_stability(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_stability({:?})", def_id);
-        self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab))
+        if let Some(stab) = self.tcx.lookup_stability(def_id) {
+            record!(self.per_def.stability[def_id] <- stab)
+        }
     }
 
-    fn encode_deprecation(&mut self, def_id: DefId) -> Option<Lazy<attr::Deprecation>> {
+    fn encode_deprecation(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_deprecation({:?})", def_id);
-        self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(depr))
+        if let Some(depr) = self.tcx.lookup_deprecation(def_id) {
+            record!(self.per_def.deprecation[def_id] <- depr);
+        }
     }
 
     fn encode_rendered_const_for_body(&mut self, body_id: hir::BodyId) -> Lazy<RenderedConst> {
@@ -1098,16 +1061,16 @@ fn encode_rendered_const_for_body(&mut self, body_id: hir::BodyId) -> Lazy<Rende
         self.lazy(rendered_const)
     }
 
-    fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) -> Entry<'tcx> {
+    fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item) {
         let tcx = self.tcx;
 
         debug!("EncodeContext::encode_info_for_item({:?})", def_id);
 
-        let kind = match item.kind {
+        record!(self.per_def.kind[def_id] <- match item.kind {
             hir::ItemKind::Static(_, hir::MutMutable, _) => EntryKind::MutStatic,
             hir::ItemKind::Static(_, hir::MutImmutable, _) => EntryKind::ImmStatic,
             hir::ItemKind::Const(_, body_id) => {
-                let mir = tcx.at(item.span).mir_const_qualif(def_id).0;
+                let mir = self.tcx.at(item.span).mir_const_qualif(def_id).0;
                 EntryKind::Const(
                     ConstQualif { mir },
                     self.encode_rendered_const_for_body(body_id)
@@ -1124,48 +1087,48 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                 EntryKind::Fn(self.lazy(data))
             }
             hir::ItemKind::Mod(ref m) => {
-                return self.encode_info_for_mod((item.hir_id, m, &item.attrs, &item.vis));
+                return self.encode_info_for_mod(item.hir_id, m, &item.attrs, &item.vis);
             }
             hir::ItemKind::ForeignMod(_) => EntryKind::ForeignMod,
             hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm,
             hir::ItemKind::TyAlias(..) => EntryKind::Type,
             hir::ItemKind::OpaqueTy(..) => EntryKind::OpaqueTy,
-            hir::ItemKind::Enum(..) => EntryKind::Enum(get_repr_options(tcx, def_id)),
+            hir::ItemKind::Enum(..) => EntryKind::Enum(self.tcx.adt_def(def_id).repr),
             hir::ItemKind::Struct(ref struct_def, _) => {
-                let variant = tcx.adt_def(def_id).non_enum_variant();
+                let adt_def = self.tcx.adt_def(def_id);
+                let variant = adt_def.non_enum_variant();
 
                 // Encode def_ids for each field and method
                 // for methods, write all the stuff get_trait_method
                 // needs to know
-                let ctor = struct_def.ctor_hir_id()
-                    .map(|ctor_hir_id| tcx.hir().local_def_id(ctor_hir_id).index);
-
-                let repr_options = get_repr_options(tcx, def_id);
+                let ctor = struct_def.ctor_hir_id().map(|ctor_hir_id| {
+                    self.tcx.hir().local_def_id(ctor_hir_id).index
+                });
 
                 EntryKind::Struct(self.lazy(VariantData {
                     ctor_kind: variant.ctor_kind,
                     discr: variant.discr,
                     ctor,
                     ctor_sig: None,
-                }), repr_options)
+                }), adt_def.repr)
             }
             hir::ItemKind::Union(..) => {
-                let variant = tcx.adt_def(def_id).non_enum_variant();
-                let repr_options = get_repr_options(tcx, def_id);
+                let adt_def = self.tcx.adt_def(def_id);
+                let variant = adt_def.non_enum_variant();
 
                 EntryKind::Union(self.lazy(VariantData {
                     ctor_kind: variant.ctor_kind,
                     discr: variant.discr,
                     ctor: None,
                     ctor_sig: None,
-                }), repr_options)
+                }), adt_def.repr)
             }
             hir::ItemKind::Impl(_, _, defaultness, ..) => {
-                let trait_ref = tcx.impl_trait_ref(def_id);
-                let polarity = tcx.impl_polarity(def_id);
+                let trait_ref = self.tcx.impl_trait_ref(def_id);
+                let polarity = self.tcx.impl_polarity(def_id);
                 let parent = if let Some(trait_ref) = trait_ref {
-                    let trait_def = tcx.trait_def(trait_ref.def_id);
-                    trait_def.ancestors(tcx, def_id).nth(1).and_then(|node| {
+                    let trait_def = self.tcx.trait_def(trait_ref.def_id);
+                    trait_def.ancestors(self.tcx, def_id).nth(1).and_then(|node| {
                         match node {
                             specialization_graph::Node::Impl(parent) => Some(parent),
                             _ => None,
@@ -1179,8 +1142,8 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                 // "unsized info", else just store None
                 let coerce_unsized_info =
                     trait_ref.and_then(|t| {
-                        if Some(t.def_id) == tcx.lang_items().coerce_unsized_trait() {
-                            Some(tcx.at(item.span).coerce_unsized_info(def_id))
+                        if Some(t.def_id) == self.tcx.lang_items().coerce_unsized_trait() {
+                            Some(self.tcx.at(item.span).coerce_unsized_info(def_id))
                         } else {
                             None
                         }
@@ -1197,27 +1160,115 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                 EntryKind::Impl(self.lazy(data))
             }
             hir::ItemKind::Trait(..) => {
-                let trait_def = tcx.trait_def(def_id);
+                let trait_def = self.tcx.trait_def(def_id);
                 let data = TraitData {
                     unsafety: trait_def.unsafety,
                     paren_sugar: trait_def.paren_sugar,
-                    has_auto_impl: tcx.trait_is_auto(def_id),
+                    has_auto_impl: self.tcx.trait_is_auto(def_id),
                     is_marker: trait_def.is_marker,
-                    super_predicates: self.lazy(&*tcx.super_predicates_of(def_id)),
+                    super_predicates: self.lazy(tcx.super_predicates_of(def_id)),
                 };
 
                 EntryKind::Trait(self.lazy(data))
             }
             hir::ItemKind::TraitAlias(..) => {
                 let data = TraitAliasData {
-                    super_predicates: self.lazy(&*tcx.super_predicates_of(def_id)),
+                    super_predicates: self.lazy(tcx.super_predicates_of(def_id)),
                 };
 
                 EntryKind::TraitAlias(self.lazy(data))
             }
             hir::ItemKind::ExternCrate(_) |
             hir::ItemKind::Use(..) => bug!("cannot encode info for item {:?}", item),
-        };
+        });
+        record!(self.per_def.visibility[def_id] <-
+            ty::Visibility::from_hir(&item.vis, item.hir_id, tcx));
+        record!(self.per_def.span[def_id] <- item.span);
+        record!(self.per_def.attributes[def_id] <- &item.attrs);
+        // FIXME(eddyb) there should be a nicer way to do this.
+        match item.kind {
+            hir::ItemKind::ForeignMod(ref fm) => record!(self.per_def.children[def_id] <-
+                fm.items
+                    .iter()
+                    .map(|foreign_item| tcx.hir().local_def_id(
+                        foreign_item.hir_id).index)
+            ),
+            hir::ItemKind::Enum(..) => record!(self.per_def.children[def_id] <-
+                self.tcx.adt_def(def_id).variants.iter().map(|v| {
+                    assert!(v.def_id.is_local());
+                    v.def_id.index
+                })
+            ),
+            hir::ItemKind::Struct(..) |
+            hir::ItemKind::Union(..) => record!(self.per_def.children[def_id] <-
+                self.tcx.adt_def(def_id).non_enum_variant().fields.iter().map(|f| {
+                    assert!(f.did.is_local());
+                    f.did.index
+                })
+            ),
+            hir::ItemKind::Impl(..) |
+            hir::ItemKind::Trait(..) => {
+                let associated_item_def_ids = self.tcx.associated_item_def_ids(def_id);
+                record!(self.per_def.children[def_id] <-
+                    associated_item_def_ids.iter().map(|&def_id| {
+                        assert!(def_id.is_local());
+                        def_id.index
+                    })
+                );
+            }
+            _ => {}
+        }
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        match item.kind {
+            hir::ItemKind::Static(..) |
+            hir::ItemKind::Const(..) |
+            hir::ItemKind::Fn(..) |
+            hir::ItemKind::TyAlias(..) |
+            hir::ItemKind::OpaqueTy(..) |
+            hir::ItemKind::Enum(..) |
+            hir::ItemKind::Struct(..) |
+            hir::ItemKind::Union(..) |
+            hir::ItemKind::Impl(..) => self.encode_item_type(def_id),
+            _ => {}
+        }
+        self.encode_inherent_implementations(def_id);
+        match item.kind {
+            hir::ItemKind::Enum(..) |
+            hir::ItemKind::Struct(..) |
+            hir::ItemKind::Union(..) |
+            hir::ItemKind::Fn(..) => self.encode_variances_of(def_id),
+            _ => {}
+        }
+        match item.kind {
+            hir::ItemKind::Static(..) |
+            hir::ItemKind::Const(..) |
+            hir::ItemKind::Fn(..) |
+            hir::ItemKind::TyAlias(..) |
+            hir::ItemKind::Enum(..) |
+            hir::ItemKind::Struct(..) |
+            hir::ItemKind::Union(..) |
+            hir::ItemKind::Impl(..) |
+            hir::ItemKind::OpaqueTy(..) |
+            hir::ItemKind::Trait(..) |
+            hir::ItemKind::TraitAlias(..) => {
+                self.encode_generics(def_id);
+                self.encode_predicates(def_id);
+            }
+            _ => {}
+        }
+        // The only time that `predicates_defined_on` is used (on
+        // an external item) is for traits, during chalk lowering,
+        // so only encode it in that case as an efficiency
+        // hack. (No reason not to expand it in the future if
+        // necessary.)
+        match item.kind {
+            hir::ItemKind::Trait(..) |
+            hir::ItemKind::TraitAlias(..) => {
+                self.encode_predicates_defined_on(def_id);
+            }
+            _ => {} // not *wrong* for other kinds of items, but not needed
+        }
 
         let mir = match item.kind {
             hir::ItemKind::Static(..) | hir::ItemKind::Const(..) => true,
@@ -1232,188 +1283,48 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
             }
             _ => false,
         };
-
-        Entry {
-            kind,
-            visibility: self.lazy(ty::Visibility::from_hir(&item.vis, item.hir_id, tcx)),
-            span: self.lazy(item.span),
-            attributes: self.encode_attributes(&item.attrs),
-            children: match item.kind {
-                hir::ItemKind::ForeignMod(ref fm) => {
-                    self.lazy(fm.items
-                        .iter()
-                        .map(|foreign_item| tcx.hir().local_def_id(
-                            foreign_item.hir_id).index))
-                }
-                hir::ItemKind::Enum(..) => {
-                    let def = self.tcx.adt_def(def_id);
-                    self.lazy(def.variants.iter().map(|v| {
-                        assert!(v.def_id.is_local());
-                        v.def_id.index
-                    }))
-                }
-                hir::ItemKind::Struct(..) |
-                hir::ItemKind::Union(..) => {
-                    let def = self.tcx.adt_def(def_id);
-                    self.lazy(def.non_enum_variant().fields.iter().map(|f| {
-                        assert!(f.did.is_local());
-                        f.did.index
-                    }))
-                }
-                hir::ItemKind::Impl(..) |
-                hir::ItemKind::Trait(..) => {
-                    self.lazy(tcx.associated_item_def_ids(def_id).iter().map(|&def_id| {
-                        assert!(def_id.is_local());
-                        def_id.index
-                    }))
-                }
-                _ => Lazy::empty(),
-            },
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: match item.kind {
-                hir::ItemKind::Static(..) |
-                hir::ItemKind::Const(..) |
-                hir::ItemKind::Fn(..) |
-                hir::ItemKind::TyAlias(..) |
-                hir::ItemKind::OpaqueTy(..) |
-                hir::ItemKind::Enum(..) |
-                hir::ItemKind::Struct(..) |
-                hir::ItemKind::Union(..) |
-                hir::ItemKind::Impl(..) => Some(self.encode_item_type(def_id)),
-                _ => None,
-            },
-            inherent_impls: self.encode_inherent_implementations(def_id),
-            variances: match item.kind {
-                hir::ItemKind::Enum(..) |
-                hir::ItemKind::Struct(..) |
-                hir::ItemKind::Union(..) |
-                hir::ItemKind::Fn(..) => self.encode_variances_of(def_id),
-                _ => Lazy::empty(),
-            },
-            generics: match item.kind {
-                hir::ItemKind::Static(..) |
-                hir::ItemKind::Const(..) |
-                hir::ItemKind::Fn(..) |
-                hir::ItemKind::TyAlias(..) |
-                hir::ItemKind::Enum(..) |
-                hir::ItemKind::Struct(..) |
-                hir::ItemKind::Union(..) |
-                hir::ItemKind::Impl(..) |
-                hir::ItemKind::OpaqueTy(..) |
-                hir::ItemKind::Trait(..) => Some(self.encode_generics(def_id)),
-                hir::ItemKind::TraitAlias(..) => Some(self.encode_generics(def_id)),
-                _ => None,
-            },
-            predicates: match item.kind {
-                hir::ItemKind::Static(..) |
-                hir::ItemKind::Const(..) |
-                hir::ItemKind::Fn(..) |
-                hir::ItemKind::TyAlias(..) |
-                hir::ItemKind::Enum(..) |
-                hir::ItemKind::Struct(..) |
-                hir::ItemKind::Union(..) |
-                hir::ItemKind::Impl(..) |
-                hir::ItemKind::OpaqueTy(..) |
-                hir::ItemKind::Trait(..) |
-                hir::ItemKind::TraitAlias(..) => Some(self.encode_predicates(def_id)),
-                _ => None,
-            },
-
-            // The only time that `predicates_defined_on` is used (on
-            // an external item) is for traits, during chalk lowering,
-            // so only encode it in that case as an efficiency
-            // hack. (No reason not to expand it in the future if
-            // necessary.)
-            predicates_defined_on: match item.kind {
-                hir::ItemKind::Trait(..) |
-                hir::ItemKind::TraitAlias(..) => Some(self.encode_predicates_defined_on(def_id)),
-                _ => None, // not *wrong* for other kinds of items, but not needed
-            },
-
-            mir: if mir { self.encode_optimized_mir(def_id) } else { None },
-            promoted_mir: if mir { self.encode_promoted_mir(def_id) } else { None },
+        if mir {
+            self.encode_optimized_mir(def_id);
+            self.encode_promoted_mir(def_id);
         }
     }
 
     /// Serialize the text of exported macros
-    fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> {
+    fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) {
         use syntax::print::pprust;
         let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
-        Entry {
-            kind: EntryKind::MacroDef(self.lazy(MacroDef {
-                body: pprust::tts_to_string(macro_def.body.clone()),
-                legacy: macro_def.legacy,
-            })),
-            visibility: self.lazy(ty::Visibility::Public),
-            span: self.lazy(macro_def.span),
-            attributes: self.encode_attributes(&macro_def.attrs),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            children: Lazy::empty(),
-            ty: None,
-            inherent_impls: Lazy::empty(),
-            variances: Lazy::empty(),
-            generics: None,
-            predicates: None,
-            predicates_defined_on: None,
-            mir: None,
-            promoted_mir: None,
-        }
+        record!(self.per_def.kind[def_id] <- EntryKind::MacroDef(self.lazy(MacroDef {
+            body: pprust::tts_to_string(macro_def.body.clone()),
+            legacy: macro_def.legacy,
+        })));
+        record!(self.per_def.visibility[def_id] <- ty::Visibility::Public);
+        record!(self.per_def.span[def_id] <- macro_def.span);
+        record!(self.per_def.attributes[def_id] <- &macro_def.attrs);
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
     }
 
     fn encode_info_for_generic_param(
         &mut self,
         def_id: DefId,
-        entry_kind: EntryKind<'tcx>,
+        kind: EntryKind<'tcx>,
         encode_type: bool,
-    ) -> Entry<'tcx> {
-        let tcx = self.tcx;
-        Entry {
-            kind: entry_kind,
-            visibility: self.lazy(ty::Visibility::Public),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: Lazy::empty(),
-            children: Lazy::empty(),
-            stability: None,
-            deprecation: None,
-            ty: if encode_type { Some(self.encode_item_type(def_id)) } else { None },
-            inherent_impls: Lazy::empty(),
-            variances: Lazy::empty(),
-            generics: None,
-            predicates: None,
-            predicates_defined_on: None,
-
-            mir: None,
-            promoted_mir: None,
+    ) {
+        record!(self.per_def.kind[def_id] <- kind);
+        record!(self.per_def.visibility[def_id] <- ty::Visibility::Public);
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        if encode_type {
+            self.encode_item_type(def_id);
         }
     }
 
-    fn encode_info_for_ty_param(
-        &mut self,
-        (def_id, encode_type): (DefId, bool),
-    ) -> Entry<'tcx> {
-        debug!("EncodeContext::encode_info_for_ty_param({:?})", def_id);
-        self.encode_info_for_generic_param(def_id, EntryKind::TypeParam, encode_type)
-    }
-
-    fn encode_info_for_const_param(
-        &mut self,
-        def_id: DefId,
-    ) -> Entry<'tcx> {
-        debug!("EncodeContext::encode_info_for_const_param({:?})", def_id);
-        self.encode_info_for_generic_param(def_id, EntryKind::ConstParam, true)
-    }
-
-    fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> {
+    fn encode_info_for_closure(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_info_for_closure({:?})", def_id);
-        let tcx = self.tcx;
 
         let tables = self.tcx.typeck_tables_of(def_id);
         let hir_id = self.tcx.hir().as_local_hir_id(def_id).unwrap();
-        let kind = match tables.node_type(hir_id).kind {
+
+        record!(self.per_def.kind[def_id] <- match tables.node_type(hir_id).kind {
             ty::Generator(def_id, ..) => {
                 let layout = self.tcx.generator_layout(def_id);
                 let data = GeneratorData {
@@ -1428,61 +1339,32 @@ fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> {
                 EntryKind::Closure(self.lazy(data))
             }
 
-            _ => bug!("closure that is neither generator nor closure")
-        };
-
-        Entry {
-            kind,
-            visibility: self.lazy(ty::Visibility::Public),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: self.encode_attributes(&tcx.get_attrs(def_id)),
-            children: Lazy::empty(),
-            stability: None,
-            deprecation: None,
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: Lazy::empty(),
-            generics: Some(self.encode_generics(def_id)),
-            predicates: None,
-            predicates_defined_on: None,
-
-            mir: self.encode_optimized_mir(def_id),
-            promoted_mir: self.encode_promoted_mir(def_id),
-        }
+            _ => bug!("closure that is neither generator nor closure"),
+        });
+        record!(self.per_def.visibility[def_id] <- ty::Visibility::Public);
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        record!(self.per_def.attributes[def_id] <- &self.tcx.get_attrs(def_id)[..]);
+        self.encode_item_type(def_id);
+        self.encode_generics(def_id);
+        self.encode_optimized_mir(def_id);
+        self.encode_promoted_mir(def_id);
     }
 
-    fn encode_info_for_anon_const(&mut self, def_id: DefId) -> Entry<'tcx> {
+    fn encode_info_for_anon_const(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_info_for_anon_const({:?})", def_id);
-        let tcx = self.tcx;
-        let id = tcx.hir().as_local_hir_id(def_id).unwrap();
-        let body_id = tcx.hir().body_owned_by(id);
+        let id = self.tcx.hir().as_local_hir_id(def_id).unwrap();
+        let body_id = self.tcx.hir().body_owned_by(id);
         let const_data = self.encode_rendered_const_for_body(body_id);
-        let mir = tcx.mir_const_qualif(def_id).0;
-
-        Entry {
-            kind: EntryKind::Const(ConstQualif { mir }, const_data),
-            visibility: self.lazy(ty::Visibility::Public),
-            span: self.lazy(tcx.def_span(def_id)),
-            attributes: Lazy::empty(),
-            children: Lazy::empty(),
-            stability: None,
-            deprecation: None,
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: Lazy::empty(),
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: self.encode_optimized_mir(def_id),
-            promoted_mir: self.encode_promoted_mir(def_id),
-        }
-    }
+        let mir = self.tcx.mir_const_qualif(def_id).0;
 
-    fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> Lazy<[ast::Attribute]> {
-        self.lazy(attrs)
+        record!(self.per_def.kind[def_id] <- EntryKind::Const(ConstQualif { mir }, const_data));
+        record!(self.per_def.visibility[def_id] <- ty::Visibility::Public);
+        record!(self.per_def.span[def_id] <- self.tcx.def_span(def_id));
+        self.encode_item_type(def_id);
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
+        self.encode_optimized_mir(def_id);
+        self.encode_promoted_mir(def_id);
     }
 
     fn encode_native_libraries(&mut self) -> Lazy<[NativeLibrary]> {
@@ -1656,14 +1538,16 @@ fn encode_dylib_dependency_formats(&mut self) -> Lazy<[Option<LinkagePreference>
         Lazy::empty()
     }
 
-    fn encode_info_for_foreign_item(&mut self,
-                                    (def_id, nitem): (DefId, &hir::ForeignItem))
-                                    -> Entry<'tcx> {
+    fn encode_info_for_foreign_item(
+        &mut self,
+        def_id: DefId,
+        nitem: &hir::ForeignItem,
+    )  {
         let tcx = self.tcx;
 
         debug!("EncodeContext::encode_info_for_foreign_item({:?})", def_id);
 
-        let kind = match nitem.kind {
+        record!(self.per_def.kind[def_id] <- match nitem.kind {
             hir::ForeignItemKind::Fn(_, ref names, _) => {
                 let data = FnData {
                     asyncness: hir::IsAsync::NotAsync,
@@ -1676,33 +1560,23 @@ fn encode_info_for_foreign_item(&mut self,
             hir::ForeignItemKind::Static(_, hir::MutMutable) => EntryKind::ForeignMutStatic,
             hir::ForeignItemKind::Static(_, hir::MutImmutable) => EntryKind::ForeignImmStatic,
             hir::ForeignItemKind::Type => EntryKind::ForeignType,
-        };
-
-        Entry {
-            kind,
-            visibility: self.lazy(ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, tcx)),
-            span: self.lazy(nitem.span),
-            attributes: self.encode_attributes(&nitem.attrs),
-            children: Lazy::empty(),
-            stability: self.encode_stability(def_id),
-            deprecation: self.encode_deprecation(def_id),
-
-            ty: Some(self.encode_item_type(def_id)),
-            inherent_impls: Lazy::empty(),
-            variances: match nitem.kind {
-                hir::ForeignItemKind::Fn(..) => self.encode_variances_of(def_id),
-                _ => Lazy::empty(),
-            },
-            generics: Some(self.encode_generics(def_id)),
-            predicates: Some(self.encode_predicates(def_id)),
-            predicates_defined_on: None,
-
-            mir: None,
-            promoted_mir: None,
+        });
+        record!(self.per_def.visibility[def_id] <-
+            ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, self.tcx));
+        record!(self.per_def.span[def_id] <- nitem.span);
+        record!(self.per_def.attributes[def_id] <- &nitem.attrs);
+        self.encode_stability(def_id);
+        self.encode_deprecation(def_id);
+        self.encode_item_type(def_id);
+        if let hir::ForeignItemKind::Fn(..) = nitem.kind {
+            self.encode_variances_of(def_id);
         }
+        self.encode_generics(def_id);
+        self.encode_predicates(def_id);
     }
 }
 
+// FIXME(eddyb) make metadata encoding walk over all definitions, instead of HIR.
 impl Visitor<'tcx> for EncodeContext<'tcx> {
     fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
         NestedVisitorMap::OnlyBodies(&self.tcx.hir())
@@ -1711,45 +1585,32 @@ fn visit_expr(&mut self, ex: &'tcx hir::Expr) {
         intravisit::walk_expr(self, ex);
         self.encode_info_for_expr(ex);
     }
+    fn visit_anon_const(&mut self, c: &'tcx AnonConst) {
+        intravisit::walk_anon_const(self, c);
+        let def_id = self.tcx.hir().local_def_id(c.hir_id);
+        self.encode_info_for_anon_const(def_id);
+    }
     fn visit_item(&mut self, item: &'tcx hir::Item) {
         intravisit::walk_item(self, item);
         let def_id = self.tcx.hir().local_def_id(item.hir_id);
         match item.kind {
             hir::ItemKind::ExternCrate(_) |
             hir::ItemKind::Use(..) => {} // ignore these
-            _ => self.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)),
+            _ => self.encode_info_for_item(def_id, item),
         }
         self.encode_addl_info_for_item(item);
     }
     fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem) {
         intravisit::walk_foreign_item(self, ni);
         let def_id = self.tcx.hir().local_def_id(ni.hir_id);
-        self.record(def_id,
-                          EncodeContext::encode_info_for_foreign_item,
-                          (def_id, ni));
-    }
-    fn visit_variant(&mut self,
-                     v: &'tcx hir::Variant,
-                     g: &'tcx hir::Generics,
-                     id: hir::HirId) {
-        intravisit::walk_variant(self, v, g, id);
-
-        if let Some(ref discr) = v.disr_expr {
-            let def_id = self.tcx.hir().local_def_id(discr.hir_id);
-            self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id);
-        }
+        self.encode_info_for_foreign_item(def_id, ni);
     }
     fn visit_generics(&mut self, generics: &'tcx hir::Generics) {
         intravisit::walk_generics(self, generics);
         self.encode_info_for_generics(generics);
     }
-    fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
-        intravisit::walk_ty(self, ty);
-        self.encode_info_for_ty(ty);
-    }
     fn visit_macro_def(&mut self, macro_def: &'tcx hir::MacroDef) {
-        let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
-        self.record(def_id, EncodeContext::encode_info_for_macro_def, macro_def);
+        self.encode_info_for_macro_def(macro_def);
     }
 }
 
@@ -1757,10 +1618,10 @@ impl EncodeContext<'tcx> {
     fn encode_fields(&mut self, adt_def_id: DefId) {
         let def = self.tcx.adt_def(adt_def_id);
         for (variant_index, variant) in def.variants.iter_enumerated() {
-            for (field_index, field) in variant.fields.iter().enumerate() {
-                self.record(field.did,
-                            EncodeContext::encode_field,
-                            (adt_def_id, variant_index, field_index));
+            for (field_index, _field) in variant.fields.iter().enumerate() {
+                // FIXME(eddyb) `adt_def_id` is leftover from incremental isolation,
+                // pass `def`, `variant` or `field` instead.
+                self.encode_field(adt_def_id, variant_index, field_index);
             }
         }
     }
@@ -1771,34 +1632,24 @@ fn encode_info_for_generics(&mut self, generics: &hir::Generics) {
             match param.kind {
                 GenericParamKind::Lifetime { .. } => continue,
                 GenericParamKind::Type { ref default, .. } => {
-                    self.record(
+                    self.encode_info_for_generic_param(
                         def_id,
-                        EncodeContext::encode_info_for_ty_param,
-                        (def_id, default.is_some()),
+                        EntryKind::TypeParam,
+                        default.is_some(),
                     );
                 }
                 GenericParamKind::Const { .. } => {
-                    self.record(def_id, EncodeContext::encode_info_for_const_param, def_id);
+                    self.encode_info_for_generic_param(def_id, EntryKind::ConstParam, true);
                 }
             }
         }
     }
 
-    fn encode_info_for_ty(&mut self, ty: &hir::Ty) {
-        match ty.kind {
-            hir::TyKind::Array(_, ref length) => {
-                let def_id = self.tcx.hir().local_def_id(length.hir_id);
-                self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id);
-            }
-            _ => {}
-        }
-    }
-
     fn encode_info_for_expr(&mut self, expr: &hir::Expr) {
         match expr.kind {
             hir::ExprKind::Closure(..) => {
                 let def_id = self.tcx.hir().local_def_id(expr.hir_id);
-                self.record(def_id, EncodeContext::encode_info_for_closure, def_id);
+                self.encode_info_for_closure(def_id);
             }
             _ => {}
         }
@@ -1829,14 +1680,14 @@ fn encode_addl_info_for_item(&mut self, item: &hir::Item) {
 
                 let def = self.tcx.adt_def(def_id);
                 for (i, variant) in def.variants.iter_enumerated() {
-                    self.record(variant.def_id,
-                                EncodeContext::encode_enum_variant_info,
-                                (def_id, i));
-
-                    if let Some(ctor_def_id) = variant.ctor_def_id {
-                        self.record(ctor_def_id,
-                                    EncodeContext::encode_enum_variant_ctor,
-                                    (def_id, i));
+                    // FIXME(eddyb) `def_id` is leftover from incremental isolation,
+                    // pass `def` or `variant` instead.
+                    self.encode_enum_variant_info(def_id, i);
+
+                    // FIXME(eddyb) `def_id` is leftover from incremental isolation,
+                    // pass `def`, `variant` or `ctor_def_id` instead.
+                    if let Some(_ctor_def_id) = variant.ctor_def_id {
+                        self.encode_enum_variant_ctor(def_id, i);
                     }
                 }
             }
@@ -1846,9 +1697,7 @@ fn encode_addl_info_for_item(&mut self, item: &hir::Item) {
                 // If the struct has a constructor, encode it.
                 if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
                     let ctor_def_id = self.tcx.hir().local_def_id(ctor_hir_id);
-                    self.record(ctor_def_id,
-                                EncodeContext::encode_struct_ctor,
-                                (def_id, ctor_def_id));
+                    self.encode_struct_ctor(def_id, ctor_def_id);
                 }
             }
             hir::ItemKind::Union(..) => {
@@ -1856,16 +1705,12 @@ fn encode_addl_info_for_item(&mut self, item: &hir::Item) {
             }
             hir::ItemKind::Impl(..) => {
                 for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
-                    self.record(trait_item_def_id,
-                                EncodeContext::encode_info_for_impl_item,
-                                trait_item_def_id);
+                    self.encode_info_for_impl_item(trait_item_def_id);
                 }
             }
             hir::ItemKind::Trait(..) => {
                 for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
-                    self.record(item_def_id,
-                                EncodeContext::encode_info_for_trait_item,
-                                item_def_id);
+                    self.encode_info_for_trait_item(item_def_id);
                 }
             }
         }
@@ -1920,7 +1765,7 @@ fn visit_impl_item(&mut self, _impl_item: &'v hir::ImplItem) {
 // will allow us to slice the metadata to the precise length that we just
 // generated regardless of trailing bytes that end up in it.
 
-pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata {
+crate fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata {
     let mut encoder = opaque::Encoder::new(vec![]);
     encoder.emit_raw_bytes(METADATA_HEADER);
 
@@ -1933,7 +1778,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata {
         let mut ecx = EncodeContext {
             opaque: encoder,
             tcx,
-            entries_index: Index::new(tcx.hir().definitions().def_index_count()),
+            per_def: Default::default(),
             lazy_state: LazyState::NoNode,
             type_shorthands: Default::default(),
             predicate_shorthands: Default::default(),
@@ -1953,7 +1798,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata {
 
     // Encode the root position.
     let header = METADATA_HEADER.len();
-    let pos = root.position;
+    let pos = root.position.get();
     result[header + 0] = (pos >> 24) as u8;
     result[header + 1] = (pos >> 16) as u8;
     result[header + 2] = (pos >> 8) as u8;
@@ -1961,11 +1806,3 @@ pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata {
 
     EncodedMetadata { raw_data: result }
 }
-
-pub fn get_repr_options(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions {
-    let ty = tcx.type_of(did);
-    match ty.kind {
-        ty::Adt(ref def, _) => return def.repr,
-        _ => bug!("{} is not an ADT", ty),
-    }
-}
index 8a4f6e6f17a5112733fc69a4ffe2e3dcd082e4fd..fa1402584eda50f7ba83eead17510d5b97e0b88e 100644 (file)
@@ -3,7 +3,7 @@
 use rustc::middle::cstore::ForeignModule;
 use rustc::ty::TyCtxt;
 
-pub fn collect(tcx: TyCtxt<'_>) -> Vec<ForeignModule> {
+crate fn collect(tcx: TyCtxt<'_>) -> Vec<ForeignModule> {
     let mut collector = Collector {
         tcx,
         modules: Vec::new(),
diff --git a/src/librustc_metadata/index.rs b/src/librustc_metadata/index.rs
deleted file mode 100644 (file)
index 6f248f2..0000000
+++ /dev/null
@@ -1,141 +0,0 @@
-use crate::schema::*;
-
-use rustc::hir::def_id::{DefId, DefIndex};
-use rustc_serialize::opaque::Encoder;
-use std::marker::PhantomData;
-use std::u32;
-use log::debug;
-
-/// Helper trait, for encoding to, and decoding from, a fixed number of bytes.
-pub trait FixedSizeEncoding {
-    const BYTE_LEN: usize;
-
-    // FIXME(eddyb) convert to and from `[u8; Self::BYTE_LEN]` instead,
-    // once that starts being allowed by the compiler (i.e. lazy normalization).
-    fn from_bytes(b: &[u8]) -> Self;
-    fn write_to_bytes(self, b: &mut [u8]);
-
-    // FIXME(eddyb) make these generic functions, or at least defaults here.
-    // (same problem as above, needs `[u8; Self::BYTE_LEN]`)
-    // For now, a macro (`fixed_size_encoding_byte_len_and_defaults`) is used.
-    fn read_from_bytes_at(b: &[u8], i: usize) -> Self;
-    fn write_to_bytes_at(self, b: &mut [u8], i: usize);
-}
-
-// HACK(eddyb) this shouldn't be needed (see comments on the methods above).
-macro_rules! fixed_size_encoding_byte_len_and_defaults {
-    ($byte_len:expr) => {
-        const BYTE_LEN: usize = $byte_len;
-        fn read_from_bytes_at(b: &[u8], i: usize) -> Self {
-            const BYTE_LEN: usize = $byte_len;
-            // HACK(eddyb) ideally this would be done with fully safe code,
-            // but slicing `[u8]` with `i * N..` is optimized worse, due to the
-            // possibility of `i * N` overflowing, than indexing `[[u8; N]]`.
-            let b = unsafe {
-                std::slice::from_raw_parts(
-                    b.as_ptr() as *const [u8; BYTE_LEN],
-                    b.len() / BYTE_LEN,
-                )
-            };
-            Self::from_bytes(&b[i])
-        }
-        fn write_to_bytes_at(self, b: &mut [u8], i: usize) {
-            const BYTE_LEN: usize = $byte_len;
-            // HACK(eddyb) ideally this would be done with fully safe code,
-            // see similar comment in `read_from_bytes_at` for why it can't yet.
-            let b = unsafe {
-                std::slice::from_raw_parts_mut(
-                    b.as_mut_ptr() as *mut [u8; BYTE_LEN],
-                    b.len() / BYTE_LEN,
-                )
-            };
-            self.write_to_bytes(&mut b[i]);
-        }
-    }
-}
-
-impl FixedSizeEncoding for u32 {
-    fixed_size_encoding_byte_len_and_defaults!(4);
-
-    fn from_bytes(b: &[u8]) -> Self {
-        let mut bytes = [0; Self::BYTE_LEN];
-        bytes.copy_from_slice(&b[..Self::BYTE_LEN]);
-        Self::from_le_bytes(bytes)
-    }
-
-    fn write_to_bytes(self, b: &mut [u8]) {
-        b[..Self::BYTE_LEN].copy_from_slice(&self.to_le_bytes());
-    }
-}
-
-/// While we are generating the metadata, we also track the position
-/// of each DefIndex. It is not required that all definitions appear
-/// in the metadata, nor that they are serialized in order, and
-/// therefore we first allocate the vector here and fill it with
-/// `u32::MAX`. Whenever an index is visited, we fill in the
-/// appropriate spot by calling `record_position`. We should never
-/// visit the same index twice.
-pub struct Index<'tcx> {
-    positions: Vec<u8>,
-    _marker: PhantomData<&'tcx ()>,
-}
-
-impl Index<'tcx> {
-    pub fn new(max_index: usize) -> Self {
-        Index {
-            positions: vec![0xff; max_index * 4],
-            _marker: PhantomData,
-        }
-    }
-
-    pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry<'tcx>>) {
-        assert!(def_id.is_local());
-        self.record_index(def_id.index, entry);
-    }
-
-    pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry<'tcx>>) {
-        assert!(entry.position < (u32::MAX as usize));
-        let position = entry.position as u32;
-        let array_index = item.index();
-
-        let positions = &mut self.positions;
-        assert!(u32::read_from_bytes_at(positions, array_index) == u32::MAX,
-                "recorded position for item {:?} twice, first at {:?} and now at {:?}",
-                item,
-                u32::read_from_bytes_at(positions, array_index),
-                position);
-
-        position.write_to_bytes_at(positions, array_index)
-    }
-
-    pub fn write_index(&self, buf: &mut Encoder) -> Lazy<[Self]> {
-        let pos = buf.position();
-
-        // First we write the length of the lower range ...
-        buf.emit_raw_bytes(&(self.positions.len() as u32 / 4).to_le_bytes());
-        // ... then the values.
-        buf.emit_raw_bytes(&self.positions);
-        Lazy::from_position_and_meta(pos as usize, self.positions.len() / 4 + 1)
-    }
-}
-
-impl Lazy<[Index<'tcx>]> {
-    /// Given the metadata, extract out the offset of a particular
-    /// DefIndex (if any).
-    #[inline(never)]
-    pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
-        let bytes = &bytes[self.position..];
-        debug!("Index::lookup: index={:?} len={:?}",
-               def_index,
-               self.meta);
-
-        let position = u32::read_from_bytes_at(bytes, 1 + def_index.index());
-        if position == u32::MAX {
-            debug!("Index::lookup: position=u32::MAX");
-            None
-        } else {
-            debug!("Index::lookup: position={:?}", position);
-            Some(Lazy::from_position(position as usize))
-        }
-    }
-}
index 9273b064ba9ceecc08821cc5adada0c484d699d7..291ee23ff7262d73917adb7dffcf3714888d8cec 100644 (file)
@@ -1,6 +1,7 @@
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
 
 #![feature(box_patterns)]
+#![feature(core_intrinsics)]
 #![feature(crate_visibility_modifier)]
 #![feature(drain_filter)]
 #![feature(in_band_lifetimes)]
@@ -11,6 +12,7 @@
 #![feature(rustc_private)]
 #![feature(slice_patterns)]
 #![feature(specialization)]
+#![feature(stmt_expr_attributes)]
 
 #![recursion_limit="256"]
 
 
 pub mod error_codes;
 
-mod index;
 mod encoder;
 mod decoder;
+mod dependency_format;
 mod cstore_impl;
-mod schema;
-mod native_libs;
-mod link_args;
 mod foreign_modules;
-mod dependency_format;
+mod link_args;
+mod native_libs;
+mod schema;
+mod table;
 
 pub mod creader;
 pub mod cstore;
index 527d4421fca656d9a9a4250eba539713f707fe66..4291f3a4ae34e74358513fcd37f3cf9d931a9160 100644 (file)
@@ -4,7 +4,7 @@
 use rustc_target::spec::abi::Abi;
 use syntax::symbol::sym;
 
-pub fn collect(tcx: TyCtxt<'_>) -> Vec<String> {
+crate fn collect(tcx: TyCtxt<'_>) -> Vec<String> {
     let mut collector = Collector {
         args: Vec::new(),
     };
index 8df236c41cfb888a88bf0556bec35a2d1046ee1b..05676dad3340c7553772ea73f362222ced55f033 100644 (file)
 //! no means all of the necessary details. Take a look at the rest of
 //! metadata::locator or metadata::creader for all the juicy details!
 
-use crate::cstore::{MetadataRef, MetadataBlob};
+use crate::cstore::{MetadataBlob, CStore};
 use crate::creader::Library;
 use crate::schema::{METADATA_HEADER, rustc_version};
 
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::svh::Svh;
-use rustc::middle::cstore::MetadataLoader;
+use rustc_data_structures::sync::MetadataRef;
+use rustc::middle::cstore::{CrateSource, MetadataLoader};
 use rustc::session::{config, Session};
 use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch};
 use rustc::session::search_paths::PathKind;
 use log::{debug, info, warn};
 
 #[derive(Clone)]
-pub struct CrateMismatch {
+crate struct CrateMismatch {
     path: PathBuf,
     got: String,
 }
 
 #[derive(Clone)]
-pub struct Context<'a> {
+crate struct Context<'a> {
     pub sess: &'a Session,
     pub span: Span,
     pub crate_name: Symbol,
@@ -272,11 +273,9 @@ pub struct Context<'a> {
     pub metadata_loader: &'a dyn MetadataLoader,
 }
 
-pub struct CratePaths {
-    pub ident: String,
-    pub dylib: Option<PathBuf>,
-    pub rlib: Option<PathBuf>,
-    pub rmeta: Option<PathBuf>,
+crate struct CratePaths {
+    pub name: Symbol,
+    pub source: CrateSource,
 }
 
 #[derive(Copy, Clone, PartialEq)]
@@ -296,14 +295,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-impl CratePaths {
-    fn paths(&self) -> Vec<PathBuf> {
-        self.dylib.iter().chain(self.rlib.iter()).chain(self.rmeta.iter()).cloned().collect()
-    }
-}
-
 impl<'a> Context<'a> {
-    pub fn reset(&mut self) {
+    crate fn reset(&mut self) {
         self.rejected_via_hash.clear();
         self.rejected_via_triple.clear();
         self.rejected_via_kind.clear();
@@ -311,7 +304,7 @@ pub fn reset(&mut self) {
         self.rejected_via_filename.clear();
     }
 
-    pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
+    crate fn maybe_load_library_crate(&mut self) -> Option<Library> {
         let mut seen_paths = FxHashSet::default();
         match self.extra_filename {
             Some(s) => self.find_library_crate(s, &mut seen_paths)
@@ -320,10 +313,10 @@ pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
         }
     }
 
-    pub fn report_errs(self) -> ! {
+    crate fn report_errs(self) -> ! {
         let add = match self.root {
             None => String::new(),
-            Some(r) => format!(" which `{}` depends on", r.ident),
+            Some(r) => format!(" which `{}` depends on", r.name),
         };
         let mut msg = "the following crate versions were found:".to_string();
         let mut err = if !self.rejected_via_hash.is_empty() {
@@ -341,8 +334,8 @@ pub fn report_errs(self) -> ! {
             match self.root {
                 None => {}
                 Some(r) => {
-                    for path in r.paths().iter() {
-                        msg.push_str(&format!("\ncrate `{}`: {}", r.ident, path.display()));
+                    for path in r.source.paths() {
+                        msg.push_str(&format!("\ncrate `{}`: {}", r.name, path.display()));
                     }
                 }
             }
@@ -534,18 +527,8 @@ fn find_library_crate(&mut self,
         // search is being performed for.
         let mut libraries = FxHashMap::default();
         for (_hash, (rlibs, rmetas, dylibs)) in candidates {
-            let mut slot = None;
-            let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot);
-            let rmeta = self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot);
-            let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot);
-            if let Some((h, m)) = slot {
-                libraries.insert(h,
-                                 Library {
-                                     dylib,
-                                     rlib,
-                                     rmeta,
-                                     metadata: m,
-                                 });
+            if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs) {
+                libraries.insert(svh, lib);
             }
         }
 
@@ -563,7 +546,7 @@ fn find_library_crate(&mut self,
                                                self.crate_name);
                 let candidates = libraries.iter().filter_map(|(_, lib)| {
                     let crate_name = &lib.metadata.get_root().name.as_str();
-                    match &(&lib.dylib, &lib.rlib) {
+                    match &(&lib.source.dylib, &lib.source.rlib) {
                         &(&Some((ref pd, _)), &Some((ref pr, _))) => {
                             Some(format!("\ncrate `{}`: {}\n{:>padding$}",
                                          crate_name,
@@ -584,6 +567,21 @@ fn find_library_crate(&mut self,
         }
     }
 
+    fn extract_lib(
+        &mut self,
+        rlibs: FxHashMap<PathBuf, PathKind>,
+        rmetas: FxHashMap<PathBuf, PathKind>,
+        dylibs: FxHashMap<PathBuf, PathKind>,
+    ) -> Option<(Svh, Library)> {
+        let mut slot = None;
+        let source = CrateSource {
+            rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot),
+            rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot),
+            dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot),
+        };
+        slot.map(|(svh, metadata)| (svh, Library { source, metadata }))
+    }
+
     // Attempts to extract *one* library from the set `m`. If the set has no
     // elements, `None` is returned. If the set has more than one element, then
     // the errors and notes are emitted about the set of libraries.
@@ -828,23 +826,8 @@ fn find_commandline_library<'b, LOCS>(&mut self, locs: LOCS) -> Option<Library>
             }
         };
 
-        // Extract the rlib/dylib pair.
-        let mut slot = None;
-        let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot);
-        let rmeta = self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot);
-        let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot);
-
-        if rlib.is_none() && rmeta.is_none() && dylib.is_none() {
-            return None;
-        }
-        slot.map(|(_, metadata)|
-            Library {
-                dylib,
-                rlib,
-                rmeta,
-                metadata,
-            }
-        )
+        // Extract the dylib/rlib/rmeta triple.
+        self.extract_lib(rlibs, rmetas, dylibs).map(|(_, lib)| lib)
     }
 }
 
@@ -931,7 +914,7 @@ fn get_metadata_section_imp(target: &Target,
 /// A diagnostic function for dumping crate metadata to an output stream.
 pub fn list_file_metadata(target: &Target,
                           path: &Path,
-                          loader: &dyn MetadataLoader,
+                          cstore: &CStore,
                           out: &mut dyn io::Write)
                           -> io::Result<()> {
     let filename = path.file_name().unwrap().to_str().unwrap();
@@ -942,7 +925,7 @@ pub fn list_file_metadata(target: &Target,
     } else {
         CrateFlavor::Dylib
     };
-    match get_metadata_section(target, flavor, path, loader) {
+    match get_metadata_section(target, flavor, path, &*cstore.metadata_loader) {
         Ok(metadata) => metadata.list_crate_metadata(out),
         Err(msg) => write!(out, "{}\n", msg),
     }
index fe215d9c7999ea66c453d20689fa2da730f2fafc..9e4c2685f11626f6744cf8f0b197d9de2a530145 100644 (file)
@@ -11,7 +11,7 @@
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::{span_err, struct_span_err};
 
-pub fn collect(tcx: TyCtxt<'_>) -> Vec<NativeLibrary> {
+crate fn collect(tcx: TyCtxt<'_>) -> Vec<NativeLibrary> {
     let mut collector = Collector {
         tcx,
         libs: Vec::new(),
@@ -21,7 +21,7 @@ pub fn collect(tcx: TyCtxt<'_>) -> Vec<NativeLibrary> {
     return collector.libs;
 }
 
-pub fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool {
+crate fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool {
     match lib.cfg {
         Some(ref cfg) => attr::cfg_matches(cfg, &sess.parse_sess, None),
         None => true,
@@ -73,6 +73,7 @@ fn visit_item(&mut self, it: &'tcx hir::Item) {
                         "static-nobundle" => cstore::NativeStaticNobundle,
                         "dylib" => cstore::NativeUnknown,
                         "framework" => cstore::NativeFramework,
+                        "raw-dylib" => cstore::NativeRawDylib,
                         k => {
                             struct_span_err!(self.tcx.sess, item.span(), E0458,
                                       "unknown kind: `{}`", k)
@@ -169,6 +170,14 @@ fn register_native_lib(&mut self, span: Option<Span>, lib: NativeLibrary) {
                                            GateIssue::Language,
                                            "kind=\"static-nobundle\" is unstable");
         }
+        if lib.kind == cstore::NativeRawDylib &&
+           !self.tcx.features().raw_dylib {
+            feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
+                                           sym::raw_dylib,
+                                           span.unwrap_or_else(|| syntax_pos::DUMMY_SP),
+                                           GateIssue::Language,
+                                           "kind=\"raw-dylib\" is unstable");
+        }
         self.libs.push(lib);
     }
 
index 4fe9c466cb6dafa0dd568526de2edfe856a1c14f..96f35783278fa5784d21821ce3b949944cabb262 100644 (file)
@@ -1,4 +1,4 @@
-use crate::index;
+use crate::table::PerDefTable;
 
 use rustc::hir;
 use rustc::hir::def::{self, CtorKind};
 use rustc_index::vec::IndexVec;
 use rustc_data_structures::svh::Svh;
 
+use rustc_serialize::Encodable;
 use syntax::{ast, attr};
 use syntax::edition::Edition;
 use syntax::symbol::Symbol;
 use syntax_pos::{self, Span};
 
 use std::marker::PhantomData;
+use std::num::NonZeroUsize;
 
-pub fn rustc_version() -> String {
+crate fn rustc_version() -> String {
     format!("rustc {}",
             option_env!("CFG_VERSION").unwrap_or("unknown version"))
 }
@@ -29,7 +31,7 @@ pub fn rustc_version() -> String {
 /// Metadata encoding version.
 /// N.B., increment this if you change the format of metadata such that
 /// the rustc version can't be found to compare with `rustc_version()`.
-pub const METADATA_VERSION: u8 = 4;
+const METADATA_VERSION: u8 = 4;
 
 /// Metadata header which includes `METADATA_VERSION`.
 /// To get older versions of rustc to ignore this metadata,
@@ -39,12 +41,12 @@ pub fn rustc_version() -> String {
 /// This header is followed by the position of the `CrateRoot`,
 /// which is encoded as a 32-bit big-endian unsigned integer,
 /// and further followed by the rustc version string.
-pub const METADATA_HEADER: &[u8; 12] =
+crate const METADATA_HEADER: &[u8; 12] =
     &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION];
 
 /// Additional metadata for a `Lazy<T>` where `T` may not be `Sized`,
 /// e.g. for `Lazy<[T]>`, this is the length (count of `T` values).
-pub trait LazyMeta {
+crate trait LazyMeta {
     type Meta: Copy + 'static;
 
     /// Returns the minimum encoded size.
@@ -52,7 +54,7 @@ pub trait LazyMeta {
     fn min_size(meta: Self::Meta) -> usize;
 }
 
-impl<T> LazyMeta for T {
+impl<T: Encodable> LazyMeta for T {
     type Meta = ();
 
     fn min_size(_: ()) -> usize {
@@ -61,7 +63,7 @@ fn min_size(_: ()) -> usize {
     }
 }
 
-impl<T> LazyMeta for [T] {
+impl<T: Encodable> LazyMeta for [T] {
     type Meta = usize;
 
     fn min_size(len: usize) -> usize {
@@ -98,17 +100,17 @@ fn min_size(len: usize) -> usize {
 #[must_use]
 // FIXME(#59875) the `Meta` parameter only exists to dodge
 // invariance wrt `T` (coming from the `meta: T::Meta` field).
-pub struct Lazy<T, Meta = <T as LazyMeta>::Meta>
+crate struct Lazy<T, Meta = <T as LazyMeta>::Meta>
     where T: ?Sized + LazyMeta<Meta = Meta>,
           Meta: 'static + Copy,
 {
-    pub position: usize,
+    pub position: NonZeroUsize,
     pub meta: Meta,
     _marker: PhantomData<T>,
 }
 
 impl<T: ?Sized + LazyMeta> Lazy<T> {
-    pub fn from_position_and_meta(position: usize, meta: T::Meta) -> Lazy<T> {
+     crate fn from_position_and_meta(position: NonZeroUsize, meta: T::Meta) -> Lazy<T> {
         Lazy {
             position,
             meta,
@@ -117,15 +119,15 @@ pub fn from_position_and_meta(position: usize, meta: T::Meta) -> Lazy<T> {
     }
 }
 
-impl<T> Lazy<T> {
-    pub fn from_position(position: usize) -> Lazy<T> {
+impl<T: Encodable> Lazy<T> {
+    crate fn from_position(position: NonZeroUsize) -> Lazy<T> {
         Lazy::from_position_and_meta(position, ())
     }
 }
 
-impl<T> Lazy<[T]> {
-    pub fn empty() -> Lazy<[T]> {
-        Lazy::from_position_and_meta(0, 0)
+impl<T: Encodable> Lazy<[T]> {
+    crate fn empty() -> Lazy<[T]> {
+        Lazy::from_position_and_meta(NonZeroUsize::new(1).unwrap(), 0)
     }
 }
 
@@ -141,22 +143,32 @@ impl<T: ?Sized + LazyMeta> rustc_serialize::UseSpecializedDecodable for Lazy<T>
 
 /// Encoding / decoding state for `Lazy`.
 #[derive(Copy, Clone, PartialEq, Eq, Debug)]
-pub enum LazyState {
+crate enum LazyState {
     /// Outside of a metadata node.
     NoNode,
 
     /// Inside a metadata node, and before any `Lazy`.
     /// The position is that of the node itself.
-    NodeStart(usize),
+    NodeStart(NonZeroUsize),
 
     /// Inside a metadata node, with a previous `Lazy`.
     /// The position is a conservative estimate of where that
     /// previous `Lazy` would end (see their comments).
-    Previous(usize),
+    Previous(NonZeroUsize),
+}
+
+// FIXME(#59875) `Lazy!(T)` replaces `Lazy<T>`, passing the `Meta` parameter
+// manually, instead of relying on the default, to get the correct variance.
+// Only needed when `T` itself contains a parameter (e.g. `'tcx`).
+macro_rules! Lazy {
+    (Table<$T:ty>) => {Lazy<Table<$T>, usize>};
+    (PerDefTable<$T:ty>) => {Lazy<PerDefTable<$T>, usize>};
+    ([$T:ty]) => {Lazy<[$T], usize>};
+    ($T:ty) => {Lazy<$T, ()>};
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct CrateRoot<'tcx> {
+crate struct CrateRoot<'tcx> {
     pub name: Symbol,
     pub triple: TargetTriple,
     pub extra_filename: String,
@@ -182,10 +194,10 @@ pub struct CrateRoot<'tcx> {
     pub source_map: Lazy<[syntax_pos::SourceFile]>,
     pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
     pub impls: Lazy<[TraitImpls]>,
-    pub exported_symbols: Lazy<[(ExportedSymbol<'tcx>, SymbolExportLevel)]>,
+    pub exported_symbols: Lazy!([(ExportedSymbol<'tcx>, SymbolExportLevel)]),
     pub interpret_alloc_index: Lazy<[u32]>,
 
-    pub entries_index: Lazy<[index::Index<'tcx>]>,
+    pub per_def: LazyPerDefTables<'tcx>,
 
     /// The DefIndex's of any proc macros delcared by
     /// this crate
@@ -202,7 +214,7 @@ pub struct CrateRoot<'tcx> {
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct CrateDep {
+crate struct CrateDep {
     pub name: ast::Name,
     pub hash: Svh,
     pub kind: DepKind,
@@ -210,34 +222,34 @@ pub struct CrateDep {
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct TraitImpls {
+crate struct TraitImpls {
     pub trait_id: (u32, DefIndex),
     pub impls: Lazy<[DefIndex]>,
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct Entry<'tcx> {
-    pub kind: EntryKind<'tcx>,
-    pub visibility: Lazy<ty::Visibility>,
-    pub span: Lazy<Span>,
-    pub attributes: Lazy<[ast::Attribute]>,
-    pub children: Lazy<[DefIndex]>,
-    pub stability: Option<Lazy<attr::Stability>>,
-    pub deprecation: Option<Lazy<attr::Deprecation>>,
-
-    pub ty: Option<Lazy<Ty<'tcx>>>,
-    pub inherent_impls: Lazy<[DefIndex]>,
-    pub variances: Lazy<[ty::Variance]>,
-    pub generics: Option<Lazy<ty::Generics>>,
-    pub predicates: Option<Lazy<ty::GenericPredicates<'tcx>>>,
-    pub predicates_defined_on: Option<Lazy<ty::GenericPredicates<'tcx>>>,
-
-    pub mir: Option<Lazy<mir::Body<'tcx>>>,
-    pub promoted_mir: Option<Lazy<IndexVec<mir::Promoted, mir::Body<'tcx>>>>,
+crate struct LazyPerDefTables<'tcx> {
+    pub kind: Lazy!(PerDefTable<Lazy!(EntryKind<'tcx>)>),
+    pub visibility: Lazy!(PerDefTable<Lazy<ty::Visibility>>),
+    pub span: Lazy!(PerDefTable<Lazy<Span>>),
+    pub attributes: Lazy!(PerDefTable<Lazy<[ast::Attribute]>>),
+    pub children: Lazy!(PerDefTable<Lazy<[DefIndex]>>),
+    pub stability: Lazy!(PerDefTable<Lazy<attr::Stability>>),
+    pub deprecation: Lazy!(PerDefTable<Lazy<attr::Deprecation>>),
+
+    pub ty: Lazy!(PerDefTable<Lazy!(Ty<'tcx>)>),
+    pub inherent_impls: Lazy!(PerDefTable<Lazy<[DefIndex]>>),
+    pub variances: Lazy!(PerDefTable<Lazy<[ty::Variance]>>),
+    pub generics: Lazy!(PerDefTable<Lazy<ty::Generics>>),
+    pub predicates: Lazy!(PerDefTable<Lazy!(ty::GenericPredicates<'tcx>)>),
+    pub predicates_defined_on: Lazy!(PerDefTable<Lazy!(ty::GenericPredicates<'tcx>)>),
+
+    pub mir: Lazy!(PerDefTable<Lazy!(mir::Body<'tcx>)>),
+    pub promoted_mir: Lazy!(PerDefTable<Lazy!(IndexVec<mir::Promoted, mir::Body<'tcx>>)>),
 }
 
 #[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
-pub enum EntryKind<'tcx> {
+crate enum EntryKind<'tcx> {
     Const(ConstQualif, Lazy<RenderedConst>),
     ImmStatic,
     MutStatic,
@@ -252,88 +264,88 @@ pub enum EntryKind<'tcx> {
     OpaqueTy,
     Enum(ReprOptions),
     Field,
-    Variant(Lazy<VariantData<'tcx>>),
-    Struct(Lazy<VariantData<'tcx>>, ReprOptions),
-    Union(Lazy<VariantData<'tcx>>, ReprOptions),
-    Fn(Lazy<FnData<'tcx>>),
-    ForeignFn(Lazy<FnData<'tcx>>),
+    Variant(Lazy!(VariantData<'tcx>)),
+    Struct(Lazy!(VariantData<'tcx>), ReprOptions),
+    Union(Lazy!(VariantData<'tcx>), ReprOptions),
+    Fn(Lazy!(FnData<'tcx>)),
+    ForeignFn(Lazy!(FnData<'tcx>)),
     Mod(Lazy<ModData>),
     MacroDef(Lazy<MacroDef>),
-    Closure(Lazy<ClosureData<'tcx>>),
-    Generator(Lazy<GeneratorData<'tcx>>),
-    Trait(Lazy<TraitData<'tcx>>),
-    Impl(Lazy<ImplData<'tcx>>),
-    Method(Lazy<MethodData<'tcx>>),
+    Closure(Lazy!(ClosureData<'tcx>)),
+    Generator(Lazy!(GeneratorData<'tcx>)),
+    Trait(Lazy!(TraitData<'tcx>)),
+    Impl(Lazy!(ImplData<'tcx>)),
+    Method(Lazy!(MethodData<'tcx>)),
     AssocType(AssocContainer),
     AssocOpaqueTy(AssocContainer),
     AssocConst(AssocContainer, ConstQualif, Lazy<RenderedConst>),
-    TraitAlias(Lazy<TraitAliasData<'tcx>>),
+    TraitAlias(Lazy!(TraitAliasData<'tcx>)),
 }
 
 /// Additional data for EntryKind::Const and EntryKind::AssocConst
 #[derive(Clone, Copy, RustcEncodable, RustcDecodable)]
-pub struct ConstQualif {
+crate struct ConstQualif {
     pub mir: u8,
 }
 
 /// Contains a constant which has been rendered to a String.
 /// Used by rustdoc.
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct RenderedConst(pub String);
+crate struct RenderedConst(pub String);
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct ModData {
+crate struct ModData {
     pub reexports: Lazy<[def::Export<hir::HirId>]>,
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct MacroDef {
+crate struct MacroDef {
     pub body: String,
     pub legacy: bool,
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct FnData<'tcx> {
+crate struct FnData<'tcx> {
     pub asyncness: hir::IsAsync,
     pub constness: hir::Constness,
     pub param_names: Lazy<[ast::Name]>,
-    pub sig: Lazy<ty::PolyFnSig<'tcx>>,
+    pub sig: Lazy!(ty::PolyFnSig<'tcx>),
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct VariantData<'tcx> {
+crate struct VariantData<'tcx> {
     pub ctor_kind: CtorKind,
     pub discr: ty::VariantDiscr,
     /// If this is unit or tuple-variant/struct, then this is the index of the ctor id.
     pub ctor: Option<DefIndex>,
     /// If this is a tuple struct or variant
     /// ctor, this is its "function" signature.
-    pub ctor_sig: Option<Lazy<ty::PolyFnSig<'tcx>>>,
+    pub ctor_sig: Option<Lazy!(ty::PolyFnSig<'tcx>)>,
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct TraitData<'tcx> {
+crate struct TraitData<'tcx> {
     pub unsafety: hir::Unsafety,
     pub paren_sugar: bool,
     pub has_auto_impl: bool,
     pub is_marker: bool,
-    pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
+    pub super_predicates: Lazy!(ty::GenericPredicates<'tcx>),
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct TraitAliasData<'tcx> {
-    pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
+crate struct TraitAliasData<'tcx> {
+    pub super_predicates: Lazy!(ty::GenericPredicates<'tcx>),
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct ImplData<'tcx> {
+crate struct ImplData<'tcx> {
     pub polarity: ty::ImplPolarity,
     pub defaultness: hir::Defaultness,
     pub parent_impl: Option<DefId>,
 
     /// This is `Some` only for impls of `CoerceUnsized`.
     pub coerce_unsized_info: Option<ty::adjustment::CoerceUnsizedInfo>,
-    pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
+    pub trait_ref: Option<Lazy!(ty::TraitRef<'tcx>)>,
 }
 
 
@@ -341,7 +353,7 @@ pub struct ImplData<'tcx> {
 /// is a trait or an impl and whether, in a trait, it has
 /// a default, or an in impl, whether it's marked "default".
 #[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
-pub enum AssocContainer {
+crate enum AssocContainer {
     TraitRequired,
     TraitWithDefault,
     ImplDefault,
@@ -349,7 +361,7 @@ pub enum AssocContainer {
 }
 
 impl AssocContainer {
-    pub fn with_def_id(&self, def_id: DefId) -> ty::AssocItemContainer {
+    crate fn with_def_id(&self, def_id: DefId) -> ty::AssocItemContainer {
         match *self {
             AssocContainer::TraitRequired |
             AssocContainer::TraitWithDefault => ty::TraitContainer(def_id),
@@ -359,7 +371,7 @@ pub fn with_def_id(&self, def_id: DefId) -> ty::AssocItemContainer {
         }
     }
 
-    pub fn defaultness(&self) -> hir::Defaultness {
+    crate fn defaultness(&self) -> hir::Defaultness {
         match *self {
             AssocContainer::TraitRequired => hir::Defaultness::Default {
                 has_value: false,
@@ -376,22 +388,22 @@ pub fn defaultness(&self) -> hir::Defaultness {
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct MethodData<'tcx> {
+crate struct MethodData<'tcx> {
     pub fn_data: FnData<'tcx>,
     pub container: AssocContainer,
     pub has_self: bool,
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct ClosureData<'tcx> {
-    pub sig: Lazy<ty::PolyFnSig<'tcx>>,
+crate struct ClosureData<'tcx> {
+    pub sig: Lazy!(ty::PolyFnSig<'tcx>),
 }
 
 #[derive(RustcEncodable, RustcDecodable)]
-pub struct GeneratorData<'tcx> {
+crate struct GeneratorData<'tcx> {
     pub layout: mir::GeneratorLayout<'tcx>,
 }
 
 // Tags used for encoding Spans:
-pub const TAG_VALID_SPAN: u8 = 0;
-pub const TAG_INVALID_SPAN: u8 = 1;
+crate const TAG_VALID_SPAN: u8 = 0;
+crate const TAG_INVALID_SPAN: u8 = 1;
diff --git a/src/librustc_metadata/table.rs b/src/librustc_metadata/table.rs
new file mode 100644 (file)
index 0000000..e164c28
--- /dev/null
@@ -0,0 +1,239 @@
+use crate::decoder::Metadata;
+use crate::schema::*;
+
+use rustc::hir::def_id::{DefId, DefIndex};
+use rustc_serialize::{Encodable, opaque::Encoder};
+use std::convert::TryInto;
+use std::marker::PhantomData;
+use std::num::NonZeroUsize;
+use log::debug;
+
+/// Helper trait, for encoding to, and decoding from, a fixed number of bytes.
+/// Used mainly for Lazy positions and lengths.
+/// Unchecked invariant: `Self::default()` should encode as `[0; BYTE_LEN]`,
+/// but this has no impact on safety.
+crate trait FixedSizeEncoding: Default {
+    const BYTE_LEN: usize;
+
+    // FIXME(eddyb) convert to and from `[u8; Self::BYTE_LEN]` instead,
+    // once that starts being allowed by the compiler (i.e. lazy normalization).
+    fn from_bytes(b: &[u8]) -> Self;
+    fn write_to_bytes(self, b: &mut [u8]);
+
+    // FIXME(eddyb) make these generic functions, or at least defaults here.
+    // (same problem as above, needs `[u8; Self::BYTE_LEN]`)
+    // For now, a macro (`fixed_size_encoding_byte_len_and_defaults`) is used.
+
+    /// Read a `Self` value (encoded as `Self::BYTE_LEN` bytes),
+    /// from `&b[i * Self::BYTE_LEN..]`, returning `None` if `i`
+    /// is not in bounds, or `Some(Self::from_bytes(...))` otherwise.
+    fn maybe_read_from_bytes_at(b: &[u8], i: usize) -> Option<Self>;
+    /// Write a `Self` value (encoded as `Self::BYTE_LEN` bytes),
+    /// at `&mut b[i * Self::BYTE_LEN..]`, using `Self::write_to_bytes`.
+    fn write_to_bytes_at(self, b: &mut [u8], i: usize);
+}
+
+// HACK(eddyb) this shouldn't be needed (see comments on the methods above).
+macro_rules! fixed_size_encoding_byte_len_and_defaults {
+    ($byte_len:expr) => {
+        const BYTE_LEN: usize = $byte_len;
+        fn maybe_read_from_bytes_at(b: &[u8], i: usize) -> Option<Self> {
+            const BYTE_LEN: usize = $byte_len;
+            // HACK(eddyb) ideally this would be done with fully safe code,
+            // but slicing `[u8]` with `i * N..` is optimized worse, due to the
+            // possibility of `i * N` overflowing, than indexing `[[u8; N]]`.
+            let b = unsafe {
+                std::slice::from_raw_parts(
+                    b.as_ptr() as *const [u8; BYTE_LEN],
+                    b.len() / BYTE_LEN,
+                )
+            };
+            b.get(i).map(|b| FixedSizeEncoding::from_bytes(b))
+        }
+        fn write_to_bytes_at(self, b: &mut [u8], i: usize) {
+            const BYTE_LEN: usize = $byte_len;
+            // HACK(eddyb) ideally this would be done with fully safe code,
+            // see similar comment in `read_from_bytes_at` for why it can't yet.
+            let b = unsafe {
+                std::slice::from_raw_parts_mut(
+                    b.as_mut_ptr() as *mut [u8; BYTE_LEN],
+                    b.len() / BYTE_LEN,
+                )
+            };
+            self.write_to_bytes(&mut b[i]);
+        }
+    }
+}
+
+impl FixedSizeEncoding for u32 {
+    fixed_size_encoding_byte_len_and_defaults!(4);
+
+    fn from_bytes(b: &[u8]) -> Self {
+        let mut bytes = [0; Self::BYTE_LEN];
+        bytes.copy_from_slice(&b[..Self::BYTE_LEN]);
+        Self::from_le_bytes(bytes)
+    }
+
+    fn write_to_bytes(self, b: &mut [u8]) {
+        b[..Self::BYTE_LEN].copy_from_slice(&self.to_le_bytes());
+    }
+}
+
+// NOTE(eddyb) there could be an impl for `usize`, which would enable a more
+// generic `Lazy<T>` impl, but in the general case we might not need / want to
+// fit every `usize` in `u32`.
+impl<T: Encodable> FixedSizeEncoding for Option<Lazy<T>> {
+    fixed_size_encoding_byte_len_and_defaults!(u32::BYTE_LEN);
+
+    fn from_bytes(b: &[u8]) -> Self {
+        Some(Lazy::from_position(NonZeroUsize::new(u32::from_bytes(b) as usize)?))
+    }
+
+    fn write_to_bytes(self, b: &mut [u8]) {
+        let position = self.map_or(0, |lazy| lazy.position.get());
+        let position: u32 = position.try_into().unwrap();
+
+        position.write_to_bytes(b)
+    }
+}
+
+impl<T: Encodable> FixedSizeEncoding for Option<Lazy<[T]>> {
+    fixed_size_encoding_byte_len_and_defaults!(u32::BYTE_LEN * 2);
+
+    fn from_bytes(b: &[u8]) -> Self {
+        Some(Lazy::from_position_and_meta(
+            <Option<Lazy<T>>>::from_bytes(b)?.position,
+            u32::from_bytes(&b[u32::BYTE_LEN..]) as usize,
+        ))
+    }
+
+    fn write_to_bytes(self, b: &mut [u8]) {
+        self.map(|lazy| Lazy::<T>::from_position(lazy.position))
+            .write_to_bytes(b);
+
+        let len = self.map_or(0, |lazy| lazy.meta);
+        let len: u32 = len.try_into().unwrap();
+
+        len.write_to_bytes(&mut b[u32::BYTE_LEN..]);
+    }
+}
+
+/// Random-access table (i.e. offeringconstant-time `get`/`set`), similar to
+/// `Vec<Option<T>>`, but without requiring encoding or decoding all the values
+/// eagerly and in-order.
+/// A total of `(max_idx + 1) * <Option<T> as FixedSizeEncoding>::BYTE_LEN` bytes
+/// are used for a table, where `max_idx` is the largest index passed to `set`.
+// FIXME(eddyb) replace `Vec` with `[_]` here, such that `Box<Table<T>>` would be used
+// when building it, and `Lazy<Table<T>>` or `&Table<T>` when reading it.
+// (not sure if that is possible given that the `Vec` is being resized now)
+crate struct Table<T> where Option<T>: FixedSizeEncoding {
+    // FIXME(eddyb) store `[u8; <Option<T>>::BYTE_LEN]` instead of `u8` in `Vec`,
+    // once that starts being allowed by the compiler (i.e. lazy normalization).
+    bytes: Vec<u8>,
+    _marker: PhantomData<T>,
+}
+
+impl<T> Default for Table<T> where Option<T>: FixedSizeEncoding {
+    fn default() -> Self {
+        Table {
+            bytes: vec![],
+            _marker: PhantomData,
+        }
+    }
+}
+
+impl<T> Table<T> where Option<T>: FixedSizeEncoding {
+    crate fn set(&mut self, i: usize, value: T) {
+        // FIXME(eddyb) investigate more compact encodings for sparse tables.
+        // On the PR @michaelwoerister mentioned:
+        // > Space requirements could perhaps be optimized by using the HAMT `popcnt`
+        // > trick (i.e. divide things into buckets of 32 or 64 items and then
+        // > store bit-masks of which item in each bucket is actually serialized).
+        let needed = (i + 1) * <Option<T>>::BYTE_LEN;
+        if self.bytes.len() < needed {
+            self.bytes.resize(needed, 0);
+        }
+
+        Some(value).write_to_bytes_at(&mut self.bytes, i);
+    }
+
+    crate fn encode(&self, buf: &mut Encoder) -> Lazy<Self> {
+        let pos = buf.position();
+        buf.emit_raw_bytes(&self.bytes);
+        Lazy::from_position_and_meta(
+            NonZeroUsize::new(pos as usize).unwrap(),
+            self.bytes.len(),
+        )
+    }
+}
+
+impl<T> LazyMeta for Table<T> where Option<T>: FixedSizeEncoding {
+    type Meta = usize;
+
+    fn min_size(len: usize) -> usize {
+        len
+    }
+}
+
+impl<T> Lazy<Table<T>> where Option<T>: FixedSizeEncoding {
+    /// Given the metadata, extract out the value at a particular index (if any).
+    #[inline(never)]
+    crate fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>(
+        &self,
+        metadata: M,
+        i: usize,
+    ) -> Option<T> {
+        debug!("Table::lookup: index={:?} len={:?}", i, self.meta);
+
+        let start = self.position.get();
+        let bytes = &metadata.raw_bytes()[start..start + self.meta];
+        <Option<T>>::maybe_read_from_bytes_at(bytes, i)?
+    }
+}
+
+/// Like a `Table` but using `DefIndex` instead of `usize` as keys.
+// FIXME(eddyb) replace by making `Table` behave like `IndexVec`,
+// and by using `newtype_index!` to define `DefIndex`.
+crate struct PerDefTable<T>(Table<T>) where Option<T>: FixedSizeEncoding;
+
+impl<T> Default for PerDefTable<T> where Option<T>: FixedSizeEncoding {
+    fn default() -> Self {
+        PerDefTable(Table::default())
+    }
+}
+
+impl<T> PerDefTable<T> where Option<T>: FixedSizeEncoding {
+    crate fn set(&mut self, def_id: DefId, value: T) {
+        assert!(def_id.is_local());
+        self.0.set(def_id.index.index(), value);
+    }
+
+    crate fn encode(&self, buf: &mut Encoder) -> Lazy<Self> {
+        let lazy = self.0.encode(buf);
+        Lazy::from_position_and_meta(lazy.position, lazy.meta)
+    }
+}
+
+impl<T> LazyMeta for PerDefTable<T> where Option<T>: FixedSizeEncoding {
+    type Meta = <Table<T> as LazyMeta>::Meta;
+
+    fn min_size(meta: Self::Meta) -> usize {
+        Table::<T>::min_size(meta)
+    }
+}
+
+impl<T> Lazy<PerDefTable<T>> where Option<T>: FixedSizeEncoding {
+    fn as_table(&self) -> Lazy<Table<T>> {
+        Lazy::from_position_and_meta(self.position, self.meta)
+    }
+
+    /// Given the metadata, extract out the value at a particular DefIndex (if any).
+    #[inline(never)]
+    crate fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>(
+        &self,
+        metadata: M,
+        def_index: DefIndex,
+    ) -> Option<T> {
+        self.as_table().get(metadata, def_index.index())
+    }
+}
index ef459ef0c1b702c6692317b39dbb7a5ee5fcb8c0..098258994f4e2a8c31f0ba65163ceb47568ca934 100644 (file)
@@ -1,5 +1,6 @@
 use rustc::hir;
 use rustc::hir::def_id::DefId;
+use rustc::hir::{AsyncGeneratorKind, GeneratorKind};
 use rustc::mir::{
     self, AggregateKind, BindingForm, BorrowKind, ClearCrossCrate, ConstraintCategory, Local,
     LocalDecl, LocalKind, Location, Operand, Place, PlaceBase, PlaceRef, ProjectionElem, Rvalue,
@@ -750,6 +751,11 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
         let kind_place = kind.filter(|_| place_desc.is_some()).map(|k| (k, place_span.0));
         let explanation = self.explain_why_borrow_contains_point(location, &borrow, kind_place);
 
+        debug!(
+            "report_borrowed_value_does_not_live_long_enough(place_desc: {:?}, explanation: {:?})",
+            place_desc,
+            explanation
+        );
         let err = match (place_desc, explanation) {
             (Some(_), _) if self.is_place_thread_local(root_place) => {
                 self.report_thread_local_value_does_not_live_long_enough(drop_span, borrow_span)
@@ -783,7 +789,25 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
                     ..
                 },
             ) if borrow_spans.for_closure() => self.report_escaping_closure_capture(
-                borrow_spans.args_or_use(),
+                borrow_spans,
+                borrow_span,
+                region_name,
+                category,
+                span,
+                &format!("`{}`", name),
+            ),
+            (
+                Some(ref name),
+                BorrowExplanation::MustBeValidFor {
+                    category: category @ ConstraintCategory::OpaqueType,
+                    from_closure: false,
+                    ref region_name,
+                    span,
+                    ..
+                },
+
+            ) if borrow_spans.for_generator() => self.report_escaping_closure_capture(
+                borrow_spans,
                 borrow_span,
                 region_name,
                 category,
@@ -1172,7 +1196,7 @@ fn try_report_cannot_return_reference_to_local(
 
     fn report_escaping_closure_capture(
         &mut self,
-        args_span: Span,
+        use_span: UseSpans,
         var_span: Span,
         fr_name: &RegionName,
         category: ConstraintCategory,
@@ -1180,7 +1204,7 @@ fn report_escaping_closure_capture(
         captured_var: &str,
     ) -> DiagnosticBuilder<'cx> {
         let tcx = self.infcx.tcx;
-
+        let args_span = use_span.args_or_use();
         let mut err = self.cannot_capture_in_long_lived_closure(
             args_span,
             captured_var,
@@ -1200,12 +1224,25 @@ fn report_escaping_closure_capture(
             },
             Err(_) => "move |<args>| <body>".to_string()
         };
-
+        let kind = match use_span.generator_kind() {
+            Some(generator_kind) => match generator_kind {
+                GeneratorKind::Async(async_kind) => match async_kind {
+                    AsyncGeneratorKind::Block => "async block",
+                    AsyncGeneratorKind::Closure => "async closure",
+                    _ => bug!("async block/closure expected, but async funtion found."),
+                },
+                GeneratorKind::Gen => "generator",
+            }
+            None => "closure",
+        };
         err.span_suggestion(
             args_span,
-            &format!("to force the closure to take ownership of {} (and any \
-                      other referenced variables), use the `move` keyword",
-                      captured_var),
+            &format!(
+                "to force the {} to take ownership of {} (and any \
+                 other referenced variables), use the `move` keyword",
+                 kind,
+                 captured_var
+            ),
             suggestion,
             Applicability::MachineApplicable,
         );
@@ -1214,6 +1251,9 @@ fn report_escaping_closure_capture(
             ConstraintCategory::Return => {
                 err.span_note(constraint_span, "closure is returned here");
             }
+            ConstraintCategory::OpaqueType => {
+                err.span_note(constraint_span, "generator is returned here");
+            }
             ConstraintCategory::CallArgument => {
                 fr_name.highlight_region_name(&mut err);
                 err.span_note(
index dc7e4b220658851df3c88386e59d1353e5ad381f..5e0727d51579fd70d518903f7d690816e0994d35 100644 (file)
@@ -1,6 +1,7 @@
 use rustc::hir;
 use rustc::hir::def::Namespace;
 use rustc::hir::def_id::DefId;
+use rustc::hir::GeneratorKind;
 use rustc::mir::{
     AggregateKind, Constant, Field, Local, LocalKind, Location, Operand,
     Place, PlaceBase, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind,
@@ -14,7 +15,7 @@
 use syntax::symbol::sym;
 
 use super::borrow_set::BorrowData;
-use super::{MirBorrowckCtxt};
+use super::MirBorrowckCtxt;
 use crate::dataflow::move_paths::{InitLocation, LookupResult};
 
 pub(super) struct IncludingDowncast(pub(super) bool);
@@ -604,7 +605,7 @@ pub(super) enum UseSpans {
     // The access is caused by capturing a variable for a closure.
     ClosureUse {
         // This is true if the captured variable was from a generator.
-        is_generator: bool,
+        generator_kind: Option<GeneratorKind>,
         // The span of the args of the closure, including the `move` keyword if
         // it's present.
         args_span: Span,
@@ -631,6 +632,13 @@ pub(super) fn var_or_use(self) -> Span {
         }
     }
 
+    pub(super) fn generator_kind(self) -> Option<GeneratorKind> {
+        match self {
+            UseSpans::ClosureUse { generator_kind, .. } => generator_kind,
+            _ => None,
+        }
+    }
+
     // Add a span label to the arguments of the closure, if it exists.
     pub(super) fn args_span_label(
         self,
@@ -656,7 +664,7 @@ pub(super) fn var_span_label(
     /// Returns `false` if this place is not used in a closure.
     pub(super) fn for_closure(&self) -> bool {
         match *self {
-            UseSpans::ClosureUse { is_generator, .. } => !is_generator,
+            UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_none(),
             _ => false,
         }
     }
@@ -664,7 +672,7 @@ pub(super) fn for_closure(&self) -> bool {
     /// Returns `false` if this place is not used in a generator.
     pub(super) fn for_generator(&self) -> bool {
         match *self {
-            UseSpans::ClosureUse { is_generator, .. } => is_generator,
+            UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_some(),
             _ => false,
         }
     }
@@ -672,7 +680,7 @@ pub(super) fn for_generator(&self) -> bool {
     /// Describe the span associated with a use of a place.
     pub(super) fn describe(&self) -> String {
         match *self {
-            UseSpans::ClosureUse { is_generator, .. } => if is_generator {
+            UseSpans::ClosureUse { generator_kind, .. } => if generator_kind.is_some() {
                 " in generator".to_string()
             } else {
                 " in closure".to_string()
@@ -794,19 +802,20 @@ pub(super) fn move_spans(
         if let  StatementKind::Assign(
             box(_, Rvalue::Aggregate(ref kind, ref places))
         ) = stmt.kind {
-            let (def_id, is_generator) = match kind {
-                box AggregateKind::Closure(def_id, _) => (def_id, false),
-                box AggregateKind::Generator(def_id, _, _) => (def_id, true),
+            let def_id = match kind {
+                box AggregateKind::Closure(def_id, _)
+                | box AggregateKind::Generator(def_id, _, _) => def_id,
                 _ => return OtherUse(stmt.source_info.span),
             };
 
             debug!(
-                "move_spans: def_id={:?} is_generator={:?} places={:?}",
-                def_id, is_generator, places
+                "move_spans: def_id={:?} places={:?}",
+                def_id, places
             );
-            if let Some((args_span, var_span)) = self.closure_span(*def_id, moved_place, places) {
+            if let Some((args_span, generator_kind, var_span))
+                = self.closure_span(*def_id, moved_place, places) {
                 return ClosureUse {
-                    is_generator,
+                    generator_kind,
                     args_span,
                     var_span,
                 };
@@ -857,11 +866,11 @@ pub(super) fn borrow_spans(&self, use_span: Span, location: Location) -> UseSpan
                     "borrow_spans: def_id={:?} is_generator={:?} places={:?}",
                     def_id, is_generator, places
                 );
-                if let Some((args_span, var_span)) = self.closure_span(
+                if let Some((args_span, generator_kind, var_span)) = self.closure_span(
                     *def_id, Place::from(target).as_ref(), places
                 ) {
                     return ClosureUse {
-                        is_generator,
+                        generator_kind,
                         args_span,
                         var_span,
                     };
@@ -884,7 +893,7 @@ fn closure_span(
         def_id: DefId,
         target_place: PlaceRef<'cx, 'tcx>,
         places: &Vec<Operand<'tcx>>,
-    ) -> Option<(Span, Span)> {
+    ) -> Option<(Span, Option<GeneratorKind>, Span)> {
         debug!(
             "closure_span: def_id={:?} target_place={:?} places={:?}",
             def_id, target_place, places
@@ -893,14 +902,16 @@ fn closure_span(
         let expr = &self.infcx.tcx.hir().expect_expr(hir_id).kind;
         debug!("closure_span: hir_id={:?} expr={:?}", hir_id, expr);
         if let hir::ExprKind::Closure(
-            .., args_span, _
+            .., body_id, args_span, _
         ) = expr {
             for (upvar, place) in self.infcx.tcx.upvars(def_id)?.values().zip(places) {
                 match place {
                     Operand::Copy(place) |
                     Operand::Move(place) if target_place == place.as_ref() => {
                         debug!("closure_span: found captured local {:?}", place);
-                        return Some((*args_span, upvar.span));
+                        let body = self.infcx.tcx.hir().body(*body_id);
+                        let generator_kind = body.generator_kind();
+                        return Some((*args_span, generator_kind, upvar.span));
                     },
                     _ => {}
                 }
index cfa211ad5afdb9a28f4c324eb56113c1665b2144..75d4b56fdb7c20480890b3e62c44380ef7ac0651 100644 (file)
@@ -1944,14 +1944,16 @@ fn check_access_permissions(
                     self.is_mutable(place.as_ref(), is_local_mutation_allowed),
                     self.errors_buffer.is_empty()
                 ) {
-                    // rust-lang/rust#46908: In pure NLL mode this code path should
-                    // be unreachable (and thus we signal an ICE in the else branch here).
-                    span_bug!(
-                        span,
+                    // rust-lang/rust#46908: In pure NLL mode this code path should be
+                    // unreachable, but we use `delay_span_bug` because we can hit this when
+                    // dereferencing a non-Copy raw pointer *and* have `-Ztreat-err-as-bug`
+                    // enabled. We don't want to ICE for that case, as other errors will have
+                    // been emitted (#52262).
+                    self.infcx.tcx.sess.delay_span_bug(span, &format!(
                         "Accessing `{:?}` with the kind `{:?}` shouldn't be possible",
                         place,
                         kind,
-                    );
+                    ));
                 }
                 return false;
             }
index 0e22ead62d1c0b747b6976e71b77ffab2ecd65b7..b105664399a5cbb6bd1ef6ea36adc1048f73388e 100644 (file)
@@ -12,7 +12,7 @@
     SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, UserTypeProjection,
 };
 use rustc::ty::fold::TypeFoldable;
-use rustc::ty::{self, GeneratorSubsts, RegionVid, Ty};
+use rustc::ty::{self, RegionVid, Ty};
 use rustc::ty::subst::SubstsRef;
 
 pub(super) fn generate_constraints<'cx, 'tcx>(
@@ -91,13 +91,6 @@ fn visit_ty(&mut self, ty: Ty<'tcx>, ty_context: TyContext) {
         self.super_ty(ty);
     }
 
-    /// We sometimes have `generator_substs` within an rvalue, or within a
-    /// call. Make them live at the location where they appear.
-    fn visit_generator_substs(&mut self, substs: &GeneratorSubsts<'tcx>, location: Location) {
-        self.add_regular_live_constraint(*substs, location);
-        self.super_generator_substs(substs);
-    }
-
     fn visit_statement(
         &mut self,
         statement: &Statement<'tcx>,
index ff4243df6e9b869cd9bd648de705c68fe040ff7f..59b2796db7abe2c6f9fded0e539a41d97612720c 100644 (file)
@@ -17,6 +17,7 @@
 
 mod find_use;
 
+#[derive(Debug)]
 pub(in crate::borrow_check) enum BorrowExplanation {
     UsedLater(LaterUseKind, Span),
     UsedLaterInLoop(LaterUseKind, Span),
@@ -35,7 +36,7 @@ pub(in crate::borrow_check) enum BorrowExplanation {
     Unexplained,
 }
 
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, Debug)]
 pub(in crate::borrow_check) enum LaterUseKind {
     TraitCapture,
     ClosureCapture,
@@ -457,7 +458,7 @@ fn find_loop_head_dfs(
     /// True if an edge `source -> target` is a backedge -- in other words, if the target
     /// dominates the source.
     fn is_back_edge(&self, source: Location, target: Location) -> bool {
-        target.dominates(source, &self.body.dominators())
+        target.dominates(source, &self.dominators)
     }
 
     /// Determine how the borrow was later used.
index 014335711a5114bba2f1ad119645a356c9cc2d39..9ecd6f837750e797365b467b434a4d818dcaf5ad 100644 (file)
@@ -1,6 +1,6 @@
 use rustc::ty::subst::SubstsRef;
-use rustc::ty::{self, GeneratorSubsts, Ty, TypeFoldable};
-use rustc::mir::{Location, Body, Promoted};
+use rustc::ty::{self, Ty, TypeFoldable};
+use rustc::mir::{Body, Location, PlaceElem, Promoted};
 use rustc::mir::visit::{MutVisitor, TyContext};
 use rustc::infer::{InferCtxt, NLLRegionVariableOrigin};
 use rustc_index::vec::IndexVec;
@@ -62,6 +62,21 @@ fn visit_ty(&mut self, ty: &mut Ty<'tcx>, ty_context: TyContext) {
         debug!("visit_ty: ty={:?}", ty);
     }
 
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        if let PlaceElem::Field(field, ty) = elem {
+            let new_ty = self.renumber_regions(ty);
+
+            if new_ty != *ty {
+                return Some(PlaceElem::Field(*field, new_ty));
+            }
+        }
+
+        None
+    }
+
     fn visit_substs(&mut self, substs: &mut SubstsRef<'tcx>, location: Location) {
         debug!("visit_substs(substs={:?}, location={:?})", substs, location);
 
@@ -82,18 +97,4 @@ fn visit_region(&mut self, region: &mut ty::Region<'tcx>, location: Location) {
     fn visit_const(&mut self, constant: &mut &'tcx ty::Const<'tcx>, _location: Location) {
         *constant = self.renumber_regions(&*constant);
     }
-
-    fn visit_generator_substs(&mut self,
-                              substs: &mut GeneratorSubsts<'tcx>,
-                              location: Location) {
-        debug!(
-            "visit_generator_substs(substs={:?}, location={:?})",
-            substs,
-            location,
-        );
-
-        *substs = self.renumber_regions(substs);
-
-        debug!("visit_generator_substs: substs={:?}", substs);
-    }
 }
index f55889794c24db9e386ff86b9a7862702de5e877..ed639e8eee7747efe171f506a731b0033d6d1b17 100644 (file)
@@ -759,13 +759,13 @@ fn field_ty(
             PlaceTy { ty, variant_index: Some(variant_index) } => match ty.kind {
                 ty::Adt(adt_def, substs) => (&adt_def.variants[variant_index], substs),
                 ty::Generator(def_id, substs, _) => {
-                    let mut variants = substs.state_tys(def_id, tcx);
+                    let mut variants = substs.as_generator().state_tys(def_id, tcx);
                     let mut variant = match variants.nth(variant_index.into()) {
                         Some(v) => v,
                         None => {
                             bug!("variant_index of generator out of range: {:?}/{:?}",
                                  variant_index,
-                                 substs.state_tys(def_id, tcx).count())
+                                 substs.as_generator().state_tys(def_id, tcx).count())
                         }
                     };
                     return match variant.nth(field.index()) {
@@ -791,10 +791,10 @@ fn field_ty(
                 ty::Generator(def_id, substs, _) => {
                     // Only prefix fields (upvars and current state) are
                     // accessible without a variant index.
-                    return match substs.prefix_tys(def_id, tcx).nth(field.index()) {
+                    return match substs.as_generator().prefix_tys(def_id, tcx).nth(field.index()) {
                         Some(ty) => Ok(ty),
                         None => Err(FieldAccessError::OutOfRange {
-                            field_count: substs.prefix_tys(def_id, tcx).count(),
+                            field_count: substs.as_generator().prefix_tys(def_id, tcx).count(),
                         }),
                     }
                 }
@@ -1396,7 +1396,9 @@ fn check_stmt(&mut self, body: &Body<'tcx>, stmt: &Statement<'tcx>, location: Lo
                 };
 
                 let place_ty = place.ty(body, tcx).ty;
+                let place_ty = self.normalize(place_ty, location);
                 let rv_ty = rv.ty(body, tcx);
+                let rv_ty = self.normalize(rv_ty, location);
                 if let Err(terr) =
                     self.sub_types_or_anon(rv_ty, place_ty, location.to_locations(), category)
                 {
@@ -1672,6 +1674,7 @@ fn check_call_dest(
         match *destination {
             Some((ref dest, _target_block)) => {
                 let dest_ty = dest.ty(body, tcx).ty;
+                let dest_ty = self.normalize(dest_ty, term_location);
                 let category = match *dest {
                     Place {
                         base: PlaceBase::Local(RETURN_PLACE),
@@ -1963,10 +1966,10 @@ fn aggregate_field_ty(
                 // It doesn't make sense to look at a field beyond the prefix;
                 // these require a variant index, and are not initialized in
                 // aggregate rvalues.
-                match substs.prefix_tys(def_id, tcx).nth(field_index) {
+                match substs.as_generator().prefix_tys(def_id, tcx).nth(field_index) {
                     Some(ty) => Ok(ty),
                     None => Err(FieldAccessError::OutOfRange {
-                        field_count: substs.prefix_tys(def_id, tcx).count(),
+                        field_count: substs.as_generator().prefix_tys(def_id, tcx).count(),
                     }),
                 }
             }
@@ -2541,7 +2544,7 @@ fn prove_aggregate_predicates(
             // these extra requirements are basically like where
             // clauses on the struct.
             AggregateKind::Closure(def_id, substs)
-            | AggregateKind::Generator(def_id, ty::GeneratorSubsts { substs }, _) => {
+            | AggregateKind::Generator(def_id, substs, _) => {
                 self.prove_closure_bounds(tcx, *def_id, substs, location)
             }
 
index e3efacff66a2fa50b9afafe9b1461dcfc4fb4fda..5f6951856434e8021bbf7866ec51356fb452ea73 100644 (file)
@@ -19,7 +19,7 @@
 use rustc::middle::lang_items;
 use rustc::ty::fold::TypeFoldable;
 use rustc::ty::subst::{InternalSubsts, SubstsRef, Subst};
-use rustc::ty::{self, GeneratorSubsts, RegionVid, Ty, TyCtxt};
+use rustc::ty::{self, RegionVid, Ty, TyCtxt};
 use rustc::util::nodemap::FxHashMap;
 use rustc_index::vec::{Idx, IndexVec};
 use rustc_errors::DiagnosticBuilder;
@@ -90,7 +90,7 @@ pub enum DefiningTy<'tcx> {
     /// The MIR is a generator. The signature is that generators take
     /// no parameters and return the result of
     /// `ClosureSubsts::generator_return_ty`.
-    Generator(DefId, ty::GeneratorSubsts<'tcx>, hir::GeneratorMovability),
+    Generator(DefId, SubstsRef<'tcx>, hir::GeneratorMovability),
 
     /// The MIR is a fn item with the given `DefId` and substs. The signature
     /// of the function can be bound then with the `fn_sig` query.
@@ -113,7 +113,7 @@ pub fn upvar_tys(self, tcx: TyCtxt<'tcx>) -> impl Iterator<Item = Ty<'tcx>> + 't
                 substs.as_closure().upvar_tys(def_id, tcx)
             ),
             DefiningTy::Generator(def_id, substs, _) => {
-                Either::Right(Either::Left(substs.upvar_tys(def_id, tcx)))
+                Either::Right(Either::Left(substs.as_generator().upvar_tys(def_id, tcx)))
             }
             DefiningTy::FnDef(..) | DefiningTy::Const(..) => {
                 Either::Right(Either::Right(iter::empty()))
@@ -334,7 +334,7 @@ pub fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid {
                 err.note(&format!(
                     "defining type: {:?} with generator substs {:#?}",
                     def_id,
-                    &substs.substs[..]
+                    &substs[..]
                 ));
 
                 // FIXME: As above, we'd like to print out the region
@@ -470,7 +470,7 @@ fn build(self) -> UniversalRegions<'tcx> {
 
         let yield_ty = match defining_ty {
             DefiningTy::Generator(def_id, substs, _) => {
-                Some(substs.yield_ty(def_id, self.infcx.tcx))
+                Some(substs.as_generator().yield_ty(def_id, self.infcx.tcx))
             }
             _ => None,
         };
@@ -549,7 +549,7 @@ fn compute_indices(
         let identity_substs = InternalSubsts::identity_for_item(tcx, closure_base_def_id);
         let fr_substs = match defining_ty {
             DefiningTy::Closure(_, ref substs)
-            | DefiningTy::Generator(_, GeneratorSubsts { ref substs }, _) => {
+            | DefiningTy::Generator(_, ref substs, _) => {
                 // In the case of closures, we rely on the fact that
                 // the first N elements in the ClosureSubsts are
                 // inherited from the `closure_base_def_id`.
@@ -612,7 +612,7 @@ fn compute_inputs_and_output(
 
             DefiningTy::Generator(def_id, substs, movability) => {
                 assert_eq!(self.mir_def_id, def_id);
-                let output = substs.return_ty(def_id, tcx);
+                let output = substs.as_generator().return_ty(def_id, tcx);
                 let generator_ty = tcx.mk_generator(def_id, substs, movability);
                 let inputs_and_output = self.infcx.tcx.intern_type_list(&[generator_ty, output]);
                 ty::Binder::dummy(inputs_and_output)
index b4d93a4493fd9b14b8648f75d60608ad10f7e0fb..87d95a751534d1d54750a6ec7ae3167fdd8bc239 100644 (file)
@@ -128,7 +128,6 @@ fn expr_as_rvalue(
                         expr_span,
                         scope,
                         result,
-                        expr.ty,
                     );
                 }
 
@@ -569,7 +568,6 @@ fn limit_capture_mutability(
                 upvar_span,
                 temp_lifetime,
                 temp,
-                upvar_ty,
             );
         }
 
index dbcc330eca382cc2e1d444fcb8e6856f23d3f037..18332ed68f8bd15ae65d7dba2c36fd7121bc5aef 100644 (file)
@@ -103,7 +103,6 @@ fn expr_as_temp(
                         expr_span,
                         temp_lifetime,
                         temp,
-                        expr_ty,
                         DropKind::Storage,
                     );
                 }
@@ -117,7 +116,6 @@ fn expr_as_temp(
                 expr_span,
                 temp_lifetime,
                 temp,
-                expr_ty,
                 DropKind::Value,
             );
         }
index 861de07c612e9bbaaa9b2f23616a946167161c99..2e451fc88d95c37fa29153521d63c074689ca4c5 100644 (file)
@@ -535,21 +535,18 @@ pub fn storage_live_binding(
                 kind: StatementKind::StorageLive(local_id),
             },
         );
-        let var_ty = self.local_decls[local_id].ty;
         let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
-        self.schedule_drop(span, region_scope, local_id, var_ty, DropKind::Storage);
+        self.schedule_drop(span, region_scope, local_id, DropKind::Storage);
         Place::from(local_id)
     }
 
     pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) {
         let local_id = self.var_local_id(var, for_guard);
-        let var_ty = self.local_decls[local_id].ty;
         let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
         self.schedule_drop(
             span,
             region_scope,
             local_id,
-            var_ty,
             DropKind::Value,
         );
     }
index 9c30d9509908b7e89520929a7aa26beda4cc0659..ffb70180bbb4b7676f1c88098e7fcf21071a756c 100644 (file)
@@ -146,7 +146,7 @@ pub fn mir_build(tcx: TyCtxt<'_>, def_id: DefId) -> Body<'_> {
             let (yield_ty, return_ty) = if body.generator_kind.is_some() {
                 let gen_sig = match ty.kind {
                     ty::Generator(gen_def_id, gen_substs, ..) =>
-                        gen_substs.sig(gen_def_id, tcx),
+                        gen_substs.as_generator().sig(gen_def_id, tcx),
                     _ =>
                         span_bug!(tcx.hir().span(id),
                                   "generator w/o generator type: {:?}", ty),
@@ -502,24 +502,21 @@ macro_rules! unpack {
     };
 }
 
-fn should_abort_on_panic(tcx: TyCtxt<'_>, fn_def_id: DefId, abi: Abi) -> bool {
-    // Not callable from C, so we can safely unwind through these
-    if abi == Abi::Rust || abi == Abi::RustCall { return false; }
-
-    // Validate `#[unwind]` syntax regardless of platform-specific panic strategy
+fn should_abort_on_panic(tcx: TyCtxt<'_>, fn_def_id: DefId, _abi: Abi) -> bool {
+    // Validate `#[unwind]` syntax regardless of platform-specific panic strategy.
     let attrs = &tcx.get_attrs(fn_def_id);
     let unwind_attr = attr::find_unwind_attr(Some(tcx.sess.diagnostic()), attrs);
 
-    // We never unwind, so it's not relevant to stop an unwind
+    // We never unwind, so it's not relevant to stop an unwind.
     if tcx.sess.panic_strategy() != PanicStrategy::Unwind { return false; }
 
-    // We cannot add landing pads, so don't add one
+    // We cannot add landing pads, so don't add one.
     if tcx.sess.no_landing_pads() { return false; }
 
     // This is a special case: some functions have a C abi but are meant to
     // unwind anyway. Don't stop them.
     match unwind_attr {
-        None => false, // FIXME(#58794)
+        None => false, // FIXME(#58794); should be `!(abi == Abi::Rust || abi == Abi::RustCall)`
         Some(UnwindAttr::Allowed) => false,
         Some(UnwindAttr::Aborts) => true,
     }
@@ -829,12 +826,12 @@ fn args_and_body(&mut self,
             // Function arguments always get the first Local indices after the return place
             let local = Local::new(index + 1);
             let place = Place::from(local);
-            let &ArgInfo(ty, opt_ty_info, arg_opt, ref self_binding) = arg_info;
+            let &ArgInfo(_, opt_ty_info, arg_opt, ref self_binding) = arg_info;
 
             // Make sure we drop (parts of) the argument even when not matched on.
             self.schedule_drop(
                 arg_opt.as_ref().map_or(ast_body.span, |arg| arg.pat.span),
-                argument_scope, local, ty, DropKind::Value,
+                argument_scope, local, DropKind::Value,
             );
 
             if let Some(arg) = arg_opt {
index a26ec72584bda72b86ef4d814094e81ca40ce6e4..a749b4263ea64abe73a5e4a5981519ec35995648 100644 (file)
@@ -85,7 +85,6 @@
 use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
 use crate::hair::{Expr, ExprRef, LintLevel};
 use rustc::middle::region;
-use rustc::ty::Ty;
 use rustc::hir;
 use rustc::mir::*;
 use syntax_pos::{DUMMY_SP, Span};
@@ -173,11 +172,11 @@ struct BreakableScope<'tcx> {
     region_scope: region::Scope,
     /// Where the body of the loop begins. `None` if block
     continue_block: Option<BasicBlock>,
-    /// Block to branch into when the loop or block terminates (either by being `break`-en out
-    /// from, or by having its condition to become false)
+    /// Block to branch into when the loop or block terminates (either by being
+    /// `break`-en out from, or by having its condition to become false)
     break_block: BasicBlock,
-    /// The destination of the loop/block expression itself (i.e., where to put the result of a
-    /// `break` expression)
+    /// The destination of the loop/block expression itself (i.e., where to put
+    /// the result of a `break` expression)
     break_destination: Place<'tcx>,
 }
 
@@ -728,10 +727,9 @@ pub fn schedule_drop_storage_and_value(
         span: Span,
         region_scope: region::Scope,
         local: Local,
-        place_ty: Ty<'tcx>,
     ) {
-        self.schedule_drop(span, region_scope, local, place_ty, DropKind::Storage);
-        self.schedule_drop(span, region_scope, local, place_ty, DropKind::Value);
+        self.schedule_drop(span, region_scope, local, DropKind::Storage);
+        self.schedule_drop(span, region_scope, local, DropKind::Value);
     }
 
     /// Indicates that `place` should be dropped on exit from
@@ -744,12 +742,13 @@ pub fn schedule_drop(
         span: Span,
         region_scope: region::Scope,
         local: Local,
-        place_ty: Ty<'tcx>,
         drop_kind: DropKind,
     ) {
-        let needs_drop = self.hir.needs_drop(place_ty);
-        match drop_kind {
-            DropKind::Value => if !needs_drop { return },
+        let needs_drop = match drop_kind {
+            DropKind::Value => {
+                if !self.hir.needs_drop(self.local_decls[local].ty) { return }
+                true
+            },
             DropKind::Storage => {
                 if local.index() <= self.arg_count {
                     span_bug!(
@@ -758,8 +757,9 @@ pub fn schedule_drop(
                         self.arg_count,
                     )
                 }
+                false
             }
-        }
+        };
 
         for scope in self.scopes.iter_mut() {
             let this_scope = scope.region_scope == region_scope;
index 990425c3252e0ecab664b93cc2903ef9a04cec57..bc09e327179268e12f351bae0c8030252497fee5 100644 (file)
@@ -104,25 +104,16 @@ fn borrow_allows_mutation(
         kind: mir::BorrowKind,
         borrowed_place: &mir::Place<'tcx>,
     ) -> bool {
-        let borrowed_ty = borrowed_place.ty(self.body, self.tcx).ty;
-
-        // Zero-sized types cannot be mutated, since there is nothing inside to mutate.
-        //
-        // FIXME: For now, we only exempt arrays of length zero. We need to carefully
-        // consider the effects before extending this to all ZSTs.
-        if let ty::Array(_, len) = borrowed_ty.kind {
-            if len.try_eval_usize(self.tcx, self.param_env) == Some(0) {
-                return false;
-            }
-        }
-
         match kind {
             mir::BorrowKind::Mut { .. } => true,
 
             | mir::BorrowKind::Shared
             | mir::BorrowKind::Shallow
             | mir::BorrowKind::Unique
-            => !borrowed_ty.is_freeze(self.tcx, self.param_env, DUMMY_SP),
+            => !borrowed_place
+                .ty(self.body, self.tcx)
+                .ty
+                .is_freeze(self.tcx, self.param_env, DUMMY_SP),
         }
     }
 }
index 0f66b13fdc51af33d7404e3113583334aec274d9..c1695ba66d0d536e54142f907043c66ddbe975e3 100644 (file)
@@ -109,15 +109,13 @@ fn start_block_effect(&self, _sets: &mut BitSet<Local>) {
         assert_eq!(1, self.body.arg_count);
     }
 
-    fn statement_effect(&self,
-                        sets: &mut GenKillSet<Local>,
-                        loc: Location) {
-        self.check_for_move(sets, loc);
+    fn before_statement_effect(&self, sets: &mut GenKillSet<Self::Idx>, loc: Location) {
+        // If we borrow or assign to a place then it needs storage for that
+        // statement.
         self.check_for_borrow(sets, loc);
 
         let stmt = &self.body[loc.block].statements[loc.statement_index];
         match stmt.kind {
-            StatementKind::StorageLive(l) => sets.gen(l),
             StatementKind::StorageDead(l) => sets.kill(l),
             StatementKind::Assign(box(ref place, _))
             | StatementKind::SetDiscriminant { box ref place, .. } => {
@@ -136,11 +134,35 @@ fn statement_effect(&self,
         }
     }
 
-    fn terminator_effect(&self,
-                         sets: &mut GenKillSet<Local>,
-                         loc: Location) {
+    fn statement_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
+        // If we move from a place then only stops needing storage *after*
+        // that statement.
         self.check_for_move(sets, loc);
+    }
+
+    fn before_terminator_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
         self.check_for_borrow(sets, loc);
+
+        if let TerminatorKind::Call {
+            destination: Some((Place { base: PlaceBase::Local(local), .. }, _)),
+            ..
+        } = self.body[loc.block].terminator().kind {
+            sets.gen(local);
+        }
+    }
+
+    fn terminator_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
+        // For call terminators the destination requires storage for the call
+        // and after the call returns successfully, but not after a panic.
+        // Since `propagate_call_unwind` doesn't exist, we have to kill the
+        // destination here, and then gen it again in `propagate_call_return`.
+        if let TerminatorKind::Call {
+            destination: Some((Place { base: PlaceBase::Local(local), projection: box [] }, _)),
+            ..
+        } = self.body[loc.block].terminator().kind {
+            sets.kill(local);
+        }
+        self.check_for_move(sets, loc);
     }
 
     fn propagate_call_return(
index 06999abdc8b264025f1136dddb88153e67f14c9e..ad0f75d772548b7208bf942b21d03e1280e9461b 100644 (file)
@@ -1,4 +1,5 @@
 use syntax::ast::{self, MetaItem};
+use syntax::print::pprust;
 use syntax::symbol::{Symbol, sym};
 
 use rustc_index::bit_set::{BitSet, HybridBitSet};
@@ -159,9 +160,8 @@ pub(crate) fn run<P>(
                 if let Some(s) = item.value_str() {
                     return Some(s.to_string())
                 } else {
-                    sess.span_err(
-                        item.span,
-                        &format!("{} attribute requires a path", item.path));
+                    let path = pprust::path_to_string(&item.path);
+                    sess.span_err(item.span, &format!("{} attribute requires a path", path));
                     return None;
                 }
             }
index fb1311de9a70659bf14855cfc9fa9f1bc302e176..419c905cb5127a070530064f1b056b4630f7cb21 100644 (file)
@@ -64,7 +64,9 @@ fn foo(x: Option<String>) {
 This error indicates that the compiler cannot guarantee a matching pattern for
 one or more possible inputs to a match expression. Guaranteed matches are
 required in order to assign values to match expressions, or alternatively,
-determine the flow of execution. Erroneous code example:
+determine the flow of execution.
+
+Erroneous code example:
 
 ```compile_fail,E0004
 enum Terminator {
@@ -109,7 +111,9 @@ enum Terminator {
 
 E0005: r##"
 Patterns used to bind names must be irrefutable, that is, they must guarantee
-that a name will be extracted in all cases. Erroneous code example:
+that a name will be extracted in all cases.
+
+Erroneous code example:
 
 ```compile_fail,E0005
 let x = Some(1);
@@ -145,6 +149,8 @@ enum Terminator {
 moved into a variable called `op_string` while simultaneously requiring the
 inner `String` to be moved into a variable called `s`.
 
+Erroneous code example:
+
 ```compile_fail,E0007
 let x = Some("s".to_string());
 
@@ -208,15 +214,130 @@ struct X { x: (), }
 ```
 "##,
 
+E0010: r##"
+The value of statics and constants must be known at compile time, and they live
+for the entire lifetime of a program. Creating a boxed value allocates memory on
+the heap at runtime, and therefore cannot be done at compile time.
+
+Erroneous code example:
+
+```compile_fail,E0010
+#![feature(box_syntax)]
+
+const CON : Box<i32> = box 0;
+```
+"##,
+
+E0013: r##"
+Static and const variables can refer to other const variables. But a const
+variable cannot refer to a static variable.
+
+Erroneous code example:
+
+```compile_fail,E0013
+static X: i32 = 42;
+const Y: i32 = X;
+```
+
+In this example, `Y` cannot refer to `X` here. To fix this, the value can be
+extracted as a const and then used:
+
+```
+const A: i32 = 42;
+static X: i32 = A;
+const Y: i32 = A;
+```
+"##,
+
+// FIXME(#57563) Change the language here when const fn stabilizes
+E0015: r##"
+The only functions that can be called in static or constant expressions are
+`const` functions, and struct/enum constructors. `const` functions are only
+available on a nightly compiler. Rust currently does not support more general
+compile-time function execution.
+
+```
+const FOO: Option<u8> = Some(1); // enum constructor
+struct Bar {x: u8}
+const BAR: Bar = Bar {x: 1}; // struct constructor
+```
+
+See [RFC 911] for more details on the design of `const fn`s.
+
+[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md
+"##,
+
+E0017: r##"
+References in statics and constants may only refer to immutable values.
+
+Erroneous code example:
+
+```compile_fail,E0017
+static X: i32 = 1;
+const C: i32 = 2;
+
+// these three are not allowed:
+const CR: &mut i32 = &mut C;
+static STATIC_REF: &'static mut i32 = &mut X;
+static CONST_REF: &'static mut i32 = &mut C;
+```
+
+Statics are shared everywhere, and if they refer to mutable data one might
+violate memory safety since holding multiple mutable references to shared data
+is not allowed.
+
+If you really want global mutable state, try using `static mut` or a global
+`UnsafeCell`.
+"##,
+
+E0019: r##"
+A function call isn't allowed in the const's initialization expression
+because the expression's value must be known at compile-time.
+
+Erroneous code example:
+
+```compile_fail,E0019
+#![feature(box_syntax)]
+
+fn main() {
+    struct MyOwned;
+
+    static STATIC11: Box<MyOwned> = box MyOwned; // error!
+}
+```
+
+Remember: you can't use a function call inside a const's initialization
+expression! However, you can totally use it anywhere else:
+
+```
+enum Test {
+    V1
+}
+
+impl Test {
+    fn func(&self) -> i32 {
+        12
+    }
+}
+
+fn main() {
+    const FOO: Test = Test::V1;
+
+    FOO.func(); // here is good
+    let x = FOO.func(); // or even here!
+}
+```
+"##,
+
 E0030: r##"
 When matching against a range, the compiler verifies that the range is
-non-empty.  Range patterns include both end-points, so this is equivalent to
+non-empty. Range patterns include both end-points, so this is equivalent to
 requiring the start of the range to be less than or equal to the end of the
 range.
 
-For example:
+Erroneous code example:
 
-```compile_fail
+```compile_fail,E0030
 match 5u32 {
     // This range is ok, albeit pointless.
     1 ..= 1 => {}
@@ -226,7 +347,61 @@ struct X { x: (), }
 ```
 "##,
 
+E0133: r##"
+Unsafe code was used outside of an unsafe function or block.
+
+Erroneous code example:
+
+```compile_fail,E0133
+unsafe fn f() { return; } // This is the unsafe code
+
+fn main() {
+    f(); // error: call to unsafe function requires unsafe function or block
+}
+```
+
+Using unsafe functionality is potentially dangerous and disallowed by safety
+checks. Examples:
+
+* Dereferencing raw pointers
+* Calling functions via FFI
+* Calling functions marked unsafe
+
+These safety checks can be relaxed for a section of the code by wrapping the
+unsafe instructions with an `unsafe` block. For instance:
+
+```
+unsafe fn f() { return; }
+
+fn main() {
+    unsafe { f(); } // ok!
+}
+```
+
+See also https://doc.rust-lang.org/book/ch19-01-unsafe-rust.html
+"##,
+
 E0158: r##"
+An associated const has been referenced in a pattern.
+
+Erroneous code example:
+
+```compile_fail,E0158
+enum EFoo { A, B, C, D }
+
+trait Foo {
+    const X: EFoo;
+}
+
+fn test<A: Foo>(arg: EFoo) {
+    match arg {
+        A::X => { // error!
+            println!("A::X");
+        }
+    }
+}
+```
+
 `const` and `static` mean different things. A `const` is a compile-time
 constant, an alias for a literal value. This property means you can match it
 directly within a pattern.
@@ -247,6 +422,39 @@ struct X { x: (), }
 ```
 "##,
 
+E0161: r##"
+A value was moved. However, its size was not known at compile time, and only
+values of a known size can be moved.
+
+Erroneous code example:
+
+```compile_fail,E0161
+#![feature(box_syntax)]
+
+fn main() {
+    let array: &[isize] = &[1, 2, 3];
+    let _x: Box<[isize]> = box *array;
+    // error: cannot move a value of type [isize]: the size of [isize] cannot
+    //        be statically determined
+}
+```
+
+In Rust, you can only move a value when its size is known at compile time.
+
+To work around this restriction, consider "hiding" the value behind a reference:
+either `&x` or `&mut x`. Since a reference has a fixed size, this lets you move
+it around as usual. Example:
+
+```
+#![feature(box_syntax)]
+
+fn main() {
+    let array: &[isize] = &[1, 2, 3];
+    let _x: Box<&[isize]> = box array; // ok!
+}
+```
+"##,
+
 E0162: r##"
 #### Note: this error code is no longer emitted by the compiler.
 
@@ -468,158 +676,6 @@ pub enum Method { GET, POST }
 See also https://github.com/rust-lang/rust/issues/14587
 "##,
 
-E0010: r##"
-The value of statics and constants must be known at compile time, and they live
-for the entire lifetime of a program. Creating a boxed value allocates memory on
-the heap at runtime, and therefore cannot be done at compile time. Erroneous
-code example:
-
-```compile_fail,E0010
-#![feature(box_syntax)]
-
-const CON : Box<i32> = box 0;
-```
-"##,
-
-E0013: r##"
-Static and const variables can refer to other const variables. But a const
-variable cannot refer to a static variable. For example, `Y` cannot refer to
-`X` here:
-
-```compile_fail,E0013
-static X: i32 = 42;
-const Y: i32 = X;
-```
-
-To fix this, the value can be extracted as a const and then used:
-
-```
-const A: i32 = 42;
-static X: i32 = A;
-const Y: i32 = A;
-```
-"##,
-
-// FIXME(#57563) Change the language here when const fn stabilizes
-E0015: r##"
-The only functions that can be called in static or constant expressions are
-`const` functions, and struct/enum constructors. `const` functions are only
-available on a nightly compiler. Rust currently does not support more general
-compile-time function execution.
-
-```
-const FOO: Option<u8> = Some(1); // enum constructor
-struct Bar {x: u8}
-const BAR: Bar = Bar {x: 1}; // struct constructor
-```
-
-See [RFC 911] for more details on the design of `const fn`s.
-
-[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md
-"##,
-
-E0017: r##"
-References in statics and constants may only refer to immutable values.
-Erroneous code example:
-
-```compile_fail,E0017
-static X: i32 = 1;
-const C: i32 = 2;
-
-// these three are not allowed:
-const CR: &mut i32 = &mut C;
-static STATIC_REF: &'static mut i32 = &mut X;
-static CONST_REF: &'static mut i32 = &mut C;
-```
-
-Statics are shared everywhere, and if they refer to mutable data one might
-violate memory safety since holding multiple mutable references to shared data
-is not allowed.
-
-If you really want global mutable state, try using `static mut` or a global
-`UnsafeCell`.
-"##,
-
-E0019: r##"
-A function call isn't allowed in the const's initialization expression
-because the expression's value must be known at compile-time. Erroneous code
-example:
-
-```compile_fail
-enum Test {
-    V1
-}
-
-impl Test {
-    fn test(&self) -> i32 {
-        12
-    }
-}
-
-fn main() {
-    const FOO: Test = Test::V1;
-
-    const A: i32 = FOO.test(); // You can't call Test::func() here!
-}
-```
-
-Remember: you can't use a function call inside a const's initialization
-expression! However, you can totally use it anywhere else:
-
-```
-enum Test {
-    V1
-}
-
-impl Test {
-    fn func(&self) -> i32 {
-        12
-    }
-}
-
-fn main() {
-    const FOO: Test = Test::V1;
-
-    FOO.func(); // here is good
-    let x = FOO.func(); // or even here!
-}
-```
-"##,
-
-E0133: r##"
-Unsafe code was used outside of an unsafe function or block.
-
-Erroneous code example:
-
-```compile_fail,E0133
-unsafe fn f() { return; } // This is the unsafe code
-
-fn main() {
-    f(); // error: call to unsafe function requires unsafe function or block
-}
-```
-
-Using unsafe functionality is potentially dangerous and disallowed by safety
-checks. Examples:
-
-* Dereferencing raw pointers
-* Calling functions via FFI
-* Calling functions marked unsafe
-
-These safety checks can be relaxed for a section of the code by wrapping the
-unsafe instructions with an `unsafe` block. For instance:
-
-```
-unsafe fn f() { return; }
-
-fn main() {
-    unsafe { f(); } // ok!
-}
-```
-
-See also https://doc.rust-lang.org/book/ch19-01-unsafe-rust.html
-"##,
-
 E0373: r##"
 This error occurs when an attempt is made to use data captured by a closure,
 when that data may no longer exist. It's most commonly seen when attempting to
@@ -672,7 +728,9 @@ fn foo() -> Box<Fn(u32) -> u32> {
 "##,
 
 E0381: r##"
-It is not allowed to use or capture an uninitialized variable. For example:
+It is not allowed to use or capture an uninitialized variable.
+
+Erroneous code example:
 
 ```compile_fail,E0381
 fn main() {
@@ -694,7 +752,9 @@ fn main() {
 
 E0382: r##"
 This error occurs when an attempt is made to use a variable after its contents
-have been moved elsewhere. For example:
+have been moved elsewhere.
+
+Erroneous code example:
 
 ```compile_fail,E0382
 struct MyStruct { s: u32 }
@@ -842,7 +902,8 @@ fn drop(&mut self) { /* ... */ }
 
 E0384: r##"
 This error occurs when an attempt is made to reassign an immutable variable.
-For example:
+
+Erroneous code example:
 
 ```compile_fail,E0384
 fn main() {
@@ -862,13 +923,15 @@ fn main() {
 ```
 "##,
 
-/*E0386: r##"
+E0386: r##"
+#### Note: this error code is no longer emitted by the compiler.
+
 This error occurs when an attempt is made to mutate the target of a mutable
 reference stored inside an immutable container.
 
 For example, this can happen when storing a `&mut` inside an immutable `Box`:
 
-```compile_fail,E0386
+```
 let mut x: i64 = 1;
 let y: Box<_> = Box::new(&mut x);
 **y = 2; // error, cannot assign to data in an immutable container
@@ -892,13 +955,15 @@ fn main() {
 let y: Box<Cell<_>> = Box::new(Cell::new(x));
 y.set(2);
 ```
-"##,*/
+"##,
 
 E0387: r##"
 #### Note: this error code is no longer emitted by the compiler.
 
 This error occurs when an attempt is made to mutate or mutably reference data
-that a closure has captured immutably. Examples of this error are shown below:
+that a closure has captured immutably.
+
+Erroneous code example:
 
 ```compile_fail
 // Accepts a function or a closure that captures its environment immutably.
@@ -953,7 +1018,7 @@ fn mutable() {
 "##,
 
 E0388: r##"
-E0388 was removed and is no longer issued.
+#### Note: this error code is no longer emitted by the compiler.
 "##,
 
 E0389: r##"
@@ -963,7 +1028,7 @@ fn mutable() {
 commonly occurs when attempting to assign to a non-mutable reference of a
 mutable reference (`&(&mut T)`).
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail
 struct FancyNum {
@@ -1022,43 +1087,11 @@ fn main() {
 ```
 "##,
 
-E0161: r##"
-A value was moved. However, its size was not known at compile time, and only
-values of a known size can be moved.
+E0492: r##"
+A borrow of a constant containing interior mutability was attempted.
 
 Erroneous code example:
 
-```compile_fail
-#![feature(box_syntax)]
-
-fn main() {
-    let array: &[isize] = &[1, 2, 3];
-    let _x: Box<[isize]> = box *array;
-    // error: cannot move a value of type [isize]: the size of [isize] cannot
-    //        be statically determined
-}
-```
-
-In Rust, you can only move a value when its size is known at compile time.
-
-To work around this restriction, consider "hiding" the value behind a reference:
-either `&x` or `&mut x`. Since a reference has a fixed size, this lets you move
-it around as usual. Example:
-
-```
-#![feature(box_syntax)]
-
-fn main() {
-    let array: &[isize] = &[1, 2, 3];
-    let _x: Box<&[isize]> = box array; // ok!
-}
-```
-"##,
-
-E0492: r##"
-A borrow of a constant containing interior mutability was attempted. Erroneous
-code example:
-
 ```compile_fail,E0492
 use std::sync::atomic::AtomicUsize;
 
@@ -1174,7 +1207,9 @@ struct Foo {
 "##,
 
 E0499: r##"
-A variable was borrowed as mutable more than once. Erroneous code example:
+A variable was borrowed as mutable more than once.
+
+Erroneous code example:
 
 ```compile_fail,E0499
 let mut i = 0;
@@ -1205,7 +1240,9 @@ struct Foo {
 "##,
 
 E0500: r##"
-A borrowed variable was used by a closure. Example of erroneous code:
+A borrowed variable was used by a closure.
+
+Erroneous code example:
 
 ```compile_fail,E0500
 fn you_know_nothing(jon_snow: &mut i32) {
@@ -1256,7 +1293,7 @@ fn you_know_nothing(jon_snow: &mut i32) {
 http://rustbyexample.com/fn/closures/capture.html for more information about
 capturing.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0501
 fn inside_closure(x: &mut i32) {
@@ -1329,7 +1366,7 @@ fn foo(a: &mut i32) {
 This error indicates that you are trying to borrow a variable as mutable when it
 has already been borrowed as immutable.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0502
 fn bar(x: &mut i32) {}
@@ -1360,7 +1397,7 @@ fn foo(a: &mut i32) {
 E0503: r##"
 A value was used after it was mutably borrowed.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0503
 fn main() {
@@ -1418,7 +1455,7 @@ fn main() {
 This error occurs when an attempt is made to move a borrowed variable into a
 closure.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail
 struct FancyNum {
@@ -1609,7 +1646,7 @@ fn main() {
 E0506: r##"
 This error occurs when an attempt is made to assign to a borrowed value.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0506
 struct FancyNum {
@@ -1827,7 +1864,7 @@ fn main() {
 E0508: r##"
 A value was moved out of a non-copy fixed-size array.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0508
 struct NonCopy;
@@ -1872,7 +1909,7 @@ fn main() {
 This error occurs when an attempt is made to move out of a value whose type
 implements the `Drop` trait.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0509
 struct FancyNum {
@@ -1982,30 +2019,14 @@ fn main() {
 to go "back in time" to the `None` arm.
 "##,
 
-E0579: r##"
-When matching against an exclusive range, the compiler verifies that the range
-is non-empty. Exclusive range patterns include the start point but not the end
-point, so this is equivalent to requiring the start of the range to be less
-than the end of the range.
-
-For example:
-
-```compile_fail
-match 5u32 {
-    // This range is ok, albeit pointless.
-    1 .. 2 => {}
-    // This range is empty, and the compiler can tell.
-    5 .. 5 => {}
-}
-```
-"##,
-
 E0515: r##"
 Cannot return value that references local variable
 
 Local variables, function parameters and temporaries are all dropped before the
 end of the function body. So a reference to them cannot be returned.
 
+Erroneous code example:
+
 ```compile_fail,E0515
 fn get_dangling_reference() -> &'static i32 {
     let x = 0;
@@ -2101,6 +2122,28 @@ fn dragoooon(x: &mut isize) {
 ```
 "##,
 
+E0579: r##"
+When matching against an exclusive range, the compiler verifies that the range
+is non-empty. Exclusive range patterns include the start point but not the end
+point, so this is equivalent to requiring the start of the range to be less
+than the end of the range.
+
+Erroneous code example:
+
+```compile_fail,E0579
+#![feature(exclusive_range_pattern)]
+
+fn main() {
+    match 5u32 {
+        // This range is ok, albeit pointless.
+        1 .. 2 => {}
+        // This range is empty, and the compiler can tell.
+        5 .. 5 => {} // error!
+    }
+}
+```
+"##,
+
 E0595: r##"
 #### Note: this error code is no longer emitted by the compiler.
 
@@ -2124,7 +2167,7 @@ fn dragoooon(x: &mut isize) {
 E0596: r##"
 This error occurs because you tried to mutably borrow a non-mutable variable.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0596
 let x = 1;
@@ -2143,7 +2186,7 @@ fn dragoooon(x: &mut isize) {
 E0597: r##"
 This error occurs because a value was dropped while it was still borrowed
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0597
 struct Foo<'a> {
@@ -2180,6 +2223,8 @@ struct Foo<'a> {
 This error occurs because a borrow in a generator persists across a
 yield point.
 
+Erroneous code example:
+
 ```compile_fail,E0626
 # #![feature(generators, generator_trait, pin)]
 # use std::ops::Generator;
@@ -2271,7 +2316,7 @@ struct Foo<'a> {
 This error occurs because a borrow of a thread-local variable was made inside a
 function which outlived the lifetime of the function.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0712
 #![feature(thread_local)]
@@ -2293,7 +2338,7 @@ fn main() {
 This error occurs when an attempt is made to borrow state past the end of the
 lifetime of a type that implements the `Drop` trait.
 
-Example of erroneous code:
+Erroneous code example:
 
 ```compile_fail,E0713
 #![feature(nll)]
index d3bc61a4dde560b69cc62125108ed2ded5c4f963..1d83b104177e23ead5bbcc9df2439ef66970f0aa 100644 (file)
 use super::{PatternFoldable, PatternFolder, compare_const_vals};
 
 use rustc::hir::def_id::DefId;
-use rustc::hir::RangeEnd;
+use rustc::hir::{RangeEnd, HirId};
 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Const};
 use rustc::ty::layout::{Integer, IntegerExt, VariantIdx, Size};
 
 use rustc::mir::Field;
 use rustc::mir::interpret::{ConstValue, Scalar, truncate, AllocId, Pointer};
 use rustc::util::common::ErrorReported;
+use rustc::lint;
 
 use syntax::attr::{SignedInt, UnsignedInt};
 use syntax_pos::{Span, DUMMY_SP};
@@ -418,7 +419,7 @@ fn is_local(&self, ty: Ty<'tcx>) -> bool {
     }
 }
 
-#[derive(Clone, Debug, PartialEq)]
+#[derive(Clone, Debug)]
 enum Constructor<'tcx> {
     /// The constructor of all patterns that don't vary by constructor,
     /// e.g., struct patterns and fixed-length arrays.
@@ -426,29 +427,71 @@ enum Constructor<'tcx> {
     /// Enum variants.
     Variant(DefId),
     /// Literal values.
-    ConstantValue(&'tcx ty::Const<'tcx>),
+    ConstantValue(&'tcx ty::Const<'tcx>, Span),
     /// Ranges of literal values (`2..=5` and `2..5`).
-    ConstantRange(u128, u128, Ty<'tcx>, RangeEnd),
+    ConstantRange(u128, u128, Ty<'tcx>, RangeEnd, Span),
     /// Array patterns of length n.
     Slice(u64),
 }
 
+// Ignore spans when comparing, they don't carry semantic information as they are only for lints.
+impl<'tcx> std::cmp::PartialEq for Constructor<'tcx> {
+    fn eq(&self, other: &Self) -> bool {
+        match (self, other) {
+            (Constructor::Single, Constructor::Single) => true,
+            (Constructor::Variant(a), Constructor::Variant(b)) => a == b,
+            (Constructor::ConstantValue(a, _), Constructor::ConstantValue(b, _)) => a == b,
+            (
+                Constructor::ConstantRange(a_start, a_end, a_ty, a_range_end, _),
+                Constructor::ConstantRange(b_start, b_end, b_ty, b_range_end, _),
+            ) => a_start == b_start && a_end == b_end && a_ty == b_ty && a_range_end == b_range_end,
+            (Constructor::Slice(a), Constructor::Slice(b)) => a == b,
+            _ => false,
+        }
+    }
+}
+
 impl<'tcx> Constructor<'tcx> {
+    fn is_slice(&self) -> bool {
+        match self {
+            Slice { .. } => true,
+            _ => false,
+        }
+    }
+
     fn variant_index_for_adt<'a>(
         &self,
         cx: &MatchCheckCtxt<'a, 'tcx>,
         adt: &'tcx ty::AdtDef,
     ) -> VariantIdx {
         match self {
-            &Variant(id) => adt.variant_index_with_id(id),
-            &Single => {
+            Variant(id) => adt.variant_index_with_id(*id),
+            Single => {
                 assert!(!adt.is_enum());
                 VariantIdx::new(0)
             }
-            &ConstantValue(c) => crate::const_eval::const_variant_index(cx.tcx, cx.param_env, c),
+            ConstantValue(c, _) => crate::const_eval::const_variant_index(cx.tcx, cx.param_env, c),
             _ => bug!("bad constructor {:?} for adt {:?}", self, adt)
         }
     }
+
+    fn display(&self, tcx: TyCtxt<'tcx>) -> String {
+        match self {
+            Constructor::ConstantValue(val, _) => format!("{}", val),
+            Constructor::ConstantRange(lo, hi, ty, range_end, _) => {
+                // Get the right sign on the output:
+                let ty = ty::ParamEnv::empty().and(*ty);
+                format!(
+                    "{}{}{}",
+                    ty::Const::from_bits(tcx, *lo, ty),
+                    range_end,
+                    ty::Const::from_bits(tcx, *hi, ty),
+                )
+            }
+            Constructor::Slice(val) => format!("[{}]", val),
+            _ => bug!("bad constructor being displayed: `{:?}", self),
+        }
+    }
 }
 
 #[derive(Clone, Debug)]
@@ -477,6 +520,7 @@ pub enum WitnessPreference {
 struct PatCtxt<'tcx> {
     ty: Ty<'tcx>,
     max_slice_length: u64,
+    span: Span,
 }
 
 /// A witness of non-exhaustiveness for error reporting, represented
@@ -603,8 +647,8 @@ fn apply_constructor<'a>(
 
                 _ => {
                     match *ctor {
-                        ConstantValue(value) => PatKind::Constant { value },
-                        ConstantRange(lo, hi, ty, end) => PatKind::Range(PatRange {
+                        ConstantValue(value, _) => PatKind::Constant { value },
+                        ConstantRange(lo, hi, ty, end, _) => PatKind::Range(PatRange {
                             lo: ty::Const::from_bits(cx.tcx, lo, ty::ParamEnv::empty().and(ty)),
                             hi: ty::Const::from_bits(cx.tcx, hi, ty::ParamEnv::empty().and(ty)),
                             end,
@@ -640,7 +684,7 @@ fn all_constructors<'a, 'tcx>(
     let ctors = match pcx.ty.kind {
         ty::Bool => {
             [true, false].iter().map(|&b| {
-                ConstantValue(ty::Const::from_bool(cx.tcx, b))
+                ConstantValue(ty::Const::from_bool(cx.tcx, b), pcx.span)
             }).collect()
         }
         ty::Array(ref sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => {
@@ -672,15 +716,19 @@ fn all_constructors<'a, 'tcx>(
         ty::Char => {
             vec![
                 // The valid Unicode Scalar Value ranges.
-                ConstantRange('\u{0000}' as u128,
-                              '\u{D7FF}' as u128,
-                              cx.tcx.types.char,
-                              RangeEnd::Included
+                ConstantRange(
+                    '\u{0000}' as u128,
+                    '\u{D7FF}' as u128,
+                    cx.tcx.types.char,
+                    RangeEnd::Included,
+                    pcx.span,
                 ),
-                ConstantRange('\u{E000}' as u128,
-                              '\u{10FFFF}' as u128,
-                              cx.tcx.types.char,
-                              RangeEnd::Included
+                ConstantRange(
+                    '\u{E000}' as u128,
+                    '\u{10FFFF}' as u128,
+                    cx.tcx.types.char,
+                    RangeEnd::Included,
+                    pcx.span,
                 ),
             ]
         }
@@ -688,12 +736,12 @@ fn all_constructors<'a, 'tcx>(
             let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128;
             let min = 1u128 << (bits - 1);
             let max = min - 1;
-            vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included)]
+            vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included, pcx.span)]
         }
         ty::Uint(uty) => {
             let size = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size();
             let max = truncate(u128::max_value(), size);
-            vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included)]
+            vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included, pcx.span)]
         }
         _ => {
             if cx.is_uninhabited(pcx.ty) {
@@ -820,13 +868,90 @@ fn max_slice_length<'p, 'a, 'tcx, I>(cx: &mut MatchCheckCtxt<'a, 'tcx>, patterns
 ///
 /// `IntRange` is never used to encode an empty range or a "range" that wraps
 /// around the (offset) space: i.e., `range.lo <= range.hi`.
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 struct IntRange<'tcx> {
     pub range: RangeInclusive<u128>,
     pub ty: Ty<'tcx>,
+    pub span: Span,
 }
 
 impl<'tcx> IntRange<'tcx> {
+    #[inline]
+    fn is_integral(ty: Ty<'_>) -> bool {
+        match ty.kind {
+            ty::Char | ty::Int(_) | ty::Uint(_) => true,
+            _ => false,
+        }
+    }
+
+    #[inline]
+    fn integral_size_and_signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'_>) -> Option<(Size, u128)> {
+        match ty.kind {
+            ty::Char => Some((Size::from_bytes(4), 0)),
+            ty::Int(ity) => {
+                let size = Integer::from_attr(&tcx, SignedInt(ity)).size();
+                Some((size, 1u128 << (size.bits() as u128 - 1)))
+            }
+            ty::Uint(uty) => Some((Integer::from_attr(&tcx, UnsignedInt(uty)).size(), 0)),
+            _ => None,
+        }
+    }
+
+    #[inline]
+    fn from_const(
+        tcx: TyCtxt<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        value: &Const<'tcx>,
+        span: Span,
+    ) -> Option<IntRange<'tcx>> {
+        if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, value.ty) {
+            let ty = value.ty;
+            let val = if let ConstValue::Scalar(Scalar::Raw { data, size }) = value.val {
+                // For this specific pattern we can skip a lot of effort and go
+                // straight to the result, after doing a bit of checking. (We
+                // could remove this branch and just use the next branch, which
+                // is more general but much slower.)
+                Scalar::<()>::check_raw(data, size, target_size);
+                data
+            } else if let Some(val) = value.try_eval_bits(tcx, param_env, ty) {
+                // This is a more general form of the previous branch.
+                val
+            } else {
+                return None
+            };
+            let val = val ^ bias;
+            Some(IntRange { range: val..=val, ty, span })
+        } else {
+            None
+        }
+    }
+
+    #[inline]
+    fn from_range(
+        tcx: TyCtxt<'tcx>,
+        lo: u128,
+        hi: u128,
+        ty: Ty<'tcx>,
+        end: &RangeEnd,
+        span: Span,
+    ) -> Option<IntRange<'tcx>> {
+        if Self::is_integral(ty) {
+            // Perform a shift if the underlying types are signed,
+            // which makes the interval arithmetic simpler.
+            let bias = IntRange::signed_bias(tcx, ty);
+            let (lo, hi) = (lo ^ bias, hi ^ bias);
+            // Make sure the interval is well-formed.
+            if lo > hi || lo == hi && *end == RangeEnd::Excluded {
+                None
+            } else {
+                let offset = (*end == RangeEnd::Excluded) as u128;
+                Some(IntRange { range: lo..=(hi - offset), ty, span })
+            }
+        } else {
+            None
+        }
+    }
+
     fn from_ctor(
         tcx: TyCtxt<'tcx>,
         param_env: ty::ParamEnv<'tcx>,
@@ -834,37 +959,9 @@ fn from_ctor(
     ) -> Option<IntRange<'tcx>> {
         // Floating-point ranges are permitted and we don't want
         // to consider them when constructing integer ranges.
-        fn is_integral(ty: Ty<'_>) -> bool {
-            match ty.kind {
-                ty::Char | ty::Int(_) | ty::Uint(_) => true,
-                _ => false,
-            }
-        }
-
         match ctor {
-            ConstantRange(lo, hi, ty, end) if is_integral(ty) => {
-                // Perform a shift if the underlying types are signed,
-                // which makes the interval arithmetic simpler.
-                let bias = IntRange::signed_bias(tcx, ty);
-                let (lo, hi) = (lo ^ bias, hi ^ bias);
-                // Make sure the interval is well-formed.
-                if lo > hi || lo == hi && *end == RangeEnd::Excluded {
-                    None
-                } else {
-                    let offset = (*end == RangeEnd::Excluded) as u128;
-                    Some(IntRange { range: lo..=(hi - offset), ty })
-                }
-            }
-            ConstantValue(val) if is_integral(val.ty) => {
-                let ty = val.ty;
-                if let Some(val) = val.try_eval_bits(tcx, param_env, ty) {
-                    let bias = IntRange::signed_bias(tcx, ty);
-                    let val = val ^ bias;
-                    Some(IntRange { range: val..=val, ty })
-                } else {
-                    None
-                }
-            }
+            ConstantRange(lo, hi, ty, end, span) => Self::from_range(tcx, *lo, *hi, ty, end, *span),
+            ConstantValue(val, span) => Self::from_const(tcx, param_env, val, *span),
             _ => None,
         }
     }
@@ -874,22 +971,27 @@ fn from_pat(
         param_env: ty::ParamEnv<'tcx>,
         mut pat: &Pat<'tcx>,
     ) -> Option<IntRange<'tcx>> {
-        let range = loop {
+        loop {
             match pat.kind {
-                box PatKind::Constant { value } => break ConstantValue(value),
-                box PatKind::Range(PatRange { lo, hi, end }) => break ConstantRange(
-                    lo.eval_bits(tcx, param_env, lo.ty),
-                    hi.eval_bits(tcx, param_env, hi.ty),
-                    lo.ty,
-                    end,
-                ),
+                box PatKind::Constant { value } => {
+                    return Self::from_const(tcx, param_env, value, pat.span);
+                }
+                box PatKind::Range(PatRange { lo, hi, end }) => {
+                    return Self::from_range(
+                        tcx,
+                        lo.eval_bits(tcx, param_env, lo.ty),
+                        hi.eval_bits(tcx, param_env, hi.ty),
+                        &lo.ty,
+                        &end,
+                        pat.span,
+                    );
+                }
                 box PatKind::AscribeUserType { ref subpattern, .. } => {
                     pat = subpattern;
                 },
                 _ => return None,
             }
-        };
-        Self::from_ctor(tcx, param_env, &range)
+        }
     }
 
     // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it.
@@ -908,14 +1010,15 @@ fn range_to_ctor(
         tcx: TyCtxt<'tcx>,
         ty: Ty<'tcx>,
         r: RangeInclusive<u128>,
+        span: Span,
     ) -> Constructor<'tcx> {
         let bias = IntRange::signed_bias(tcx, ty);
         let (lo, hi) = r.into_inner();
         if lo == hi {
             let ty = ty::ParamEnv::empty().and(ty);
-            ConstantValue(ty::Const::from_bits(tcx, lo ^ bias, ty))
+            ConstantValue(ty::Const::from_bits(tcx, lo ^ bias, ty), span)
         } else {
-            ConstantRange(lo ^ bias, hi ^ bias, ty, RangeEnd::Included)
+            ConstantRange(lo ^ bias, hi ^ bias, ty, RangeEnd::Included, span)
         }
     }
 
@@ -938,17 +1041,23 @@ fn subtract_from(
             if lo > subrange_hi || subrange_lo > hi  {
                 // The pattern doesn't intersect with the subrange at all,
                 // so the subrange remains untouched.
-                remaining_ranges.push(Self::range_to_ctor(tcx, ty, subrange_lo..=subrange_hi));
+                remaining_ranges.push(
+                    Self::range_to_ctor(tcx, ty, subrange_lo..=subrange_hi, self.span),
+                );
             } else {
                 if lo > subrange_lo {
                     // The pattern intersects an upper section of the
                     // subrange, so a lower section will remain.
-                    remaining_ranges.push(Self::range_to_ctor(tcx, ty, subrange_lo..=(lo - 1)));
+                    remaining_ranges.push(
+                        Self::range_to_ctor(tcx, ty, subrange_lo..=(lo - 1), self.span),
+                    );
                 }
                 if hi < subrange_hi {
                     // The pattern intersects a lower section of the
                     // subrange, so an upper section will remain.
-                    remaining_ranges.push(Self::range_to_ctor(tcx, ty, (hi + 1)..=subrange_hi));
+                    remaining_ranges.push(
+                        Self::range_to_ctor(tcx, ty, (hi + 1)..=subrange_hi, self.span),
+                    );
                 }
             }
         }
@@ -960,11 +1069,29 @@ fn intersection(&self, other: &Self) -> Option<Self> {
         let (lo, hi) = (*self.range.start(), *self.range.end());
         let (other_lo, other_hi) = (*other.range.start(), *other.range.end());
         if lo <= other_hi && other_lo <= hi {
-            Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), ty })
+            let span = other.span;
+            Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), ty, span })
         } else {
             None
         }
     }
+
+    fn suspicious_intersection(&self, other: &Self) -> bool {
+        // `false` in the following cases:
+        // 1     ----      // 1  ----------   // 1 ----        // 1       ----
+        // 2  ----------   // 2     ----      // 2       ----  // 2 ----
+        //
+        // The following are currently `false`, but could be `true` in the future (#64007):
+        // 1 ---------       // 1     ---------
+        // 2     ----------  // 2 ----------
+        //
+        // `true` in the following cases:
+        // 1 -------          // 1       -------
+        // 2       --------   // 2 -------
+        let (lo, hi) = (*self.range.start(), *self.range.end());
+        let (other_lo, other_hi) = (*other.range.start(), *other.range.end());
+        (lo == other_hi || hi == other_lo)
+    }
 }
 
 // A request for missing constructor data in terms of either:
@@ -1070,6 +1197,7 @@ pub fn is_useful<'p, 'a, 'tcx>(
     matrix: &Matrix<'p, 'tcx>,
     v: &[&Pat<'tcx>],
     witness: WitnessPreference,
+    hir_id: HirId,
 ) -> Usefulness<'tcx> {
     let &Matrix(ref rows) = matrix;
     debug!("is_useful({:#?}, {:#?})", matrix, v);
@@ -1092,6 +1220,10 @@ pub fn is_useful<'p, 'a, 'tcx>(
 
     assert!(rows.iter().all(|r| r.len() == v.len()));
 
+    let (ty, span) = rows.iter()
+        .map(|r| (r[0].ty, r[0].span))
+        .find(|(ty, _)| !ty.references_error())
+        .unwrap_or((v[0].ty, v[0].span));
     let pcx = PatCtxt {
         // TyErr is used to represent the type of wildcard patterns matching
         // against inaccessible (private) fields of structs, so that we won't
@@ -1112,8 +1244,9 @@ pub fn is_useful<'p, 'a, 'tcx>(
         // FIXME: this might lead to "unstable" behavior with macro hygiene
         // introducing uninhabited patterns for inaccessible fields. We
         // need to figure out how to model that.
-        ty: rows.iter().map(|r| r[0].ty).find(|ty| !ty.references_error()).unwrap_or(v[0].ty),
-        max_slice_length: max_slice_length(cx, rows.iter().map(|r| r[0]).chain(Some(v[0])))
+        ty,
+        max_slice_length: max_slice_length(cx, rows.iter().map(|r| r[0]).chain(Some(v[0]))),
+        span,
     };
 
     debug!("is_useful_expand_first_col: pcx={:#?}, expanding {:#?}", pcx, v[0]);
@@ -1127,9 +1260,9 @@ pub fn is_useful<'p, 'a, 'tcx>(
             Useful
         } else {
             split_grouped_constructors(
-                cx.tcx, cx.param_env, constructors, matrix, pcx.ty,
+                cx.tcx, cx.param_env, constructors, matrix, pcx.ty, pcx.span, Some(hir_id),
             ).into_iter().map(|c|
-                is_useful_specialized(cx, matrix, v, c, pcx.ty, witness)
+                is_useful_specialized(cx, matrix, v, c, pcx.ty, witness, hir_id)
             ).find(|result| result.is_useful()).unwrap_or(NotUseful)
         }
     } else {
@@ -1182,8 +1315,11 @@ pub fn is_useful<'p, 'a, 'tcx>(
             (pcx.ty.is_ptr_sized_integral() && !cx.tcx.features().precise_pointer_size_matching);
 
         if cheap_missing_ctors == MissingCtors::Empty && !is_non_exhaustive {
-            split_grouped_constructors(cx.tcx, cx.param_env, all_ctors, matrix, pcx.ty)
-                .into_iter().map(|c| is_useful_specialized(cx, matrix, v, c, pcx.ty, witness))
+            split_grouped_constructors(
+                cx.tcx, cx.param_env, all_ctors, matrix, pcx.ty, DUMMY_SP, None,
+            )
+                .into_iter()
+                .map(|c| is_useful_specialized(cx, matrix, v, c, pcx.ty, witness, hir_id))
                 .find(|result| result.is_useful())
                 .unwrap_or(NotUseful)
         } else {
@@ -1194,7 +1330,7 @@ pub fn is_useful<'p, 'a, 'tcx>(
                     None
                 }
             }).collect();
-            match is_useful(cx, &matrix, &v[1..], witness) {
+            match is_useful(cx, &matrix, &v[1..], witness, hir_id) {
                 UsefulWithWitness(pats) => {
                     let cx = &*cx;
                     // In this case, there's at least one "free"
@@ -1287,6 +1423,7 @@ fn is_useful_specialized<'p, 'a, 'tcx>(
     ctor: Constructor<'tcx>,
     lty: Ty<'tcx>,
     witness: WitnessPreference,
+    hir_id: HirId,
 ) -> Usefulness<'tcx> {
     debug!("is_useful_specialized({:#?}, {:#?}, {:?})", v, ctor, lty);
     let sub_pat_tys = constructor_sub_pattern_tys(cx, &ctor, lty);
@@ -1298,11 +1435,13 @@ fn is_useful_specialized<'p, 'a, 'tcx>(
         }
     }).collect();
     let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect();
-    let matrix = Matrix(m.iter().flat_map(|r| {
-        specialize(cx, &r, &ctor, &wild_patterns)
-    }).collect());
+    let matrix = Matrix(
+        m.iter()
+            .filter_map(|r| specialize(cx, &r, &ctor, &wild_patterns))
+            .collect()
+    );
     match specialize(cx, v, &ctor, &wild_patterns) {
-        Some(v) => match is_useful(cx, &matrix, &v, witness) {
+        Some(v) => match is_useful(cx, &matrix, &v, witness, hir_id) {
             UsefulWithWitness(witnesses) => UsefulWithWitness(
                 witnesses.into_iter()
                     .map(|witness| witness.apply_constructor(cx, &ctor, lty))
@@ -1322,11 +1461,11 @@ fn is_useful_specialized<'p, 'a, 'tcx>(
 /// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on.
 ///
 /// Returns `None` in case of a catch-all, which can't be specialized.
-fn pat_constructors<'tcx>(cx: &mut MatchCheckCtxt<'_, 'tcx>,
-                          pat: &Pat<'tcx>,
-                          pcx: PatCtxt<'tcx>)
-                          -> Option<Vec<Constructor<'tcx>>>
-{
+fn pat_constructors<'tcx>(
+    cx: &mut MatchCheckCtxt<'_, 'tcx>,
+    pat: &Pat<'tcx>,
+    pcx: PatCtxt<'tcx>,
+) -> Option<Vec<Constructor<'tcx>>> {
     match *pat.kind {
         PatKind::AscribeUserType { ref subpattern, .. } =>
             pat_constructors(cx, subpattern, pcx),
@@ -1335,13 +1474,14 @@ fn pat_constructors<'tcx>(cx: &mut MatchCheckCtxt<'_, 'tcx>,
         PatKind::Variant { adt_def, variant_index, .. } => {
             Some(vec![Variant(adt_def.variants[variant_index].def_id)])
         }
-        PatKind::Constant { value } => Some(vec![ConstantValue(value)]),
+        PatKind::Constant { value } => Some(vec![ConstantValue(value, pat.span)]),
         PatKind::Range(PatRange { lo, hi, end }) =>
             Some(vec![ConstantRange(
                 lo.eval_bits(cx.tcx, cx.param_env, lo.ty),
                 hi.eval_bits(cx.tcx, cx.param_env, hi.ty),
                 lo.ty,
                 end,
+                pat.span,
             )]),
         PatKind::Array { .. } => match pcx.ty.kind {
             ty::Array(_, length) => Some(vec![
@@ -1374,7 +1514,7 @@ fn constructor_arity(cx: &MatchCheckCtxt<'a, 'tcx>, ctor: &Constructor<'tcx>, ty
         ty::Tuple(ref fs) => fs.len() as u64,
         ty::Slice(..) | ty::Array(..) => match *ctor {
             Slice(length) => length,
-            ConstantValue(_) => 0,
+            ConstantValue(..) => 0,
             _ => bug!("bad slice pattern {:?} {:?}", ctor, ty)
         }
         ty::Ref(..) => 1,
@@ -1399,7 +1539,7 @@ fn constructor_sub_pattern_tys<'a, 'tcx>(
         ty::Tuple(ref fs) => fs.into_iter().map(|t| t.expect_ty()).collect(),
         ty::Slice(ty) | ty::Array(ty, _) => match *ctor {
             Slice(length) => (0..length).map(|_| ty).collect(),
-            ConstantValue(_) => vec![],
+            ConstantValue(..) => vec![],
             _ => bug!("bad slice pattern {:?} {:?}", ctor, ty)
         }
         ty::Ref(_, rty, _) => vec![rty],
@@ -1497,8 +1637,8 @@ fn slice_pat_covered_by_const<'tcx>(
 // constructor is a range or constant with an integer type.
 fn should_treat_range_exhaustively(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>) -> bool {
     let ty = match ctor {
-        ConstantValue(value) => value.ty,
-        ConstantRange(_, _, ty, _) => ty,
+        ConstantValue(value, _) => value.ty,
+        ConstantRange(_, _, ty, _, _) => ty,
         _ => return false,
     };
     if let ty::Char | ty::Int(_) | ty::Uint(_) = ty.kind {
@@ -1540,12 +1680,17 @@ fn should_treat_range_exhaustively(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>)
 /// boundaries for each interval range, sort them, then create constructors for each new interval
 /// between every pair of boundary points. (This essentially sums up to performing the intuitive
 /// merging operation depicted above.)
+///
+/// `hir_id` is `None` when we're evaluating the wildcard pattern, do not lint for overlapping in
+/// ranges that case.
 fn split_grouped_constructors<'p, 'tcx>(
     tcx: TyCtxt<'tcx>,
     param_env: ty::ParamEnv<'tcx>,
     ctors: Vec<Constructor<'tcx>>,
     &Matrix(ref m): &Matrix<'p, 'tcx>,
     ty: Ty<'tcx>,
+    span: Span,
+    hir_id: Option<HirId>,
 ) -> Vec<Constructor<'tcx>> {
     let mut split_ctors = Vec::with_capacity(ctors.len());
 
@@ -1562,7 +1707,7 @@ fn split_grouped_constructors<'p, 'tcx>(
                 /// Represents a border between 2 integers. Because the intervals spanning borders
                 /// must be able to cover every integer, we need to be able to represent
                 /// 2^128 + 1 such borders.
-                #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+                #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
                 enum Border {
                     JustBefore(u128),
                     AfterMax,
@@ -1579,16 +1724,38 @@ fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> {
                     vec![from, to].into_iter()
                 }
 
+                // Collect the span and range of all the intersecting ranges to lint on likely
+                // incorrect range patterns. (#63987)
+                let mut overlaps = vec![];
                 // `borders` is the set of borders between equivalence classes: each equivalence
                 // class lies between 2 borders.
                 let row_borders = m.iter()
-                    .flat_map(|row| IntRange::from_pat(tcx, param_env, row[0]))
-                    .flat_map(|range| ctor_range.intersection(&range))
+                    .flat_map(|row| {
+                        IntRange::from_pat(tcx, param_env, row[0]).map(|r| (r, row.len()))
+                    })
+                    .flat_map(|(range, row_len)| {
+                        let intersection = ctor_range.intersection(&range);
+                        let should_lint = ctor_range.suspicious_intersection(&range);
+                        if let (Some(range), 1, true) = (&intersection, row_len, should_lint) {
+                            // FIXME: for now, only check for overlapping ranges on simple range
+                            // patterns. Otherwise with the current logic the following is detected
+                            // as overlapping:
+                            //   match (10u8, true) {
+                            //    (0 ..= 125, false) => {}
+                            //    (126 ..= 255, false) => {}
+                            //    (0 ..= 255, true) => {}
+                            //  }
+                            overlaps.push(range.clone());
+                        }
+                        intersection
+                    })
                     .flat_map(|range| range_borders(range));
                 let ctor_borders = range_borders(ctor_range.clone());
                 let mut borders: Vec<_> = row_borders.chain(ctor_borders).collect();
                 borders.sort_unstable();
 
+                lint_overlapping_patterns(tcx, hir_id, ctor_range, ty, overlaps);
+
                 // We're going to iterate through every pair of borders, making sure that each
                 // represents an interval of nonnegative length, and convert each such interval
                 // into a constructor.
@@ -1596,18 +1763,18 @@ fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> {
                     match (window[0], window[1]) {
                         (Border::JustBefore(n), Border::JustBefore(m)) => {
                             if n < m {
-                                Some(IntRange { range: n..=(m - 1), ty })
+                                Some(IntRange { range: n..=(m - 1), ty, span })
                             } else {
                                 None
                             }
                         }
                         (Border::JustBefore(n), Border::AfterMax) => {
-                            Some(IntRange { range: n..=u128::MAX, ty })
+                            Some(IntRange { range: n..=u128::MAX, ty, span })
                         }
                         (Border::AfterMax, _) => None,
                     }
                 }) {
-                    split_ctors.push(IntRange::range_to_ctor(tcx, ty, range));
+                    split_ctors.push(IntRange::range_to_ctor(tcx, ty, range, span));
                 }
             }
             // Any other constructor can be used unchanged.
@@ -1618,33 +1785,29 @@ fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> {
     split_ctors
 }
 
-/// Checks whether there exists any shared value in either `ctor` or `pat` by intersecting them.
-fn constructor_intersects_pattern<'p, 'tcx>(
+fn lint_overlapping_patterns(
     tcx: TyCtxt<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
-    ctor: &Constructor<'tcx>,
-    pat: &'p Pat<'tcx>,
-) -> Option<SmallVec<[&'p Pat<'tcx>; 2]>> {
-    if should_treat_range_exhaustively(tcx, ctor) {
-        match (IntRange::from_ctor(tcx, param_env, ctor), IntRange::from_pat(tcx, param_env, pat)) {
-            (Some(ctor), Some(pat)) => {
-                ctor.intersection(&pat).map(|_| {
-                    let (pat_lo, pat_hi) = pat.range.into_inner();
-                    let (ctor_lo, ctor_hi) = ctor.range.into_inner();
-                    assert!(pat_lo <= ctor_lo && ctor_hi <= pat_hi);
-                    smallvec![]
-                })
-            }
-            _ => None,
-        }
-    } else {
-        // Fallback for non-ranges and ranges that involve floating-point numbers, which are not
-        // conveniently handled by `IntRange`. For these cases, the constructor may not be a range
-        // so intersection actually devolves into being covered by the pattern.
-        match constructor_covered_by_range(tcx, param_env, ctor, pat) {
-            Ok(true) => Some(smallvec![]),
-            Ok(false) | Err(ErrorReported) => None,
+    hir_id: Option<HirId>,
+    ctor_range: IntRange<'tcx>,
+    ty: Ty<'tcx>,
+    overlaps: Vec<IntRange<'tcx>>,
+) {
+    if let (true, Some(hir_id)) = (!overlaps.is_empty(), hir_id) {
+        let mut err = tcx.struct_span_lint_hir(
+            lint::builtin::OVERLAPPING_PATTERNS,
+            hir_id,
+            ctor_range.span,
+            "multiple patterns covering the same range",
+        );
+        err.span_label(ctor_range.span, "overlapping patterns");
+        for int_range in overlaps {
+            // Use the real type for user display of the ranges:
+            err.span_label(int_range.span, &format!(
+                "this range overlaps on `{}`",
+                IntRange::range_to_ctor(tcx, ty, int_range.range, DUMMY_SP).display(tcx),
+            ));
         }
+        err.emit();
     }
 }
 
@@ -1672,13 +1835,13 @@ macro_rules! some_or_ok {
         };
     }
     match *ctor {
-        ConstantValue(value) => {
+        ConstantValue(value, _) => {
             let to = some_or_ok!(cmp_to(value));
             let end = (to == Ordering::Less) ||
                       (end == RangeEnd::Included && to == Ordering::Equal);
             Ok(some_or_ok!(cmp_from(value)) && end)
         },
-        ConstantRange(from, to, ty, RangeEnd::Included) => {
+        ConstantRange(from, to, ty, RangeEnd::Included, _) => {
             let to = some_or_ok!(cmp_to(ty::Const::from_bits(
                 tcx,
                 to,
@@ -1692,7 +1855,7 @@ macro_rules! some_or_ok {
                 ty::ParamEnv::empty().and(ty),
             ))) && end)
         },
-        ConstantRange(from, to, ty, RangeEnd::Excluded) => {
+        ConstantRange(from, to, ty, RangeEnd::Excluded, _) => {
             let to = some_or_ok!(cmp_to(ty::Const::from_bits(
                 tcx,
                 to,
@@ -1766,90 +1929,104 @@ fn specialize<'p, 'a: 'p, 'tcx>(
             Some(smallvec![subpattern])
         }
 
-        PatKind::Constant { value } => {
-            match *constructor {
-                Slice(..) => {
-                    // we extract an `Option` for the pointer because slices of zero elements don't
-                    // necessarily point to memory, they are usually just integers. The only time
-                    // they should be pointing to memory is when they are subslices of nonzero
-                    // slices
-                    let (alloc, offset, n, ty) = match value.ty.kind {
-                        ty::Array(t, n) => {
-                            match value.val {
-                                ConstValue::ByRef { offset, alloc, .. } => (
-                                    alloc,
-                                    offset,
-                                    n.eval_usize(cx.tcx, cx.param_env),
-                                    t,
-                                ),
-                                _ => span_bug!(
-                                    pat.span,
-                                    "array pattern is {:?}", value,
-                                ),
-                            }
-                        },
-                        ty::Slice(t) => {
-                            match value.val {
-                                ConstValue::Slice { data, start, end } => (
-                                    data,
-                                    Size::from_bytes(start as u64),
-                                    (end - start) as u64,
-                                    t,
-                                ),
-                                ConstValue::ByRef { .. } => {
-                                    // FIXME(oli-obk): implement `deref` for `ConstValue`
-                                    return None;
-                                },
-                                _ => span_bug!(
-                                    pat.span,
-                                    "slice pattern constant must be scalar pair but is {:?}",
-                                    value,
-                                ),
-                            }
+        PatKind::Constant { value } if constructor.is_slice() => {
+            // We extract an `Option` for the pointer because slices of zero
+            // elements don't necessarily point to memory, they are usually
+            // just integers. The only time they should be pointing to memory
+            // is when they are subslices of nonzero slices.
+            let (alloc, offset, n, ty) = match value.ty.kind {
+                ty::Array(t, n) => {
+                    match value.val {
+                        ConstValue::ByRef { offset, alloc, .. } => (
+                            alloc,
+                            offset,
+                            n.eval_usize(cx.tcx, cx.param_env),
+                            t,
+                        ),
+                        _ => span_bug!(
+                            pat.span,
+                            "array pattern is {:?}", value,
+                        ),
+                    }
+                },
+                ty::Slice(t) => {
+                    match value.val {
+                        ConstValue::Slice { data, start, end } => (
+                            data,
+                            Size::from_bytes(start as u64),
+                            (end - start) as u64,
+                            t,
+                        ),
+                        ConstValue::ByRef { .. } => {
+                            // FIXME(oli-obk): implement `deref` for `ConstValue`
+                            return None;
                         },
                         _ => span_bug!(
                             pat.span,
-                            "unexpected const-val {:?} with ctor {:?}",
+                            "slice pattern constant must be scalar pair but is {:?}",
                             value,
-                            constructor,
                         ),
-                    };
-                    if wild_patterns.len() as u64 == n {
-                        // convert a constant slice/array pattern to a list of patterns.
-                        let layout = cx.tcx.layout_of(cx.param_env.and(ty)).ok()?;
-                        let ptr = Pointer::new(AllocId(0), offset);
-                        (0..n).map(|i| {
-                            let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?;
-                            let scalar = alloc.read_scalar(
-                                &cx.tcx, ptr, layout.size,
-                            ).ok()?;
-                            let scalar = scalar.not_undef().ok()?;
-                            let value = ty::Const::from_scalar(cx.tcx, scalar, ty);
-                            let pattern = Pat {
-                                ty,
-                                span: pat.span,
-                                kind: box PatKind::Constant { value },
-                            };
-                            Some(&*cx.pattern_arena.alloc(pattern))
-                        }).collect()
-                    } else {
-                        None
                     }
-                }
-                _ => {
-                    // If the constructor is a:
-                    //      Single value: add a row if the constructor equals the pattern.
-                    //      Range: add a row if the constructor contains the pattern.
-                    constructor_intersects_pattern(cx.tcx, cx.param_env, constructor, pat)
-                }
+                },
+                _ => span_bug!(
+                    pat.span,
+                    "unexpected const-val {:?} with ctor {:?}",
+                    value,
+                    constructor,
+                ),
+            };
+            if wild_patterns.len() as u64 == n {
+                // convert a constant slice/array pattern to a list of patterns.
+                let layout = cx.tcx.layout_of(cx.param_env.and(ty)).ok()?;
+                let ptr = Pointer::new(AllocId(0), offset);
+                (0..n).map(|i| {
+                    let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?;
+                    let scalar = alloc.read_scalar(
+                        &cx.tcx, ptr, layout.size,
+                    ).ok()?;
+                    let scalar = scalar.not_undef().ok()?;
+                    let value = ty::Const::from_scalar(cx.tcx, scalar, ty);
+                    let pattern = Pat {
+                        ty,
+                        span: pat.span,
+                        kind: box PatKind::Constant { value },
+                    };
+                    Some(&*cx.pattern_arena.alloc(pattern))
+                }).collect()
+            } else {
+                None
             }
         }
 
+        PatKind::Constant { .. } |
         PatKind::Range { .. } => {
             // If the constructor is a:
-            //      Single value: add a row if the pattern contains the constructor.
-            //      Range: add a row if the constructor intersects the pattern.
-            constructor_intersects_pattern(cx.tcx, cx.param_env, constructor, pat)
+            // - Single value: add a row if the pattern contains the constructor.
+            // - Range: add a row if the constructor intersects the pattern.
+            if should_treat_range_exhaustively(cx.tcx, constructor) {
+                match (IntRange::from_ctor(cx.tcx, cx.param_env, constructor),
+                       IntRange::from_pat(cx.tcx, cx.param_env, pat)) {
+                    (Some(ctor), Some(pat)) => {
+                        ctor.intersection(&pat).map(|_| {
+                            let (pat_lo, pat_hi) = pat.range.into_inner();
+                            let (ctor_lo, ctor_hi) = ctor.range.into_inner();
+                            assert!(pat_lo <= ctor_lo && ctor_hi <= pat_hi);
+                            smallvec![]
+                        })
+                    }
+                    _ => None,
+                }
+            } else {
+                // Fallback for non-ranges and ranges that involve
+                // floating-point numbers, which are not conveniently handled
+                // by `IntRange`. For these cases, the constructor may not be a
+                // range so intersection actually devolves into being covered
+                // by the pattern.
+                match constructor_covered_by_range(cx.tcx, cx.param_env, constructor, pat) {
+                    Ok(true) => Some(smallvec![]),
+                    Ok(false) | Err(ErrorReported) => None,
+                }
+            }
         }
 
         PatKind::Array { ref prefix, ref slice, ref suffix } |
@@ -1872,7 +2049,7 @@ fn specialize<'p, 'a: 'p, 'tcx>(
                         None
                     }
                 }
-                ConstantValue(cv) => {
+                ConstantValue(cv, _) => {
                     match slice_pat_covered_by_const(
                         cx.tcx, pat.span, cv, prefix, slice, suffix, cx.param_env,
                     ) {
index c521b7352214bdf47841a5e7235179361206dd9d..7bc4bf291ee488f5b61543b5b90ec71c197749f2 100644 (file)
@@ -10,6 +10,7 @@
 use rustc::lint;
 use rustc_errors::{Applicability, DiagnosticBuilder};
 
+use rustc::hir::HirId;
 use rustc::hir::def::*;
 use rustc::hir::def_id::DefId;
 use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
@@ -62,12 +63,13 @@ fn visit_expr(&mut self, ex: &'tcx hir::Expr) {
     fn visit_local(&mut self, loc: &'tcx hir::Local) {
         intravisit::walk_local(self, loc);
 
-        self.check_irrefutable(&loc.pat, match loc.source {
-            hir::LocalSource::Normal => "local binding",
-            hir::LocalSource::ForLoopDesugar => "`for` loop binding",
-            hir::LocalSource::AsyncFn => "async fn binding",
-            hir::LocalSource::AwaitDesugar => "`await` future binding",
-        });
+        let (msg, sp) = match loc.source {
+            hir::LocalSource::Normal => ("local binding", Some(loc.span)),
+            hir::LocalSource::ForLoopDesugar => ("`for` loop binding", None),
+            hir::LocalSource::AsyncFn => ("async fn binding", None),
+            hir::LocalSource::AwaitDesugar => ("`await` future binding", None),
+        };
+        self.check_irrefutable(&loc.pat, msg, sp);
 
         // Check legality of move bindings and `@` patterns.
         self.check_patterns(false, &loc.pat);
@@ -77,7 +79,7 @@ fn visit_body(&mut self, body: &'tcx hir::Body) {
         intravisit::walk_body(self, body);
 
         for param in &body.params {
-            self.check_irrefutable(&param.pat, "function argument");
+            self.check_irrefutable(&param.pat, "function argument", None);
             self.check_patterns(false, &param.pat);
         }
     }
@@ -238,11 +240,11 @@ fn check_match(
                 .map(|pat| smallvec![pat.0])
                 .collect();
             let scrut_ty = self.tables.node_type(scrut.hir_id);
-            check_exhaustive(cx, scrut_ty, scrut.span, &matrix);
+            check_exhaustive(cx, scrut_ty, scrut.span, &matrix, scrut.hir_id);
         })
     }
 
-    fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str) {
+    fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str, sp: Option<Span>) {
         let module = self.tcx.hir().get_module_parent(pat.hir_id);
         MatchCheckCtxt::create_and_enter(self.tcx, self.param_env, module, |ref mut cx| {
             let mut patcx = PatCtxt::new(self.tcx,
@@ -255,7 +257,7 @@ fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str) {
                 expand_pattern(cx, pattern)
             ]].into_iter().collect();
 
-            let witnesses = match check_not_useful(cx, pattern_ty, &pats) {
+            let witnesses = match check_not_useful(cx, pattern_ty, &pats, pat.hir_id) {
                 Ok(_) => return,
                 Err(err) => err,
             };
@@ -266,18 +268,35 @@ fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str) {
                 "refutable pattern in {}: {} not covered",
                 origin, joined_patterns
             );
-            match &pat.kind {
+            let suggest_if_let = match &pat.kind {
                 hir::PatKind::Path(hir::QPath::Resolved(None, path))
                     if path.segments.len() == 1 && path.segments[0].args.is_none() =>
                 {
                     const_not_var(&mut err, cx.tcx, pat, path);
+                    false
                 }
                 _ => {
                     err.span_label(
                         pat.span,
                         pattern_not_covered_label(&witnesses, &joined_patterns),
                     );
+                    true
+                }
+            };
+
+            if let (Some(span), true) = (sp, suggest_if_let) {
+                err.note("`let` bindings require an \"irrefutable pattern\", like a `struct` or \
+                          an `enum` with only one variant");
+                if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
+                    err.span_suggestion(
+                        span,
+                        "you might want to use `if let` to ignore the variant that isn't matched",
+                        format!("if {} {{ /* */ }}", &snippet[..snippet.len() - 1]),
+                        Applicability::HasPlaceholders,
+                    );
                 }
+                err.note("for more information, visit \
+                          https://doc.rust-lang.org/book/ch18-02-refutability.html");
             }
 
             adt_defined_here(cx, &mut err, pattern_ty, &witnesses);
@@ -371,7 +390,7 @@ fn check_arms<'tcx>(
         for &(pat, hir_pat) in pats {
             let v = smallvec![pat];
 
-            match is_useful(cx, &seen, &v, LeaveOutWitness) {
+            match is_useful(cx, &seen, &v, LeaveOutWitness, hir_pat.hir_id) {
                 NotUseful => {
                     match source {
                         hir::MatchSource::IfDesugar { .. } |
@@ -447,9 +466,10 @@ fn check_not_useful(
     cx: &mut MatchCheckCtxt<'_, 'tcx>,
     ty: Ty<'tcx>,
     matrix: &Matrix<'_, 'tcx>,
+    hir_id: HirId,
 ) -> Result<(), Vec<super::Pat<'tcx>>> {
     let wild_pattern = super::Pat { ty, span: DUMMY_SP, kind: box PatKind::Wild };
-    match is_useful(cx, matrix, &[&wild_pattern], ConstructWitness) {
+    match is_useful(cx, matrix, &[&wild_pattern], ConstructWitness, hir_id) {
         NotUseful => Ok(()), // This is good, wildcard pattern isn't reachable.
         UsefulWithWitness(pats) => Err(if pats.is_empty() {
             vec![wild_pattern]
@@ -465,8 +485,9 @@ fn check_exhaustive<'tcx>(
     scrut_ty: Ty<'tcx>,
     sp: Span,
     matrix: &Matrix<'_, 'tcx>,
+    hir_id: HirId,
 ) {
-    let witnesses = match check_not_useful(cx, scrut_ty, matrix) {
+    let witnesses = match check_not_useful(cx, scrut_ty, matrix, hir_id) {
         Ok(_) => return,
         Err(err) => err,
     };
index 58d741b9295a3b209dd4652b75a50f5996f8fcfe..7e17162dfb3ef0df1d79afabee9e8d98707b5ec4 100644 (file)
@@ -312,10 +312,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
             }
             PatKind::Range(PatRange { lo, hi, end }) => {
                 write!(f, "{}", lo)?;
-                match end {
-                    RangeEnd::Included => write!(f, "..=")?,
-                    RangeEnd::Excluded => write!(f, "..")?,
-                }
+                write!(f, "{}", end)?;
                 write!(f, "{}", hi)
             }
             PatKind::Slice { ref prefix, ref slice, ref suffix } |
index d120412c901a692d3d3d8c8bf026ea27c73ce9c9..9ab347957f97ac8480ff5ac7f57f4ddbac941887 100644 (file)
@@ -44,7 +44,14 @@ pub fn cast(
                         if self.tcx.has_attr(def_id, sym::rustc_args_required_const) {
                             bug!("reifying a fn ptr that requires const arguments");
                         }
-                        let instance = self.resolve(def_id, substs)?;
+
+                        let instance = ty::Instance::resolve_for_fn_ptr(
+                            *self.tcx,
+                            self.param_env,
+                            def_id,
+                            substs,
+                        ).ok_or_else(|| err_inval!(TooGeneric))?;
+
                         let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
                         self.write_scalar(Scalar::Ptr(fn_ptr.into()), dest)?;
                     }
index e1c45132103b450049f70cd48db64fb62be236d9..06fdd407951c1f76c6bb3c27ee3426ad52445943 100644 (file)
@@ -35,7 +35,7 @@ pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
     pub(crate) param_env: ty::ParamEnv<'tcx>,
 
     /// The virtual memory system.
-    pub(crate) memory: Memory<'mir, 'tcx, M>,
+    pub memory: Memory<'mir, 'tcx, M>,
 
     /// The virtual call stack.
     pub(crate) stack: Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>>,
@@ -211,16 +211,6 @@ pub fn new(
         }
     }
 
-    #[inline(always)]
-    pub fn memory(&self) -> &Memory<'mir, 'tcx, M> {
-        &self.memory
-    }
-
-    #[inline(always)]
-    pub fn memory_mut(&mut self) -> &mut Memory<'mir, 'tcx, M> {
-        &mut self.memory
-    }
-
     #[inline(always)]
     pub fn force_ptr(
         &self,
index ec06b6298e11229e14593474c6ffd089fb2f6800..646d1783c8ec96356a97120f3d379b424a0d6ea2 100644 (file)
@@ -73,8 +73,7 @@ fn intern_shallow<'rt, 'mir, 'tcx>(
     );
     // remove allocation
     let tcx = ecx.tcx;
-    let memory = ecx.memory_mut();
-    let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) {
+    let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) {
         Some(entry) => entry,
         None => {
             // Pointer not found in local memory map. It is either a pointer to the global
@@ -332,7 +331,7 @@ pub fn intern_const_alloc_recursive(
 
     let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
     while let Some(alloc_id) = todo.pop() {
-        if let Some((_, mut alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) {
+        if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
             // We can't call the `intern_shallow` method here, as its logic is tailored to safe
             // references and a `leftover_allocations` set (where we only have a todo-list here).
             // So we hand-roll the interning logic here again.
@@ -350,7 +349,7 @@ pub fn intern_const_alloc_recursive(
                     todo.push(reloc);
                 }
             }
-        } else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) {
+        } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
             // dangling pointer
             throw_unsup!(ValidationFailure("encountered dangling pointer in final constant".into()))
         }
index dc3b7694c35c9b3800f588fab44a00d8da7ec210..f9200f8c1c04239c87ba03610dee0027ff50f31f 100644 (file)
@@ -68,8 +68,7 @@ fn print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error> {
             | ty::Projection(ty::ProjectionTy { item_def_id: def_id, substs })
             | ty::UnnormalizedProjection(ty::ProjectionTy { item_def_id: def_id, substs })
             | ty::Closure(def_id, substs)
-            | ty::Generator(def_id, ty::GeneratorSubsts { substs }, _)
-            => self.print_def_path(def_id, substs),
+            | ty::Generator(def_id, substs, _) => self.print_def_path(def_id, substs),
             ty::Foreign(def_id) => self.print_def_path(def_id, &[]),
 
             ty::GeneratorWitness(_) => {
index 861e5ebef877deb024ed8654ff39c56d4b706b5e..4d9be55945e0279ef3acfb4ec0ced2efd156f9b3 100644 (file)
@@ -647,7 +647,7 @@ pub fn read_discriminant(
                 let bits_discr = raw_discr
                     .not_undef()
                     .and_then(|raw_discr| self.force_bits(raw_discr, discr_val.layout.size))
-                    .map_err(|_| err_unsup!(InvalidDiscriminant(raw_discr.erase_tag())))?;
+                    .map_err(|_| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?;
                 let real_discr = if discr_val.layout.ty.is_signed() {
                     // going from layout tag type to typeck discriminant type
                     // requires first sign extending with the discriminant layout
@@ -664,16 +664,20 @@ pub fn read_discriminant(
                     bits_discr
                 };
                 // Make sure we catch invalid discriminants
-                let index = match &rval.layout.ty.kind {
+                let index = match rval.layout.ty.kind {
                     ty::Adt(adt, _) => adt
                         .discriminants(self.tcx.tcx)
                         .find(|(_, var)| var.val == real_discr),
-                    ty::Generator(def_id, substs, _) => substs
-                        .discriminants(*def_id, self.tcx.tcx)
-                        .find(|(_, var)| var.val == real_discr),
+                    ty::Generator(def_id, substs, _) => {
+                        let substs = substs.as_generator();
+                        substs
+                            .discriminants(def_id, self.tcx.tcx)
+                            .find(|(_, var)| var.val == real_discr)
+                    }
                     _ => bug!("tagged layout for non-adt non-generator"),
+
                 }.ok_or_else(
-                    || err_unsup!(InvalidDiscriminant(raw_discr.erase_tag()))
+                    || err_ub!(InvalidDiscriminant(raw_discr.erase_tag()))
                 )?;
                 (real_discr, index.0)
             },
@@ -685,7 +689,7 @@ pub fn read_discriminant(
                 let variants_start = niche_variants.start().as_u32();
                 let variants_end = niche_variants.end().as_u32();
                 let raw_discr = raw_discr.not_undef().map_err(|_| {
-                    err_unsup!(InvalidDiscriminant(ScalarMaybeUndef::Undef))
+                    err_ub!(InvalidDiscriminant(ScalarMaybeUndef::Undef))
                 })?;
                 match raw_discr.to_bits_or_ptr(discr_val.layout.size, self) {
                     Err(ptr) => {
@@ -693,7 +697,7 @@ pub fn read_discriminant(
                         let ptr_valid = niche_start == 0 && variants_start == variants_end &&
                             !self.memory.ptr_may_be_null(ptr);
                         if !ptr_valid {
-                            throw_unsup!(InvalidDiscriminant(raw_discr.erase_tag().into()))
+                            throw_ub!(InvalidDiscriminant(raw_discr.erase_tag().into()))
                         }
                         (dataful_variant.as_u32() as u128, dataful_variant)
                     },
index 1166ca9bf24443b30c8a550aad2f0708b8ca7021..0289c52fd37446bbd7578bbc3865786618170369 100644 (file)
@@ -9,7 +9,7 @@
 use rustc::mir::interpret::truncate;
 use rustc::ty::{self, Ty};
 use rustc::ty::layout::{
-    self, Size, Abi, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt
+    self, Size, Align, LayoutOf, TyLayout, HasDataLayout, VariantIdx, PrimitiveExt
 };
 use rustc::ty::TypeFoldable;
 
@@ -377,20 +377,17 @@ pub fn mplace_field(
             layout::FieldPlacement::Array { stride, .. } => {
                 let len = base.len(self)?;
                 if field >= len {
-                    // This can be violated because this runs during promotion on code where the
-                    // type system has not yet ensured that such things don't happen.
+                    // This can be violated because the index (field) can be a runtime value
+                    // provided by the user.
                     debug!("tried to access element {} of array/slice with length {}", field, len);
                     throw_panic!(BoundsCheck { len, index: field });
                 }
                 stride * field
             }
             layout::FieldPlacement::Union(count) => {
-                // FIXME(#64506) `UninhabitedValue` can be removed when this issue is resolved
-                if base.layout.abi == Abi::Uninhabited {
-                    throw_unsup!(UninhabitedValue);
-                }
                 assert!(field < count as u64,
-                        "Tried to access field {} of union with {} fields", field, count);
+                        "Tried to access field {} of union {:#?} with {} fields",
+                        field, base.layout, count);
                 // Offset is always 0
                 Size::from_bytes(0)
             }
@@ -594,6 +591,13 @@ pub(super) fn eval_static_to_mplace(
             StaticKind::Promoted(promoted, promoted_substs) => {
                 let substs = self.subst_from_frame_and_normalize_erasing_regions(promoted_substs);
                 let instance = ty::Instance::new(place_static.def_id, substs);
+
+                // Even after getting `substs` from the frame, this instance may still be
+                // polymorphic because `ConstProp` will try to promote polymorphic MIR.
+                if instance.needs_subst() {
+                    throw_inval!(TooGeneric);
+                }
+
                 self.const_eval_raw(GlobalId {
                     instance,
                     promoted: Some(promoted),
@@ -1027,9 +1031,13 @@ pub fn write_discriminant_index(
         variant_index: VariantIdx,
         dest: PlaceTy<'tcx, M::PointerTag>,
     ) -> InterpResult<'tcx> {
+        let variant_scalar = Scalar::from_u32(variant_index.as_u32()).into();
+
         match dest.layout.variants {
             layout::Variants::Single { index } => {
-                assert_eq!(index, variant_index);
+                if index != variant_index {
+                    throw_ub!(InvalidDiscriminant(variant_scalar));
+                }
             }
             layout::Variants::Multiple {
                 discr_kind: layout::DiscriminantKind::Tag,
@@ -1037,7 +1045,9 @@ pub fn write_discriminant_index(
                 discr_index,
                 ..
             } => {
-                assert!(dest.layout.ty.variant_range(*self.tcx).unwrap().contains(&variant_index));
+                if !dest.layout.ty.variant_range(*self.tcx).unwrap().contains(&variant_index) {
+                    throw_ub!(InvalidDiscriminant(variant_scalar));
+                }
                 let discr_val =
                     dest.layout.ty.discriminant_for_variant(*self.tcx, variant_index).unwrap().val;
 
@@ -1060,9 +1070,9 @@ pub fn write_discriminant_index(
                 discr_index,
                 ..
             } => {
-                assert!(
-                    variant_index.as_usize() < dest.layout.ty.ty_adt_def().unwrap().variants.len(),
-                );
+                if !variant_index.as_usize() < dest.layout.ty.ty_adt_def().unwrap().variants.len() {
+                    throw_ub!(InvalidDiscriminant(variant_scalar));
+                }
                 if variant_index != dataful_variant {
                     let variants_start = niche_variants.start().as_u32();
                     let variant_index_relative = variant_index.as_u32()
index ef6b7d626e7a456a4819263355a21137cd683211..7f6baf0bb49c13e34c7af4ae2b77720c41ed3983 100644 (file)
@@ -140,12 +140,12 @@ pub(super) fn eval_terminator(
                                 .read_immediate(self.eval_operand(len, None)?)
                                 .expect("can't eval len")
                                 .to_scalar()?
-                                .to_bits(self.memory().pointer_size())? as u64;
+                                .to_bits(self.memory.pointer_size())? as u64;
                             let index = self
                                 .read_immediate(self.eval_operand(index, None)?)
                                 .expect("can't eval index")
                                 .to_scalar()?
-                                .to_bits(self.memory().pointer_size())? as u64;
+                                .to_bits(self.memory.pointer_size())? as u64;
                             err_panic!(BoundsCheck { len, index })
                         }
                         Overflow(op) => err_panic!(Overflow(*op)),
@@ -263,6 +263,7 @@ fn eval_fn_call(
                 Ok(())
             }
             ty::InstanceDef::VtableShim(..) |
+            ty::InstanceDef::ReifyShim(..) |
             ty::InstanceDef::ClosureOnceShim { .. } |
             ty::InstanceDef::FnPtrShim(..) |
             ty::InstanceDef::DropGlue(..) |
index 853fcb1beabf574b8e45976fe0758885bfa018ec..3444fb60f333b9d35fcf5c33cbe2f7bfbb265065 100644 (file)
@@ -344,7 +344,7 @@ fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx>
         match self.walk_value(op) {
             Ok(()) => Ok(()),
             Err(err) => match err.kind {
-                err_unsup!(InvalidDiscriminant(val)) =>
+                err_ub!(InvalidDiscriminant(val)) =>
                     throw_validation_failure!(
                         val, self.path, "a valid enum discriminant"
                     ),
index 81c08ee87e985ecab89b9f4b78ac868ef94d1282..98d5487870a4de609eb01dfc2ce466d57415198e 100644 (file)
@@ -14,6 +14,7 @@
 #![feature(core_intrinsics)]
 #![feature(const_fn)]
 #![feature(decl_macro)]
+#![feature(drain_filter)]
 #![feature(exhaustive_patterns)]
 #![feature(never_type)]
 #![feature(specialization)]
@@ -22,7 +23,6 @@
 #![feature(slice_concat_ext)]
 #![feature(trusted_len)]
 #![feature(try_blocks)]
-#![feature(mem_take)]
 #![feature(associated_type_bounds)]
 #![feature(range_is_empty)]
 #![feature(stmt_expr_attributes)]
index 3ac837dd330fd0ffe22adec26fcfadeca21613c0..ee7452d3e8b460cae476dbb3fae5e9e227cbe4c1 100644 (file)
@@ -285,7 +285,11 @@ pub fn collect_crate_mono_items(
     tcx: TyCtxt<'_>,
     mode: MonoItemCollectionMode,
 ) -> (FxHashSet<MonoItem<'_>>, InliningMap<'_>) {
+    let _prof_timer = tcx.prof.generic_activity("monomorphization_collector");
+
     let roots = time(tcx.sess, "collecting roots", || {
+        let _prof_timer = tcx.prof
+            .generic_activity("monomorphization_collector_root_collections");
         collect_roots(tcx, mode)
     });
 
@@ -295,6 +299,9 @@ pub fn collect_crate_mono_items(
     let mut inlining_map = MTLock::new(InliningMap::new());
 
     {
+        let _prof_timer = tcx.prof
+            .generic_activity("monomorphization_collector_graph_walk");
+
         let visited: MTRef<'_, _> = &mut visited;
         let inlining_map: MTRef<'_, _> = &mut inlining_map;
 
@@ -714,10 +721,12 @@ fn visit_fn_use<'tcx>(
     output: &mut Vec<MonoItem<'tcx>>,
 ) {
     if let ty::FnDef(def_id, substs) = ty.kind {
-        let instance = ty::Instance::resolve(tcx,
-                                             ty::ParamEnv::reveal_all(),
-                                             def_id,
-                                             substs).unwrap();
+        let resolver = if is_direct_call {
+            ty::Instance::resolve
+        } else {
+            ty::Instance::resolve_for_fn_ptr
+        };
+        let instance = resolver(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap();
         visit_instance_use(tcx, instance, is_direct_call, output);
     }
 }
@@ -740,6 +749,7 @@ fn visit_instance_use<'tcx>(
             }
         }
         ty::InstanceDef::VtableShim(..) |
+        ty::InstanceDef::ReifyShim(..) |
         ty::InstanceDef::Virtual(..) |
         ty::InstanceDef::DropGlue(_, None) => {
             // don't need to emit shim if we are calling directly.
@@ -766,6 +776,7 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx
     let def_id = match instance.def {
         ty::InstanceDef::Item(def_id) => def_id,
         ty::InstanceDef::VtableShim(..) |
+        ty::InstanceDef::ReifyShim(..) |
         ty::InstanceDef::ClosureOnceShim { .. } |
         ty::InstanceDef::Virtual(..) |
         ty::InstanceDef::FnPtrShim(..) |
@@ -1265,7 +1276,14 @@ fn collect_const<'tcx>(
 ) {
     debug!("visiting const {:?}", constant);
 
-    match constant.val {
+    let param_env = ty::ParamEnv::reveal_all();
+    let substituted_constant = tcx.subst_and_normalize_erasing_regions(
+        param_substs,
+        param_env,
+        &constant,
+    );
+
+    match substituted_constant.val {
         ConstValue::Scalar(Scalar::Ptr(ptr)) =>
             collect_miri(tcx, ptr.alloc_id, output),
         ConstValue::Slice { data: alloc, start: _, end: _ } |
@@ -1275,12 +1293,6 @@ fn collect_const<'tcx>(
             }
         }
         ConstValue::Unevaluated(def_id, substs) => {
-            let param_env = ty::ParamEnv::reveal_all();
-            let substs = tcx.subst_and_normalize_erasing_regions(
-                param_substs,
-                param_env,
-                &substs,
-            );
             let instance = ty::Instance::resolve(tcx,
                                                 param_env,
                                                 def_id,
@@ -1297,7 +1309,7 @@ fn collect_const<'tcx>(
                     tcx.def_span(def_id), "collection encountered polymorphic constant",
                 ),
             }
-        }
+        },
         _ => {},
     }
 }
diff --git a/src/librustc_mir/monomorphize/item.rs b/src/librustc_mir/monomorphize/item.rs
deleted file mode 100644 (file)
index e634262..0000000
+++ /dev/null
@@ -1,204 +0,0 @@
-use rustc::hir::def_id::LOCAL_CRATE;
-use rustc::mir::mono::MonoItem;
-use rustc::session::config::OptLevel;
-use rustc::ty::{self, TyCtxt, Instance};
-use rustc::ty::subst::InternalSubsts;
-use rustc::ty::print::obsolete::DefPathBasedNames;
-use syntax::attr::InlineAttr;
-use std::fmt;
-use rustc::mir::mono::Linkage;
-use syntax_pos::symbol::InternedString;
-use syntax::source_map::Span;
-
-/// Describes how a monomorphization will be instantiated in object files.
-#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
-pub enum InstantiationMode {
-    /// There will be exactly one instance of the given MonoItem. It will have
-    /// external linkage so that it can be linked to from other codegen units.
-    GloballyShared {
-        /// In some compilation scenarios we may decide to take functions that
-        /// are typically `LocalCopy` and instead move them to `GloballyShared`
-        /// to avoid codegenning them a bunch of times. In this situation,
-        /// however, our local copy may conflict with other crates also
-        /// inlining the same function.
-        ///
-        /// This flag indicates that this situation is occurring, and informs
-        /// symbol name calculation that some extra mangling is needed to
-        /// avoid conflicts. Note that this may eventually go away entirely if
-        /// ThinLTO enables us to *always* have a globally shared instance of a
-        /// function within one crate's compilation.
-        may_conflict: bool,
-    },
-
-    /// Each codegen unit containing a reference to the given MonoItem will
-    /// have its own private copy of the function (with internal linkage).
-    LocalCopy,
-}
-
-pub trait MonoItemExt<'tcx>: fmt::Debug {
-    fn as_mono_item(&self) -> &MonoItem<'tcx>;
-
-    fn is_generic_fn(&self) -> bool {
-        match *self.as_mono_item() {
-            MonoItem::Fn(ref instance) => {
-                instance.substs.non_erasable_generics().next().is_some()
-            }
-            MonoItem::Static(..) |
-            MonoItem::GlobalAsm(..) => false,
-        }
-    }
-
-    fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> ty::SymbolName {
-        match *self.as_mono_item() {
-            MonoItem::Fn(instance) => tcx.symbol_name(instance),
-            MonoItem::Static(def_id) => {
-                tcx.symbol_name(Instance::mono(tcx, def_id))
-            }
-            MonoItem::GlobalAsm(hir_id) => {
-                let def_id = tcx.hir().local_def_id(hir_id);
-                ty::SymbolName {
-                    name: InternedString::intern(&format!("global_asm_{:?}", def_id))
-                }
-            }
-        }
-    }
-    fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode {
-        let inline_in_all_cgus =
-            tcx.sess.opts.debugging_opts.inline_in_all_cgus.unwrap_or_else(|| {
-                tcx.sess.opts.optimize != OptLevel::No
-            }) && !tcx.sess.opts.cg.link_dead_code;
-
-        match *self.as_mono_item() {
-            MonoItem::Fn(ref instance) => {
-                let entry_def_id = tcx.entry_fn(LOCAL_CRATE).map(|(id, _)| id);
-                // If this function isn't inlined or otherwise has explicit
-                // linkage, then we'll be creating a globally shared version.
-                if self.explicit_linkage(tcx).is_some() ||
-                    !instance.def.requires_local(tcx) ||
-                    Some(instance.def_id()) == entry_def_id
-                {
-                    return InstantiationMode::GloballyShared  { may_conflict: false }
-                }
-
-                // At this point we don't have explicit linkage and we're an
-                // inlined function. If we're inlining into all CGUs then we'll
-                // be creating a local copy per CGU
-                if inline_in_all_cgus {
-                    return InstantiationMode::LocalCopy
-                }
-
-                // Finally, if this is `#[inline(always)]` we're sure to respect
-                // that with an inline copy per CGU, but otherwise we'll be
-                // creating one copy of this `#[inline]` function which may
-                // conflict with upstream crates as it could be an exported
-                // symbol.
-                match tcx.codegen_fn_attrs(instance.def_id()).inline {
-                    InlineAttr::Always => InstantiationMode::LocalCopy,
-                    _ => {
-                        InstantiationMode::GloballyShared  { may_conflict: true }
-                    }
-                }
-            }
-            MonoItem::Static(..) |
-            MonoItem::GlobalAsm(..) => {
-                InstantiationMode::GloballyShared { may_conflict: false }
-            }
-        }
-    }
-
-    fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option<Linkage> {
-        let def_id = match *self.as_mono_item() {
-            MonoItem::Fn(ref instance) => instance.def_id(),
-            MonoItem::Static(def_id) => def_id,
-            MonoItem::GlobalAsm(..) => return None,
-        };
-
-        let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id);
-        codegen_fn_attrs.linkage
-    }
-
-    /// Returns `true` if this instance is instantiable - whether it has no unsatisfied
-    /// predicates.
-    ///
-    /// In order to codegen an item, all of its predicates must hold, because
-    /// otherwise the item does not make sense. Type-checking ensures that
-    /// the predicates of every item that is *used by* a valid item *do*
-    /// hold, so we can rely on that.
-    ///
-    /// However, we codegen collector roots (reachable items) and functions
-    /// in vtables when they are seen, even if they are not used, and so they
-    /// might not be instantiable. For example, a programmer can define this
-    /// public function:
-    ///
-    ///     pub fn foo<'a>(s: &'a mut ()) where &'a mut (): Clone {
-    ///         <&mut () as Clone>::clone(&s);
-    ///     }
-    ///
-    /// That function can't be codegened, because the method `<&mut () as Clone>::clone`
-    /// does not exist. Luckily for us, that function can't ever be used,
-    /// because that would require for `&'a mut (): Clone` to hold, so we
-    /// can just not emit any code, or even a linker reference for it.
-    ///
-    /// Similarly, if a vtable method has such a signature, and therefore can't
-    /// be used, we can just not emit it and have a placeholder (a null pointer,
-    /// which will never be accessed) in its place.
-    fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool {
-        debug!("is_instantiable({:?})", self);
-        let (def_id, substs) = match *self.as_mono_item() {
-            MonoItem::Fn(ref instance) => (instance.def_id(), instance.substs),
-            MonoItem::Static(def_id) => (def_id, InternalSubsts::empty()),
-            // global asm never has predicates
-            MonoItem::GlobalAsm(..) => return true
-        };
-
-        tcx.substitute_normalize_and_test_predicates((def_id, &substs))
-    }
-
-    fn to_string(&self, tcx: TyCtxt<'tcx>, debug: bool) -> String {
-        return match *self.as_mono_item() {
-            MonoItem::Fn(instance) => {
-                to_string_internal(tcx, "fn ", instance, debug)
-            },
-            MonoItem::Static(def_id) => {
-                let instance = Instance::new(def_id, tcx.intern_substs(&[]));
-                to_string_internal(tcx, "static ", instance, debug)
-            },
-            MonoItem::GlobalAsm(..) => {
-                "global_asm".to_string()
-            }
-        };
-
-        fn to_string_internal<'a, 'tcx>(
-            tcx: TyCtxt<'tcx>,
-            prefix: &str,
-            instance: Instance<'tcx>,
-            debug: bool,
-        ) -> String {
-            let mut result = String::with_capacity(32);
-            result.push_str(prefix);
-            let printer = DefPathBasedNames::new(tcx, false, false);
-            printer.push_instance_as_string(instance, &mut result, debug);
-            result
-        }
-    }
-
-    fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option<Span> {
-        match *self.as_mono_item() {
-            MonoItem::Fn(Instance { def, .. }) => {
-                tcx.hir().as_local_hir_id(def.def_id())
-            }
-            MonoItem::Static(def_id) => {
-                tcx.hir().as_local_hir_id(def_id)
-            }
-            MonoItem::GlobalAsm(hir_id) => {
-                Some(hir_id)
-            }
-        }.map(|hir_id| tcx.hir().span(hir_id))
-    }
-}
-
-impl MonoItemExt<'tcx> for MonoItem<'tcx> {
-    fn as_mono_item(&self) -> &MonoItem<'tcx> {
-        self
-    }
-}
index c193911247e2bea5bb4219c1461214ab7476bf5d..b9d38028b72a81f21e8a3cdd4db3317fda5d2410 100644 (file)
@@ -134,10 +134,15 @@ pub fn partition<'tcx, I>(
 where
     I: Iterator<Item = MonoItem<'tcx>>,
 {
+    let _prof_timer = tcx.prof.generic_activity("cgu_partitioning");
+
     // In the first step, we place all regular monomorphizations into their
     // respective 'home' codegen unit. Regular monomorphizations are all
     // functions and statics defined in the local crate.
-    let mut initial_partitioning = place_root_mono_items(tcx, mono_items);
+    let mut initial_partitioning = {
+        let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_roots");
+        place_root_mono_items(tcx, mono_items)
+    };
 
     initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(tcx));
 
@@ -146,8 +151,8 @@ pub fn partition<'tcx, I>(
     // If the partitioning should produce a fixed count of codegen units, merge
     // until that count is reached.
     if let PartitioningStrategy::FixedUnitCount(count) = strategy {
+        let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus");
         merge_codegen_units(tcx, &mut initial_partitioning, count);
-
         debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter());
     }
 
@@ -155,8 +160,11 @@ pub fn partition<'tcx, I>(
     // monomorphizations have to go into each codegen unit. These additional
     // monomorphizations can be drop-glue, functions from external crates, and
     // local functions the definition of which is marked with `#[inline]`.
-    let mut post_inlining = place_inlined_mono_items(initial_partitioning,
-                                                            inlining_map);
+    let mut post_inlining = {
+        let _prof_timer =
+            tcx.prof.generic_activity("cgu_partitioning_place_inline_items");
+        place_inlined_mono_items(initial_partitioning, inlining_map)
+    };
 
     post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(tcx));
 
@@ -165,6 +173,8 @@ pub fn partition<'tcx, I>(
     // Next we try to make as many symbols "internal" as possible, so LLVM has
     // more freedom to optimize.
     if !tcx.sess.opts.cg.link_dead_code {
+        let _prof_timer =
+            tcx.prof.generic_activity("cgu_partitioning_internalize_symbols");
         internalize_symbols(tcx, &mut post_inlining, inlining_map);
     }
 
@@ -329,6 +339,7 @@ fn mono_item_visibility(
 
         // These are all compiler glue and such, never exported, always hidden.
         InstanceDef::VtableShim(..) |
+        InstanceDef::ReifyShim(..) |
         InstanceDef::FnPtrShim(..) |
         InstanceDef::Virtual(..) |
         InstanceDef::Intrinsic(..) |
@@ -494,6 +505,9 @@ fn merge_codegen_units<'tcx>(
         for (k, v) in smallest.items_mut().drain() {
             second_smallest.items_mut().insert(k, v);
         }
+        debug!("CodegenUnit {} merged in to CodegenUnit {}",
+               smallest.name(),
+               second_smallest.name());
     }
 
     let cgu_name_builder = &mut CodegenUnitNameBuilder::new(tcx);
@@ -664,6 +678,7 @@ fn characteristic_def_id_of_mono_item<'tcx>(
             let def_id = match instance.def {
                 ty::InstanceDef::Item(def_id) => def_id,
                 ty::InstanceDef::VtableShim(..) |
+                ty::InstanceDef::ReifyShim(..) |
                 ty::InstanceDef::FnPtrShim(..) |
                 ty::InstanceDef::ClosureOnceShim { .. } |
                 ty::InstanceDef::Intrinsic(..) |
@@ -774,7 +789,7 @@ fn debug_dump<'a, 'tcx, I>(tcx: TyCtxt<'tcx>, label: &str, cgus: I)
     if cfg!(debug_assertions) {
         debug!("{}", label);
         for cgu in cgus {
-            debug!("CodegenUnit {}:", cgu.name());
+            debug!("CodegenUnit {} estimated size {} :", cgu.name(), cgu.size_estimate());
 
             for (mono_item, linkage) in cgu.items() {
                 let symbol_name = mono_item.symbol_name(tcx).name.as_str();
@@ -782,10 +797,11 @@ fn debug_dump<'a, 'tcx, I>(tcx: TyCtxt<'tcx>, label: &str, cgus: I)
                 let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
                                                    .unwrap_or("<no hash>");
 
-                debug!(" - {} [{:?}] [{}]",
+                debug!(" - {} [{:?}] [{}] estimated size {}",
                        mono_item.to_string(tcx, true),
                        linkage,
-                       symbol_hash);
+                       symbol_hash,
+                       mono_item.size_estimate(tcx));
             }
 
             debug!("");
index d089eafbb0798138acd0a854cc4ac56221d7f697..f532a18072fbda480bf227f709e635689f64441d 100644 (file)
@@ -66,9 +66,12 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx
                 Some(arg_tys)
             )
         }
-        ty::InstanceDef::Virtual(def_id, _) => {
-            // We are generating a call back to our def-id, which the
-            // codegen backend knows to turn to an actual virtual call.
+        // We are generating a call back to our def-id, which the
+        // codegen backend knows to turn to an actual virtual call.
+        ty::InstanceDef::Virtual(def_id, _) |
+        // ...or we are generating a direct call to a function for which indirect calls must be
+        // codegen'd differently than direct ones (example: #[track_caller])
+        ty::InstanceDef::ReifyShim(def_id) => {
             build_call_shim(
                 tcx,
                 def_id,
@@ -169,7 +172,7 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
     // Check if this is a generator, if so, return the drop glue for it
     if let Some(&ty::TyS { kind: ty::Generator(gen_def_id, substs, _), .. }) = ty {
         let body = &**tcx.optimized_mir(gen_def_id).generator_drop.as_ref().unwrap();
-        return body.subst(tcx, substs.substs);
+        return body.subst(tcx, substs);
     }
 
     let substs = if let Some(ty) = ty {
index 49ac1de8fef64fe59b1cd1974f76965f3a06327d..f0c0e57344388f8e09d410c118a099e5158a7e82 100644 (file)
@@ -8,7 +8,7 @@
 use rustc::hir::def_id::DefId;
 use rustc::mir::{
     AggregateKind, Constant, Location, Place, PlaceBase, Body, Operand, Rvalue,
-    Local, NullOp, UnOp, StatementKind, Statement, LocalKind,
+    Local, UnOp, StatementKind, Statement, LocalKind,
     TerminatorKind, Terminator,  ClearCrossCrate, SourceInfo, BinOp,
     SourceScope, SourceScopeLocalData, LocalDecl, BasicBlock,
 };
@@ -118,7 +118,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'
 struct ConstPropMachine;
 
 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
-    type MemoryKinds= !;
+    type MemoryKinds = !;
     type PointerTag = ();
     type ExtraFnVal = !;
 
@@ -434,32 +434,23 @@ fn const_prop(
     ) -> Option<Const<'tcx>> {
         let span = source_info.span;
 
-        // if this isn't a supported operation, then return None
-        match rvalue {
-            Rvalue::Repeat(..) |
-            Rvalue::Aggregate(..) |
-            Rvalue::NullaryOp(NullOp::Box, _) |
-            Rvalue::Discriminant(..) => return None,
-
-            Rvalue::Use(_) |
-            Rvalue::Len(_) |
-            Rvalue::Cast(..) |
-            Rvalue::NullaryOp(..) |
-            Rvalue::CheckedBinaryOp(..) |
-            Rvalue::Ref(..) |
-            Rvalue::UnaryOp(..) |
-            Rvalue::BinaryOp(..) => { }
-        }
+        let overflow_check = self.tcx.sess.overflow_checks();
 
-        // perform any special checking for specific Rvalue types
-        if let Rvalue::UnaryOp(op, arg) = rvalue {
-            trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg);
-            let overflow_check = self.tcx.sess.overflow_checks();
+        // Perform any special handling for specific Rvalue types.
+        // Generally, checks here fall into one of two categories:
+        //   1. Additional checking to provide useful lints to the user
+        //        - In this case, we will do some validation and then fall through to the
+        //          end of the function which evals the assignment.
+        //   2. Working around bugs in other parts of the compiler
+        //        - In this case, we'll return `None` from this function to stop evaluation.
+        match rvalue {
+            // Additional checking: if overflow checks are disabled (which is usually the case in
+            // release mode), then we need to do additional checking here to give lints to the user
+            // if an overflow would occur.
+            Rvalue::UnaryOp(UnOp::Neg, arg) if !overflow_check => {
+                trace!("checking UnaryOp(op = Neg, arg = {:?})", arg);
 
-            self.use_ecx(source_info, |this| {
-                // We check overflow in debug mode already
-                // so should only check in release mode.
-                if *op == UnOp::Neg && !overflow_check {
+                self.use_ecx(source_info, |this| {
                     let ty = arg.ty(&this.local_decls, this.tcx);
 
                     if ty.is_integral() {
@@ -471,60 +462,70 @@ fn const_prop(
                             throw_panic!(OverflowNeg)
                         }
                     }
+
+                    Ok(())
+                })?;
+            }
+
+            // Additional checking: check for overflows on integer binary operations and report
+            // them to the user as lints.
+            Rvalue::BinaryOp(op, left, right) => {
+                trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
+
+                let r = self.use_ecx(source_info, |this| {
+                    this.ecx.read_immediate(this.ecx.eval_operand(right, None)?)
+                })?;
+                if *op == BinOp::Shr || *op == BinOp::Shl {
+                    let left_bits = place_layout.size.bits();
+                    let right_size = r.layout.size;
+                    let r_bits = r.to_scalar().and_then(|r| r.to_bits(right_size));
+                    if r_bits.ok().map_or(false, |b| b >= left_bits as u128) {
+                        let source_scope_local_data = match self.source_scope_local_data {
+                            ClearCrossCrate::Set(ref data) => data,
+                            ClearCrossCrate::Clear => return None,
+                        };
+                        let dir = if *op == BinOp::Shr {
+                            "right"
+                        } else {
+                            "left"
+                        };
+                        let hir_id = source_scope_local_data[source_info.scope].lint_root;
+                        self.tcx.lint_hir(
+                            ::rustc::lint::builtin::EXCEEDING_BITSHIFTS,
+                            hir_id,
+                            span,
+                            &format!("attempt to shift {} with overflow", dir));
+                        return None;
+                    }
                 }
 
-                Ok(())
-            })?;
-        } else if let Rvalue::BinaryOp(op, left, right) = rvalue {
-            trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right);
-
-            let r = self.use_ecx(source_info, |this| {
-                this.ecx.read_immediate(this.ecx.eval_operand(right, None)?)
-            })?;
-            if *op == BinOp::Shr || *op == BinOp::Shl {
-                let left_bits = place_layout.size.bits();
-                let right_size = r.layout.size;
-                let r_bits = r.to_scalar().and_then(|r| r.to_bits(right_size));
-                if r_bits.ok().map_or(false, |b| b >= left_bits as u128) {
-                    let source_scope_local_data = match self.source_scope_local_data {
-                        ClearCrossCrate::Set(ref data) => data,
-                        ClearCrossCrate::Clear => return None,
-                    };
-                    let dir = if *op == BinOp::Shr {
-                        "right"
-                    } else {
-                        "left"
-                    };
-                    let hir_id = source_scope_local_data[source_info.scope].lint_root;
-                    self.tcx.lint_hir(
-                        ::rustc::lint::builtin::EXCEEDING_BITSHIFTS,
-                        hir_id,
-                        span,
-                        &format!("attempt to shift {} with overflow", dir));
-                    return None;
+                // If overflow checking is enabled (like in debug mode by default),
+                // then we'll already catch overflow when we evaluate the `Assert` statement
+                // in MIR. However, if overflow checking is disabled, then there won't be any
+                // `Assert` statement and so we have to do additional checking here.
+                if !overflow_check {
+                    self.use_ecx(source_info, |this| {
+                        let l = this.ecx.read_immediate(this.ecx.eval_operand(left, None)?)?;
+                        let (_, overflow, _ty) = this.ecx.overflowing_binary_op(*op, l, r)?;
+
+                        if overflow {
+                            let err = err_panic!(Overflow(*op)).into();
+                            return Err(err);
+                        }
+
+                        Ok(())
+                    })?;
                 }
             }
-            self.use_ecx(source_info, |this| {
-                let l = this.ecx.read_immediate(this.ecx.eval_operand(left, None)?)?;
-                let (_, overflow, _ty) = this.ecx.overflowing_binary_op(*op, l, r)?;
-
-                // We check overflow in debug mode already
-                // so should only check in release mode.
-                if !this.tcx.sess.overflow_checks() && overflow {
-                    let err = err_panic!(Overflow(*op)).into();
-                    return Err(err);
-                }
 
-                Ok(())
-            })?;
-        } else if let Rvalue::Ref(_, _, place) = rvalue {
-            trace!("checking Ref({:?})", place);
+            // Work around: avoid ICE in miri.
             // FIXME(wesleywiser) we don't currently handle the case where we try to make a ref
-            // from a function argument that hasn't been assigned to in this function.
-            if let Place {
-                base: PlaceBase::Local(local),
-                projection: box []
-            } = place {
+            // from a function argument that hasn't been assigned to in this function. The main
+            // issue is if an arg is a fat-pointer, miri `expects()` to be able to read the value
+            // of that pointer to get size info. However, since this is `ConstProp`, that argument
+            // doesn't actually have a backing value and so this causes an ICE.
+            Rvalue::Ref(_, _, Place { base: PlaceBase::Local(local), projection: box [] }) => {
+                trace!("checking Ref({:?})", place);
                 let alive =
                     if let LocalValue::Live(_) = self.ecx.frame().locals[*local].value {
                         true
@@ -535,6 +536,15 @@ fn const_prop(
                     return None;
                 }
             }
+
+            // Work around: avoid extra unnecessary locals.
+            // FIXME(wesleywiser): const eval will turn this into a `const Scalar(<ZST>)` that
+            // `SimplifyLocals` doesn't know it can remove.
+            Rvalue::Aggregate(_, operands) if operands.len() == 0 => {
+                return None;
+            }
+
+            _ => { }
         }
 
         self.use_ecx(source_info, |this| {
index 21ca339eb968bdfb6f644a14011725a021daee75..439cae2093ae5361ec82b136f369d862cd03f796 100644 (file)
@@ -25,7 +25,6 @@ pub fn new(tcx: TyCtxt<'tcx>) -> Self {
 impl MutVisitor<'tcx> for EraseRegionsVisitor<'tcx> {
     fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
         *ty = self.tcx.erase_regions(ty);
-        self.super_ty(ty);
     }
 
     fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) {
@@ -40,10 +39,19 @@ fn visit_substs(&mut self, substs: &mut SubstsRef<'tcx>, _: Location) {
         *substs = self.tcx.erase_regions(substs);
     }
 
-    fn visit_statement(&mut self,
-                       statement: &mut Statement<'tcx>,
-                       location: Location) {
-        self.super_statement(statement, location);
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        if let PlaceElem::Field(field, ty) = elem {
+            let new_ty = self.tcx.erase_regions(ty);
+
+            if new_ty != *ty {
+                return Some(PlaceElem::Field(*field, new_ty));
+            }
+        }
+
+        None
     }
 }
 
index bfc5eb5a94ef88ea37410dc1a6e3f9ffb96c4b5e..6533e3c5ba81fcc8180252c5bb8299eb09fadb82 100644 (file)
@@ -88,6 +88,18 @@ fn visit_local(&mut self,
             *local = self.to;
         }
     }
+
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        match elem {
+            PlaceElem::Index(local) if *local == self.from => {
+                Some(PlaceElem::Index(self.to))
+            }
+            _ => None,
+        }
+    }
 }
 
 struct DerefArgVisitor;
@@ -110,7 +122,13 @@ fn visit_place(&mut self,
                 projection: Box::new([ProjectionElem::Deref]),
             });
         } else {
-            self.super_place(place, context, location);
+            self.visit_place_base(&mut place.base, context, location);
+
+            for elem in place.projection.iter() {
+                if let PlaceElem::Index(local) = elem {
+                    assert_ne!(*local, self_arg());
+                }
+            }
         }
     }
 }
@@ -137,7 +155,13 @@ fn visit_place(&mut self,
                 projection: Box::new([ProjectionElem::Field(Field::new(0), self.ref_gen_ty)]),
             });
         } else {
-            self.super_place(place, context, location);
+            self.visit_place_base(&mut place.base, context, location);
+
+            for elem in place.projection.iter() {
+                if let PlaceElem::Index(local) = elem {
+                    assert_ne!(*local, self_arg());
+                }
+            }
         }
     }
 }
@@ -247,17 +271,25 @@ fn visit_local(&mut self,
         assert_eq!(self.remap.get(local), None);
     }
 
-    fn visit_place(&mut self,
-                    place: &mut Place<'tcx>,
-                    context: PlaceContext,
-                    location: Location) {
+    fn visit_place(
+        &mut self,
+        place: &mut Place<'tcx>,
+        context: PlaceContext,
+        location: Location,
+    ) {
         if let PlaceBase::Local(l) = place.base {
             // Replace an Local in the remap with a generator struct access
             if let Some(&(ty, variant_index, idx)) = self.remap.get(&l) {
                 replace_base(place, self.make_field(variant_index, idx, ty));
             }
         } else {
-            self.super_place(place, context, location);
+            self.visit_place_base(&mut place.base, context, location);
+
+            for elem in place.projection.iter() {
+                if let PlaceElem::Index(local) = elem {
+                    assert_ne!(*local, self_arg());
+                }
+            }
         }
     }
 
@@ -508,10 +540,7 @@ fn locals_live_across_suspend_points(
             storage_liveness_map.insert(block, storage_liveness.clone());
 
             requires_storage_cursor.seek(loc);
-            let mut storage_required = requires_storage_cursor.get().clone();
-
-            // Mark locals without storage statements as always requiring storage
-            storage_required.union(&ignored.0);
+            let storage_required = requires_storage_cursor.get().clone();
 
             // Locals live are live at this point only if they are used across
             // suspension points (the `liveness` variable)
@@ -1126,6 +1155,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'
         // Get the interior types and substs which typeck computed
         let (upvars, interior, discr_ty, movable) = match gen_ty.kind {
             ty::Generator(_, substs, movability) => {
+                let substs = substs.as_generator();
                 (substs.upvar_tys(def_id, tcx).collect(),
                  substs.witness(def_id, tcx),
                  substs.discr_ty(tcx),
index 9830ed35ffc3ed0b10ed8bfc890b6cfe37fb51f6..0cbdcedff4780dea11618d9a609c18f2436ff196 100644 (file)
@@ -647,38 +647,45 @@ fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
         debug!("updating target `{:?}`, new: `{:?}`", tgt, new);
         new
     }
-}
 
-impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
-    fn visit_local(&mut self,
-                   local: &mut Local,
-                   _ctxt: PlaceContext,
-                   _location: Location) {
+    fn make_integrate_local(&self, local: &Local) -> Local {
         if *local == RETURN_PLACE {
             match self.destination {
                 Place {
                     base: PlaceBase::Local(l),
                     projection: box [],
                 } => {
-                    *local = l;
-                    return;
+                    return l;
                 },
                 ref place => bug!("Return place is {:?}, not local", place)
             }
         }
+
         let idx = local.index() - 1;
         if idx < self.args.len() {
-            *local = self.args[idx];
-            return;
+            return self.args[idx];
         }
-        *local = self.local_map[Local::new(idx - self.args.len())];
+
+        self.local_map[Local::new(idx - self.args.len())]
     }
+}
 
-    fn visit_place(&mut self,
-                    place: &mut Place<'tcx>,
-                    _ctxt: PlaceContext,
-                    _location: Location) {
+impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
+    fn visit_local(
+        &mut self,
+        local: &mut Local,
+        _ctxt: PlaceContext,
+        _location: Location,
+    ) {
+        *local = self.make_integrate_local(local);
+    }
 
+    fn visit_place(
+        &mut self,
+        place: &mut Place<'tcx>,
+        context: PlaceContext,
+        location: Location,
+    ) {
         match place {
             Place {
                 base: PlaceBase::Local(RETURN_PLACE),
@@ -687,10 +694,27 @@ fn visit_place(&mut self,
                 // Return pointer; update the place itself
                 *place = self.destination.clone();
             },
-            _ => self.super_place(place, _ctxt, _location)
+            _ => {
+                self.super_place(place, context, location);
+            }
         }
     }
 
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        if let PlaceElem::Index(local) = elem {
+            let new_local = self.make_integrate_local(local);
+
+            if new_local != *local {
+                return Some(PlaceElem::Index(new_local))
+            }
+        }
+
+        None
+    }
+
     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
         self.in_cleanup_block = data.is_cleanup;
         self.super_basic_block_data(block, data);
index 5d241ffe1c06a6860c98c42b40cb34538fdaac98..ad1785417cd93409633603c784e9b8735143e92c 100644 (file)
@@ -191,6 +191,10 @@ fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
         });
     }
 
+    fn is_temp_kind(&self, local: Local) -> bool {
+        self.source.local_kind(local) == LocalKind::Temp
+    }
+
     /// Copies the initialization of this temp to the
     /// promoted MIR, recursing through temps.
     fn promote_temp(&mut self, temp: Local) -> Local {
@@ -396,10 +400,22 @@ fn visit_local(&mut self,
                    local: &mut Local,
                    _: PlaceContext,
                    _: Location) {
-        if self.source.local_kind(*local) == LocalKind::Temp {
+        if self.is_temp_kind(*local) {
             *local = self.promote_temp(*local);
         }
     }
+
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        match elem {
+            PlaceElem::Index(local) if self.is_temp_kind(*local) => {
+                Some(PlaceElem::Index(self.promote_temp(*local)))
+            }
+            _ => None,
+        }
+    }
 }
 
 pub fn promote_candidates<'tcx>(
index fbcf9c8cb5eba5063f6087e10d2ca3f007081a4b..da1abb9747c1a7ef5cff597176234cc5c8905557 100644 (file)
@@ -1024,23 +1024,12 @@ fn check_const(&mut self) -> (u8, &'tcx BitSet<Local>) {
             new_errors.dedup();
 
             if self.errors != new_errors {
-                error!("old validator: {:?}", self.errors);
-                error!("new validator: {:?}", new_errors);
-
-                // ICE on nightly if the validators do not emit exactly the same errors.
-                // Users can supress this panic with an unstable compiler flag (hopefully after
-                // filing an issue).
-                let opts = &self.tcx.sess.opts;
-                let trigger_ice = opts.unstable_features.is_nightly_build()
-                    && !opts.debugging_opts.suppress_const_validation_back_compat_ice;
-
-                if trigger_ice {
-                    span_bug!(
-                        body.span,
-                        "{}",
-                        VALIDATOR_MISMATCH_ERR,
-                    );
-                }
+                validator_mismatch(
+                    self.tcx,
+                    body,
+                    std::mem::replace(&mut self.errors, vec![]),
+                    new_errors,
+                );
             }
         }
 
@@ -1870,6 +1859,58 @@ fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option<FxHashSet<usize
     Some(ret)
 }
 
+fn validator_mismatch(
+    tcx: TyCtxt<'tcx>,
+    body: &Body<'tcx>,
+    mut old_errors: Vec<(Span, String)>,
+    mut new_errors: Vec<(Span, String)>,
+) {
+    error!("old validator: {:?}", old_errors);
+    error!("new validator: {:?}", new_errors);
+
+    // ICE on nightly if the validators do not emit exactly the same errors.
+    // Users can supress this panic with an unstable compiler flag (hopefully after
+    // filing an issue).
+    let opts = &tcx.sess.opts;
+    let strict_validation_enabled = opts.unstable_features.is_nightly_build()
+        && !opts.debugging_opts.suppress_const_validation_back_compat_ice;
+
+    if !strict_validation_enabled {
+        return;
+    }
+
+    // If this difference would cause a regression from the old to the new or vice versa, trigger
+    // the ICE.
+    if old_errors.is_empty() || new_errors.is_empty() {
+        span_bug!(body.span, "{}", VALIDATOR_MISMATCH_ERR);
+    }
+
+    // HACK: Borrows that would allow mutation are forbidden in const contexts, but they cause the
+    // new validator to be more conservative about when a dropped local has been moved out of.
+    //
+    // Supress the mismatch ICE in cases where the validators disagree only on the number of
+    // `LiveDrop` errors and both observe the same sequence of `MutBorrow`s.
+
+    let is_live_drop = |(_, s): &mut (_, String)| s.starts_with("LiveDrop");
+    let is_mut_borrow = |(_, s): &&(_, String)| s.starts_with("MutBorrow");
+
+    let old_live_drops: Vec<_> = old_errors.drain_filter(is_live_drop).collect();
+    let new_live_drops: Vec<_> = new_errors.drain_filter(is_live_drop).collect();
+
+    let only_live_drops_differ = old_live_drops != new_live_drops && old_errors == new_errors;
+
+    let old_mut_borrows = old_errors.iter().filter(is_mut_borrow);
+    let new_mut_borrows = new_errors.iter().filter(is_mut_borrow);
+
+    let at_least_one_mut_borrow = old_mut_borrows.clone().next().is_some();
+
+    if only_live_drops_differ && at_least_one_mut_borrow && old_mut_borrows.eq(new_mut_borrows) {
+        return;
+    }
+
+    span_bug!(body.span, "{}", VALIDATOR_MISMATCH_ERR);
+}
+
 const VALIDATOR_MISMATCH_ERR: &str =
     r"Disagreement between legacy and dataflow-based const validators.
     After filing an issue, use `-Zsuppress-const-validation-back-compat-ice` to compile your code.";
index cf0ee1bf092223b03b61a98fe93608c1f80e6e5f..7b6255defd148cae58f4dcd0d5150bef7586188f 100644 (file)
@@ -14,7 +14,7 @@ pub fn is_min_const_fn(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>) -
     let mut current = def_id;
     loop {
         let predicates = tcx.predicates_of(current);
-        for (predicate, _) in &predicates.predicates {
+        for (predicate, _) in predicates.predicates {
             match predicate {
                 | Predicate::RegionOutlives(_)
                 | Predicate::TypeOutlives(_)
index 9ffff9a92fa53d041255af2ac45ce90c17f1af05..606c1a3a1cc09dd9deb761c42b0fe8f819340410 100644 (file)
@@ -366,7 +366,20 @@ fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockDat
         });
         self.super_basic_block_data(block, data);
     }
+
     fn visit_local(&mut self, l: &mut Local, _: PlaceContext, _: Location) {
         *l = self.map[*l].unwrap();
     }
+
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        match elem {
+            PlaceElem::Index(local) => {
+                Some(PlaceElem::Index(self.map[*local].unwrap()))
+            }
+            _ => None
+        }
+    }
 }
index 3aea25fa8769fe708623e42c8b86a97c97ee9f7b..cdd07ad4b8ff4b88ba81689ddbf0aca0b71a7055 100644 (file)
@@ -1,6 +1,6 @@
 //! Def-use analysis.
 
-use rustc::mir::{Local, Location, Body};
+use rustc::mir::{Body, Local, Location, PlaceElem};
 use rustc::mir::visit::{PlaceContext, MutVisitor, Visitor};
 use rustc_index::vec::IndexVec;
 use std::mem;
@@ -47,13 +47,10 @@ pub fn local_info(&self, local: Local) -> &Info {
         &self.info[local]
     }
 
-    fn mutate_defs_and_uses<F>(&self, local: Local, body: &mut Body<'_>, mut callback: F)
-                               where F: for<'a> FnMut(&'a mut Local,
-                                                      PlaceContext,
-                                                      Location) {
+    fn mutate_defs_and_uses(&self, local: Local, body: &mut Body<'_>, new_local: Local) {
         for place_use in &self.info[local].defs_and_uses {
             MutateUseVisitor::new(local,
-                                  &mut callback,
+                                  new_local,
                                   body).visit_location(body, place_use.location)
         }
     }
@@ -63,7 +60,7 @@ pub fn replace_all_defs_and_uses_with(&self,
                                           local: Local,
                                           body: &mut Body<'_>,
                                           new_local: Local) {
-        self.mutate_defs_and_uses(local, body, |local, _, _| *local = new_local)
+        self.mutate_defs_and_uses(local, body, new_local)
     }
 }
 
@@ -117,30 +114,39 @@ pub fn use_count(&self) -> usize {
     }
 }
 
-struct MutateUseVisitor<F> {
+struct MutateUseVisitor {
     query: Local,
-    callback: F,
+    new_local: Local,
 }
 
-impl<F> MutateUseVisitor<F> {
-    fn new(query: Local, callback: F, _: &Body<'_>)
-           -> MutateUseVisitor<F>
-           where F: for<'a> FnMut(&'a mut Local, PlaceContext, Location) {
+impl MutateUseVisitor {
+    fn new(query: Local, new_local: Local, _: &Body<'_>) -> MutateUseVisitor {
         MutateUseVisitor {
             query,
-            callback,
+            new_local,
         }
     }
 }
 
-impl<F> MutVisitor<'_> for MutateUseVisitor<F>
-              where F: for<'a> FnMut(&'a mut Local, PlaceContext, Location) {
+impl MutVisitor<'_> for MutateUseVisitor {
     fn visit_local(&mut self,
                     local: &mut Local,
-                    context: PlaceContext,
-                    location: Location) {
+                    _context: PlaceContext,
+                    _location: Location) {
         if *local == self.query {
-            (self.callback)(local, context, location)
+            *local = self.new_local;
+        }
+    }
+
+    fn process_projection_elem(
+        &mut self,
+        elem: &PlaceElem<'tcx>,
+    ) -> Option<PlaceElem<'tcx>> {
+        match elem {
+            PlaceElem::Index(local) if *local == self.query => {
+                Some(PlaceElem::Index(self.new_local))
+            }
+            _ => None,
         }
     }
 }
index bdbf040276d22eee0df4adcab73e994f1da244fd..f7ba6f1ec6993994d8595ab43d4d44721db210f8 100644 (file)
@@ -798,7 +798,7 @@ fn open_drop(&mut self) -> BasicBlock {
             // It effetively only contains upvars until the generator transformation runs.
             // See librustc_body/transform/generator.rs for more details.
             ty::Generator(def_id, substs, _) => {
-                let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
+                let tys : Vec<_> = substs.as_generator().upvar_tys(def_id, self.tcx()).collect();
                 self.open_drop_for_tuple(&tys)
             }
             ty::Tuple(..) => {
index 596ec6c19bcbfd0b5e419173ca1d6541a47270df..118deb560d62f1dff29252f4782fa640b6399716 100644 (file)
@@ -13,5 +13,8 @@ log = "0.4"
 rustc = { path = "../librustc" }
 rustc_data_structures = { path = "../librustc_data_structures" }
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
 errors = { path = "../librustc_errors", package = "rustc_errors" }
+rustc_target = { path = "../librustc_target" }
+rustc_index = { path = "../librustc_index" }
index 0339b85ca55e31029b9335a3b86ab473b83caa44..74de31263d3944104009f40cf886fc3b67de4ef1 100644 (file)
@@ -14,7 +14,7 @@
 use rustc_data_structures::fx::FxHashMap;
 use syntax::ast::*;
 use syntax::attr;
-use syntax::ext::proc_macro::is_proc_macro_attr;
+use syntax_expand::proc_macro::is_proc_macro_attr;
 use syntax::feature_gate::is_builtin_attr;
 use syntax::source_map::Spanned;
 use syntax::symbol::{kw, sym};
@@ -263,7 +263,8 @@ fn no_questions_in_bounds(&self, bounds: &GenericBounds, where_: &str, is_trait:
                 let mut err = self.err_handler().struct_span_err(poly.span,
                     &format!("`?Trait` is not permitted in {}", where_));
                 if is_trait {
-                    err.note(&format!("traits are `?{}` by default", poly.trait_ref.path));
+                    let path_str = pprust::path_to_string(&poly.trait_ref.path);
+                    err.note(&format!("traits are `?{}` by default", path_str));
                 }
                 err.emit();
             }
diff --git a/src/librustc_passes/dead.rs b/src/librustc_passes/dead.rs
new file mode 100644 (file)
index 0000000..f2aef2c
--- /dev/null
@@ -0,0 +1,676 @@
+// This implements the dead-code warning pass. It follows middle::reachable
+// closely. The idea is that all reachable symbols are live, codes called
+// from live codes are live, and everything else is dead.
+
+use rustc::hir::Node;
+use rustc::hir::{self, PatKind, TyKind};
+use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
+use rustc::hir::itemlikevisit::ItemLikeVisitor;
+
+use rustc::hir::def::{CtorOf, Res, DefKind};
+use rustc::hir::CodegenFnAttrFlags;
+use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::lint;
+use rustc::middle::privacy;
+use rustc::ty::{self, DefIdTree, TyCtxt};
+use rustc::util::nodemap::FxHashSet;
+
+use rustc_data_structures::fx::FxHashMap;
+
+use syntax::{ast, attr};
+use syntax::symbol::sym;
+use syntax_pos;
+
+// Any local node that may call something in its body block should be
+// explored. For example, if it's a live Node::Item that is a
+// function, then we should explore its block to check for codes that
+// may need to be marked as live.
+fn should_explore(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
+    match tcx.hir().find(hir_id) {
+        Some(Node::Item(..)) |
+        Some(Node::ImplItem(..)) |
+        Some(Node::ForeignItem(..)) |
+        Some(Node::TraitItem(..)) |
+        Some(Node::Variant(..)) |
+        Some(Node::AnonConst(..)) |
+        Some(Node::Pat(..)) => true,
+        _ => false
+    }
+}
+
+struct MarkSymbolVisitor<'a, 'tcx> {
+    worklist: Vec<hir::HirId>,
+    tcx: TyCtxt<'tcx>,
+    tables: &'a ty::TypeckTables<'tcx>,
+    live_symbols: FxHashSet<hir::HirId>,
+    repr_has_repr_c: bool,
+    in_pat: bool,
+    inherited_pub_visibility: bool,
+    ignore_variant_stack: Vec<DefId>,
+    // maps from tuple struct constructors to tuple struct items
+    struct_constructors: FxHashMap<hir::HirId, hir::HirId>,
+}
+
+impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
+    fn check_def_id(&mut self, def_id: DefId) {
+        if let Some(hir_id) = self.tcx.hir().as_local_hir_id(def_id) {
+            if should_explore(self.tcx, hir_id) || self.struct_constructors.contains_key(&hir_id) {
+                self.worklist.push(hir_id);
+            }
+            self.live_symbols.insert(hir_id);
+        }
+    }
+
+    fn insert_def_id(&mut self, def_id: DefId) {
+        if let Some(hir_id) = self.tcx.hir().as_local_hir_id(def_id) {
+            debug_assert!(!should_explore(self.tcx, hir_id));
+            self.live_symbols.insert(hir_id);
+        }
+    }
+
+    fn handle_res(&mut self, res: Res) {
+        match res {
+            Res::Def(DefKind::Const, _)
+            | Res::Def(DefKind::AssocConst, _)
+            | Res::Def(DefKind::TyAlias, _) => {
+                self.check_def_id(res.def_id());
+            }
+            _ if self.in_pat => {},
+            Res::PrimTy(..) | Res::SelfCtor(..) |
+            Res::Local(..) => {}
+            Res::Def(DefKind::Ctor(CtorOf::Variant, ..), ctor_def_id) => {
+                let variant_id = self.tcx.parent(ctor_def_id).unwrap();
+                let enum_id = self.tcx.parent(variant_id).unwrap();
+                self.check_def_id(enum_id);
+                if !self.ignore_variant_stack.contains(&ctor_def_id) {
+                    self.check_def_id(variant_id);
+                }
+            }
+            Res::Def(DefKind::Variant, variant_id) => {
+                let enum_id = self.tcx.parent(variant_id).unwrap();
+                self.check_def_id(enum_id);
+                if !self.ignore_variant_stack.contains(&variant_id) {
+                    self.check_def_id(variant_id);
+                }
+            }
+            Res::SelfTy(t, i) => {
+                if let Some(t) = t {
+                    self.check_def_id(t);
+                }
+                if let Some(i) = i {
+                    self.check_def_id(i);
+                }
+            }
+            Res::ToolMod | Res::NonMacroAttr(..) | Res::Err => {}
+            _ => {
+                self.check_def_id(res.def_id());
+            }
+        }
+    }
+
+    fn lookup_and_handle_method(&mut self, id: hir::HirId) {
+        if let Some(def_id) = self.tables.type_dependent_def_id(id) {
+            self.check_def_id(def_id);
+        } else {
+            bug!("no type-dependent def for method");
+        }
+    }
+
+    fn handle_field_access(&mut self, lhs: &hir::Expr, hir_id: hir::HirId) {
+        match self.tables.expr_ty_adjusted(lhs).kind {
+            ty::Adt(def, _) => {
+                let index = self.tcx.field_index(hir_id, self.tables);
+                self.insert_def_id(def.non_enum_variant().fields[index].did);
+            }
+            ty::Tuple(..) => {}
+            _ => span_bug!(lhs.span, "named field access on non-ADT"),
+        }
+    }
+
+    fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, res: Res, pats: &[hir::FieldPat]) {
+        let variant = match self.tables.node_type(lhs.hir_id).kind {
+            ty::Adt(adt, _) => adt.variant_of_res(res),
+            _ => span_bug!(lhs.span, "non-ADT in struct pattern")
+        };
+        for pat in pats {
+            if let PatKind::Wild = pat.pat.kind {
+                continue;
+            }
+            let index = self.tcx.field_index(pat.hir_id, self.tables);
+            self.insert_def_id(variant.fields[index].did);
+        }
+    }
+
+    fn mark_live_symbols(&mut self) {
+        let mut scanned = FxHashSet::default();
+        while let Some(id) = self.worklist.pop() {
+            if !scanned.insert(id) {
+                continue
+            }
+
+            // in the case of tuple struct constructors we want to check the item, not the generated
+            // tuple struct constructor function
+            let id = self.struct_constructors.get(&id).cloned().unwrap_or(id);
+
+            if let Some(node) = self.tcx.hir().find(id) {
+                self.live_symbols.insert(id);
+                self.visit_node(node);
+            }
+        }
+    }
+
+    fn visit_node(&mut self, node: Node<'tcx>) {
+        let had_repr_c = self.repr_has_repr_c;
+        self.repr_has_repr_c = false;
+        let had_inherited_pub_visibility = self.inherited_pub_visibility;
+        self.inherited_pub_visibility = false;
+        match node {
+            Node::Item(item) => {
+                match item.kind {
+                    hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => {
+                        let def_id = self.tcx.hir().local_def_id(item.hir_id);
+                        let def = self.tcx.adt_def(def_id);
+                        self.repr_has_repr_c = def.repr.c();
+
+                        intravisit::walk_item(self, &item);
+                    }
+                    hir::ItemKind::Enum(..) => {
+                        self.inherited_pub_visibility = item.vis.node.is_pub();
+
+                        intravisit::walk_item(self, &item);
+                    }
+                    hir::ItemKind::ForeignMod(..) => {}
+                    _ => {
+                        intravisit::walk_item(self, &item);
+                    }
+                }
+            }
+            Node::TraitItem(trait_item) => {
+                intravisit::walk_trait_item(self, trait_item);
+            }
+            Node::ImplItem(impl_item) => {
+                intravisit::walk_impl_item(self, impl_item);
+            }
+            Node::ForeignItem(foreign_item) => {
+                intravisit::walk_foreign_item(self, &foreign_item);
+            }
+            _ => {}
+        }
+        self.repr_has_repr_c = had_repr_c;
+        self.inherited_pub_visibility = had_inherited_pub_visibility;
+    }
+
+    fn mark_as_used_if_union(&mut self, adt: &ty::AdtDef, fields: &hir::HirVec<hir::Field>) {
+        if adt.is_union() && adt.non_enum_variant().fields.len() > 1 && adt.did.is_local() {
+            for field in fields {
+                let index = self.tcx.field_index(field.hir_id, self.tables);
+                self.insert_def_id(adt.non_enum_variant().fields[index].did);
+            }
+        }
+    }
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
+    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+        NestedVisitorMap::None
+    }
+
+    fn visit_nested_body(&mut self, body: hir::BodyId) {
+        let old_tables = self.tables;
+        self.tables = self.tcx.body_tables(body);
+        let body = self.tcx.hir().body(body);
+        self.visit_body(body);
+        self.tables = old_tables;
+    }
+
+    fn visit_variant_data(&mut self, def: &'tcx hir::VariantData, _: ast::Name,
+                          _: &hir::Generics, _: hir::HirId, _: syntax_pos::Span) {
+        let has_repr_c = self.repr_has_repr_c;
+        let inherited_pub_visibility = self.inherited_pub_visibility;
+        let live_fields = def.fields().iter().filter(|f| {
+            has_repr_c || inherited_pub_visibility || f.vis.node.is_pub()
+        });
+        self.live_symbols.extend(live_fields.map(|f| f.hir_id));
+
+        intravisit::walk_struct_def(self, def);
+    }
+
+    fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
+        match expr.kind {
+            hir::ExprKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => {
+                let res = self.tables.qpath_res(qpath, expr.hir_id);
+                self.handle_res(res);
+            }
+            hir::ExprKind::MethodCall(..) => {
+                self.lookup_and_handle_method(expr.hir_id);
+            }
+            hir::ExprKind::Field(ref lhs, ..) => {
+                self.handle_field_access(&lhs, expr.hir_id);
+            }
+            hir::ExprKind::Struct(_, ref fields, _) => {
+                if let ty::Adt(ref adt, _) = self.tables.expr_ty(expr).kind {
+                    self.mark_as_used_if_union(adt, fields);
+                }
+            }
+            _ => ()
+        }
+
+        intravisit::walk_expr(self, expr);
+    }
+
+    fn visit_arm(&mut self, arm: &'tcx hir::Arm) {
+        // Inside the body, ignore constructions of variants
+        // necessary for the pattern to match. Those construction sites
+        // can't be reached unless the variant is constructed elsewhere.
+        let len = self.ignore_variant_stack.len();
+        self.ignore_variant_stack.extend(arm.pat.necessary_variants());
+        intravisit::walk_arm(self, arm);
+        self.ignore_variant_stack.truncate(len);
+    }
+
+    fn visit_pat(&mut self, pat: &'tcx hir::Pat) {
+        match pat.kind {
+            PatKind::Struct(ref path, ref fields, _) => {
+                let res = self.tables.qpath_res(path, pat.hir_id);
+                self.handle_field_pattern_match(pat, res, fields);
+            }
+            PatKind::Path(ref qpath) => {
+                let res = self.tables.qpath_res(qpath, pat.hir_id);
+                self.handle_res(res);
+            }
+            _ => ()
+        }
+
+        self.in_pat = true;
+        intravisit::walk_pat(self, pat);
+        self.in_pat = false;
+    }
+
+    fn visit_path(&mut self, path: &'tcx hir::Path, _: hir::HirId) {
+        self.handle_res(path.res);
+        intravisit::walk_path(self, path);
+    }
+
+    fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
+        match ty.kind {
+            TyKind::Def(item_id, _) => {
+                let item = self.tcx.hir().expect_item(item_id.id);
+                intravisit::walk_item(self, item);
+            }
+            _ => ()
+        }
+        intravisit::walk_ty(self, ty);
+    }
+
+    fn visit_anon_const(&mut self, c: &'tcx hir::AnonConst) {
+        self.live_symbols.insert(c.hir_id);
+        intravisit::walk_anon_const(self, c);
+    }
+}
+
+fn has_allow_dead_code_or_lang_attr(
+    tcx: TyCtxt<'_>,
+    id: hir::HirId,
+    attrs: &[ast::Attribute],
+) -> bool {
+    if attr::contains_name(attrs, sym::lang) {
+        return true;
+    }
+
+    // Stable attribute for #[lang = "panic_impl"]
+    if attr::contains_name(attrs, sym::panic_handler) {
+        return true;
+    }
+
+    // (To be) stable attribute for #[lang = "oom"]
+    if attr::contains_name(attrs, sym::alloc_error_handler) {
+        return true;
+    }
+
+    let def_id = tcx.hir().local_def_id(id);
+    let cg_attrs = tcx.codegen_fn_attrs(def_id);
+
+    // #[used], #[no_mangle], #[export_name], etc also keeps the item alive
+    // forcefully, e.g., for placing it in a specific section.
+    if cg_attrs.contains_extern_indicator() ||
+        cg_attrs.flags.contains(CodegenFnAttrFlags::USED) {
+        return true;
+    }
+
+    tcx.lint_level_at_node(lint::builtin::DEAD_CODE, id).0 == lint::Allow
+}
+
+// This visitor seeds items that
+//   1) We want to explicitly consider as live:
+//     * Item annotated with #[allow(dead_code)]
+//         - This is done so that if we want to suppress warnings for a
+//           group of dead functions, we only have to annotate the "root".
+//           For example, if both `f` and `g` are dead and `f` calls `g`,
+//           then annotating `f` with `#[allow(dead_code)]` will suppress
+//           warning for both `f` and `g`.
+//     * Item annotated with #[lang=".."]
+//         - This is because lang items are always callable from elsewhere.
+//   or
+//   2) We are not sure to be live or not
+//     * Implementation of a trait method
+struct LifeSeeder<'k, 'tcx> {
+    worklist: Vec<hir::HirId>,
+    krate: &'k hir::Crate,
+    tcx: TyCtxt<'tcx>,
+    // see `MarkSymbolVisitor::struct_constructors`
+    struct_constructors: FxHashMap<hir::HirId, hir::HirId>,
+}
+
+impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> {
+    fn visit_item(&mut self, item: &hir::Item) {
+        let allow_dead_code = has_allow_dead_code_or_lang_attr(self.tcx,
+                                                               item.hir_id,
+                                                               &item.attrs);
+        if allow_dead_code {
+            self.worklist.push(item.hir_id);
+        }
+        match item.kind {
+            hir::ItemKind::Enum(ref enum_def, _) => {
+                if allow_dead_code {
+                    self.worklist.extend(enum_def.variants.iter().map(|variant| variant.id));
+                }
+
+                for variant in &enum_def.variants {
+                    if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
+                        self.struct_constructors.insert(ctor_hir_id, variant.id);
+                    }
+                }
+            }
+            hir::ItemKind::Trait(.., ref trait_item_refs) => {
+                for trait_item_ref in trait_item_refs {
+                    let trait_item = self.krate.trait_item(trait_item_ref.id);
+                    match trait_item.kind {
+                        hir::TraitItemKind::Const(_, Some(_)) |
+                        hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => {
+                            if has_allow_dead_code_or_lang_attr(self.tcx,
+                                                                trait_item.hir_id,
+                                                                &trait_item.attrs) {
+                                self.worklist.push(trait_item.hir_id);
+                            }
+                        }
+                        _ => {}
+                    }
+                }
+            }
+            hir::ItemKind::Impl(.., ref opt_trait, _, ref impl_item_refs) => {
+                for impl_item_ref in impl_item_refs {
+                    let impl_item = self.krate.impl_item(impl_item_ref.id);
+                    if opt_trait.is_some() ||
+                            has_allow_dead_code_or_lang_attr(self.tcx,
+                                                             impl_item.hir_id,
+                                                             &impl_item.attrs) {
+                        self.worklist.push(impl_item_ref.id.hir_id);
+                    }
+                }
+            }
+            hir::ItemKind::Struct(ref variant_data, _) => {
+                if let Some(ctor_hir_id) = variant_data.ctor_hir_id() {
+                    self.struct_constructors.insert(ctor_hir_id, item.hir_id);
+                }
+            }
+            _ => ()
+        }
+    }
+
+    fn visit_trait_item(&mut self, _item: &hir::TraitItem) {
+        // ignore: we are handling this in `visit_item` above
+    }
+
+    fn visit_impl_item(&mut self, _item: &hir::ImplItem) {
+        // ignore: we are handling this in `visit_item` above
+    }
+}
+
+fn create_and_seed_worklist<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    access_levels: &privacy::AccessLevels,
+    krate: &hir::Crate,
+) -> (Vec<hir::HirId>, FxHashMap<hir::HirId, hir::HirId>) {
+    let worklist = access_levels.map.iter().filter_map(|(&id, level)| {
+        if level >= &privacy::AccessLevel::Reachable {
+            Some(id)
+        } else {
+            None
+        }
+    }).chain(
+        // Seed entry point
+        tcx.entry_fn(LOCAL_CRATE).map(|(def_id, _)| tcx.hir().as_local_hir_id(def_id).unwrap())
+    ).collect::<Vec<_>>();
+
+    // Seed implemented trait items
+    let mut life_seeder = LifeSeeder {
+        worklist,
+        krate,
+        tcx,
+        struct_constructors: Default::default(),
+    };
+    krate.visit_all_item_likes(&mut life_seeder);
+
+    (life_seeder.worklist, life_seeder.struct_constructors)
+}
+
+fn find_live<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    access_levels: &privacy::AccessLevels,
+    krate: &hir::Crate,
+) -> FxHashSet<hir::HirId> {
+    let (worklist, struct_constructors) = create_and_seed_worklist(tcx, access_levels, krate);
+    let mut symbol_visitor = MarkSymbolVisitor {
+        worklist,
+        tcx,
+        tables: &ty::TypeckTables::empty(None),
+        live_symbols: Default::default(),
+        repr_has_repr_c: false,
+        in_pat: false,
+        inherited_pub_visibility: false,
+        ignore_variant_stack: vec![],
+        struct_constructors,
+    };
+    symbol_visitor.mark_live_symbols();
+    symbol_visitor.live_symbols
+}
+
+struct DeadVisitor<'tcx> {
+    tcx: TyCtxt<'tcx>,
+    live_symbols: FxHashSet<hir::HirId>,
+}
+
+impl DeadVisitor<'tcx> {
+    fn should_warn_about_item(&mut self, item: &hir::Item) -> bool {
+        let should_warn = match item.kind {
+            hir::ItemKind::Static(..)
+            | hir::ItemKind::Const(..)
+            | hir::ItemKind::Fn(..)
+            | hir::ItemKind::TyAlias(..)
+            | hir::ItemKind::Enum(..)
+            | hir::ItemKind::Struct(..)
+            | hir::ItemKind::Union(..) => true,
+            _ => false
+        };
+        should_warn && !self.symbol_is_live(item.hir_id)
+    }
+
+    fn should_warn_about_field(&mut self, field: &hir::StructField) -> bool {
+        let field_type = self.tcx.type_of(self.tcx.hir().local_def_id(field.hir_id));
+        !field.is_positional()
+            && !self.symbol_is_live(field.hir_id)
+            && !field_type.is_phantom_data()
+            && !has_allow_dead_code_or_lang_attr(self.tcx, field.hir_id, &field.attrs)
+    }
+
+    fn should_warn_about_variant(&mut self, variant: &hir::Variant) -> bool {
+        !self.symbol_is_live(variant.id)
+            && !has_allow_dead_code_or_lang_attr(self.tcx,
+                                                 variant.id,
+                                                 &variant.attrs)
+    }
+
+    fn should_warn_about_foreign_item(&mut self, fi: &hir::ForeignItem) -> bool {
+        !self.symbol_is_live(fi.hir_id)
+            && !has_allow_dead_code_or_lang_attr(self.tcx, fi.hir_id, &fi.attrs)
+    }
+
+    // id := HIR id of an item's definition.
+    fn symbol_is_live(
+        &mut self,
+        id: hir::HirId,
+    ) -> bool {
+        if self.live_symbols.contains(&id) {
+            return true;
+        }
+        // If it's a type whose items are live, then it's live, too.
+        // This is done to handle the case where, for example, the static
+        // method of a private type is used, but the type itself is never
+        // called directly.
+        let def_id = self.tcx.hir().local_def_id(id);
+        let inherent_impls = self.tcx.inherent_impls(def_id);
+        for &impl_did in inherent_impls.iter() {
+            for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] {
+                if let Some(item_hir_id) = self.tcx.hir().as_local_hir_id(item_did) {
+                    if self.live_symbols.contains(&item_hir_id) {
+                        return true;
+                    }
+                }
+            }
+        }
+        false
+    }
+
+    fn warn_dead_code(&mut self,
+                      id: hir::HirId,
+                      span: syntax_pos::Span,
+                      name: ast::Name,
+                      node_type: &str,
+                      participle: &str) {
+        if !name.as_str().starts_with("_") {
+            self.tcx
+                .lint_hir(lint::builtin::DEAD_CODE,
+                          id,
+                          span,
+                          &format!("{} is never {}: `{}`",
+                                   node_type, participle, name));
+        }
+    }
+}
+
+impl Visitor<'tcx> for DeadVisitor<'tcx> {
+    /// Walk nested items in place so that we don't report dead-code
+    /// on inner functions when the outer function is already getting
+    /// an error. We could do this also by checking the parents, but
+    /// this is how the code is setup and it seems harmless enough.
+    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+        NestedVisitorMap::All(&self.tcx.hir())
+    }
+
+    fn visit_item(&mut self, item: &'tcx hir::Item) {
+        if self.should_warn_about_item(item) {
+            // For items that have a definition with a signature followed by a
+            // block, point only at the signature.
+            let span = match item.kind {
+                hir::ItemKind::Fn(..) |
+                hir::ItemKind::Mod(..) |
+                hir::ItemKind::Enum(..) |
+                hir::ItemKind::Struct(..) |
+                hir::ItemKind::Union(..) |
+                hir::ItemKind::Trait(..) |
+                hir::ItemKind::Impl(..) => self.tcx.sess.source_map().def_span(item.span),
+                _ => item.span,
+            };
+            let participle = match item.kind {
+                hir::ItemKind::Struct(..) => "constructed", // Issue #52325
+                _ => "used"
+            };
+            self.warn_dead_code(
+                item.hir_id,
+                span,
+                item.ident.name,
+                item.kind.descriptive_variant(),
+                participle,
+            );
+        } else {
+            // Only continue if we didn't warn
+            intravisit::walk_item(self, item);
+        }
+    }
+
+    fn visit_variant(&mut self,
+                     variant: &'tcx hir::Variant,
+                     g: &'tcx hir::Generics,
+                     id: hir::HirId) {
+        if self.should_warn_about_variant(&variant) {
+            self.warn_dead_code(variant.id, variant.span, variant.ident.name,
+                                "variant", "constructed");
+        } else {
+            intravisit::walk_variant(self, variant, g, id);
+        }
+    }
+
+    fn visit_foreign_item(&mut self, fi: &'tcx hir::ForeignItem) {
+        if self.should_warn_about_foreign_item(fi) {
+            self.warn_dead_code(fi.hir_id, fi.span, fi.ident.name,
+                                fi.kind.descriptive_variant(), "used");
+        }
+        intravisit::walk_foreign_item(self, fi);
+    }
+
+    fn visit_struct_field(&mut self, field: &'tcx hir::StructField) {
+        if self.should_warn_about_field(&field) {
+            self.warn_dead_code(field.hir_id, field.span, field.ident.name, "field", "used");
+        }
+        intravisit::walk_struct_field(self, field);
+    }
+
+    fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
+        match impl_item.kind {
+            hir::ImplItemKind::Const(_, body_id) => {
+                if !self.symbol_is_live(impl_item.hir_id) {
+                    self.warn_dead_code(impl_item.hir_id,
+                                        impl_item.span,
+                                        impl_item.ident.name,
+                                        "associated const",
+                                        "used");
+                }
+                self.visit_nested_body(body_id)
+            }
+            hir::ImplItemKind::Method(_, body_id) => {
+                if !self.symbol_is_live(impl_item.hir_id) {
+                    let span = self.tcx.sess.source_map().def_span(impl_item.span);
+                    self.warn_dead_code(impl_item.hir_id, span, impl_item.ident.name, "method",
+                        "used");
+                }
+                self.visit_nested_body(body_id)
+            }
+            hir::ImplItemKind::OpaqueTy(..) |
+            hir::ImplItemKind::TyAlias(..) => {}
+        }
+    }
+
+    // Overwrite so that we don't warn the trait item itself.
+    fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
+        match trait_item.kind {
+            hir::TraitItemKind::Const(_, Some(body_id)) |
+            hir::TraitItemKind::Method(_, hir::TraitMethod::Provided(body_id)) => {
+                self.visit_nested_body(body_id)
+            }
+            hir::TraitItemKind::Const(_, None) |
+            hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) |
+            hir::TraitItemKind::Type(..) => {}
+        }
+    }
+}
+
+pub fn check_crate(tcx: TyCtxt<'_>) {
+    let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
+    let krate = tcx.hir().krate();
+    let live_symbols = find_live(tcx, access_levels, krate);
+    let mut visitor = DeadVisitor {
+        tcx,
+        live_symbols,
+    };
+    intravisit::walk_crate(&mut visitor, krate);
+}
diff --git a/src/librustc_passes/entry.rs b/src/librustc_passes/entry.rs
new file mode 100644 (file)
index 0000000..bf68807
--- /dev/null
@@ -0,0 +1,202 @@
+use rustc::hir::map as hir_map;
+use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE};
+use rustc::session::{config, Session};
+use rustc::session::config::EntryFnType;
+use syntax::attr;
+use syntax::entry::EntryPointType;
+use syntax::symbol::sym;
+use syntax_pos::Span;
+use rustc::hir::{HirId, Item, ItemKind, ImplItem, TraitItem};
+use rustc::hir::itemlikevisit::ItemLikeVisitor;
+use rustc::ty::TyCtxt;
+use rustc::ty::query::Providers;
+
+struct EntryContext<'a, 'tcx> {
+    session: &'a Session,
+
+    map: &'a hir_map::Map<'tcx>,
+
+    /// The top-level function called `main`.
+    main_fn: Option<(HirId, Span)>,
+
+    /// The function that has attribute named `main`.
+    attr_main_fn: Option<(HirId, Span)>,
+
+    /// The function that has the attribute 'start' on it.
+    start_fn: Option<(HirId, Span)>,
+
+    /// The functions that one might think are `main` but aren't, e.g.
+    /// main functions not defined at the top level. For diagnostics.
+    non_main_fns: Vec<(HirId, Span)> ,
+}
+
+impl<'a, 'tcx> ItemLikeVisitor<'tcx> for EntryContext<'a, 'tcx> {
+    fn visit_item(&mut self, item: &'tcx Item) {
+        let def_id = self.map.local_def_id(item.hir_id);
+        let def_key = self.map.def_key(def_id);
+        let at_root = def_key.parent == Some(CRATE_DEF_INDEX);
+        find_item(item, self, at_root);
+    }
+
+    fn visit_trait_item(&mut self, _trait_item: &'tcx TraitItem) {
+        // Entry fn is never a trait item.
+    }
+
+    fn visit_impl_item(&mut self, _impl_item: &'tcx ImplItem) {
+        // Entry fn is never a trait item.
+    }
+}
+
+fn entry_fn(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<(DefId, EntryFnType)> {
+    assert_eq!(cnum, LOCAL_CRATE);
+
+    let any_exe = tcx.sess.crate_types.borrow().iter().any(|ty| {
+        *ty == config::CrateType::Executable
+    });
+    if !any_exe {
+        // No need to find a main function.
+        return None;
+    }
+
+    // If the user wants no main function at all, then stop here.
+    if attr::contains_name(&tcx.hir().krate().attrs, sym::no_main) {
+        return None;
+    }
+
+    let mut ctxt = EntryContext {
+        session: tcx.sess,
+        map: tcx.hir(),
+        main_fn: None,
+        attr_main_fn: None,
+        start_fn: None,
+        non_main_fns: Vec::new(),
+    };
+
+    tcx.hir().krate().visit_all_item_likes(&mut ctxt);
+
+    configure_main(tcx, &ctxt)
+}
+
+// Beware, this is duplicated in `libsyntax/entry.rs`, so make sure to keep
+// them in sync.
+fn entry_point_type(item: &Item, at_root: bool) -> EntryPointType {
+    match item.kind {
+        ItemKind::Fn(..) => {
+            if attr::contains_name(&item.attrs, sym::start) {
+                EntryPointType::Start
+            } else if attr::contains_name(&item.attrs, sym::main) {
+                EntryPointType::MainAttr
+            } else if item.ident.name == sym::main {
+                if at_root {
+                    // This is a top-level function so can be `main`.
+                    EntryPointType::MainNamed
+                } else {
+                    EntryPointType::OtherMain
+                }
+            } else {
+                EntryPointType::None
+            }
+        }
+        _ => EntryPointType::None,
+    }
+}
+
+
+fn find_item(item: &Item, ctxt: &mut EntryContext<'_, '_>, at_root: bool) {
+    match entry_point_type(item, at_root) {
+        EntryPointType::MainNamed => {
+            if ctxt.main_fn.is_none() {
+                ctxt.main_fn = Some((item.hir_id, item.span));
+            } else {
+                span_err!(ctxt.session, item.span, E0136,
+                          "multiple `main` functions");
+            }
+        },
+        EntryPointType::OtherMain => {
+            ctxt.non_main_fns.push((item.hir_id, item.span));
+        },
+        EntryPointType::MainAttr => {
+            if ctxt.attr_main_fn.is_none() {
+                ctxt.attr_main_fn = Some((item.hir_id, item.span));
+            } else {
+                struct_span_err!(ctxt.session, item.span, E0137,
+                                 "multiple functions with a `#[main]` attribute")
+                .span_label(item.span, "additional `#[main]` function")
+                .span_label(ctxt.attr_main_fn.unwrap().1, "first `#[main]` function")
+                .emit();
+            }
+        },
+        EntryPointType::Start => {
+            if ctxt.start_fn.is_none() {
+                ctxt.start_fn = Some((item.hir_id, item.span));
+            } else {
+                struct_span_err!(ctxt.session, item.span, E0138, "multiple `start` functions")
+                    .span_label(ctxt.start_fn.unwrap().1, "previous `start` function here")
+                    .span_label(item.span, "multiple `start` functions")
+                    .emit();
+            }
+        }
+        EntryPointType::None => (),
+    }
+}
+
+fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) -> Option<(DefId, EntryFnType)> {
+    if let Some((hir_id, _)) = visitor.start_fn {
+        Some((tcx.hir().local_def_id(hir_id), EntryFnType::Start))
+    } else if let Some((hir_id, _)) = visitor.attr_main_fn {
+        Some((tcx.hir().local_def_id(hir_id), EntryFnType::Main))
+    } else if let Some((hir_id, _)) = visitor.main_fn {
+        Some((tcx.hir().local_def_id(hir_id), EntryFnType::Main))
+    } else {
+        no_main_err(tcx, visitor);
+        None
+    }
+}
+
+fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
+    // There is no main function.
+    let mut err = struct_err!(tcx.sess, E0601,
+        "`main` function not found in crate `{}`", tcx.crate_name(LOCAL_CRATE));
+    let filename = &tcx.sess.local_crate_source_file;
+    let note = if !visitor.non_main_fns.is_empty() {
+        for &(_, span) in &visitor.non_main_fns {
+            err.span_note(span, "here is a function named `main`");
+        }
+        err.note("you have one or more functions named `main` not defined at the crate level");
+        err.help("either move the `main` function definitions or attach the `#[main]` attribute \
+                  to one of them");
+        // There were some functions named `main` though. Try to give the user a hint.
+        format!("the main function must be defined at the crate level{}",
+                 filename.as_ref().map(|f| format!(" (in `{}`)", f.display())).unwrap_or_default())
+    } else if let Some(filename) = filename {
+        format!("consider adding a `main` function to `{}`", filename.display())
+    } else {
+        String::from("consider adding a `main` function at the crate level")
+    };
+    let sp = tcx.hir().krate().span;
+    // The file may be empty, which leads to the diagnostic machinery not emitting this
+    // note. This is a relatively simple way to detect that case and emit a span-less
+    // note instead.
+    if let Ok(_) = tcx.sess.source_map().lookup_line(sp.lo()) {
+        err.set_span(sp);
+        err.span_label(sp, &note);
+    } else {
+        err.note(&note);
+    }
+    if tcx.sess.teach(&err.get_code().unwrap()) {
+        err.note("If you don't know the basics of Rust, you can go look to the Rust Book \
+                  to get started: https://doc.rust-lang.org/book/");
+    }
+    err.emit();
+}
+
+pub fn find_entry_point(tcx: TyCtxt<'_>) -> Option<(DefId, EntryFnType)> {
+    tcx.entry_fn(LOCAL_CRATE)
+}
+
+pub fn provide(providers: &mut Providers<'_>) {
+    *providers = Providers {
+        entry_fn,
+        ..*providers
+    };
+}
index af07c790e2a8799a35e82878719ab7d1af97d0f4..a2626617afec33f880c11b431a50a54d5d40e730 100644 (file)
@@ -1,12 +1,15 @@
 syntax::register_diagnostics! {
-/*
 E0014: r##"
+#### Note: this error code is no longer emitted by the compiler.
+
 Constants can only be initialized by a constant value or, in a future
 version of Rust, a call to a const function. This error indicates the use
 of a path (like a::b, or x) denoting something other than one of these
-allowed items. Erroneous code xample:
+allowed items.
 
-```compile_fail
+Erroneous code example:
+
+```
 const FOO: i32 = { let x = 0; x }; // 'x' isn't a constant nor a function!
 ```
 
 const FOO2: i32 = { 0 }; // but brackets are useless here
 ```
 "##,
-*/
 
 E0130: r##"
 You declared a pattern as an argument in a foreign function declaration.
+
 Erroneous code example:
 
 ```compile_fail
@@ -53,6 +56,81 @@ struct SomeStruct {
 ```
 "##,
 
+// This shouldn't really ever trigger since the repeated value error comes first
+E0136: r##"
+A binary can only have one entry point, and by default that entry point is the
+function `main()`. If there are multiple such functions, please rename one.
+
+Erroneous code example:
+
+```compile_fail,E0136
+fn main() {
+    // ...
+}
+
+// ...
+
+fn main() { // error!
+    // ...
+}
+```
+"##,
+
+E0137: r##"
+More than one function was declared with the `#[main]` attribute.
+
+Erroneous code example:
+
+```compile_fail,E0137
+#![feature(main)]
+
+#[main]
+fn foo() {}
+
+#[main]
+fn f() {} // error: multiple functions with a `#[main]` attribute
+```
+
+This error indicates that the compiler found multiple functions with the
+`#[main]` attribute. This is an error because there must be a unique entry
+point into a Rust program. Example:
+
+```
+#![feature(main)]
+
+#[main]
+fn f() {} // ok!
+```
+"##,
+
+E0138: r##"
+More than one function was declared with the `#[start]` attribute.
+
+Erroneous code example:
+
+```compile_fail,E0138
+#![feature(start)]
+
+#[start]
+fn foo(argc: isize, argv: *const *const u8) -> isize {}
+
+#[start]
+fn f(argc: isize, argv: *const *const u8) -> isize {}
+// error: multiple 'start' functions
+```
+
+This error indicates that the compiler found multiple functions with the
+`#[start]` attribute. This is an error because there must be a unique entry
+point into a Rust program. Example:
+
+```
+#![feature(start)]
+
+#[start]
+fn foo(argc: isize, argv: *const *const u8) -> isize { 0 } // ok!
+```
+"##,
+
 E0197: r##"
 Inherent implementations (one that do not implement a trait but provide
 methods associated with a type) are always safe because they are not
@@ -198,20 +276,115 @@ fn foo() {}
 ```
 "##,
 
+E0512: r##"
+Transmute with two differently sized types was attempted. Erroneous code
+example:
 
-E0590: r##"
-`break` or `continue` must include a label when used in the condition of a
-`while` loop.
+```compile_fail,E0512
+fn takes_u8(_: u8) {}
 
-Example of erroneous code:
+fn main() {
+    unsafe { takes_u8(::std::mem::transmute(0u16)); }
+    // error: cannot transmute between types of different sizes,
+    //        or dependently-sized types
+}
+```
+
+Please use types with same size or use the expected type directly. Example:
 
-```compile_fail
-while break {}
 ```
+fn takes_u8(_: u8) {}
 
-To fix this, add a label specifying which loop is being broken out of:
+fn main() {
+    unsafe { takes_u8(::std::mem::transmute(0i8)); } // ok!
+    // or:
+    unsafe { takes_u8(0u8); } // ok!
+}
 ```
-'foo: while break 'foo {}
+"##,
+
+E0561: r##"
+A non-ident or non-wildcard pattern has been used as a parameter of a function
+pointer type.
+
+Erroneous code example:
+
+```compile_fail,E0561
+type A1 = fn(mut param: u8); // error!
+type A2 = fn(&param: u32); // error!
+```
+
+When using an alias over a function type, you cannot e.g. denote a parameter as
+being mutable.
+
+To fix the issue, remove patterns (`_` is allowed though). Example:
+
+```
+type A1 = fn(param: u8); // ok!
+type A2 = fn(_: u32); // ok!
+```
+
+You can also omit the parameter name:
+
+```
+type A3 = fn(i16); // ok!
+```
+"##,
+
+E0567: r##"
+Generics have been used on an auto trait.
+
+Erroneous code example:
+
+```compile_fail,E0567
+#![feature(optin_builtin_traits)]
+
+auto trait Generic<T> {} // error!
+
+fn main() {}
+```
+
+Since an auto trait is implemented on all existing types, the
+compiler would not be able to infer the types of the trait's generic
+parameters.
+
+To fix this issue, just remove the generics:
+
+```
+#![feature(optin_builtin_traits)]
+
+auto trait Generic {} // ok!
+
+fn main() {}
+```
+"##,
+
+E0568: r##"
+A super trait has been added to an auto trait.
+
+Erroneous code example:
+
+```compile_fail,E0568
+#![feature(optin_builtin_traits)]
+
+auto trait Bound : Copy {} // error!
+
+fn main() {}
+```
+
+Since an auto trait is implemented on all existing types, adding a super trait
+would filter out a lot of those types. In the current example, almost none of
+all the existing types could implement `Bound` because very few of them have the
+`Copy` trait.
+
+To fix this issue, just remove the super trait:
+
+```
+#![feature(optin_builtin_traits)]
+
+auto trait Bound {} // ok!
+
+fn main() {}
 ```
 "##,
 
@@ -249,6 +422,115 @@ fn foo() {}
 ```
 "##,
 
+E0590: r##"
+`break` or `continue` must include a label when used in the condition of a
+`while` loop.
+
+Example of erroneous code:
+
+```compile_fail
+while break {}
+```
+
+To fix this, add a label specifying which loop is being broken out of:
+```
+'foo: while break 'foo {}
+```
+"##,
+
+E0591: r##"
+Per [RFC 401][rfc401], if you have a function declaration `foo`:
+
+```
+// For the purposes of this explanation, all of these
+// different kinds of `fn` declarations are equivalent:
+struct S;
+fn foo(x: S) { /* ... */ }
+# #[cfg(for_demonstration_only)]
+extern "C" { fn foo(x: S); }
+# #[cfg(for_demonstration_only)]
+impl S { fn foo(self) { /* ... */ } }
+```
+
+the type of `foo` is **not** `fn(S)`, as one might expect.
+Rather, it is a unique, zero-sized marker type written here as `typeof(foo)`.
+However, `typeof(foo)` can be _coerced_ to a function pointer `fn(S)`,
+so you rarely notice this:
+
+```
+# struct S;
+# fn foo(_: S) {}
+let x: fn(S) = foo; // OK, coerces
+```
+
+The reason that this matter is that the type `fn(S)` is not specific to
+any particular function: it's a function _pointer_. So calling `x()` results
+in a virtual call, whereas `foo()` is statically dispatched, because the type
+of `foo` tells us precisely what function is being called.
+
+As noted above, coercions mean that most code doesn't have to be
+concerned with this distinction. However, you can tell the difference
+when using **transmute** to convert a fn item into a fn pointer.
+
+This is sometimes done as part of an FFI:
+
+```compile_fail,E0591
+extern "C" fn foo(userdata: Box<i32>) {
+    /* ... */
+}
+
+# fn callback(_: extern "C" fn(*mut i32)) {}
+# use std::mem::transmute;
+# unsafe {
+let f: extern "C" fn(*mut i32) = transmute(foo);
+callback(f);
+# }
+```
+
+Here, transmute is being used to convert the types of the fn arguments.
+This pattern is incorrect because, because the type of `foo` is a function
+**item** (`typeof(foo)`), which is zero-sized, and the target type (`fn()`)
+is a function pointer, which is not zero-sized.
+This pattern should be rewritten. There are a few possible ways to do this:
+
+- change the original fn declaration to match the expected signature,
+  and do the cast in the fn body (the preferred option)
+- cast the fn item fo a fn pointer before calling transmute, as shown here:
+
+    ```
+    # extern "C" fn foo(_: Box<i32>) {}
+    # use std::mem::transmute;
+    # unsafe {
+    let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_));
+    let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
+    # }
+    ```
+
+The same applies to transmutes to `*mut fn()`, which were observed in practice.
+Note though that use of this type is generally incorrect.
+The intention is typically to describe a function pointer, but just `fn()`
+alone suffices for that. `*mut fn()` is a pointer to a fn pointer.
+(Since these values are typically just passed to C code, however, this rarely
+makes a difference in practice.)
+
+[rfc401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md
+"##,
+
+E0601: r##"
+No `main` function was found in a binary crate. To fix this error, add a
+`main` function. For example:
+
+```
+fn main() {
+    // Your program will start here.
+    println!("Hello world!");
+}
+```
+
+If you don't know the basics of Rust, you can go look to the Rust Book to get
+started: https://doc.rust-lang.org/book/
+"##,
+
 E0642: r##"
 Trait methods currently cannot take patterns as arguments.
 
@@ -319,12 +601,10 @@ async fn foo() {}
 
 Switch to the Rust 2018 edition to use `async fn`.
 "##,
+
 ;
     E0226, // only a single explicit lifetime bound is permitted
     E0472, // asm! is unsupported on this target
-    E0561, // patterns aren't allowed in function pointer types
-    E0567, // auto traits can not have generic parameters
-    E0568, // auto traits can not have super traits
     E0666, // nested `impl Trait` is illegal
     E0667, // `impl Trait` in projections
     E0696, // `continue` pointing to a labeled block
diff --git a/src/librustc_passes/intrinsicck.rs b/src/librustc_passes/intrinsicck.rs
new file mode 100644 (file)
index 0000000..91a7e9f
--- /dev/null
@@ -0,0 +1,170 @@
+use rustc::hir::def::{Res, DefKind};
+use rustc::hir::def_id::DefId;
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx};
+use rustc::ty::query::Providers;
+
+use rustc_target::spec::abi::Abi::RustIntrinsic;
+use rustc_index::vec::Idx;
+use syntax_pos::{Span, sym};
+use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
+use rustc::hir;
+
+fn check_mod_intrinsics(tcx: TyCtxt<'_>, module_def_id: DefId) {
+    tcx.hir().visit_item_likes_in_module(
+        module_def_id,
+        &mut ItemVisitor { tcx }.as_deep_visitor()
+    );
+}
+
+pub fn provide(providers: &mut Providers<'_>) {
+    *providers = Providers {
+        check_mod_intrinsics,
+        ..*providers
+    };
+}
+
+struct ItemVisitor<'tcx> {
+    tcx: TyCtxt<'tcx>,
+}
+
+struct ExprVisitor<'tcx> {
+    tcx: TyCtxt<'tcx>,
+    tables: &'tcx ty::TypeckTables<'tcx>,
+    param_env: ty::ParamEnv<'tcx>,
+}
+
+/// If the type is `Option<T>`, it will return `T`, otherwise
+/// the type itself. Works on most `Option`-like types.
+fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
+    let (def, substs) = match ty.kind {
+        ty::Adt(def, substs) => (def, substs),
+        _ => return ty
+    };
+
+    if def.variants.len() == 2 && !def.repr.c() && def.repr.int.is_none() {
+        let data_idx;
+
+        let one = VariantIdx::new(1);
+        let zero = VariantIdx::new(0);
+
+        if def.variants[zero].fields.is_empty() {
+            data_idx = one;
+        } else if def.variants[one].fields.is_empty() {
+            data_idx = zero;
+        } else {
+            return ty;
+        }
+
+        if def.variants[data_idx].fields.len() == 1 {
+            return def.variants[data_idx].fields[0].ty(tcx, substs);
+        }
+    }
+
+    ty
+}
+
+impl ExprVisitor<'tcx> {
+    fn def_id_is_transmute(&self, def_id: DefId) -> bool {
+        self.tcx.fn_sig(def_id).abi() == RustIntrinsic &&
+        self.tcx.item_name(def_id) == sym::transmute
+    }
+
+    fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
+        let sk_from = SizeSkeleton::compute(from, self.tcx, self.param_env);
+        let sk_to = SizeSkeleton::compute(to, self.tcx, self.param_env);
+
+        // Check for same size using the skeletons.
+        if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) {
+            if sk_from.same_size(sk_to) {
+                return;
+            }
+
+            // Special-case transmutting from `typeof(function)` and
+            // `Option<typeof(function)>` to present a clearer error.
+            let from = unpack_option_like(self.tcx, from);
+            if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.kind, sk_to) {
+                if size_to == Pointer.size(&self.tcx) {
+                    struct_span_err!(self.tcx.sess, span, E0591,
+                                     "can't transmute zero-sized type")
+                        .note(&format!("source type: {}", from))
+                        .note(&format!("target type: {}", to))
+                        .help("cast with `as` to a pointer instead")
+                        .emit();
+                    return;
+                }
+            }
+        }
+
+        // Try to display a sensible error with as much information as possible.
+        let skeleton_string = |ty: Ty<'tcx>, sk| {
+            match sk {
+                Ok(SizeSkeleton::Known(size)) => {
+                    format!("{} bits", size.bits())
+                }
+                Ok(SizeSkeleton::Pointer { tail, .. }) => {
+                    format!("pointer to `{}`", tail)
+                }
+                Err(LayoutError::Unknown(bad)) => {
+                    if bad == ty {
+                        "this type does not have a fixed size".to_owned()
+                    } else {
+                        format!("size can vary because of {}", bad)
+                    }
+                }
+                Err(err) => err.to_string()
+            }
+        };
+
+        let mut err = struct_span_err!(self.tcx.sess, span, E0512,
+                                       "cannot transmute between types of different sizes, \
+                                        or dependently-sized types");
+        if from == to {
+            err.note(&format!("`{}` does not have a fixed size", from));
+        } else {
+            err.note(&format!("source type: `{}` ({})", from, skeleton_string(from, sk_from)))
+                .note(&format!("target type: `{}` ({})", to, skeleton_string(to, sk_to)));
+        }
+        err.emit()
+    }
+}
+
+impl Visitor<'tcx> for ItemVisitor<'tcx> {
+    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+        NestedVisitorMap::None
+    }
+
+    fn visit_nested_body(&mut self, body_id: hir::BodyId) {
+        let owner_def_id = self.tcx.hir().body_owner_def_id(body_id);
+        let body = self.tcx.hir().body(body_id);
+        let param_env = self.tcx.param_env(owner_def_id);
+        let tables = self.tcx.typeck_tables_of(owner_def_id);
+        ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body);
+        self.visit_body(body);
+    }
+}
+
+impl Visitor<'tcx> for ExprVisitor<'tcx> {
+    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+        NestedVisitorMap::None
+    }
+
+    fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
+        let res = if let hir::ExprKind::Path(ref qpath) = expr.kind {
+            self.tables.qpath_res(qpath, expr.hir_id)
+        } else {
+            Res::Err
+        };
+        if let Res::Def(DefKind::Fn, did) = res {
+            if self.def_id_is_transmute(did) {
+                let typ = self.tables.node_type(expr.hir_id);
+                let sig = typ.fn_sig(self.tcx);
+                let from = sig.inputs().skip_binder()[0];
+                let to = *sig.output().skip_binder();
+                self.check_transmute(expr.span, from, to);
+            }
+        }
+
+        intravisit::walk_expr(self, expr);
+    }
+}
index 6c7958fb365dd1ddac040a0b202583642a59a1e4..db59d8e101f77df07c0655f3e7bc5c761196cd7a 100644 (file)
 
 #[macro_use]
 extern crate rustc;
+#[macro_use]
+extern crate log;
+#[macro_use]
+extern crate syntax;
 
 use rustc::ty::query::Providers;
 
 pub mod hir_stats;
 pub mod layout_test;
 pub mod loops;
+pub mod dead;
+pub mod entry;
+mod liveness;
+mod intrinsicck;
 
 pub fn provide(providers: &mut Providers<'_>) {
+    entry::provide(providers);
     loops::provide(providers);
+    liveness::provide(providers);
+    intrinsicck::provide(providers);
 }
diff --git a/src/librustc_passes/liveness.rs b/src/librustc_passes/liveness.rs
new file mode 100644 (file)
index 0000000..fb06808
--- /dev/null
@@ -0,0 +1,1568 @@
+//! A classic liveness analysis based on dataflow over the AST. Computes,
+//! for each local variable in a function, whether that variable is live
+//! at a given point. Program execution points are identified by their
+//! IDs.
+//!
+//! # Basic idea
+//!
+//! The basic model is that each local variable is assigned an index. We
+//! represent sets of local variables using a vector indexed by this
+//! index. The value in the vector is either 0, indicating the variable
+//! is dead, or the ID of an expression that uses the variable.
+//!
+//! We conceptually walk over the AST in reverse execution order. If we
+//! find a use of a variable, we add it to the set of live variables. If
+//! we find an assignment to a variable, we remove it from the set of live
+//! variables. When we have to merge two flows, we take the union of
+//! those two flows -- if the variable is live on both paths, we simply
+//! pick one ID. In the event of loops, we continue doing this until a
+//! fixed point is reached.
+//!
+//! ## Checking initialization
+//!
+//! At the function entry point, all variables must be dead. If this is
+//! not the case, we can report an error using the ID found in the set of
+//! live variables, which identifies a use of the variable which is not
+//! dominated by an assignment.
+//!
+//! ## Checking moves
+//!
+//! After each explicit move, the variable must be dead.
+//!
+//! ## Computing last uses
+//!
+//! Any use of the variable where the variable is dead afterwards is a
+//! last use.
+//!
+//! # Implementation details
+//!
+//! The actual implementation contains two (nested) walks over the AST.
+//! The outer walk has the job of building up the ir_maps instance for the
+//! enclosing function. On the way down the tree, it identifies those AST
+//! nodes and variable IDs that will be needed for the liveness analysis
+//! and assigns them contiguous IDs. The liveness ID for an AST node is
+//! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
+//! is called a `variable` (another newtype'd `u32`).
+//!
+//! On the way back up the tree, as we are about to exit from a function
+//! declaration we allocate a `liveness` instance. Now that we know
+//! precisely how many nodes and variables we need, we can allocate all
+//! the various arrays that we will need to precisely the right size. We then
+//! perform the actual propagation on the `liveness` instance.
+//!
+//! This propagation is encoded in the various `propagate_through_*()`
+//! methods. It effectively does a reverse walk of the AST; whenever we
+//! reach a loop node, we iterate until a fixed point is reached.
+//!
+//! ## The `RWU` struct
+//!
+//! At each live node `N`, we track three pieces of information for each
+//! variable `V` (these are encapsulated in the `RWU` struct):
+//!
+//! - `reader`: the `LiveNode` ID of some node which will read the value
+//!    that `V` holds on entry to `N`. Formally: a node `M` such
+//!    that there exists a path `P` from `N` to `M` where `P` does not
+//!    write `V`. If the `reader` is `invalid_node()`, then the current
+//!    value will never be read (the variable is dead, essentially).
+//!
+//! - `writer`: the `LiveNode` ID of some node which will write the
+//!    variable `V` and which is reachable from `N`. Formally: a node `M`
+//!    such that there exists a path `P` from `N` to `M` and `M` writes
+//!    `V`. If the `writer` is `invalid_node()`, then there is no writer
+//!    of `V` that follows `N`.
+//!
+//! - `used`: a boolean value indicating whether `V` is *used*. We
+//!   distinguish a *read* from a *use* in that a *use* is some read that
+//!   is not just used to generate a new value. For example, `x += 1` is
+//!   a read but not a use. This is used to generate better warnings.
+//!
+//! ## Special Variables
+//!
+//! We generate various special variables for various, well, special purposes.
+//! These are described in the `specials` struct:
+//!
+//! - `exit_ln`: a live node that is generated to represent every 'exit' from
+//!   the function, whether it be by explicit return, panic, or other means.
+//!
+//! - `fallthrough_ln`: a live node that represents a fallthrough
+//!
+//! - `clean_exit_var`: a synthetic variable that is only 'read' from the
+//!   fallthrough node. It is only live if the function could converge
+//!   via means other than an explicit `return` expression. That is, it is
+//!   only dead if the end of the function's block can never be reached.
+//!   It is the responsibility of typeck to ensure that there are no
+//!   `return` expressions in a function declared as diverging.
+
+use self::LiveNodeKind::*;
+use self::VarKind::*;
+
+use rustc::hir;
+use rustc::hir::{Expr, HirId};
+use rustc::hir::def::*;
+use rustc::hir::def_id::DefId;
+use rustc::hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap};
+use rustc::hir::Node;
+use rustc::hir::ptr::P;
+use rustc::ty::{self, TyCtxt};
+use rustc::ty::query::Providers;
+use rustc::lint;
+use rustc::util::nodemap::{HirIdMap, HirIdSet};
+
+use errors::Applicability;
+use rustc_data_structures::fx::FxIndexMap;
+use std::collections::VecDeque;
+use std::{fmt, u32};
+use std::io::prelude::*;
+use std::io;
+use std::rc::Rc;
+use syntax::ast;
+use syntax::symbol::sym;
+use syntax_pos::Span;
+
+#[derive(Copy, Clone, PartialEq)]
+struct Variable(u32);
+
+#[derive(Copy, Clone, PartialEq)]
+struct LiveNode(u32);
+
+impl Variable {
+    fn get(&self) -> usize { self.0 as usize }
+}
+
+impl LiveNode {
+    fn get(&self) -> usize { self.0 as usize }
+}
+
+#[derive(Copy, Clone, PartialEq, Debug)]
+enum LiveNodeKind {
+    UpvarNode(Span),
+    ExprNode(Span),
+    VarDefNode(Span),
+    ExitNode
+}
+
+fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
+    let cm = tcx.sess.source_map();
+    match lnk {
+        UpvarNode(s) => {
+            format!("Upvar node [{}]", cm.span_to_string(s))
+        }
+        ExprNode(s) => {
+            format!("Expr node [{}]", cm.span_to_string(s))
+        }
+        VarDefNode(s) => {
+            format!("Var def node [{}]", cm.span_to_string(s))
+        }
+        ExitNode => "Exit node".to_owned(),
+    }
+}
+
+impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
+    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+        NestedVisitorMap::OnlyBodies(&self.tcx.hir())
+    }
+
+    fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx hir::FnDecl,
+                b: hir::BodyId, s: Span, id: HirId) {
+        visit_fn(self, fk, fd, b, s, id);
+    }
+
+    fn visit_local(&mut self, l: &'tcx hir::Local) { visit_local(self, l); }
+    fn visit_expr(&mut self, ex: &'tcx Expr) { visit_expr(self, ex); }
+    fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); }
+}
+
+fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: DefId) {
+    tcx.hir().visit_item_likes_in_module(
+        module_def_id,
+        &mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),
+    );
+}
+
+pub fn provide(providers: &mut Providers<'_>) {
+    *providers = Providers {
+        check_mod_liveness,
+        ..*providers
+    };
+}
+
+impl fmt::Debug for LiveNode {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "ln({})", self.get())
+    }
+}
+
+impl fmt::Debug for Variable {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "v({})", self.get())
+    }
+}
+
+// ______________________________________________________________________
+// Creating ir_maps
+//
+// This is the first pass and the one that drives the main
+// computation.  It walks up and down the IR once.  On the way down,
+// we count for each function the number of variables as well as
+// liveness nodes.  A liveness node is basically an expression or
+// capture clause that does something of interest: either it has
+// interesting control flow or it uses/defines a local variable.
+//
+// On the way back up, at each function node we create liveness sets
+// (we now know precisely how big to make our various vectors and so
+// forth) and then do the data-flow propagation to compute the set
+// of live variables at each program point.
+//
+// Finally, we run back over the IR one last time and, using the
+// computed liveness, check various safety conditions.  For example,
+// there must be no live nodes at the definition site for a variable
+// unless it has an initializer.  Similarly, each non-mutable local
+// variable must not be assigned if there is some successor
+// assignment.  And so forth.
+
+impl LiveNode {
+    fn is_valid(&self) -> bool {
+        self.0 != u32::MAX
+    }
+}
+
+fn invalid_node() -> LiveNode { LiveNode(u32::MAX) }
+
+struct CaptureInfo {
+    ln: LiveNode,
+    var_hid: HirId
+}
+
+#[derive(Copy, Clone, Debug)]
+struct LocalInfo {
+    id: HirId,
+    name: ast::Name,
+    is_shorthand: bool,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum VarKind {
+    Param(HirId, ast::Name),
+    Local(LocalInfo),
+    CleanExit
+}
+
+struct IrMaps<'tcx> {
+    tcx: TyCtxt<'tcx>,
+    body_owner: DefId,
+    num_live_nodes: usize,
+    num_vars: usize,
+    live_node_map: HirIdMap<LiveNode>,
+    variable_map: HirIdMap<Variable>,
+    capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
+    var_kinds: Vec<VarKind>,
+    lnks: Vec<LiveNodeKind>,
+}
+
+impl IrMaps<'tcx> {
+    fn new(tcx: TyCtxt<'tcx>, body_owner: DefId) -> IrMaps<'tcx> {
+        IrMaps {
+            tcx,
+            body_owner,
+            num_live_nodes: 0,
+            num_vars: 0,
+            live_node_map: HirIdMap::default(),
+            variable_map: HirIdMap::default(),
+            capture_info_map: Default::default(),
+            var_kinds: Vec::new(),
+            lnks: Vec::new(),
+        }
+    }
+
+    fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
+        let ln = LiveNode(self.num_live_nodes as u32);
+        self.lnks.push(lnk);
+        self.num_live_nodes += 1;
+
+        debug!("{:?} is of kind {}", ln,
+               live_node_kind_to_string(lnk, self.tcx));
+
+        ln
+    }
+
+    fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
+        let ln = self.add_live_node(lnk);
+        self.live_node_map.insert(hir_id, ln);
+
+        debug!("{:?} is node {:?}", ln, hir_id);
+    }
+
+    fn add_variable(&mut self, vk: VarKind) -> Variable {
+        let v = Variable(self.num_vars as u32);
+        self.var_kinds.push(vk);
+        self.num_vars += 1;
+
+        match vk {
+            Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) => {
+                self.variable_map.insert(node_id, v);
+            },
+            CleanExit => {}
+        }
+
+        debug!("{:?} is {:?}", v, vk);
+
+        v
+    }
+
+    fn variable(&self, hir_id: HirId, span: Span) -> Variable {
+        match self.variable_map.get(&hir_id) {
+            Some(&var) => var,
+            None => {
+                span_bug!(span, "no variable registered for id {:?}", hir_id);
+            }
+        }
+    }
+
+    fn variable_name(&self, var: Variable) -> String {
+        match self.var_kinds[var.get()] {
+            Local(LocalInfo { name, .. }) | Param(_, name) => {
+                name.to_string()
+            },
+            CleanExit => "<clean-exit>".to_owned()
+        }
+    }
+
+    fn variable_is_shorthand(&self, var: Variable) -> bool {
+        match self.var_kinds[var.get()] {
+            Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
+            Param(..) | CleanExit => false
+        }
+    }
+
+    fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
+        self.capture_info_map.insert(hir_id, Rc::new(cs));
+    }
+
+    fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
+        self.lnks[ln.get()]
+    }
+}
+
+fn visit_fn<'tcx>(
+    ir: &mut IrMaps<'tcx>,
+    fk: FnKind<'tcx>,
+    decl: &'tcx hir::FnDecl,
+    body_id: hir::BodyId,
+    sp: Span,
+    id: hir::HirId,
+) {
+    debug!("visit_fn");
+
+    // swap in a new set of IR maps for this function body:
+    let def_id = ir.tcx.hir().local_def_id(id);
+    let mut fn_maps = IrMaps::new(ir.tcx, def_id);
+
+    // Don't run unused pass for #[derive()]
+    if let FnKind::Method(..) = fk {
+        let parent = ir.tcx.hir().get_parent_item(id);
+        if let Some(Node::Item(i)) = ir.tcx.hir().find(parent) {
+            if i.attrs.iter().any(|a| a.check_name(sym::automatically_derived)) {
+                return;
+            }
+        }
+    }
+
+    debug!("creating fn_maps: {:p}", &fn_maps);
+
+    let body = ir.tcx.hir().body(body_id);
+
+    for param in &body.params {
+        let is_shorthand = match param.pat.kind {
+            rustc::hir::PatKind::Struct(..) => true,
+            _ => false,
+        };
+        param.pat.each_binding(|_bm, hir_id, _x, ident| {
+            debug!("adding parameters {:?}", hir_id);
+            let var = if is_shorthand {
+                Local(LocalInfo {
+                    id: hir_id,
+                    name: ident.name,
+                    is_shorthand: true,
+                })
+            } else {
+                Param(hir_id, ident.name)
+            };
+            fn_maps.add_variable(var);
+        })
+    };
+
+    // gather up the various local variables, significant expressions,
+    // and so forth:
+    intravisit::walk_fn(&mut fn_maps, fk, decl, body_id, sp, id);
+
+    // compute liveness
+    let mut lsets = Liveness::new(&mut fn_maps, body_id);
+    let entry_ln = lsets.compute(&body.value);
+
+    // check for various error conditions
+    lsets.visit_body(body);
+    lsets.warn_about_unused_args(body, entry_ln);
+}
+
+fn add_from_pat(ir: &mut IrMaps<'_>, pat: &P<hir::Pat>) {
+    // For struct patterns, take note of which fields used shorthand
+    // (`x` rather than `x: x`).
+    let mut shorthand_field_ids = HirIdSet::default();
+    let mut pats = VecDeque::new();
+    pats.push_back(pat);
+    while let Some(pat) = pats.pop_front() {
+        use rustc::hir::PatKind::*;
+        match &pat.kind {
+            Binding(.., inner_pat) => {
+                pats.extend(inner_pat.iter());
+            }
+            Struct(_, fields, _) => {
+                let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
+                shorthand_field_ids.extend(ids);
+            }
+            Ref(inner_pat, _) | Box(inner_pat) => {
+                pats.push_back(inner_pat);
+            }
+            TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
+                pats.extend(inner_pats.iter());
+            }
+            Slice(pre_pats, inner_pat, post_pats) => {
+                pats.extend(pre_pats.iter());
+                pats.extend(inner_pat.iter());
+                pats.extend(post_pats.iter());
+            }
+            _ => {}
+        }
+    }
+
+    pat.each_binding(|_, hir_id, _, ident| {
+        ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
+        ir.add_variable(Local(LocalInfo {
+            id: hir_id,
+            name: ident.name,
+            is_shorthand: shorthand_field_ids.contains(&hir_id)
+        }));
+    });
+}
+
+fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local) {
+    add_from_pat(ir, &local.pat);
+    intravisit::walk_local(ir, local);
+}
+
+fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm) {
+    add_from_pat(ir, &arm.pat);
+    intravisit::walk_arm(ir, arm);
+}
+
+fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr) {
+    match expr.kind {
+      // live nodes required for uses or definitions of variables:
+      hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
+        debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
+        if let Res::Local(var_hir_id) = path.res {
+            let upvars = ir.tcx.upvars(ir.body_owner);
+            if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hir_id)) {
+                ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
+            }
+        }
+        intravisit::walk_expr(ir, expr);
+      }
+      hir::ExprKind::Closure(..) => {
+        // Interesting control flow (for loops can contain labeled
+        // breaks or continues)
+        ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
+
+        // Make a live_node for each captured variable, with the span
+        // being the location that the variable is used.  This results
+        // in better error messages than just pointing at the closure
+        // construction site.
+        let mut call_caps = Vec::new();
+        let closure_def_id = ir.tcx.hir().local_def_id(expr.hir_id);
+        if let Some(upvars) = ir.tcx.upvars(closure_def_id) {
+            let parent_upvars = ir.tcx.upvars(ir.body_owner);
+            call_caps.extend(upvars.iter().filter_map(|(&var_id, upvar)| {
+                let has_parent = parent_upvars
+                    .map_or(false, |upvars| upvars.contains_key(&var_id));
+                if !has_parent {
+                    let upvar_ln = ir.add_live_node(UpvarNode(upvar.span));
+                    Some(CaptureInfo { ln: upvar_ln, var_hid: var_id })
+                } else {
+                    None
+                }
+            }));
+        }
+        ir.set_captures(expr.hir_id, call_caps);
+        let old_body_owner = ir.body_owner;
+        ir.body_owner = closure_def_id;
+        intravisit::walk_expr(ir, expr);
+        ir.body_owner = old_body_owner;
+      }
+
+      // live nodes required for interesting control flow:
+      hir::ExprKind::Match(..) |
+      hir::ExprKind::Loop(..) => {
+        ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
+        intravisit::walk_expr(ir, expr);
+      }
+      hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
+        ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
+        intravisit::walk_expr(ir, expr);
+      }
+
+      // otherwise, live nodes are not required:
+      hir::ExprKind::Index(..) |
+      hir::ExprKind::Field(..) |
+      hir::ExprKind::Array(..) |
+      hir::ExprKind::Call(..) |
+      hir::ExprKind::MethodCall(..) |
+      hir::ExprKind::Tup(..) |
+      hir::ExprKind::Binary(..) |
+      hir::ExprKind::AddrOf(..) |
+      hir::ExprKind::Cast(..) |
+      hir::ExprKind::DropTemps(..) |
+      hir::ExprKind::Unary(..) |
+      hir::ExprKind::Break(..) |
+      hir::ExprKind::Continue(_) |
+      hir::ExprKind::Lit(_) |
+      hir::ExprKind::Ret(..) |
+      hir::ExprKind::Block(..) |
+      hir::ExprKind::Assign(..) |
+      hir::ExprKind::AssignOp(..) |
+      hir::ExprKind::Struct(..) |
+      hir::ExprKind::Repeat(..) |
+      hir::ExprKind::InlineAsm(..) |
+      hir::ExprKind::Box(..) |
+      hir::ExprKind::Yield(..) |
+      hir::ExprKind::Type(..) |
+      hir::ExprKind::Err |
+      hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
+          intravisit::walk_expr(ir, expr);
+      }
+    }
+}
+
+// ______________________________________________________________________
+// Computing liveness sets
+//
+// Actually we compute just a bit more than just liveness, but we use
+// the same basic propagation framework in all cases.
+
+#[derive(Clone, Copy)]
+struct RWU {
+    reader: LiveNode,
+    writer: LiveNode,
+    used: bool
+}
+
+/// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
+/// very large, so it uses a more compact representation that takes advantage
+/// of the fact that when the number of `RWU`s is large, most of them have an
+/// invalid reader and an invalid writer.
+struct RWUTable {
+    /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
+    /// an index into `unpacked_rwus`. In the common cases, this compacts the
+    /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
+    /// in 96.
+    ///
+    /// More compact representations are possible -- e.g., use only 2 bits per
+    /// packed `RWU` and make the secondary table a HashMap that maps from
+    /// indices to `RWU`s -- but this one strikes a good balance between size
+    /// and speed.
+    packed_rwus: Vec<u32>,
+    unpacked_rwus: Vec<RWU>,
+}
+
+// A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
+const INV_INV_FALSE: u32 = u32::MAX;
+
+// A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
+const INV_INV_TRUE: u32 = u32::MAX - 1;
+
+impl RWUTable {
+    fn new(num_rwus: usize) -> RWUTable {
+        Self {
+            packed_rwus: vec![INV_INV_FALSE; num_rwus],
+            unpacked_rwus: vec![],
+        }
+    }
+
+    fn get(&self, idx: usize) -> RWU {
+        let packed_rwu = self.packed_rwus[idx];
+        match packed_rwu {
+            INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false },
+            INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true },
+            _ => self.unpacked_rwus[packed_rwu as usize],
+        }
+    }
+
+    fn get_reader(&self, idx: usize) -> LiveNode {
+        let packed_rwu = self.packed_rwus[idx];
+        match packed_rwu {
+            INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
+            _ => self.unpacked_rwus[packed_rwu as usize].reader,
+        }
+    }
+
+    fn get_writer(&self, idx: usize) -> LiveNode {
+        let packed_rwu = self.packed_rwus[idx];
+        match packed_rwu {
+            INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
+            _ => self.unpacked_rwus[packed_rwu as usize].writer,
+        }
+    }
+
+    fn get_used(&self, idx: usize) -> bool {
+        let packed_rwu = self.packed_rwus[idx];
+        match packed_rwu {
+            INV_INV_FALSE => false,
+            INV_INV_TRUE => true,
+            _ => self.unpacked_rwus[packed_rwu as usize].used,
+        }
+    }
+
+    #[inline]
+    fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
+        self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
+    }
+
+    fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
+        if rwu.reader == invalid_node() && rwu.writer == invalid_node() {
+            // When we overwrite an indexing entry in `self.packed_rwus` with
+            // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
+            // from `self.unpacked_rwus`; it's not worth the effort, and we
+            // can't have entries shifting around anyway.
+            self.packed_rwus[idx] = if rwu.used {
+                INV_INV_TRUE
+            } else {
+                INV_INV_FALSE
+            }
+        } else {
+            // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
+            // point to it.
+            self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
+            self.unpacked_rwus.push(rwu);
+        }
+    }
+
+    fn assign_inv_inv(&mut self, idx: usize) {
+        self.packed_rwus[idx] = if self.get_used(idx) {
+            INV_INV_TRUE
+        } else {
+            INV_INV_FALSE
+        };
+    }
+}
+
+#[derive(Copy, Clone)]
+struct Specials {
+    exit_ln: LiveNode,
+    fallthrough_ln: LiveNode,
+    clean_exit_var: Variable
+}
+
+const ACC_READ: u32 = 1;
+const ACC_WRITE: u32 = 2;
+const ACC_USE: u32 = 4;
+
+struct Liveness<'a, 'tcx> {
+    ir: &'a mut IrMaps<'tcx>,
+    tables: &'a ty::TypeckTables<'tcx>,
+    s: Specials,
+    successors: Vec<LiveNode>,
+    rwu_table: RWUTable,
+
+    // mappings from loop node ID to LiveNode
+    // ("break" label should map to loop node ID,
+    // it probably doesn't now)
+    break_ln: HirIdMap<LiveNode>,
+    cont_ln: HirIdMap<LiveNode>,
+}
+
+impl<'a, 'tcx> Liveness<'a, 'tcx> {
+    fn new(ir: &'a mut IrMaps<'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
+        // Special nodes and variables:
+        // - exit_ln represents the end of the fn, either by return or panic
+        // - implicit_ret_var is a pseudo-variable that represents
+        //   an implicit return
+        let specials = Specials {
+            exit_ln: ir.add_live_node(ExitNode),
+            fallthrough_ln: ir.add_live_node(ExitNode),
+            clean_exit_var: ir.add_variable(CleanExit)
+        };
+
+        let tables = ir.tcx.body_tables(body);
+
+        let num_live_nodes = ir.num_live_nodes;
+        let num_vars = ir.num_vars;
+
+        Liveness {
+            ir,
+            tables,
+            s: specials,
+            successors: vec![invalid_node(); num_live_nodes],
+            rwu_table: RWUTable::new(num_live_nodes * num_vars),
+            break_ln: Default::default(),
+            cont_ln: Default::default(),
+        }
+    }
+
+    fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
+        match self.ir.live_node_map.get(&hir_id) {
+          Some(&ln) => ln,
+          None => {
+            // This must be a mismatch between the ir_map construction
+            // above and the propagation code below; the two sets of
+            // code have to agree about which AST nodes are worth
+            // creating liveness nodes for.
+            span_bug!(
+                span,
+                "no live node registered for node {:?}",
+                hir_id);
+          }
+        }
+    }
+
+    fn variable(&self, hir_id: HirId, span: Span) -> Variable {
+        self.ir.variable(hir_id, span)
+    }
+
+    fn define_bindings_in_pat(&mut self, pat: &hir::Pat, mut succ: LiveNode) -> LiveNode {
+        // In an or-pattern, only consider the first pattern; any later patterns
+        // must have the same bindings, and we also consider the first pattern
+        // to be the "authoritative" set of ids.
+        pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
+            let ln = self.live_node(hir_id, pat_sp);
+            let var = self.variable(hir_id, ident.span);
+            self.init_from_succ(ln, succ);
+            self.define(ln, var);
+            succ = ln;
+        });
+        succ
+    }
+
+    fn idx(&self, ln: LiveNode, var: Variable) -> usize {
+        ln.get() * self.ir.num_vars + var.get()
+    }
+
+    fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
+        assert!(ln.is_valid());
+        let reader = self.rwu_table.get_reader(self.idx(ln, var));
+        if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None }
+    }
+
+    // Is this variable live on entry to any of its successor nodes?
+    fn live_on_exit(&self, ln: LiveNode, var: Variable)
+                    -> Option<LiveNodeKind> {
+        let successor = self.successors[ln.get()];
+        self.live_on_entry(successor, var)
+    }
+
+    fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
+        assert!(ln.is_valid());
+        self.rwu_table.get_used(self.idx(ln, var))
+    }
+
+    fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
+                         -> Option<LiveNodeKind> {
+        assert!(ln.is_valid());
+        let writer = self.rwu_table.get_writer(self.idx(ln, var));
+        if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None }
+    }
+
+    fn assigned_on_exit(&self, ln: LiveNode, var: Variable)
+                        -> Option<LiveNodeKind> {
+        let successor = self.successors[ln.get()];
+        self.assigned_on_entry(successor, var)
+    }
+
+    fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where
+        F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
+    {
+        let node_base_idx = self.idx(ln, Variable(0));
+        let succ_base_idx = self.idx(succ_ln, Variable(0));
+        for var_idx in 0..self.ir.num_vars {
+            op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
+        }
+    }
+
+    fn write_vars<F>(&self,
+                     wr: &mut dyn Write,
+                     ln: LiveNode,
+                     mut test: F)
+                     -> io::Result<()> where
+        F: FnMut(usize) -> LiveNode,
+    {
+        let node_base_idx = self.idx(ln, Variable(0));
+        for var_idx in 0..self.ir.num_vars {
+            let idx = node_base_idx + var_idx;
+            if test(idx).is_valid() {
+                write!(wr, " {:?}", Variable(var_idx as u32))?;
+            }
+        }
+        Ok(())
+    }
+
+
+    #[allow(unused_must_use)]
+    fn ln_str(&self, ln: LiveNode) -> String {
+        let mut wr = Vec::new();
+        {
+            let wr = &mut wr as &mut dyn Write;
+            write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln));
+            self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx));
+            write!(wr, "  writes");
+            self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx));
+            write!(wr, "  precedes {:?}]", self.successors[ln.get()]);
+        }
+        String::from_utf8(wr).unwrap()
+    }
+
+    fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
+        self.successors[ln.get()] = succ_ln;
+
+        // It is not necessary to initialize the RWUs here because they are all
+        // set to INV_INV_FALSE when they are created, and the sets only grow
+        // during iterations.
+    }
+
+    fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
+        // more efficient version of init_empty() / merge_from_succ()
+        self.successors[ln.get()] = succ_ln;
+
+        self.indices2(ln, succ_ln, |this, idx, succ_idx| {
+            this.rwu_table.copy_packed(idx, succ_idx);
+        });
+        debug!("init_from_succ(ln={}, succ={})",
+               self.ln_str(ln), self.ln_str(succ_ln));
+    }
+
+    fn merge_from_succ(&mut self,
+                       ln: LiveNode,
+                       succ_ln: LiveNode,
+                       first_merge: bool)
+                       -> bool {
+        if ln == succ_ln { return false; }
+
+        let mut changed = false;
+        self.indices2(ln, succ_ln, |this, idx, succ_idx| {
+            let mut rwu = this.rwu_table.get(idx);
+            let succ_rwu = this.rwu_table.get(succ_idx);
+            if succ_rwu.reader.is_valid() && !rwu.reader.is_valid() {
+                rwu.reader = succ_rwu.reader;
+                changed = true
+            }
+
+            if succ_rwu.writer.is_valid() && !rwu.writer.is_valid() {
+                rwu.writer = succ_rwu.writer;
+                changed = true
+            }
+
+            if succ_rwu.used && !rwu.used {
+                rwu.used = true;
+                changed = true;
+            }
+
+            if changed {
+                this.rwu_table.assign_unpacked(idx, rwu);
+            }
+        });
+
+        debug!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
+               ln, self.ln_str(succ_ln), first_merge, changed);
+        return changed;
+    }
+
+    // Indicates that a local variable was *defined*; we know that no
+    // uses of the variable can precede the definition (resolve checks
+    // this) so we just clear out all the data.
+    fn define(&mut self, writer: LiveNode, var: Variable) {
+        let idx = self.idx(writer, var);
+        self.rwu_table.assign_inv_inv(idx);
+
+        debug!("{:?} defines {:?} (idx={}): {}", writer, var,
+               idx, self.ln_str(writer));
+    }
+
+    // Either read, write, or both depending on the acc bitset
+    fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
+        debug!("{:?} accesses[{:x}] {:?}: {}",
+               ln, acc, var, self.ln_str(ln));
+
+        let idx = self.idx(ln, var);
+        let mut rwu = self.rwu_table.get(idx);
+
+        if (acc & ACC_WRITE) != 0 {
+            rwu.reader = invalid_node();
+            rwu.writer = ln;
+        }
+
+        // Important: if we both read/write, must do read second
+        // or else the write will override.
+        if (acc & ACC_READ) != 0 {
+            rwu.reader = ln;
+        }
+
+        if (acc & ACC_USE) != 0 {
+            rwu.used = true;
+        }
+
+        self.rwu_table.assign_unpacked(idx, rwu);
+    }
+
+    fn compute(&mut self, body: &hir::Expr) -> LiveNode {
+        debug!("compute: using id for body, {}",
+               self.ir.tcx.hir().hir_to_pretty_string(body.hir_id));
+
+        // the fallthrough exit is only for those cases where we do not
+        // explicitly return:
+        let s = self.s;
+        self.init_from_succ(s.fallthrough_ln, s.exit_ln);
+        self.acc(s.fallthrough_ln, s.clean_exit_var, ACC_READ);
+
+        let entry_ln = self.propagate_through_expr(body, s.fallthrough_ln);
+
+        // hack to skip the loop unless debug! is enabled:
+        debug!("^^ liveness computation results for body {} (entry={:?})", {
+                   for ln_idx in 0..self.ir.num_live_nodes {
+                        debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32)));
+                   }
+                   body.hir_id
+               },
+               entry_ln);
+
+        entry_ln
+    }
+
+    fn propagate_through_block(&mut self, blk: &hir::Block, succ: LiveNode)
+                               -> LiveNode {
+        if blk.targeted_by_break {
+            self.break_ln.insert(blk.hir_id, succ);
+        }
+        let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ);
+        blk.stmts.iter().rev().fold(succ, |succ, stmt| {
+            self.propagate_through_stmt(stmt, succ)
+        })
+    }
+
+    fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode)
+                              -> LiveNode {
+        match stmt.kind {
+            hir::StmtKind::Local(ref local) => {
+                // Note: we mark the variable as defined regardless of whether
+                // there is an initializer.  Initially I had thought to only mark
+                // the live variable as defined if it was initialized, and then we
+                // could check for uninit variables just by scanning what is live
+                // at the start of the function. But that doesn't work so well for
+                // immutable variables defined in a loop:
+                //     loop { let x; x = 5; }
+                // because the "assignment" loops back around and generates an error.
+                //
+                // So now we just check that variables defined w/o an
+                // initializer are not live at the point of their
+                // initialization, which is mildly more complex than checking
+                // once at the func header but otherwise equivalent.
+
+                let succ = self.propagate_through_opt_expr(local.init.as_ref().map(|e| &**e), succ);
+                self.define_bindings_in_pat(&local.pat, succ)
+            }
+            hir::StmtKind::Item(..) => succ,
+            hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
+                self.propagate_through_expr(&expr, succ)
+            }
+        }
+    }
+
+    fn propagate_through_exprs(&mut self, exprs: &[Expr], succ: LiveNode)
+                               -> LiveNode {
+        exprs.iter().rev().fold(succ, |succ, expr| {
+            self.propagate_through_expr(&expr, succ)
+        })
+    }
+
+    fn propagate_through_opt_expr(&mut self,
+                                  opt_expr: Option<&Expr>,
+                                  succ: LiveNode)
+                                  -> LiveNode {
+        opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
+    }
+
+    fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode)
+                              -> LiveNode {
+        debug!("propagate_through_expr: {}", self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
+
+        match expr.kind {
+            // Interesting cases with control flow or which gen/kill
+            hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
+                self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
+            }
+
+            hir::ExprKind::Field(ref e, _) => {
+                self.propagate_through_expr(&e, succ)
+            }
+
+            hir::ExprKind::Closure(..) => {
+                debug!("{} is an ExprKind::Closure",
+                       self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
+
+                // the construction of a closure itself is not important,
+                // but we have to consider the closed over variables.
+                let caps = self.ir.capture_info_map.get(&expr.hir_id).cloned().unwrap_or_else(||
+                    span_bug!(expr.span, "no registered caps"));
+
+                caps.iter().rev().fold(succ, |succ, cap| {
+                    self.init_from_succ(cap.ln, succ);
+                    let var = self.variable(cap.var_hid, expr.span);
+                    self.acc(cap.ln, var, ACC_READ | ACC_USE);
+                    cap.ln
+                })
+            }
+
+            // Note that labels have been resolved, so we don't need to look
+            // at the label ident
+            hir::ExprKind::Loop(ref blk, _, _) => {
+                self.propagate_through_loop(expr, &blk, succ)
+            }
+
+            hir::ExprKind::Match(ref e, ref arms, _) => {
+                //
+                //      (e)
+                //       |
+                //       v
+                //     (expr)
+                //     / | \
+                //    |  |  |
+                //    v  v  v
+                //   (..arms..)
+                //    |  |  |
+                //    v  v  v
+                //   (  succ  )
+                //
+                //
+                let ln = self.live_node(expr.hir_id, expr.span);
+                self.init_empty(ln, succ);
+                let mut first_merge = true;
+                for arm in arms {
+                    let body_succ = self.propagate_through_expr(&arm.body, succ);
+
+                    let guard_succ = self.propagate_through_opt_expr(
+                        arm.guard.as_ref().map(|hir::Guard::If(e)| &**e),
+                        body_succ
+                    );
+                    let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
+                    self.merge_from_succ(ln, arm_succ, first_merge);
+                    first_merge = false;
+                };
+                self.propagate_through_expr(&e, ln)
+            }
+
+            hir::ExprKind::Ret(ref o_e) => {
+                // ignore succ and subst exit_ln:
+                let exit_ln = self.s.exit_ln;
+                self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln)
+            }
+
+            hir::ExprKind::Break(label, ref opt_expr) => {
+                // Find which label this break jumps to
+                let target = match label.target_id {
+                    Ok(hir_id) => self.break_ln.get(&hir_id),
+                    Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
+                }.cloned();
+
+                // Now that we know the label we're going to,
+                // look it up in the break loop nodes table
+
+                match target {
+                    Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
+                    None => span_bug!(expr.span, "break to unknown label")
+                }
+            }
+
+            hir::ExprKind::Continue(label) => {
+                // Find which label this expr continues to
+                let sc = label.target_id.unwrap_or_else(|err|
+                    span_bug!(expr.span, "loop scope error: {}", err));
+
+                // Now that we know the label we're going to,
+                // look it up in the continue loop nodes table
+                self.cont_ln.get(&sc).cloned().unwrap_or_else(||
+                    span_bug!(expr.span, "continue to unknown label"))
+            }
+
+            hir::ExprKind::Assign(ref l, ref r) => {
+                // see comment on places in
+                // propagate_through_place_components()
+                let succ = self.write_place(&l, succ, ACC_WRITE);
+                let succ = self.propagate_through_place_components(&l, succ);
+                self.propagate_through_expr(&r, succ)
+            }
+
+            hir::ExprKind::AssignOp(_, ref l, ref r) => {
+                // an overloaded assign op is like a method call
+                if self.tables.is_method_call(expr) {
+                    let succ = self.propagate_through_expr(&l, succ);
+                    self.propagate_through_expr(&r, succ)
+                } else {
+                    // see comment on places in
+                    // propagate_through_place_components()
+                    let succ = self.write_place(&l, succ, ACC_WRITE|ACC_READ);
+                    let succ = self.propagate_through_expr(&r, succ);
+                    self.propagate_through_place_components(&l, succ)
+                }
+            }
+
+            // Uninteresting cases: just propagate in rev exec order
+
+            hir::ExprKind::Array(ref exprs) => {
+                self.propagate_through_exprs(exprs, succ)
+            }
+
+            hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
+                let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
+                fields.iter().rev().fold(succ, |succ, field| {
+                    self.propagate_through_expr(&field.expr, succ)
+                })
+            }
+
+            hir::ExprKind::Call(ref f, ref args) => {
+                let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
+                let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
+                    self.s.exit_ln
+                } else {
+                    succ
+                };
+                let succ = self.propagate_through_exprs(args, succ);
+                self.propagate_through_expr(&f, succ)
+            }
+
+            hir::ExprKind::MethodCall(.., ref args) => {
+                let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
+                let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
+                    self.s.exit_ln
+                } else {
+                    succ
+                };
+
+                self.propagate_through_exprs(args, succ)
+            }
+
+            hir::ExprKind::Tup(ref exprs) => {
+                self.propagate_through_exprs(exprs, succ)
+            }
+
+            hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
+                let r_succ = self.propagate_through_expr(&r, succ);
+
+                let ln = self.live_node(expr.hir_id, expr.span);
+                self.init_from_succ(ln, succ);
+                self.merge_from_succ(ln, r_succ, false);
+
+                self.propagate_through_expr(&l, ln)
+            }
+
+            hir::ExprKind::Index(ref l, ref r) |
+            hir::ExprKind::Binary(_, ref l, ref r) => {
+                let r_succ = self.propagate_through_expr(&r, succ);
+                self.propagate_through_expr(&l, r_succ)
+            }
+
+            hir::ExprKind::Box(ref e) |
+            hir::ExprKind::AddrOf(_, ref e) |
+            hir::ExprKind::Cast(ref e, _) |
+            hir::ExprKind::Type(ref e, _) |
+            hir::ExprKind::DropTemps(ref e) |
+            hir::ExprKind::Unary(_, ref e) |
+            hir::ExprKind::Yield(ref e, _) |
+            hir::ExprKind::Repeat(ref e, _) => {
+                self.propagate_through_expr(&e, succ)
+            }
+
+            hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
+                let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
+                // see comment on places
+                // in propagate_through_place_components()
+                if o.is_indirect {
+                    self.propagate_through_expr(output, succ)
+                } else {
+                    let acc = if o.is_rw { ACC_WRITE|ACC_READ } else { ACC_WRITE };
+                    let succ = self.write_place(output, succ, acc);
+                    self.propagate_through_place_components(output, succ)
+                }});
+
+                // Inputs are executed first. Propagate last because of rev order
+                self.propagate_through_exprs(inputs, succ)
+            }
+
+            hir::ExprKind::Lit(..) | hir::ExprKind::Err |
+            hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
+                succ
+            }
+
+            // Note that labels have been resolved, so we don't need to look
+            // at the label ident
+            hir::ExprKind::Block(ref blk, _) => {
+                self.propagate_through_block(&blk, succ)
+            }
+        }
+    }
+
+    fn propagate_through_place_components(&mut self,
+                                          expr: &Expr,
+                                          succ: LiveNode)
+                                          -> LiveNode {
+        // # Places
+        //
+        // In general, the full flow graph structure for an
+        // assignment/move/etc can be handled in one of two ways,
+        // depending on whether what is being assigned is a "tracked
+        // value" or not. A tracked value is basically a local
+        // variable or argument.
+        //
+        // The two kinds of graphs are:
+        //
+        //    Tracked place          Untracked place
+        // ----------------------++-----------------------
+        //                       ||
+        //         |             ||           |
+        //         v             ||           v
+        //     (rvalue)          ||       (rvalue)
+        //         |             ||           |
+        //         v             ||           v
+        // (write of place)     ||   (place components)
+        //         |             ||           |
+        //         v             ||           v
+        //      (succ)           ||        (succ)
+        //                       ||
+        // ----------------------++-----------------------
+        //
+        // I will cover the two cases in turn:
+        //
+        // # Tracked places
+        //
+        // A tracked place is a local variable/argument `x`.  In
+        // these cases, the link_node where the write occurs is linked
+        // to node id of `x`.  The `write_place()` routine generates
+        // the contents of this node.  There are no subcomponents to
+        // consider.
+        //
+        // # Non-tracked places
+        //
+        // These are places like `x[5]` or `x.f`.  In that case, we
+        // basically ignore the value which is written to but generate
+        // reads for the components---`x` in these two examples.  The
+        // components reads are generated by
+        // `propagate_through_place_components()` (this fn).
+        //
+        // # Illegal places
+        //
+        // It is still possible to observe assignments to non-places;
+        // these errors are detected in the later pass borrowck.  We
+        // just ignore such cases and treat them as reads.
+
+        match expr.kind {
+            hir::ExprKind::Path(_) => succ,
+            hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
+            _ => self.propagate_through_expr(expr, succ)
+        }
+    }
+
+    // see comment on propagate_through_place()
+    fn write_place(&mut self, expr: &Expr, succ: LiveNode, acc: u32) -> LiveNode {
+        match expr.kind {
+            hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
+                self.access_path(expr.hir_id, path, succ, acc)
+            }
+
+            // We do not track other places, so just propagate through
+            // to their subcomponents.  Also, it may happen that
+            // non-places occur here, because those are detected in the
+            // later pass borrowck.
+            _ => succ
+        }
+    }
+
+    fn access_var(&mut self, hir_id: HirId, var_hid: HirId, succ: LiveNode, acc: u32, span: Span)
+                  -> LiveNode {
+        let ln = self.live_node(hir_id, span);
+        if acc != 0 {
+            self.init_from_succ(ln, succ);
+            let var = self.variable(var_hid, span);
+            self.acc(ln, var, acc);
+        }
+        ln
+    }
+
+    fn access_path(&mut self, hir_id: HirId, path: &hir::Path, succ: LiveNode, acc: u32)
+                   -> LiveNode {
+        match path.res {
+            Res::Local(hid) => {
+                let upvars = self.ir.tcx.upvars(self.ir.body_owner);
+                if !upvars.map_or(false, |upvars| upvars.contains_key(&hid)) {
+                    self.access_var(hir_id, hid, succ, acc, path.span)
+                } else {
+                    succ
+                }
+            }
+            _ => succ
+        }
+    }
+
+    fn propagate_through_loop(
+        &mut self,
+        expr: &Expr,
+        body: &hir::Block,
+        succ: LiveNode
+    ) -> LiveNode {
+        /*
+        We model control flow like this:
+
+              (expr) <-+
+                |      |
+                v      |
+              (body) --+
+
+        Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
+        Meanwhile, a `break` expression will have a successor of `succ`.
+        */
+
+        // first iteration:
+        let mut first_merge = true;
+        let ln = self.live_node(expr.hir_id, expr.span);
+        self.init_empty(ln, succ);
+        debug!("propagate_through_loop: using id for loop body {} {}",
+               expr.hir_id, self.ir.tcx.hir().hir_to_pretty_string(body.hir_id));
+
+        self.break_ln.insert(expr.hir_id, succ);
+
+        self.cont_ln.insert(expr.hir_id, ln);
+
+        let body_ln = self.propagate_through_block(body, ln);
+
+        // repeat until fixed point is reached:
+        while self.merge_from_succ(ln, body_ln, first_merge) {
+            first_merge = false;
+            assert_eq!(body_ln, self.propagate_through_block(body, ln));
+        }
+
+        ln
+    }
+}
+
+// _______________________________________________________________________
+// Checking for error conditions
+
+impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
+    fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
+        NestedVisitorMap::None
+    }
+
+    fn visit_local(&mut self, local: &'tcx hir::Local) {
+        self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
+            if local.init.is_some() {
+                self.warn_about_dead_assign(spans, hir_id, ln, var);
+            }
+        });
+
+        intravisit::walk_local(self, local);
+    }
+
+    fn visit_expr(&mut self, ex: &'tcx Expr) {
+        check_expr(self, ex);
+    }
+
+    fn visit_arm(&mut self, arm: &'tcx hir::Arm) {
+        self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
+        intravisit::walk_arm(self, arm);
+    }
+}
+
+fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr) {
+    match expr.kind {
+        hir::ExprKind::Assign(ref l, _) => {
+            this.check_place(&l);
+        }
+
+        hir::ExprKind::AssignOp(_, ref l, _) => {
+            if !this.tables.is_method_call(expr) {
+                this.check_place(&l);
+            }
+        }
+
+        hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
+            for input in inputs {
+                this.visit_expr(input);
+            }
+
+            // Output operands must be places
+            for (o, output) in ia.outputs.iter().zip(outputs) {
+                if !o.is_indirect {
+                    this.check_place(output);
+                }
+                this.visit_expr(output);
+            }
+        }
+
+        // no correctness conditions related to liveness
+        hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) |
+        hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) |
+        hir::ExprKind::Index(..) | hir::ExprKind::Field(..) |
+        hir::ExprKind::Array(..) | hir::ExprKind::Tup(..) | hir::ExprKind::Binary(..) |
+        hir::ExprKind::Cast(..) | hir::ExprKind::DropTemps(..) | hir::ExprKind::Unary(..) |
+        hir::ExprKind::Ret(..) | hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) |
+        hir::ExprKind::Lit(_) | hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) |
+        hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) |
+        hir::ExprKind::Closure(..) | hir::ExprKind::Path(_) | hir::ExprKind::Yield(..) |
+        hir::ExprKind::Box(..) | hir::ExprKind::Type(..) | hir::ExprKind::Err => {}
+    }
+
+    intravisit::walk_expr(this, expr);
+}
+
+impl<'tcx> Liveness<'_, 'tcx> {
+    fn check_place(&mut self, expr: &'tcx Expr) {
+        match expr.kind {
+            hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
+                if let Res::Local(var_hid) = path.res {
+                    let upvars = self.ir.tcx.upvars(self.ir.body_owner);
+                    if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hid)) {
+                        // Assignment to an immutable variable or argument: only legal
+                        // if there is no later assignment. If this local is actually
+                        // mutable, then check for a reassignment to flag the mutability
+                        // as being used.
+                        let ln = self.live_node(expr.hir_id, expr.span);
+                        let var = self.variable(var_hid, expr.span);
+                        self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
+                    }
+                }
+            }
+            _ => {
+                // For other kinds of places, no checks are required,
+                // and any embedded expressions are actually rvalues
+                intravisit::walk_expr(self, expr);
+            }
+        }
+    }
+
+    fn should_warn(&self, var: Variable) -> Option<String> {
+        let name = self.ir.variable_name(var);
+        if name.is_empty() || name.as_bytes()[0] == b'_' {
+            None
+        } else {
+            Some(name)
+        }
+    }
+
+    fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
+        for p in &body.params {
+            self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
+                if self.live_on_entry(ln, var).is_none() {
+                    self.report_dead_assign(hir_id, spans, var, true);
+                }
+            });
+        }
+    }
+
+    fn check_unused_vars_in_pat(
+        &self,
+        pat: &hir::Pat,
+        entry_ln: Option<LiveNode>,
+        on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
+    ) {
+        // In an or-pattern, only consider the variable; any later patterns must have the same
+        // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
+        // However, we should take the spans of variables with the same name from the later
+        // patterns so the suggestions to prefix with underscores will apply to those too.
+        let mut vars: FxIndexMap<String, (LiveNode, Variable, HirId, Vec<Span>)> = <_>::default();
+
+        pat.each_binding(|_, hir_id, pat_sp, ident| {
+            let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
+            let var = self.variable(hir_id, ident.span);
+            vars.entry(self.ir.variable_name(var))
+                .and_modify(|(.., spans)| spans.push(ident.span))
+                .or_insert_with(|| (ln, var, hir_id, vec![ident.span]));
+        });
+
+        for (_, (ln, var, id, spans)) in vars {
+            if self.used_on_entry(ln, var) {
+                on_used_on_entry(spans, id, ln, var);
+            } else {
+                self.report_unused(spans, id, ln, var);
+            }
+        }
+    }
+
+    fn report_unused(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
+        if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
+            // annoying: for parameters in funcs like `fn(x: i32)
+            // {ret}`, there is only one node, so asking about
+            // assigned_on_exit() is not meaningful.
+            let is_assigned = if ln == self.s.exit_ln {
+                false
+            } else {
+                self.assigned_on_exit(ln, var).is_some()
+            };
+
+            if is_assigned {
+                self.ir.tcx.lint_hir_note(
+                    lint::builtin::UNUSED_VARIABLES,
+                    hir_id,
+                    spans,
+                    &format!("variable `{}` is assigned to, but never used", name),
+                    &format!("consider using `_{}` instead", name),
+                );
+            } else {
+                let mut err = self.ir.tcx.struct_span_lint_hir(
+                    lint::builtin::UNUSED_VARIABLES,
+                    hir_id,
+                    spans.clone(),
+                    &format!("unused variable: `{}`", name),
+                );
+
+                if self.ir.variable_is_shorthand(var) {
+                    if let Node::Binding(pat) = self.ir.tcx.hir().get(hir_id) {
+                        // Handle `ref` and `ref mut`.
+                        let spans = spans.iter()
+                            .map(|_span| (pat.span, format!("{}: _", name)))
+                            .collect();
+
+                        err.multipart_suggestion(
+                            "try ignoring the field",
+                            spans,
+                            Applicability::MachineApplicable,
+                        );
+                    }
+                } else {
+                    err.multipart_suggestion(
+                        "consider prefixing with an underscore",
+                        spans.iter().map(|span| (*span, format!("_{}", name))).collect(),
+                        Applicability::MachineApplicable,
+                    );
+                }
+
+                err.emit()
+            }
+        }
+    }
+
+    fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
+        if self.live_on_exit(ln, var).is_none() {
+            self.report_dead_assign(hir_id, spans, var, false);
+        }
+    }
+
+    fn report_dead_assign(&self, hir_id: HirId, spans: Vec<Span>, var: Variable, is_param: bool) {
+        if let Some(name) = self.should_warn(var) {
+            if is_param {
+                self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans,
+                &format!("value passed to `{}` is never read", name))
+                .help("maybe it is overwritten before being read?")
+                .emit();
+            } else {
+                self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans,
+                &format!("value assigned to `{}` is never read", name))
+                .help("maybe it is overwritten before being read?")
+                .emit();
+            }
+        }
+    }
+}
index 3f11430dc82cba3bd5e0eefe94ef29b53d4c57da..e8bf4e7ea8ff67d94da7c47a02e7cc598c4d6759 100644 (file)
@@ -14,4 +14,5 @@ doctest = false
 rustc = { path = "../librustc" }
 rustc_metadata = { path = "../librustc_metadata" }
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
index 4e1a47c503e59fd3675e8f576714dbbf74b8a3e2..38738e20630c912f895859479b7d84e098ed0920 100644 (file)
@@ -21,7 +21,7 @@
 //! extern crate syntax_pos;
 //!
 //! use rustc_driver::plugin::Registry;
-//! use syntax::ext::base::{ExtCtxt, MacResult};
+//! use syntax_expand::base::{ExtCtxt, MacResult};
 //! use syntax_pos::Span;
 //! use syntax::tokenstream::TokenTree;
 //!
index bb3c950edae3bb292de3869f599b9d7f4d39addc..b826dd911983862a78479a66045b95980fa2249d 100644 (file)
@@ -4,8 +4,8 @@
 use rustc::session::Session;
 use rustc::util::nodemap::FxHashMap;
 
-use syntax::ext::base::{SyntaxExtension, SyntaxExtensionKind, NamedSyntaxExtension};
-use syntax::ext::base::MacroExpanderFn;
+use syntax_expand::base::{SyntaxExtension, SyntaxExtensionKind, NamedSyntaxExtension};
+use syntax_expand::base::MacroExpanderFn;
 use syntax::symbol::Symbol;
 use syntax::ast;
 use syntax::feature_gate::AttributeType;
index 67066466f1d222d0d0af63a786a193a46f45f137..03afb547d3a226a54faa166b91bf8673bb20b2ad 100644 (file)
@@ -1,8 +1,9 @@
 syntax::register_diagnostics! {
 
 E0445: r##"
-A private trait was used on a public type parameter bound. Erroneous code
-examples:
+A private trait was used on a public type parameter bound.
+
+Erroneous code examples:
 
 ```compile_fail,E0445
 #![deny(private_in_public)]
@@ -32,7 +33,9 @@ pub fn foo<T: Foo> (t: T) {} // ok!
 "##,
 
 E0446: r##"
-A private type was used in a public type signature. Erroneous code example:
+A private type was used in a public type signature.
+
+Erroneous code example:
 
 ```compile_fail,E0446
 #![deny(private_in_public)]
@@ -65,7 +68,9 @@ pub fn bar() -> Bar { // ok!
 E0447: r##"
 #### Note: this error code is no longer emitted by the compiler.
 
-The `pub` keyword was used inside a function. Erroneous code example:
+The `pub` keyword was used inside a function.
+
+Erroneous code example:
 
 ```
 fn foo() {
@@ -79,7 +84,11 @@ fn foo() {
 "##,
 
 E0448: r##"
-The `pub` keyword was used inside a public enum. Erroneous code example:
+#### Note: this error code is no longer emitted by the compiler.
+
+The `pub` keyword was used inside a public enum.
+
+Erroneous code example:
 
 ```compile_fail
 pub enum Foo {
@@ -106,7 +115,9 @@ pub enum Foo {
 "##,
 
 E0451: r##"
-A struct constructor with private fields was invoked. Erroneous code example:
+A struct constructor with private fields was invoked.
+
+Erroneous code example:
 
 ```compile_fail,E0451
 mod Bar {
index f44692b7aea7d6193dff4f35d160d017c00d5e7a..34cdec229af4318f329225cbecce252b842a8e76 100644 (file)
@@ -64,7 +64,7 @@ fn visit(&mut self, ty_fragment: impl TypeFoldable<'tcx>) -> bool {
     fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
         self.skeleton().visit_trait(trait_ref)
     }
-    fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool {
+    fn visit_predicates(&mut self, predicates: ty::GenericPredicates<'tcx>) -> bool {
         self.skeleton().visit_predicates(predicates)
     }
 }
@@ -88,7 +88,7 @@ fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
         (!self.def_id_visitor.shallow() && substs.visit_with(self))
     }
 
-    fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool {
+    fn visit_predicates(&mut self, predicates: ty::GenericPredicates<'tcx>) -> bool {
         let ty::GenericPredicates { parent: _, predicates } = predicates;
         for (predicate, _span) in predicates {
             match predicate {
@@ -880,11 +880,11 @@ fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef) {
             self.tcx,
             self.tcx.hir().local_def_id(md.hir_id)
         ).unwrap();
-        let mut module_id = self.tcx.hir().as_local_hir_id(macro_module_def_id).unwrap();
-        if !self.tcx.hir().is_hir_id_module(module_id) {
-            // `module_id` doesn't correspond to a `mod`, return early (#63164).
-            return;
-        }
+        let mut module_id = match self.tcx.hir().as_local_hir_id(macro_module_def_id) {
+            Some(module_id) if self.tcx.hir().is_hir_id_module(module_id) => module_id,
+            // `module_id` doesn't correspond to a `mod`, return early (#63164, #65252).
+            _ => return,
+        };
         let level = if md.vis.node.is_pub() { self.get(module_id) } else { None };
         let new_level = self.update(md.hir_id, level);
         if new_level.is_none() {
index 936e72ef2c571a4f620a6839be5f3048246640fa..06bf30859898a2af2fa1a4348ee06ed7b747b44b 100644 (file)
@@ -14,6 +14,7 @@ doctest = false
 bitflags = "1.0"
 log = "0.4"
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 rustc = { path = "../librustc" }
 arena = { path = "../libarena" }
 errors = { path = "../librustc_errors", package = "rustc_errors" }
index f76aa95dd2cc8e40642dfc89669714dfe40a819d..e261d3af61ff8c8372b7b464e1a65fe10ee6a5a3 100644 (file)
 
 use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind, NodeId};
 use syntax::ast::{MetaItemKind, StmtKind, TraitItem, TraitItemKind};
-use syntax::ext::base::{MacroKind, SyntaxExtension};
-use syntax::ext::expand::AstFragment;
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::base::{MacroKind, SyntaxExtension};
+use syntax_expand::expand::AstFragment;
+use syntax_expand::hygiene::ExpnId;
 use syntax::feature_gate::is_builtin_attr;
 use syntax::parse::token::{self, Token};
+use syntax::print::pprust;
 use syntax::{span_err, struct_span_err};
+use syntax::source_map::{respan, Spanned};
 use syntax::symbol::{kw, sym};
 use syntax::visit::{self, Visitor};
 
@@ -92,7 +94,8 @@ impl<'a> Resolver<'a> {
         where T: ToNameBinding<'a>,
     {
         let binding = def.to_name_binding(self.arenas);
-        if let Err(old_binding) = self.try_define(parent, ident, ns, binding) {
+        let key = self.new_key(ident, ns);
+        if let Err(old_binding) = self.try_define(parent, key, binding) {
             self.report_conflict(parent, ident, ns, old_binding, &binding);
         }
     }
@@ -102,8 +105,7 @@ impl<'a> Resolver<'a> {
             return self.module_map[&def_id]
         }
 
-        let macros_only = self.cstore.dep_kind_untracked(def_id.krate).macros_only();
-        if let Some(&module) = self.extern_module_map.get(&(def_id, macros_only)) {
+        if let Some(&module) = self.extern_module_map.get(&def_id) {
             return module;
         }
 
@@ -119,7 +121,7 @@ impl<'a> Resolver<'a> {
         let module = self.arenas.alloc_module(ModuleData::new(
             parent, kind, def_id, ExpnId::root(), DUMMY_SP
         ));
-        self.extern_module_map.insert((def_id, macros_only), module);
+        self.extern_module_map.insert(def_id, module);
         module
     }
 
@@ -161,25 +163,15 @@ impl<'a> Resolver<'a> {
         Some(ext)
     }
 
-    // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders.
     crate fn build_reduced_graph(
         &mut self,
         fragment: &AstFragment,
-        extra_placeholders: &[NodeId],
         parent_scope: ParentScope<'a>,
     ) -> LegacyScope<'a> {
         let mut def_collector = DefCollector::new(&mut self.definitions, parent_scope.expansion);
         fragment.visit_with(&mut def_collector);
-        for placeholder in extra_placeholders {
-            def_collector.visit_macro_invoc(*placeholder);
-        }
-
         let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope };
         fragment.visit_with(&mut visitor);
-        for placeholder in extra_placeholders {
-            visitor.parent_scope.legacy = visitor.visit_invoc(*placeholder);
-        }
-
         visitor.parent_scope.legacy
     }
 
@@ -227,7 +219,7 @@ fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility {
                         .span_suggestion(
                             path.span,
                             "try",
-                            format!("crate::{}", path),
+                            format!("crate::{}", pprust::path_to_string(&path)),
                             Applicability::MaybeIncorrect,
                         )
                         .emit();
@@ -301,7 +293,7 @@ fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility {
         }
     }
 
-    fn insert_field_names(&mut self, def_id: DefId, field_names: Vec<Name>) {
+    fn insert_field_names(&mut self, def_id: DefId, field_names: Vec<Spanned<Name>>) {
         if !field_names.is_empty() {
             self.r.field_names.insert(def_id, field_names);
         }
@@ -348,9 +340,12 @@ fn add_import_directive(
 
         self.r.indeterminate_imports.push(directive);
         match directive.subclass {
+            // Don't add unresolved underscore imports to modules
+            SingleImport { target: Ident { name: kw::Underscore, .. }, .. } => {}
             SingleImport { target, type_ns_only, .. } => {
                 self.r.per_ns(|this, ns| if !type_ns_only || ns == TypeNS {
-                    let mut resolution = this.resolution(current_module, target, ns).borrow_mut();
+                    let key = this.new_key(target, ns);
+                    let mut resolution = this.resolution(current_module, key).borrow_mut();
                     resolution.add_single_import(directive);
                 });
             }
@@ -406,7 +401,7 @@ fn build_reduced_graph_for_use_tree(
         };
         match use_tree.kind {
             ast::UseTreeKind::Simple(rename, ..) => {
-                let mut ident = use_tree.ident().gensym_if_underscore();
+                let mut ident = use_tree.ident();
                 let mut module_path = prefix;
                 let mut source = module_path.pop().unwrap();
                 let mut type_ns_only = false;
@@ -584,7 +579,7 @@ fn build_reduced_graph_for_item(&mut self, item: &'b Item) {
         let parent_scope = &self.parent_scope;
         let parent = parent_scope.module;
         let expansion = parent_scope.expansion;
-        let ident = item.ident.gensym_if_underscore();
+        let ident = item.ident;
         let sp = item.span;
         let vis = self.resolve_visibility(&item.vis);
 
@@ -616,6 +611,7 @@ fn build_reduced_graph_for_item(&mut self, item: &'b Item) {
                     let crate_id = self.r.crate_loader.process_extern_crate(
                         item, &self.r.definitions
                     );
+                    self.r.extern_crate_map.insert(item.id, crate_id);
                     self.r.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX })
                 };
 
@@ -752,12 +748,12 @@ fn build_reduced_graph_for_item(&mut self, item: &'b Item) {
                 }
 
                 // Record field names for error reporting.
-                let field_names = struct_def.fields().iter().filter_map(|field| {
+                let field_names = struct_def.fields().iter().map(|field| {
                     let field_vis = self.resolve_visibility(&field.vis);
                     if ctor_vis.is_at_least(field_vis, &*self.r) {
                         ctor_vis = field_vis;
                     }
-                    field.ident.map(|ident| ident.name)
+                    respan(field.span, field.ident.map_or(kw::Invalid, |ident| ident.name))
                 }).collect();
                 let item_def_id = self.r.definitions.local_def_id(item.id);
                 self.insert_field_names(item_def_id, field_names);
@@ -779,9 +775,9 @@ fn build_reduced_graph_for_item(&mut self, item: &'b Item) {
                 self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion));
 
                 // Record field names for error reporting.
-                let field_names = vdata.fields().iter().filter_map(|field| {
+                let field_names = vdata.fields().iter().map(|field| {
                     self.resolve_visibility(&field.vis);
-                    field.ident.map(|ident| ident.name)
+                    respan(field.span, field.ident.map_or(kw::Invalid, |ident| ident.name))
                 }).collect();
                 let item_def_id = self.r.definitions.local_def_id(item.id);
                 self.insert_field_names(item_def_id, field_names);
@@ -849,10 +845,6 @@ fn build_reduced_graph_for_block(&mut self, block: &Block) {
     fn build_reduced_graph_for_external_crate_res(&mut self, child: Export<NodeId>) {
         let parent = self.parent_scope.module;
         let Export { ident, res, vis, span } = child;
-        // FIXME: We shouldn't create the gensym here, it should come from metadata,
-        // but metadata cannot encode gensyms currently, so we create it here.
-        // This is only a guess, two equivalent idents may incorrectly get different gensyms here.
-        let ident = ident.gensym_if_underscore();
         let expansion = ExpnId::root(); // FIXME(jseyfried) intercrate hygiene
         // Record primary definitions.
         match res {
@@ -895,7 +887,8 @@ fn build_reduced_graph_for_external_crate_res(&mut self, child: Export<NodeId>)
         // Record some extra data for better diagnostics.
         match res {
             Res::Def(DefKind::Struct, def_id) | Res::Def(DefKind::Union, def_id) => {
-                let field_names = self.r.cstore.struct_field_names_untracked(def_id);
+                let field_names =
+                    self.r.cstore.struct_field_names_untracked(def_id, self.r.session);
                 self.insert_field_names(def_id, field_names);
             }
             Res::Def(DefKind::Method, def_id) => {
@@ -1061,8 +1054,17 @@ fn proc_macro_stub(item: &ast::Item) -> Option<(MacroKind, Ident, Span)> {
         None
     }
 
+    // Mark the given macro as unused unless its name starts with `_`.
+    // Macro uses will remove items from this set, and the remaining
+    // items will be reported as `unused_macros`.
+    fn insert_unused_macro(&mut self, ident: Ident, node_id: NodeId, span: Span) {
+        if !ident.as_str().starts_with("_") {
+            self.r.unused_macros.insert(node_id, span);
+        }
+    }
+
     fn define_macro(&mut self, item: &ast::Item) -> LegacyScope<'a> {
-        let parent_scope = &self.parent_scope;
+        let parent_scope = self.parent_scope;
         let expansion = parent_scope.expansion;
         let (ext, ident, span, is_legacy) = match &item.kind {
             ItemKind::MacroDef(def) => {
@@ -1102,7 +1104,7 @@ fn define_macro(&mut self, item: &ast::Item) -> LegacyScope<'a> {
                             (res, vis, span, expansion, IsMacroExport));
             } else {
                 self.r.check_reserved_macro_name(ident, res);
-                self.r.unused_macros.insert(item.id, span);
+                self.insert_unused_macro(ident, item.id, span);
             }
             LegacyScope::Binding(self.r.arenas.alloc_legacy_binding(LegacyBinding {
                 parent_legacy_scope: parent_scope.legacy, binding, ident
@@ -1111,7 +1113,7 @@ fn define_macro(&mut self, item: &ast::Item) -> LegacyScope<'a> {
             let module = parent_scope.module;
             let vis = self.resolve_visibility(&item.vis);
             if vis != ty::Visibility::Public {
-                self.r.unused_macros.insert(item.id, span);
+                self.insert_unused_macro(ident, item.id, span);
             }
             self.r.define(module, ident, MacroNS, (res, vis, span, expansion));
             self.parent_scope.legacy
index 7f819486f5bd3af609c66464df97703cc721966b..5647d5b2794afb024aa14e3498c32f41f8092296 100644 (file)
@@ -10,7 +10,7 @@
 use rustc::ty::{self, DefIdTree};
 use rustc::util::nodemap::FxHashSet;
 use syntax::ast::{self, Ident, Path};
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::feature_gate::BUILTIN_ATTRIBUTES;
 use syntax::source_map::SourceMap;
 use syntax::struct_span_err;
@@ -20,7 +20,7 @@
 
 use crate::resolve_imports::{ImportDirective, ImportDirectiveSubclass, ImportResolver};
 use crate::{path_names_to_string, KNOWN_TOOLS};
-use crate::{BindingError, CrateLint, LegacyScope, Module, ModuleOrUniformRoot};
+use crate::{BindingError, CrateLint, HasGenericParams, LegacyScope, Module, ModuleOrUniformRoot};
 use crate::{PathResult, ParentScope, ResolutionError, Resolver, Scope, ScopeSet, Segment};
 
 type Res = def::Res<ast::NodeId>;
@@ -80,11 +80,11 @@ impl<'a> Resolver<'a> {
         names: &mut Vec<TypoSuggestion>,
         filter_fn: &impl Fn(Res) -> bool,
     ) {
-        for (&(ident, _), resolution) in self.resolutions(module).borrow().iter() {
+        for (key, resolution) in self.resolutions(module).borrow().iter() {
             if let Some(binding) = resolution.borrow().binding {
                 let res = binding.res();
                 if filter_fn(res) {
-                    names.push(TypoSuggestion::from_res(ident.name, res));
+                    names.push(TypoSuggestion::from_res(key.ident.name, res));
                 }
             }
         }
@@ -102,7 +102,7 @@ impl<'a> Resolver<'a> {
         &self, span: Span, resolution_error: ResolutionError<'_>
     ) -> DiagnosticBuilder<'_> {
         match resolution_error {
-            ResolutionError::GenericParamsFromOuterFunction(outer_res) => {
+            ResolutionError::GenericParamsFromOuterFunction(outer_res, has_generic_params) => {
                 let mut err = struct_span_err!(self.session,
                     span,
                     E0401,
@@ -148,22 +148,24 @@ impl<'a> Resolver<'a> {
                     }
                 }
 
-                // Try to retrieve the span of the function signature and generate a new message
-                // with a local type or const parameter.
-                let sugg_msg = &format!("try using a local generic parameter instead");
-                if let Some((sugg_span, new_snippet)) = cm.generate_local_type_param_snippet(span) {
-                    // Suggest the modification to the user
-                    err.span_suggestion(
-                        sugg_span,
-                        sugg_msg,
-                        new_snippet,
-                        Applicability::MachineApplicable,
-                    );
-                } else if let Some(sp) = cm.generate_fn_name_span(span) {
-                    err.span_label(sp,
-                        format!("try adding a local generic parameter in this method instead"));
-                } else {
-                    err.help(&format!("try using a local generic parameter instead"));
+                if has_generic_params == HasGenericParams::Yes {
+                    // Try to retrieve the span of the function signature and generate a new
+                    // message with a local type or const parameter.
+                    let sugg_msg = &format!("try using a local generic parameter instead");
+                    if let Some((sugg_span, snippet)) = cm.generate_local_type_param_snippet(span) {
+                        // Suggest the modification to the user
+                        err.span_suggestion(
+                            sugg_span,
+                            sugg_msg,
+                            snippet,
+                            Applicability::MachineApplicable,
+                        );
+                    } else if let Some(sp) = cm.generate_fn_name_span(span) {
+                        err.span_label(sp,
+                            format!("try adding a local generic parameter in this method instead"));
+                    } else {
+                        err.help(&format!("try using a local generic parameter instead"));
+                    }
                 }
 
                 err
@@ -527,7 +529,7 @@ fn lookup_import_candidates_from_module<FilterFn>(&mut self,
                         in_module_is_extern)) = worklist.pop() {
             // We have to visit module children in deterministic order to avoid
             // instabilities in reported imports (#43552).
-            in_module.for_each_child_stable(self, |this, ident, ns, name_binding| {
+            in_module.for_each_child(self, |this, ident, ns, name_binding| {
                 // avoid imports entirely
                 if name_binding.is_import() && !name_binding.is_extern_crate() { return; }
                 // avoid non-importable candidates as well
@@ -847,7 +849,7 @@ pub(crate) fn check_for_module_export_macro(
         }
 
         let resolutions = self.r.resolutions(crate_module).borrow();
-        let resolution = resolutions.get(&(ident, MacroNS))?;
+        let resolution = resolutions.get(&self.r.new_key(ident, MacroNS))?;
         let binding = resolution.borrow().binding()?;
         if let Res::Def(DefKind::Macro(MacroKind::Bang), _) = binding.res() {
             let module_name = crate_module.kind.name().unwrap();
index 47346774180fe4ae091533b6951dd2d2dc0075f0..cd6189c681da14db13693d9eb178bb0dcd6da050 100644 (file)
@@ -1013,7 +1013,8 @@ fn h1() -> i32 {
 "##,
 
 E0424: r##"
-The `self` keyword was used in a static method.
+The `self` keyword was used inside of an associated function without a "`self`
+receiver" parameter.
 
 Erroneous code example:
 
@@ -1021,25 +1022,33 @@ fn h1() -> i32 {
 struct Foo;
 
 impl Foo {
-    fn bar(self) {}
+    // `bar` is a method, because it has a receiver parameter.
+    fn bar(&self) {}
 
+    // `foo` is not a method, because it has no receiver parameter.
     fn foo() {
-        self.bar(); // error: `self` is not available in a static method.
+        self.bar(); // error: `self` value is a keyword only available in
+                    //        methods with a `self` parameter
     }
 }
 ```
 
-Please check if the method's argument list should have contained `self`,
-`&self`, or `&mut self` (in case you didn't want to create a static
-method), and add it if so. Example:
+The `self` keyword can only be used inside methods, which are associated
+functions (functions defined inside of a `trait` or `impl` block) that have a
+`self` receiver as its first parameter, like `self`, `&self`, `&mut self` or
+`self: &mut Pin<Self>` (this last one is an example of an ["abitrary `self`
+type"](https://github.com/rust-lang/rust/issues/44874)).
+
+Check if the associated function's parameter list should have contained a `self`
+receiver for it to be a method, and add it if so. Example:
 
 ```
 struct Foo;
 
 impl Foo {
-    fn bar(self) {}
+    fn bar(&self) {}
 
-    fn foo(self) {
+    fn foo(self) { // `foo` is now a method.
         self.bar(); // ok!
     }
 }
@@ -1611,6 +1620,183 @@ fn print_on_failure(state: &State) {
 ```
 "##,
 
+E0573: r##"
+Something other than a type has been used when one was expected.
+
+Erroneous code examples:
+
+```compile_fail,E0573
+enum Dragon {
+    Born,
+}
+
+fn oblivion() -> Dragon::Born { // error!
+    Dragon::Born
+}
+
+const HOBBIT: u32 = 2;
+impl HOBBIT {} // error!
+
+enum Wizard {
+    Gandalf,
+    Saruman,
+}
+
+trait Isengard {
+    fn wizard(_: Wizard::Saruman); // error!
+}
+```
+
+In all these errors, a type was expected. For example, in the first error, if
+we want to return the `Born` variant from the `Dragon` enum, we must set the
+function to return the enum and not its variant:
+
+```
+enum Dragon {
+    Born,
+}
+
+fn oblivion() -> Dragon { // ok!
+    Dragon::Born
+}
+```
+
+In the second error, you can't implement something on an item, only on types.
+We would need to create a new type if we wanted to do something similar:
+
+```
+struct Hobbit(u32); // we create a new type
+
+const HOBBIT: Hobbit = Hobbit(2);
+impl Hobbit {} // ok!
+```
+
+In the third case, we tried to only expect one variant of the `Wizard` enum,
+which is not possible. To make this work, we need to using pattern matching
+over the `Wizard` enum:
+
+```
+enum Wizard {
+    Gandalf,
+    Saruman,
+}
+
+trait Isengard {
+    fn wizard(w: Wizard) { // error!
+        match w {
+            Wizard::Saruman => {
+                // do something
+            }
+            _ => {} // ignore everything else
+        }
+    }
+}
+```
+"##,
+
+E0574: r##"
+Something other than a struct, variant or union has been used when one was
+expected.
+
+Erroneous code example:
+
+```compile_fail,E0574
+mod Mordor {}
+
+let sauron = Mordor { x: () }; // error!
+
+enum Jak {
+    Daxter { i: isize },
+}
+
+let eco = Jak::Daxter { i: 1 };
+match eco {
+    Jak { i } => {} // error!
+}
+```
+
+In all these errors, a type was expected. For example, in the first error,
+we tried to instantiate the `Mordor` module, which is impossible. If you want
+to instantiate a type inside a module, you can do it as follow:
+
+```
+mod Mordor {
+    pub struct TheRing {
+        pub x: usize,
+    }
+}
+
+let sauron = Mordor::TheRing { x: 1 }; // ok!
+```
+
+In the second error, we tried to bind the `Jak` enum directly, which is not
+possible: you can only bind one of its variants. To do so:
+
+```
+enum Jak {
+    Daxter { i: isize },
+}
+
+let eco = Jak::Daxter { i: 1 };
+match eco {
+    Jak::Daxter { i } => {} // ok!
+}
+```
+"##,
+
+E0575: r##"
+Something other than a type or an associated type was given.
+
+Erroneous code example:
+
+```compile_fail,E0575
+enum Rick { Morty }
+
+let _: <u8 as Rick>::Morty; // error!
+
+trait Age {
+    type Empire;
+    fn Mythology() {}
+}
+
+impl Age for u8 {
+    type Empire = u16;
+}
+
+let _: <u8 as Age>::Mythology; // error!
+```
+
+In both cases, we're declaring a variable (called `_`) and we're giving it a
+type. However, `<u8 as Rick>::Morty` and `<u8 as Age>::Mythology` aren't types,
+therefore the compiler throws an error.
+
+`<u8 as Rick>::Morty` is an enum variant, you cannot use a variant as a type,
+you have to use the enum directly:
+
+```
+enum Rick { Morty }
+
+let _: Rick; // ok!
+```
+
+`<u8 as Age>::Mythology` is a trait method, which is definitely not a type.
+However, the `Age` trait provides an associated type `Empire` which can be
+used as a type:
+
+```
+trait Age {
+    type Empire;
+    fn Mythology() {}
+}
+
+impl Age for u8 {
+    type Empire = u16;
+}
+
+let _: <u8 as Age>::Empire; // ok!
+```
+"##,
+
 E0603: r##"
 A private item was used outside its scope.
 
@@ -1738,9 +1924,6 @@ struct Foo<X = Box<Self>> {
 //  E0427, merged into 530
 //  E0467, removed
 //  E0470, removed
-    E0573,
-    E0574,
-    E0575,
     E0576,
     E0577,
     E0578,
index 93c96b9f75ba20c6de1d283dd063c20d732b1d26..73a282b1a0ec1e9e159133b675d85d3f2bdbcb66 100644 (file)
@@ -5,7 +5,6 @@
 //! If you wonder why there's no `early.rs`, that's because it's split into three files -
 //! `build_reduced_graph.rs`, `macros.rs` and `resolve_imports.rs`.
 
-use GenericParameters::*;
 use RibKind::*;
 
 use crate::{path_names_to_string, BindingError, CrateLint, LexicalScopeBinding};
@@ -46,16 +45,6 @@ struct BindingInfo {
     binding_mode: BindingMode,
 }
 
-#[derive(Copy, Clone)]
-enum GenericParameters<'a, 'b> {
-    NoGenericParams,
-    HasGenericParams(// Type parameters.
-                      &'b Generics,
-
-                      // The kind of the rib used for type parameters.
-                      RibKind<'a>),
-}
-
 #[derive(Copy, Clone, PartialEq, Eq, Debug)]
 enum PatternSource {
     Match,
@@ -85,6 +74,10 @@ enum PatBoundCtx {
     Or,
 }
 
+/// Does this the item (from the item rib scope) allow generic parameters?
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+crate enum HasGenericParams { Yes, No }
+
 /// The rib kind restricts certain accesses,
 /// e.g. to a `Res::Local` of an outer item.
 #[derive(Copy, Clone, Debug)]
@@ -103,7 +96,7 @@ enum PatBoundCtx {
     FnItemRibKind,
 
     /// We passed through an item scope. Disallow upvars.
-    ItemRibKind,
+    ItemRibKind(HasGenericParams),
 
     /// We're in a constant item. Can't refer to dynamic stuff.
     ConstantItemRibKind,
@@ -134,7 +127,7 @@ impl RibKind<'_> {
             | ModuleRibKind(_)
             | MacroDefinition(_) => false,
             AssocItemRibKind
-            | ItemRibKind
+            | ItemRibKind(_)
             | ForwardTyParamBanRibKind
             | TyParamAsConstParamTy => true,
         }
@@ -352,6 +345,9 @@ struct LateResolutionVisitor<'a, 'b> {
     /// The current self item if inside an ADT (used for better errors).
     current_self_item: Option<NodeId>,
 
+    /// The current enclosing funciton (used for better errors).
+    current_function: Option<Span>,
+
     /// A list of labels as of yet unused. Labels will be removed from this map when
     /// they are used (in a `break` or `continue` statement)
     unused_labels: FxHashMap<NodeId, Span>,
@@ -406,19 +402,24 @@ fn visit_poly_trait_ref(&mut self,
         visit::walk_poly_trait_ref(self, tref, m);
     }
     fn visit_foreign_item(&mut self, foreign_item: &'tcx ForeignItem) {
-        let generic_params = match foreign_item.kind {
+        match foreign_item.kind {
             ForeignItemKind::Fn(_, ref generics) => {
-                HasGenericParams(generics, ItemRibKind)
+                self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| {
+                    visit::walk_foreign_item(this, foreign_item);
+                });
             }
-            ForeignItemKind::Static(..) => NoGenericParams,
-            ForeignItemKind::Ty => NoGenericParams,
-            ForeignItemKind::Macro(..) => NoGenericParams,
-        };
-        self.with_generic_param_rib(generic_params, |this| {
-            visit::walk_foreign_item(this, foreign_item);
-        });
+            ForeignItemKind::Static(..) => {
+                self.with_item_rib(HasGenericParams::No, |this| {
+                    visit::walk_foreign_item(this, foreign_item);
+                });
+            }
+            ForeignItemKind::Ty | ForeignItemKind::Macro(..) => {
+                visit::walk_foreign_item(self, foreign_item);
+            }
+        }
     }
-    fn visit_fn(&mut self, fn_kind: FnKind<'tcx>, declaration: &'tcx FnDecl, _: Span, _: NodeId) {
+    fn visit_fn(&mut self, fn_kind: FnKind<'tcx>, declaration: &'tcx FnDecl, sp: Span, _: NodeId) {
+        let previous_value = replace(&mut self.current_function, Some(sp));
         debug!("(resolving function) entering function");
         let rib_kind = match fn_kind {
             FnKind::ItemFn(..) => FnItemRibKind,
@@ -444,6 +445,7 @@ fn visit_fn(&mut self, fn_kind: FnKind<'tcx>, declaration: &'tcx FnDecl, _: Span
                 debug!("(resolving function) leaving function");
             })
         });
+        self.current_function = previous_value;
     }
 
     fn visit_generics(&mut self, generics: &'tcx Generics) {
@@ -549,6 +551,7 @@ fn new(resolver: &'b mut Resolver<'a>) -> LateResolutionVisitor<'a, 'b> {
             current_trait_assoc_types: Vec::new(),
             current_self_type: None,
             current_self_item: None,
+            current_function: None,
             unused_labels: Default::default(),
             current_type_ascription: Vec::new(),
         }
@@ -660,7 +663,7 @@ fn search_label<P, R>(&self, mut ident: Ident, pred: P) -> Option<R>
     fn resolve_adt(&mut self, item: &Item, generics: &Generics) {
         debug!("resolve_adt");
         self.with_current_self_item(item, |this| {
-            this.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| {
+            this.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| {
                 let item_def_id = this.r.definitions.local_def_id(item.id);
                 this.with_self_rib(Res::SelfTy(None, Some(item_def_id)), |this| {
                     visit::walk_item(this, item);
@@ -719,10 +722,8 @@ fn resolve_item(&mut self, item: &Item) {
             ItemKind::TyAlias(_, ref generics) |
             ItemKind::OpaqueTy(_, ref generics) |
             ItemKind::Fn(_, _, ref generics, _) => {
-                self.with_generic_param_rib(
-                    HasGenericParams(generics, ItemRibKind),
-                    |this| visit::walk_item(this, item)
-                );
+                self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes),
+                                            |this| visit::walk_item(this, item));
             }
 
             ItemKind::Enum(_, ref generics) |
@@ -740,7 +741,7 @@ fn resolve_item(&mut self, item: &Item) {
 
             ItemKind::Trait(.., ref generics, ref bounds, ref trait_items) => {
                 // Create a new rib for the trait-wide type parameters.
-                self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| {
+                self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| {
                     let local_def_id = this.r.definitions.local_def_id(item.id);
                     this.with_self_rib(Res::SelfTy(Some(local_def_id), None), |this| {
                         this.visit_generics(generics);
@@ -748,35 +749,32 @@ fn resolve_item(&mut self, item: &Item) {
 
                         for trait_item in trait_items {
                             this.with_trait_items(trait_items, |this| {
-                                let generic_params = HasGenericParams(
-                                    &trait_item.generics,
-                                    AssocItemRibKind,
-                                );
-                                this.with_generic_param_rib(generic_params, |this| {
-                                    match trait_item.kind {
-                                        TraitItemKind::Const(ref ty, ref default) => {
-                                            this.visit_ty(ty);
-
-                                            // Only impose the restrictions of
-                                            // ConstRibKind for an actual constant
-                                            // expression in a provided default.
-                                            if let Some(ref expr) = *default{
-                                                this.with_constant_rib(|this| {
-                                                    this.visit_expr(expr);
-                                                });
+                                this.with_generic_param_rib(&trait_item.generics, AssocItemRibKind,
+                                    |this| {
+                                        match trait_item.kind {
+                                            TraitItemKind::Const(ref ty, ref default) => {
+                                                this.visit_ty(ty);
+
+                                                // Only impose the restrictions of
+                                                // ConstRibKind for an actual constant
+                                                // expression in a provided default.
+                                                if let Some(ref expr) = *default{
+                                                    this.with_constant_rib(|this| {
+                                                        this.visit_expr(expr);
+                                                    });
+                                                }
                                             }
-                                        }
-                                        TraitItemKind::Method(_, _) => {
-                                            visit::walk_trait_item(this, trait_item)
-                                        }
-                                        TraitItemKind::Type(..) => {
-                                            visit::walk_trait_item(this, trait_item)
-                                        }
-                                        TraitItemKind::Macro(_) => {
-                                            panic!("unexpanded macro in resolve!")
-                                        }
-                                    };
-                                });
+                                            TraitItemKind::Method(_, _) => {
+                                                visit::walk_trait_item(this, trait_item)
+                                            }
+                                            TraitItemKind::Type(..) => {
+                                                visit::walk_trait_item(this, trait_item)
+                                            }
+                                            TraitItemKind::Macro(_) => {
+                                                panic!("unexpanded macro in resolve!")
+                                            }
+                                        };
+                                    });
                             });
                         }
                     });
@@ -785,7 +783,7 @@ fn resolve_item(&mut self, item: &Item) {
 
             ItemKind::TraitAlias(ref generics, ref bounds) => {
                 // Create a new rib for the trait-wide type parameters.
-                self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| {
+                self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| {
                     let local_def_id = this.r.definitions.local_def_id(item.id);
                     this.with_self_rib(Res::SelfTy(Some(local_def_id), None), |this| {
                         this.visit_generics(generics);
@@ -803,7 +801,7 @@ fn resolve_item(&mut self, item: &Item) {
             ItemKind::Static(ref ty, _, ref expr) |
             ItemKind::Const(ref ty, ref expr) => {
                 debug!("resolve_item ItemKind::Const");
-                self.with_item_rib(|this| {
+                self.with_item_rib(HasGenericParams::No, |this| {
                     this.visit_ty(ty);
                     this.with_constant_rib(|this| {
                         this.visit_expr(expr);
@@ -824,91 +822,75 @@ fn resolve_item(&mut self, item: &Item) {
         }
     }
 
-    fn with_generic_param_rib<'c, F>(&'c mut self, generic_params: GenericParameters<'a, 'c>, f: F)
+    fn with_generic_param_rib<'c, F>(&'c mut self, generics: &'c Generics, kind: RibKind<'a>, f: F)
         where F: FnOnce(&mut Self)
     {
         debug!("with_generic_param_rib");
-        match generic_params {
-            HasGenericParams(generics, rib_kind) => {
-                let mut function_type_rib = Rib::new(rib_kind);
-                let mut function_value_rib = Rib::new(rib_kind);
-                let mut seen_bindings = FxHashMap::default();
-                // We also can't shadow bindings from the parent item
-                if let AssocItemRibKind = rib_kind {
-                    let mut add_bindings_for_ns = |ns| {
-                        let parent_rib = self.ribs[ns].iter()
-                            .rfind(|rib| if let ItemRibKind = rib.kind { true } else { false })
-                            .expect("associated item outside of an item");
-                        seen_bindings.extend(
-                            parent_rib.bindings.iter().map(|(ident, _)| (*ident, ident.span)),
-                        );
-                    };
-                    add_bindings_for_ns(ValueNS);
-                    add_bindings_for_ns(TypeNS);
-                }
-                for param in &generics.params {
-                    match param.kind {
-                        GenericParamKind::Lifetime { .. } => {}
-                        GenericParamKind::Type { .. } => {
-                            let ident = param.ident.modern();
-                            debug!("with_generic_param_rib: {}", param.id);
-
-                            if seen_bindings.contains_key(&ident) {
-                                let span = seen_bindings.get(&ident).unwrap();
-                                let err = ResolutionError::NameAlreadyUsedInParameterList(
-                                    ident.name,
-                                    *span,
-                                );
-                                self.r.report_error(param.ident.span, err);
-                            }
-                            seen_bindings.entry(ident).or_insert(param.ident.span);
-
-                            // Plain insert (no renaming).
-                            let res = Res::Def(
-                                DefKind::TyParam,
-                                self.r.definitions.local_def_id(param.id),
-                            );
-                            function_type_rib.bindings.insert(ident, res);
-                            self.r.record_partial_res(param.id, PartialRes::new(res));
-                        }
-                        GenericParamKind::Const { .. } => {
-                            let ident = param.ident.modern();
-                            debug!("with_generic_param_rib: {}", param.id);
-
-                            if seen_bindings.contains_key(&ident) {
-                                let span = seen_bindings.get(&ident).unwrap();
-                                let err = ResolutionError::NameAlreadyUsedInParameterList(
-                                    ident.name,
-                                    *span,
-                                );
-                                self.r.report_error(param.ident.span, err);
-                            }
-                            seen_bindings.entry(ident).or_insert(param.ident.span);
-
-                            let res = Res::Def(
-                                DefKind::ConstParam,
-                                self.r.definitions.local_def_id(param.id),
-                            );
-                            function_value_rib.bindings.insert(ident, res);
-                            self.r.record_partial_res(param.id, PartialRes::new(res));
-                        }
-                    }
-                }
-                self.ribs[ValueNS].push(function_value_rib);
-                self.ribs[TypeNS].push(function_type_rib);
+        let mut function_type_rib = Rib::new(kind);
+        let mut function_value_rib = Rib::new(kind);
+        let mut seen_bindings = FxHashMap::default();
+
+        // We also can't shadow bindings from the parent item
+        if let AssocItemRibKind = kind {
+            let mut add_bindings_for_ns = |ns| {
+                let parent_rib = self.ribs[ns].iter()
+                    .rfind(|r| if let ItemRibKind(_) = r.kind { true } else { false })
+                    .expect("associated item outside of an item");
+                seen_bindings.extend(
+                    parent_rib.bindings.iter().map(|(ident, _)| (*ident, ident.span)),
+                );
+            };
+            add_bindings_for_ns(ValueNS);
+            add_bindings_for_ns(TypeNS);
+        }
+
+        for param in &generics.params {
+            if let GenericParamKind::Lifetime { .. } = param.kind {
+                continue;
             }
 
-            NoGenericParams => {
-                // Nothing to do.
+            let def_kind = match param.kind {
+                GenericParamKind::Type { .. } => DefKind::TyParam,
+                GenericParamKind::Const { .. } => DefKind::ConstParam,
+                _ => unreachable!(),
+            };
+
+            let ident = param.ident.modern();
+            debug!("with_generic_param_rib: {}", param.id);
+
+            if seen_bindings.contains_key(&ident) {
+                let span = seen_bindings.get(&ident).unwrap();
+                let err = ResolutionError::NameAlreadyUsedInParameterList(
+                    ident.name,
+                    *span,
+                );
+                self.r.report_error(param.ident.span, err);
+            }
+            seen_bindings.entry(ident).or_insert(param.ident.span);
+
+            // Plain insert (no renaming).
+            let res = Res::Def(def_kind, self.r.definitions.local_def_id(param.id));
+
+            match param.kind {
+                GenericParamKind::Type { .. } => {
+                    function_type_rib.bindings.insert(ident, res);
+                    self.r.record_partial_res(param.id, PartialRes::new(res));
+                }
+                GenericParamKind::Const { .. } => {
+                    function_value_rib.bindings.insert(ident, res);
+                    self.r.record_partial_res(param.id, PartialRes::new(res));
+                }
+                _ => unreachable!(),
             }
         }
 
+        self.ribs[ValueNS].push(function_value_rib);
+        self.ribs[TypeNS].push(function_type_rib);
+
         f(self);
 
-        if let HasGenericParams(..) = generic_params {
-            self.ribs[TypeNS].pop();
-            self.ribs[ValueNS].pop();
-        }
+        self.ribs[TypeNS].pop();
+        self.ribs[ValueNS].pop();
     }
 
     fn with_label_rib(&mut self, kind: RibKind<'a>, f: impl FnOnce(&mut Self)) {
@@ -917,8 +899,9 @@ fn with_label_rib(&mut self, kind: RibKind<'a>, f: impl FnOnce(&mut Self)) {
         self.label_ribs.pop();
     }
 
-    fn with_item_rib(&mut self, f: impl FnOnce(&mut Self)) {
-        self.with_rib(ValueNS, ItemRibKind, |this| this.with_rib(TypeNS, ItemRibKind, f))
+    fn with_item_rib(&mut self, has_generic_params: HasGenericParams, f: impl FnOnce(&mut Self)) {
+        let kind = ItemRibKind(has_generic_params);
+        self.with_rib(ValueNS, kind, |this| this.with_rib(TypeNS, kind, f))
     }
 
     fn with_constant_rib(&mut self, f: impl FnOnce(&mut Self)) {
@@ -1023,7 +1006,7 @@ fn resolve_implementation(&mut self,
                               impl_items: &[ImplItem]) {
         debug!("resolve_implementation");
         // If applicable, create a rib for the type parameters.
-        self.with_generic_param_rib(HasGenericParams(generics, ItemRibKind), |this| {
+        self.with_generic_param_rib(generics, ItemRibKind(HasGenericParams::Yes), |this| {
             // Dummy self type for better errors if `Self` is used in the trait path.
             this.with_self_rib(Res::SelfTy(None, None), |this| {
                 // Resolve the trait reference, if necessary.
@@ -1044,9 +1027,9 @@ fn resolve_implementation(&mut self,
                                 debug!("resolve_implementation with_self_rib_ns(ValueNS, ...)");
                                 for impl_item in impl_items {
                                     // We also need a new scope for the impl item type parameters.
-                                    let generic_params = HasGenericParams(&impl_item.generics,
-                                                                          AssocItemRibKind);
-                                    this.with_generic_param_rib(generic_params, |this| {
+                                    this.with_generic_param_rib(&impl_item.generics,
+                                                                AssocItemRibKind,
+                                                                |this| {
                                         use crate::ResolutionError::*;
                                         match impl_item.kind {
                                             ImplItemKind::Const(..) => {
index d3bf82b66ad1cf163a9310ecf28db381196ddd01..2721df4c687636d7b10924e0acae6e328a6664db 100644 (file)
@@ -13,7 +13,7 @@
 use rustc::session::config::nightly_options;
 use rustc::util::nodemap::FxHashSet;
 use syntax::ast::{self, Expr, ExprKind, Ident, NodeId, Path, Ty, TyKind};
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::symbol::kw;
 use syntax::util::lev_distance::find_best_match_for_name;
 use syntax_pos::Span;
@@ -115,8 +115,10 @@ pub(crate) fn smart_resolve_report_errors(
         if is_self_type(path, ns) {
             syntax::diagnostic_used!(E0411);
             err.code(DiagnosticId::Error("E0411".into()));
-            err.span_label(span, format!("`Self` is only available in impls, traits, \
-                                          and type definitions"));
+            err.span_label(
+                span,
+                format!("`Self` is only available in impls, traits, and type definitions"),
+            );
             return (err, Vec::new());
         }
         if is_self_value(path, ns) {
@@ -125,17 +127,16 @@ pub(crate) fn smart_resolve_report_errors(
             syntax::diagnostic_used!(E0424);
             err.code(DiagnosticId::Error("E0424".into()));
             err.span_label(span, match source {
-                PathSource::Pat => {
-                    format!("`self` value is a keyword \
-                             and may not be bound to \
-                             variables or shadowed")
-                }
-                _ => {
-                    format!("`self` value is a keyword \
-                             only available in methods \
-                             with `self` parameter")
-                }
+                PathSource::Pat => format!(
+                    "`self` value is a keyword and may not be bound to variables or shadowed",
+                ),
+                _ => format!(
+                    "`self` value is a keyword only available in methods with a `self` parameter",
+                ),
             });
+            if let Some(span) = &self.current_function {
+                err.span_label(*span, "this function doesn't have a `self` parameter");
+            }
             return (err, Vec::new());
         }
 
@@ -497,7 +498,8 @@ fn extract_node_id(t: &Ty) -> Option<NodeId> {
                         Res::Def(DefKind::Struct, did) | Res::Def(DefKind::Union, did)
                                 if resolution.unresolved_segments() == 0 => {
                             if let Some(field_names) = self.r.field_names.get(&did) {
-                                if field_names.iter().any(|&field_name| ident.name == field_name) {
+                                if field_names.iter()
+                                        .any(|&field_name| ident.name == field_name.node) {
                                     return Some(AssocSuggestion::Field);
                                 }
                             }
@@ -728,7 +730,7 @@ fn find_module(&mut self, def_id: DefId) -> Option<(Module<'a>, ImportSuggestion
             // abort if the module is already found
             if result.is_some() { break; }
 
-            in_module.for_each_child_stable(self.r, |_, ident, _, name_binding| {
+            in_module.for_each_child(self.r, |_, ident, _, name_binding| {
                 // abort if the module is already found or if name_binding is private external
                 if result.is_some() || !name_binding.vis.is_visible_locally() {
                     return
@@ -760,7 +762,7 @@ fn find_module(&mut self, def_id: DefId) -> Option<(Module<'a>, ImportSuggestion
     fn collect_enum_variants(&mut self, def_id: DefId) -> Option<Vec<Path>> {
         self.find_module(def_id).map(|(enum_module, enum_import_suggestion)| {
             let mut variants = Vec::new();
-            enum_module.for_each_child_stable(self.r, |_, ident, _, name_binding| {
+            enum_module.for_each_child(self.r, |_, ident, _, name_binding| {
                 if let Res::Def(DefKind::Variant, _) = name_binding.res() {
                     let mut segms = enum_import_suggestion.path.segments.clone();
                     segms.push(ast::PathSegment::from_ident(ident));
index e7292b52ab3e88c512dbafab43797192f40a5cd8..17d8f0f211a9288a061ad8f02056eac9869aaf2b 100644 (file)
@@ -9,10 +9,8 @@
 
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
 
-#![feature(inner_deref)]
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
-#![feature(mem_take)]
 #![feature(nll)]
 
 #![recursion_limit="256"]
 use rustc::lint;
 use rustc::hir::def::{self, DefKind, PartialRes, CtorKind, CtorOf, NonMacroAttrKind, ExportMap};
 use rustc::hir::def::Namespace::*;
-use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefId};
 use rustc::hir::{TraitMap, GlobMap};
-use rustc::ty;
+use rustc::ty::{self, DefIdTree};
 use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap};
 use rustc::span_bug;
 
 use rustc_metadata::creader::CrateLoader;
 use rustc_metadata::cstore::CStore;
 
-use syntax::ext::hygiene::{ExpnId, Transparency, SyntaxContext};
+use syntax_expand::hygiene::{ExpnId, Transparency, SyntaxContext};
+use syntax_expand::base::{SyntaxExtension, MacroKind, SpecialDerives};
+use syntax::{struct_span_err, unwrap_or};
+use syntax::attr;
 use syntax::ast::{self, Name, NodeId, Ident, FloatTy, IntTy, UintTy};
-use syntax::ext::base::{SyntaxExtension, MacroKind, SpecialDerives};
+use syntax::ast::{ItemKind, Path, CRATE_NODE_ID, Crate};
+use syntax::print::pprust;
 use syntax::symbol::{kw, sym};
-
+use syntax::source_map::Spanned;
 use syntax::visit::{self, Visitor};
-use syntax::attr;
-use syntax::ast::{CRATE_NODE_ID, Crate};
-use syntax::ast::{ItemKind, Path};
-use syntax::{struct_span_err, unwrap_or};
 
 use syntax_pos::{Span, DUMMY_SP};
 use errors::{Applicability, DiagnosticBuilder};
 use std::collections::BTreeSet;
 use rustc_data_structures::ptr_key::PtrKey;
 use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::fx::FxIndexMap;
 
 use diagnostics::{Suggestion, ImportSuggestion};
 use diagnostics::{find_span_of_binding_until_next_binding, extend_span_to_previous_binding};
-use late::{PathSource, Rib, RibKind::*};
+use late::{HasGenericParams, PathSource, Rib, RibKind::*};
 use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver};
 use macros::{LegacyBinding, LegacyScope};
 
@@ -178,7 +177,7 @@ fn cmp(&self, other: &BindingError) -> cmp::Ordering {
 
 enum ResolutionError<'a> {
     /// Error E0401: can't use type or const parameters from outer function.
-    GenericParamsFromOuterFunction(Res),
+    GenericParamsFromOuterFunction(Res, HasGenericParams),
     /// Error E0403: the name is already used for a type or const parameter in this generic
     /// parameter list.
     NameAlreadyUsedInParameterList(Name, Span),
@@ -431,7 +430,22 @@ pub fn name(&self) -> Option<Name> {
     }
 }
 
-type Resolutions<'a> = RefCell<FxHashMap<(Ident, Namespace), &'a RefCell<NameResolution<'a>>>>;
+/// A key that identifies a binding in a given `Module`.
+///
+/// Multiple bindings in the same module can have the same key (in a valid
+/// program) if all but one of them come from glob imports.
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct BindingKey {
+    /// The identifier for the binding, aways the `modern` version of the
+    /// identifier.
+    ident: Ident,
+    ns: Namespace,
+    /// 0 if ident is not `_`, otherwise a value that's unique to the specific
+    /// `_` in the expanded AST that introduced this binding.
+    disambiguator: u32,
+}
+
+type Resolutions<'a> = RefCell<FxIndexMap<BindingKey, &'a RefCell<NameResolution<'a>>>>;
 
 /// One node in the tree of modules.
 pub struct ModuleData<'a> {
@@ -491,19 +505,8 @@ fn new(parent: Option<Module<'a>>,
     fn for_each_child<R, F>(&'a self, resolver: &mut R, mut f: F)
         where R: AsMut<Resolver<'a>>, F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>)
     {
-        for (&(ident, ns), name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() {
-            name_resolution.borrow().binding.map(|binding| f(resolver, ident, ns, binding));
-        }
-    }
-
-    fn for_each_child_stable<R, F>(&'a self, resolver: &mut R, mut f: F)
-        where R: AsMut<Resolver<'a>>, F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>)
-    {
-        let resolutions = resolver.as_mut().resolutions(self).borrow();
-        let mut resolutions = resolutions.iter().collect::<Vec<_>>();
-        resolutions.sort_by_cached_key(|&(&(ident, ns), _)| (ident.as_str(), ns));
-        for &(&(ident, ns), &resolution) in resolutions.iter() {
-            resolution.borrow().binding.map(|binding| f(resolver, ident, ns, binding));
+        for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() {
+            name_resolution.borrow().binding.map(|binding| f(resolver, key.ident, key.ns, binding));
         }
     }
 
@@ -840,7 +843,7 @@ pub struct Resolver<'a> {
 
     /// Names of fields of an item `DefId` accessible with dot syntax.
     /// Used for hints during error reporting.
-    field_names: FxHashMap<DefId, Vec<Name>>,
+    field_names: FxHashMap<DefId, Vec<Spanned<Name>>>,
 
     /// All imports known to succeed or fail.
     determined_imports: Vec<&'a ImportDirective<'a>>,
@@ -865,6 +868,8 @@ pub struct Resolver<'a> {
     /// Resolutions for labels (node IDs of their corresponding blocks or loops).
     label_res_map: NodeMap<NodeId>,
 
+    /// `CrateNum` resolutions of `extern crate` items.
+    pub extern_crate_map: NodeMap<CrateNum>,
     pub export_map: ExportMap<NodeId>,
     pub trait_map: TraitMap,
 
@@ -888,8 +893,9 @@ pub struct Resolver<'a> {
     /// language items.
     empty_module: Module<'a>,
     module_map: FxHashMap<DefId, Module<'a>>,
-    extern_module_map: FxHashMap<(DefId, bool /* MacrosOnly? */), Module<'a>>,
+    extern_module_map: FxHashMap<DefId, Module<'a>>,
     binding_parent_modules: FxHashMap<PtrKey<'a, NameBinding<'a>>, Module<'a>>,
+    underscore_disambiguator: u32,
 
     /// Maps glob imports to the names of items actually imported.
     pub glob_map: GlobMap,
@@ -910,7 +916,7 @@ pub struct Resolver<'a> {
     arenas: &'a ResolverArenas<'a>,
     dummy_binding: &'a NameBinding<'a>,
 
-    crate_loader: &'a mut CrateLoader<'a>,
+    crate_loader: &'a CrateLoader<'a>,
     macro_names: FxHashSet<Ident>,
     builtin_macros: FxHashMap<Name, SyntaxExtension>,
     macro_use_prelude: FxHashMap<Name, &'a NameBinding<'a>>,
@@ -1005,7 +1011,7 @@ impl<'a> AsMut<Resolver<'a>> for Resolver<'a> {
     fn as_mut(&mut self) -> &mut Resolver<'a> { self }
 }
 
-impl<'a, 'b> ty::DefIdTree for &'a Resolver<'b> {
+impl<'a, 'b> DefIdTree for &'a Resolver<'b> {
     fn parent(self, id: DefId) -> Option<DefId> {
         match id.krate {
             LOCAL_CRATE => self.definitions.def_key(id.index).parent,
@@ -1080,7 +1086,7 @@ pub fn new(session: &'a Session,
                cstore: &'a CStore,
                krate: &Crate,
                crate_name: &str,
-               crate_loader: &'a mut CrateLoader<'a>,
+               crate_loader: &'a CrateLoader<'a>,
                arenas: &'a ResolverArenas<'a>)
                -> Resolver<'a> {
         let root_def_id = DefId::local(CRATE_DEF_INDEX);
@@ -1165,8 +1171,10 @@ pub fn new(session: &'a Session,
             partial_res_map: Default::default(),
             import_res_map: Default::default(),
             label_res_map: Default::default(),
+            extern_crate_map: Default::default(),
             export_map: FxHashMap::default(),
             trait_map: Default::default(),
+            underscore_disambiguator: 0,
             empty_module,
             module_map,
             block_map: Default::default(),
@@ -1266,6 +1274,9 @@ fn has_derives(&self, expn_id: ExpnId, markers: SpecialDerives) -> bool {
 
     /// Entry point to crate resolution.
     pub fn resolve_crate(&mut self, krate: &Crate) {
+        let _prof_timer =
+            self.session.prof.generic_activity("resolve_crate");
+
         ImportResolver { r: self }.finalize_imports();
         self.finalize_macro_resolutions();
 
@@ -1288,6 +1299,17 @@ fn new_module(
         self.arenas.alloc_module(module)
     }
 
+    fn new_key(&mut self, ident: Ident, ns: Namespace) -> BindingKey {
+        let ident = ident.modern();
+        let disambiguator = if ident.name == kw::Underscore {
+            self.underscore_disambiguator += 1;
+            self.underscore_disambiguator
+        } else {
+            0
+        };
+        BindingKey { ident, ns, disambiguator }
+    }
+
     fn resolutions(&mut self, module: Module<'a>) -> &'a Resolutions<'a> {
         if module.populate_on_access.get() {
             module.populate_on_access.set(false);
@@ -1296,9 +1318,9 @@ fn resolutions(&mut self, module: Module<'a>) -> &'a Resolutions<'a> {
         &module.lazy_resolutions
     }
 
-    fn resolution(&mut self, module: Module<'a>, ident: Ident, ns: Namespace)
+    fn resolution(&mut self, module: Module<'a>, key: BindingKey)
                   -> &'a RefCell<NameResolution<'a>> {
-        *self.resolutions(module).borrow_mut().entry((ident.modern(), ns))
+        *self.resolutions(module).borrow_mut().entry(key)
                .or_insert_with(|| self.arenas.alloc_name_resolution())
     }
 
@@ -2019,13 +2041,13 @@ fn resolve_path_with_ribs(
                         let mut candidates =
                             self.lookup_import_candidates(ident, TypeNS, is_mod);
                         candidates.sort_by_cached_key(|c| {
-                            (c.path.segments.len(), c.path.to_string())
+                            (c.path.segments.len(), pprust::path_to_string(&c.path))
                         });
                         if let Some(candidate) = candidates.get(0) {
                             (
                                 String::from("unresolved import"),
                                 Some((
-                                    vec![(ident.span, candidate.path.to_string())],
+                                    vec![(ident.span, pprust::path_to_string(&candidate.path))],
                                     String::from("a similar path exists"),
                                     Applicability::MaybeIncorrect,
                                 )),
@@ -2167,7 +2189,7 @@ fn validate_res_from_ribs(
                         ForwardTyParamBanRibKind | TyParamAsConstParamTy => {
                             // Nothing to do. Continue.
                         }
-                        ItemRibKind | FnItemRibKind | AssocItemRibKind => {
+                        ItemRibKind(_) | FnItemRibKind | AssocItemRibKind => {
                             // This was an attempt to access an upvar inside a
                             // named function item. This is not allowed, so we
                             // report an error.
@@ -2195,22 +2217,23 @@ fn validate_res_from_ribs(
             }
             Res::Def(DefKind::TyParam, _) | Res::SelfTy(..) => {
                 for rib in ribs {
-                    match rib.kind {
+                    let has_generic_params = match rib.kind {
                         NormalRibKind | AssocItemRibKind |
                         ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind |
                         ConstantItemRibKind | TyParamAsConstParamTy => {
                             // Nothing to do. Continue.
+                            continue;
                         }
-                        ItemRibKind | FnItemRibKind => {
-                            // This was an attempt to use a type parameter outside its scope.
-                            if record_used {
-                                self.report_error(
-                                    span, ResolutionError::GenericParamsFromOuterFunction(res)
-                                );
-                            }
-                            return Res::Err;
-                        }
+                        // This was an attempt to use a type parameter outside its scope.
+                        ItemRibKind(has_generic_params) => has_generic_params,
+                        FnItemRibKind => HasGenericParams::Yes,
+                    };
+
+                    if record_used {
+                        self.report_error(span, ResolutionError::GenericParamsFromOuterFunction(
+                            res, has_generic_params));
                     }
+                    return Res::Err;
                 }
             }
             Res::Def(DefKind::ConstParam, _) => {
@@ -2222,15 +2245,18 @@ fn validate_res_from_ribs(
                     ribs.next();
                 }
                 for rib in ribs {
-                    if let ItemRibKind | FnItemRibKind = rib.kind {
-                        // This was an attempt to use a const parameter outside its scope.
-                        if record_used {
-                            self.report_error(
-                                span, ResolutionError::GenericParamsFromOuterFunction(res)
-                            );
-                        }
-                        return Res::Err;
+                    let has_generic_params = match rib.kind {
+                        ItemRibKind(has_generic_params) => has_generic_params,
+                        FnItemRibKind => HasGenericParams::Yes,
+                        _ => continue,
+                    };
+
+                    // This was an attempt to use a const parameter outside its scope.
+                    if record_used {
+                        self.report_error(span, ResolutionError::GenericParamsFromOuterFunction(
+                            res, has_generic_params));
                     }
+                    return Res::Err;
                 }
             }
             _ => {}
@@ -2382,32 +2408,38 @@ fn report_errors(&mut self, krate: &Crate) {
         let mut reported_spans = FxHashSet::default();
         for &PrivacyError(dedup_span, ident, binding) in &self.privacy_errors {
             if reported_spans.insert(dedup_span) {
-                let mut err = struct_span_err!(
-                    self.session,
-                    ident.span,
-                    E0603,
-                    "{} `{}` is private",
-                    binding.res().descr(),
-                    ident.name,
-                );
-                // FIXME: use the ctor's `def_id` to check wether any of the fields is not visible
-                match binding.kind {
-                    NameBindingKind::Res(Res::Def(DefKind::Ctor(
-                        CtorOf::Struct,
-                        CtorKind::Fn,
-                    ), _def_id), _) => {
-                        err.note("a tuple struct constructor is private if any of its fields \
-                                  is private");
-                    }
-                    NameBindingKind::Res(Res::Def(DefKind::Ctor(
-                        CtorOf::Variant,
-                        CtorKind::Fn,
-                    ), _def_id), _) => {
-                        err.note("a tuple variant constructor is private if any of its fields \
-                                  is private");
+                let session = &self.session;
+                let mk_struct_span_error = |is_constructor| {
+                    struct_span_err!(
+                        session,
+                        ident.span,
+                        E0603,
+                        "{}{} `{}` is private",
+                        binding.res().descr(),
+                        if is_constructor { " constructor"} else { "" },
+                        ident.name,
+                    )
+                };
+
+                let mut err = if let NameBindingKind::Res(
+                    Res::Def(DefKind::Ctor(CtorOf::Struct, CtorKind::Fn), ctor_def_id), _
+                ) = binding.kind {
+                    let def_id = (&*self).parent(ctor_def_id).expect("no parent for a constructor");
+                    if let Some(fields) = self.field_names.get(&def_id) {
+                        let mut err = mk_struct_span_error(true);
+                        let first_field = fields.first().expect("empty field list in the map");
+                        err.span_label(
+                            fields.iter().fold(first_field.span, |acc, field| acc.to(field.span)),
+                            "a constructor is private if any of the fields is private",
+                        );
+                        err
+                    } else {
+                        mk_struct_span_error(false)
                     }
-                    _ => {}
-                }
+                } else {
+                    mk_struct_span_error(false)
+                };
+
                 err.emit();
             }
         }
index 73ad0670659b5fd08633b44304b8a52ec32a6500..94fe0cc57403e7a376a5d66384839e1a484b529e 100644 (file)
 use syntax::ast::{self, NodeId, Ident};
 use syntax::attr::StabilityLevel;
 use syntax::edition::Edition;
-use syntax::ext::base::{self, InvocationRes, Indeterminate, SpecialDerives};
-use syntax::ext::base::{MacroKind, SyntaxExtension};
-use syntax::ext::expand::{AstFragment, AstFragmentKind, Invocation, InvocationKind};
-use syntax::ext::hygiene::{self, ExpnId, ExpnData, ExpnKind};
-use syntax::ext::compile_declarative_macro;
+use syntax_expand::base::{self, InvocationRes, Indeterminate, SpecialDerives};
+use syntax_expand::base::{MacroKind, SyntaxExtension};
+use syntax_expand::expand::{AstFragment, AstFragmentKind, Invocation, InvocationKind};
+use syntax_expand::hygiene::{self, ExpnId, ExpnData, ExpnKind};
+use syntax_expand::compile_declarative_macro;
 use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name};
 use syntax::feature_gate::GateIssue;
+use syntax::print::pprust;
 use syntax::symbol::{Symbol, kw, sym};
 use syntax_pos::{Span, DUMMY_SP};
 
@@ -107,15 +108,11 @@ fn resolve_dollar_crates(&mut self) {
         });
     }
 
-    // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders.
-    fn visit_ast_fragment_with_placeholders(
-        &mut self, expansion: ExpnId, fragment: &AstFragment, extra_placeholders: &[NodeId]
-    ) {
+    fn visit_ast_fragment_with_placeholders(&mut self, expansion: ExpnId, fragment: &AstFragment) {
         // Integrate the new AST fragment into all the definition and module structures.
         // We are inside the `expansion` now, but other parent scope components are still the same.
         let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] };
-        let output_legacy_scope =
-            self.build_reduced_graph(fragment, extra_placeholders, parent_scope);
+        let output_legacy_scope = self.build_reduced_graph(fragment, parent_scope);
         self.output_legacy_scopes.insert(expansion, output_legacy_scope);
 
         parent_scope.module.unexpanded_invocations.borrow_mut().remove(&expansion);
@@ -324,7 +321,8 @@ fn smart_resolve_macro_path(
 
         Ok(if ext.macro_kind() != kind {
             let expected = kind.descr_expected();
-            let msg = format!("expected {}, found {} `{}`", expected, res.descr(), path);
+            let path_str = pprust::path_to_string(path);
+            let msg = format!("expected {}, found {} `{}`", expected, res.descr(), path_str);
             self.session.struct_span_err(path.span, &msg)
                         .span_label(path.span, format!("not {} {}", kind.article(), expected))
                         .emit();
@@ -773,7 +771,6 @@ struct Flags: u8 {
                     check_consistency(self, &[seg], ident.span, kind, initial_res, res);
                 }
                 Err(..) => {
-                    assert!(initial_binding.is_none());
                     let expected = kind.descr_expected();
                     let msg = format!("cannot find {} `{}` in this scope", expected, ident);
                     let mut err = self.session.struct_span_err(ident.span, &msg);
@@ -797,18 +794,25 @@ fn check_stability_and_deprecation(&self, ext: &SyntaxExtension, path: &ast::Pat
             if let StabilityLevel::Unstable { reason, issue, is_soft } = stability.level {
                 let feature = stability.feature;
                 if !self.active_features.contains(&feature) && !span.allows_unstable(feature) {
-                    stability::report_unstable(self.session, feature, reason, issue, is_soft, span);
+                    let node_id = ast::CRATE_NODE_ID;
+                    let soft_handler =
+                        |lint, span, msg: &_| self.session.buffer_lint(lint, node_id, span, msg);
+                    stability::report_unstable(
+                        self.session, feature, reason, issue, is_soft, span, soft_handler
+                    );
                 }
             }
             if let Some(depr) = &stability.rustc_depr {
-                let (message, lint) = stability::rustc_deprecation_message(depr, &path.to_string());
+                let path = pprust::path_to_string(path);
+                let (message, lint) = stability::rustc_deprecation_message(depr, &path);
                 stability::early_report_deprecation(
                     self.session, &message, depr.suggestion, lint, span
                 );
             }
         }
         if let Some(depr) = &ext.deprecation {
-            let (message, lint) = stability::deprecation_message(depr, &path.to_string());
+            let path = pprust::path_to_string(&path);
+            let (message, lint) = stability::deprecation_message(depr, &path);
             stability::early_report_deprecation(self.session, &message, None, lint, span);
         }
     }
index 360343169bc3d83dc0b09e1e815f46b050fb79be..424bf31a785056e3a3f0c5a3c5d383b24e61d9a0 100644 (file)
@@ -7,7 +7,7 @@
 use crate::Determinacy::{self, *};
 use crate::Namespace::{self, TypeNS, MacroNS};
 use crate::{NameBinding, NameBindingKind, ToNameBinding, PathResult, PrivacyError};
-use crate::{Resolver, ResolutionError, Segment, ModuleKind};
+use crate::{Resolver, ResolutionError, BindingKey, Segment, ModuleKind};
 use crate::{names_to_string, module_to_string};
 use crate::diagnostics::Suggestion;
 
@@ -28,7 +28,7 @@
 use rustc::{bug, span_bug};
 
 use syntax::ast::{Ident, Name, NodeId, CRATE_NODE_ID};
-use syntax::ext::hygiene::ExpnId;
+use syntax_expand::hygiene::ExpnId;
 use syntax::symbol::kw;
 use syntax::util::lev_distance::find_best_match_for_name;
 use syntax::{struct_span_err, unwrap_or};
@@ -235,7 +235,8 @@ impl<'a> Resolver<'a> {
             }
         };
 
-        let resolution = self.resolution(module, ident, ns)
+        let key = self.new_key(ident, ns);
+        let resolution = self.resolution(module, key)
             .try_borrow_mut()
             .map_err(|_| (Determined, Weak::No))?; // This happens when there is a cycle of imports.
 
@@ -447,17 +448,16 @@ impl<'a> Resolver<'a> {
     }
 
     // Define the name or return the existing binding if there is a collision.
-    pub fn try_define(
+    crate fn try_define(
         &mut self,
         module: Module<'a>,
-        ident: Ident,
-        ns: Namespace,
+        key: BindingKey,
         binding: &'a NameBinding<'a>,
     ) -> Result<(), &'a NameBinding<'a>> {
         let res = binding.res();
-        self.check_reserved_macro_name(ident, res);
+        self.check_reserved_macro_name(key.ident, res);
         self.set_binding_parent_module(binding, module);
-        self.update_resolution(module, ident, ns, |this, resolution| {
+        self.update_resolution(module, key, |this, resolution| {
             if let Some(old_binding) = resolution.binding {
                 if res == Res::Err {
                     // Do not override real bindings with `Res::Err`s from error recovery.
@@ -479,8 +479,9 @@ pub fn try_define(
                         } else {
                             (binding, old_binding)
                         };
-                        if glob_binding.res() != nonglob_binding.res() &&
-                           ns == MacroNS && nonglob_binding.expansion != ExpnId::root() {
+                        if glob_binding.res() != nonglob_binding.res()
+                            && key.ns == MacroNS && nonglob_binding.expansion != ExpnId::root()
+                        {
                             resolution.binding = Some(this.ambiguity(
                                 AmbiguityKind::GlobVsExpanded,
                                 nonglob_binding,
@@ -499,9 +500,9 @@ pub fn try_define(
                                 DUPLICATE_MACRO_EXPORTS,
                                 CRATE_NODE_ID,
                                 binding.span,
-                                &format!("a macro named `{}` has already been exported", ident),
+                                &format!("a macro named `{}` has already been exported", key.ident),
                                 BuiltinLintDiagnostics::DuplicatedMacroExports(
-                                    ident, old_binding.span, binding.span));
+                                    key.ident, old_binding.span, binding.span));
 
                             resolution.binding = Some(binding);
                         } else {
@@ -531,9 +532,9 @@ fn ambiguity(
     // Use `f` to mutate the resolution of the name in the module.
     // If the resolution becomes a success, define it in the module's glob importers.
     fn update_resolution<T, F>(
-        &mut self, module: Module<'a>,
-        ident: Ident,
-        ns: Namespace,
+        &mut self,
+        module: Module<'a>,
+        key: BindingKey,
         f: F,
     ) -> T
         where F: FnOnce(&mut Resolver<'a>, &mut NameResolution<'a>) -> T
@@ -541,7 +542,7 @@ fn update_resolution<T, F>(
         // Ensure that `resolution` isn't borrowed when defining in the module's glob importers,
         // during which the resolution might end up getting re-defined via a glob cycle.
         let (binding, t) = {
-            let resolution = &mut *self.resolution(module, ident, ns).borrow_mut();
+            let resolution = &mut *self.resolution(module, key).borrow_mut();
             let old_binding = resolution.binding();
 
             let t = f(self, resolution);
@@ -558,7 +559,7 @@ fn update_resolution<T, F>(
 
         // Define `binding` in `module`s glob importers.
         for directive in module.glob_importers.borrow_mut().iter() {
-            let mut ident = ident.modern();
+            let mut ident = key.ident;
             let scope = match ident.span.reverse_glob_adjust(module.expansion, directive.span) {
                 Some(Some(def)) => self.macro_def_scope(def),
                 Some(None) => directive.parent_scope.module,
@@ -566,7 +567,8 @@ fn update_resolution<T, F>(
             };
             if self.is_accessible_from(binding.vis, scope) {
                 let imported_binding = self.import(binding, directive);
-                let _ = self.try_define(directive.parent_scope.module, ident, ns, imported_binding);
+                let key = BindingKey { ident, ..key };
+                let _ = self.try_define(directive.parent_scope.module, key, imported_binding);
             }
         }
 
@@ -580,7 +582,8 @@ fn import_dummy_binding(&mut self, directive: &'a ImportDirective<'a>) {
             let dummy_binding = self.dummy_binding;
             let dummy_binding = self.import(dummy_binding, directive);
             self.per_ns(|this, ns| {
-                let _ = this.try_define(directive.parent_scope.module, target, ns, dummy_binding);
+                let key = this.new_key(target, ns);
+                let _ = this.try_define(directive.parent_scope.module, key, dummy_binding);
                 // Consider erroneous imports used to avoid duplicate diagnostics.
                 this.record_use(target, ns, dummy_binding, false);
             });
@@ -820,8 +823,11 @@ fn resolve_import(&mut self, directive: &'b ImportDirective<'b>) -> bool {
             let parent = directive.parent_scope.module;
             match source_bindings[ns].get() {
                 Err(Undetermined) => indeterminate = true,
+                // Don't update the resolution, because it was never added.
+                Err(Determined) if target.name == kw::Underscore => {}
                 Err(Determined) => {
-                    this.update_resolution(parent, target, ns, |_, resolution| {
+                    let key = this.new_key(target, ns);
+                    this.update_resolution(parent, key, |_, resolution| {
                         resolution.single_imports.remove(&PtrKey(directive));
                     });
                 }
@@ -1052,7 +1058,7 @@ fn finalize_import(
                     _ => None,
                 };
                 let resolutions = resolutions.as_ref().into_iter().flat_map(|r| r.iter());
-                let names = resolutions.filter_map(|(&(ref i, _), resolution)| {
+                let names = resolutions.filter_map(|(BindingKey { ident: i, .. }, resolution)| {
                     if *i == ident { return None; } // Never suggest the same name
                     match *resolution.borrow() {
                         NameResolution { binding: Some(name_binding), .. } => {
@@ -1301,19 +1307,18 @@ fn resolve_glob_import(&mut self, directive: &'b ImportDirective<'b>) {
 
         // Ensure that `resolutions` isn't borrowed during `try_define`,
         // since it might get updated via a glob cycle.
-        let bindings = self.r.resolutions(module).borrow().iter().filter_map(|(ident, resolution)| {
-            resolution.borrow().binding().map(|binding| (*ident, binding))
+        let bindings = self.r.resolutions(module).borrow().iter().filter_map(|(key, resolution)| {
+            resolution.borrow().binding().map(|binding| (*key, binding))
         }).collect::<Vec<_>>();
-        for ((mut ident, ns), binding) in bindings {
-            let scope = match ident.span.reverse_glob_adjust(module.expansion, directive.span) {
+        for (mut key, binding) in bindings {
+            let scope = match key.ident.span.reverse_glob_adjust(module.expansion, directive.span) {
                 Some(Some(def)) => self.r.macro_def_scope(def),
                 Some(None) => directive.parent_scope.module,
                 None => continue,
             };
             if self.r.is_accessible_from(binding.pseudo_vis(), scope) {
                 let imported_binding = self.r.import(binding, directive);
-                let _ =
-                    self.r.try_define(directive.parent_scope.module, ident, ns, imported_binding);
+                let _ = self.r.try_define(directive.parent_scope.module, key, imported_binding);
             }
         }
 
@@ -1329,29 +1334,23 @@ fn finalize_resolutions_in(&mut self, module: Module<'b>) {
 
         let mut reexports = Vec::new();
 
-        for (&(ident, ns), resolution) in self.r.resolutions(module).borrow().iter() {
-            let resolution = &mut *resolution.borrow_mut();
-            let binding = match resolution.binding {
-                Some(binding) => binding,
-                None => continue,
-            };
-
+        module.for_each_child(self.r, |this, ident, ns, binding| {
             // Filter away ambiguous imports and anything that has def-site
             // hygiene.
             // FIXME: Implement actual cross-crate hygiene.
             let is_good_import = binding.is_import() && !binding.is_ambiguity()
-                && !ident.span.modern().from_expansion();
+                && !ident.span.from_expansion();
             if is_good_import || binding.is_macro_def() {
                 let res = binding.res();
                 if res != Res::Err {
                     if let Some(def_id) = res.opt_def_id() {
                         if !def_id.is_local() {
-                            self.r.cstore.export_macros_untracked(def_id.krate);
+                            this.cstore.export_macros_untracked(def_id.krate);
                         }
                     }
                     reexports.push(Export {
-                        ident: ident.modern(),
-                        res: res,
+                        ident,
+                        res,
                         span: binding.span,
                         vis: binding.vis,
                     });
@@ -1360,7 +1359,7 @@ fn finalize_resolutions_in(&mut self, module: Module<'b>) {
 
             if let NameBindingKind::Import { binding: orig_binding, directive, .. } = binding.kind {
                 if ns == TypeNS && orig_binding.is_variant() &&
-                    !orig_binding.vis.is_at_least(binding.vis, &*self) {
+                    !orig_binding.vis.is_at_least(binding.vis, &*this) {
                         let msg = match directive.subclass {
                             ImportDirectiveSubclass::SingleImport { .. } => {
                                 format!("variant `{}` is private and cannot be re-exported",
@@ -1372,33 +1371,34 @@ fn finalize_resolutions_in(&mut self, module: Module<'b>) {
                                 let error_id = (DiagnosticMessageId::ErrorId(0), // no code?!
                                                 Some(binding.span),
                                                 msg.clone());
-                                let fresh = self.r.session.one_time_diagnostics
+                                let fresh = this.session.one_time_diagnostics
                                     .borrow_mut().insert(error_id);
                                 if !fresh {
-                                    continue;
+                                    return;
                                 }
                                 msg
                             },
                             ref s @ _ => bug!("unexpected import subclass {:?}", s)
                         };
-                        let mut err = self.r.session.struct_span_err(binding.span, &msg);
+                        let mut err = this.session.struct_span_err(binding.span, &msg);
 
                         let imported_module = match directive.imported_module.get() {
                             Some(ModuleOrUniformRoot::Module(module)) => module,
                             _ => bug!("module should exist"),
                         };
                         let parent_module = imported_module.parent.expect("parent should exist");
-                        let resolutions = self.r.resolutions(parent_module).borrow();
+                        let resolutions = this.resolutions(parent_module).borrow();
                         let enum_path_segment_index = directive.module_path.len() - 1;
                         let enum_ident = directive.module_path[enum_path_segment_index].ident;
 
-                        let enum_resolution = resolutions.get(&(enum_ident, TypeNS))
+                        let key = this.new_key(enum_ident, TypeNS);
+                        let enum_resolution = resolutions.get(&key)
                             .expect("resolution should exist");
                         let enum_span = enum_resolution.borrow()
                             .binding.expect("binding should exist")
                             .span;
-                        let enum_def_span = self.r.session.source_map().def_span(enum_span);
-                        let enum_def_snippet = self.r.session.source_map()
+                        let enum_def_span = this.session.source_map().def_span(enum_span);
+                        let enum_def_snippet = this.session.source_map()
                             .span_to_snippet(enum_def_span).expect("snippet should exist");
                         // potentially need to strip extant `crate`/`pub(path)` for suggestion
                         let after_vis_index = enum_def_snippet.find("enum")
@@ -1406,7 +1406,7 @@ fn finalize_resolutions_in(&mut self, module: Module<'b>) {
                         let suggestion = format!("pub {}",
                                                  &enum_def_snippet[after_vis_index..]);
 
-                        self.r.session
+                        this.session
                             .diag_span_suggestion_once(&mut err,
                                                        DiagnosticMessageId::ErrorId(0),
                                                        enum_def_span,
@@ -1415,7 +1415,7 @@ fn finalize_resolutions_in(&mut self, module: Module<'b>) {
                         err.emit();
                 }
             }
-        }
+        });
 
         if reexports.len() > 0 {
             if let Some(def_id) = module.def_id() {
index edd2db3c8f738bb2546d704d3010e1d17585e231..e282936b5d9e7e762938fc483af2dabddc0f315b 100644 (file)
@@ -115,15 +115,17 @@ fn nest_tables<F>(&mut self, item_id: NodeId, f: F)
         F: FnOnce(&mut Self),
     {
         let item_def_id = self.tcx.hir().local_def_id_from_node_id(item_id);
-        if self.tcx.has_typeck_tables(item_def_id) {
-            let tables = self.tcx.typeck_tables_of(item_def_id);
-            let old_tables = self.save_ctxt.tables;
-            self.save_ctxt.tables = tables;
-            f(self);
-            self.save_ctxt.tables = old_tables;
+
+        let tables = if self.tcx.has_typeck_tables(item_def_id) {
+            self.tcx.typeck_tables_of(item_def_id)
         } else {
-            f(self);
-        }
+            self.save_ctxt.empty_tables
+        };
+
+        let old_tables = self.save_ctxt.tables;
+        self.save_ctxt.tables = tables;
+        f(self);
+        self.save_ctxt.tables = old_tables;
     }
 
     fn span_from_span(&self, span: Span) -> SpanData {
@@ -530,12 +532,14 @@ fn process_struct(
             );
         }
 
-        for field in def.fields() {
-            self.process_struct_field_def(field, item.id);
-            self.visit_ty(&field.ty);
-        }
+        self.nest_tables(item.id, |v| {
+            for field in def.fields() {
+                v.process_struct_field_def(field, item.id);
+                v.visit_ty(&field.ty);
+            }
 
-        self.process_generic_params(ty_params, &qualname, item.id);
+            v.process_generic_params(ty_params, &qualname, item.id);
+        });
     }
 
     fn process_enum(
@@ -665,15 +669,18 @@ fn process_impl(
                 }
             }
         }
-        self.visit_ty(&typ);
-        if let &Some(ref trait_ref) = trait_ref {
-            self.process_path(trait_ref.ref_id, &trait_ref.path);
-        }
-        self.process_generic_params(generics, "", item.id);
-        for impl_item in impl_items {
-            let map = &self.tcx.hir();
-            self.process_impl_item(impl_item, map.local_def_id_from_node_id(item.id));
-        }
+
+        let map = &self.tcx.hir();
+        self.nest_tables(item.id, |v| {
+            v.visit_ty(&typ);
+            if let &Some(ref trait_ref) = trait_ref {
+                v.process_path(trait_ref.ref_id, &trait_ref.path);
+            }
+            v.process_generic_params(generics, "", item.id);
+            for impl_item in impl_items {
+                v.process_impl_item(impl_item, map.local_def_id_from_node_id(item.id));
+            }
+        });
     }
 
     fn process_trait(
index e3c898610cde0c8be1575cef14ab6c76d2c09441..1cfb84bb511e4b4ca5c8c16d73235790f35b4db8 100644 (file)
@@ -1,6 +1,5 @@
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
 #![feature(nll)]
-#![feature(inner_deref)]
 
 #![recursion_limit="256"]
 
@@ -49,6 +48,9 @@
 pub struct SaveContext<'l, 'tcx> {
     tcx: TyCtxt<'tcx>,
     tables: &'l ty::TypeckTables<'tcx>,
+    /// Used as a fallback when nesting the typeck tables during item processing
+    /// (if these are not available for that item, e.g. don't own a body)
+    empty_tables: &'l ty::TypeckTables<'tcx>,
     access_levels: &'l AccessLevels,
     span_utils: SpanUtils<'tcx>,
     config: Config,
@@ -1115,6 +1117,7 @@ pub fn process_crate<'l, 'tcx, H: SaveHandler>(
         let save_ctxt = SaveContext {
             tcx,
             tables: &ty::TypeckTables::empty(None),
+            empty_tables: &ty::TypeckTables::empty(None),
             access_levels: &access_levels,
             span_utils: SpanUtils::new(&tcx.sess),
             config: find_config(config),
index 26d37f196befac3b9f15ed3b8e0d25af295043cc..fde5c5bed4d91a02b35b42d8c2ed78ca3a241e38 100644 (file)
@@ -738,7 +738,11 @@ pub fn count(&self) -> usize {
 
     pub fn offset(&self, i: usize) -> Size {
         match *self {
-            FieldPlacement::Union(_) => Size::ZERO,
+            FieldPlacement::Union(count) => {
+                assert!(i < count,
+                        "Tried to access field {} of union with {} fields", i, count);
+                Size::ZERO
+            },
             FieldPlacement::Array { stride, count } => {
                 let i = i as u64;
                 assert!(i < count);
index cf1a84dec97bccf625742399a4d9c5e0bd2f8b4e..c5277c4f90e7ae42e908715938c4b79afbcebbcf 100644 (file)
@@ -691,6 +691,9 @@ pub struct TargetOptions {
     /// defined in libgcc. If this option is enabled, the target must provide
     /// `eh_unwind_resume` lang item.
     pub custom_unwind_resume: bool,
+    /// Whether the runtime startup code requires the `main` function be passed
+    /// `argc` and `argv` values.
+    pub main_needs_argc_argv: bool,
 
     /// Flag indicating whether ELF TLS (e.g., #[thread_local]) is available for
     /// this target.
@@ -849,6 +852,7 @@ fn default() -> TargetOptions {
             link_env_remove: Vec::new(),
             archive_format: "gnu".to_string(),
             custom_unwind_resume: false,
+            main_needs_argc_argv: true,
             allow_asm: true,
             has_elf_tls: false,
             obj_is_bitcode: false,
@@ -1159,6 +1163,7 @@ macro_rules! key {
         key!(archive_format);
         key!(allow_asm, bool);
         key!(custom_unwind_resume, bool);
+        key!(main_needs_argc_argv, bool);
         key!(has_elf_tls, bool);
         key!(obj_is_bitcode, bool);
         key!(no_integrated_as, bool);
@@ -1376,6 +1381,7 @@ macro_rules! target_option_val {
         target_option_val!(archive_format);
         target_option_val!(allow_asm);
         target_option_val!(custom_unwind_resume);
+        target_option_val!(main_needs_argc_argv);
         target_option_val!(has_elf_tls);
         target_option_val!(obj_is_bitcode);
         target_option_val!(no_integrated_as);
index 86978c05b15d0dbef4f3fd9fdd87518a7320c0ad..d5ef230dcf7d23c5da16e34a71684249c6be4bc5 100644 (file)
@@ -101,6 +101,10 @@ pub fn target() -> Result<Target, String> {
     // without a main function.
     options.crt_static_allows_dylibs = true;
 
+    // WASI's `sys::args::init` function ignores its arguments; instead,
+    // `args::args()` makes the WASI API calls itself.
+    options.main_needs_argc_argv = false;
+
     Ok(Target {
         llvm_target: "wasm32-wasi".to_string(),
         target_endian: "little".to_string(),
index 7db1a7413c7be27351560ea7a5555cdeda478388..c1316f415a559182709fc15dd9b75e809046224f 100644 (file)
@@ -80,22 +80,30 @@ fn dropck_outlives<'tcx>(
             let mut fulfill_cx = TraitEngine::new(infcx.tcx);
 
             let cause = ObligationCause::dummy();
+            let mut constraints = DtorckConstraint::empty();
             while let Some((ty, depth)) = ty_stack.pop() {
-                let DtorckConstraint {
-                    dtorck_types,
-                    outlives,
-                    overflows,
-                } = dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty)?;
+                info!("{} kinds, {} overflows, {} ty_stack",
+                    result.kinds.len(), result.overflows.len(), ty_stack.len());
+                dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?;
 
                 // "outlives" represent types/regions that may be touched
                 // by a destructor.
-                result.kinds.extend(outlives);
-                result.overflows.extend(overflows);
+                result.kinds.extend(constraints.outlives.drain(..));
+                result.overflows.extend(constraints.overflows.drain(..));
+
+                // If we have even one overflow, we should stop trying to evaluate further --
+                // chances are, the subsequent overflows for this evaluation won't provide useful
+                // information and will just decrease the speed at which we can emit these errors
+                // (since we'll be printing for just that much longer for the often enormous types
+                // that result here).
+                if result.overflows.len() >= 1 {
+                    break;
+                }
 
                 // dtorck types are "types that will get dropped but which
                 // do not themselves define a destructor", more or less. We have
                 // to push them onto the stack to be expanded.
-                for ty in dtorck_types {
+                for ty in constraints.dtorck_types.drain(..) {
                     match infcx.at(&cause, param_env).normalize(&ty) {
                         Ok(Normalized {
                             value: ty,
@@ -152,21 +160,23 @@ fn dtorck_constraint_for_ty<'tcx>(
     for_ty: Ty<'tcx>,
     depth: usize,
     ty: Ty<'tcx>,
-) -> Result<DtorckConstraint<'tcx>, NoSolution> {
+    constraints: &mut DtorckConstraint<'tcx>,
+) -> Result<(), NoSolution> {
     debug!(
         "dtorck_constraint_for_ty({:?}, {:?}, {:?}, {:?})",
         span, for_ty, depth, ty
     );
 
     if depth >= *tcx.sess.recursion_limit.get() {
-        return Ok(DtorckConstraint {
-            outlives: vec![],
-            dtorck_types: vec![],
-            overflows: vec![ty],
-        });
+        constraints.overflows.push(ty);
+        return Ok(());
     }
 
-    let result = match ty.kind {
+    if tcx.trivial_dropck_outlives(ty) {
+        return Ok(());
+    }
+
+    match ty.kind {
         ty::Bool
         | ty::Char
         | ty::Int(_)
@@ -181,22 +191,20 @@ fn dtorck_constraint_for_ty<'tcx>(
         | ty::FnPtr(_)
         | ty::GeneratorWitness(..) => {
             // these types never have a destructor
-            Ok(DtorckConstraint::empty())
         }
 
         ty::Array(ety, _) | ty::Slice(ety) => {
             // single-element containers, behave like their element
-            dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety)
+            dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints)?;
         }
 
-        ty::Tuple(tys) => tys.iter()
-            .map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty.expect_ty()))
-            .collect(),
+        ty::Tuple(tys) => for ty in tys.iter() {
+            dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty.expect_ty(), constraints)?;
+        },
 
-        ty::Closure(def_id, substs) => substs.as_closure()
-            .upvar_tys(def_id, tcx)
-            .map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty))
-            .collect(),
+        ty::Closure(def_id, substs) => for ty in substs.as_closure().upvar_tys(def_id, tcx) {
+            dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?;
+        }
 
         ty::Generator(def_id, substs, _movability) => {
             // rust-lang/rust#49918: types can be constructed, stored
@@ -222,17 +230,8 @@ fn dtorck_constraint_for_ty<'tcx>(
             // derived from lifetimes attached to the upvars, and we
             // *do* incorporate the upvars here.
 
-            let constraint = DtorckConstraint {
-                outlives: substs.upvar_tys(def_id, tcx).map(|t| t.into()).collect(),
-                dtorck_types: vec![],
-                overflows: vec![],
-            };
-            debug!(
-                "dtorck_constraint: generator {:?} => {:?}",
-                def_id, constraint
-            );
-
-            Ok(constraint)
+            constraints.outlives.extend(substs.as_generator().upvar_tys(def_id, tcx)
+                .map(|t| -> ty::subst::GenericArg<'tcx> { t.into() }));
         }
 
         ty::Adt(def, substs) => {
@@ -241,41 +240,34 @@ fn dtorck_constraint_for_ty<'tcx>(
                 outlives,
                 overflows,
             } = tcx.at(span).adt_dtorck_constraint(def.did)?;
-            Ok(DtorckConstraint {
-                // FIXME: we can try to recursively `dtorck_constraint_on_ty`
-                // there, but that needs some way to handle cycles.
-                dtorck_types: dtorck_types.subst(tcx, substs),
-                outlives: outlives.subst(tcx, substs),
-                overflows: overflows.subst(tcx, substs),
-            })
+            // FIXME: we can try to recursively `dtorck_constraint_on_ty`
+            // there, but that needs some way to handle cycles.
+            constraints.dtorck_types.extend(dtorck_types.subst(tcx, substs));
+            constraints.outlives.extend(outlives.subst(tcx, substs));
+            constraints.overflows.extend(overflows.subst(tcx, substs));
         }
 
         // Objects must be alive in order for their destructor
         // to be called.
-        ty::Dynamic(..) => Ok(DtorckConstraint {
-            outlives: vec![ty.into()],
-            dtorck_types: vec![],
-            overflows: vec![],
-        }),
+        ty::Dynamic(..) => {
+            constraints.outlives.push(ty.into());
+        },
 
         // Types that can't be resolved. Pass them forward.
-        ty::Projection(..) | ty::Opaque(..) | ty::Param(..) => Ok(DtorckConstraint {
-            outlives: vec![],
-            dtorck_types: vec![ty],
-            overflows: vec![],
-        }),
+        ty::Projection(..) | ty::Opaque(..) | ty::Param(..) => {
+            constraints.dtorck_types.push(ty);
+        },
 
         ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
 
         ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error => {
             // By the time this code runs, all type variables ought to
             // be fully resolved.
-            Err(NoSolution)
+            return Err(NoSolution)
         }
-    };
+    }
 
-    debug!("dtorck_constraint_for_ty({:?}) = {:?}", ty, result);
-    result
+    Ok(())
 }
 
 /// Calculates the dtorck constraint for a type.
@@ -301,10 +293,11 @@ fn dtorck_constraint_for_ty<'tcx>(
         return Ok(result);
     }
 
-    let mut result = def.all_fields()
-        .map(|field| tcx.type_of(field.did))
-        .map(|fty| dtorck_constraint_for_ty(tcx, span, fty, 0, fty))
-        .collect::<Result<DtorckConstraint<'_>, NoSolution>>()?;
+    let mut result = DtorckConstraint::empty();
+    for field in def.all_fields() {
+        let fty = tcx.type_of(field.did);
+        dtorck_constraint_for_ty(tcx, span, fty, 0, fty, &mut result)?;
+    }
     result.outlives.extend(tcx.destructor_constraints(def));
     dedup_dtorck_constraint(&mut result);
 
index 30a1814d0f83ef697d6a1fdc26ed779718ac3765..dc7eeead30cbeb639657770cf614293a75913618 100644 (file)
@@ -17,10 +17,12 @@ fn evaluate_obligation<'tcx>(
     tcx: TyCtxt<'tcx>,
     canonical_goal: CanonicalPredicateGoal<'tcx>,
 ) -> Result<EvaluationResult, OverflowError> {
+    debug!("evaluate_obligation(canonical_goal={:#?})", canonical_goal);
     tcx.infer_ctxt().enter_with_canonical(
         DUMMY_SP,
         &canonical_goal,
         |ref infcx, goal, _canonical_inference_vars| {
+            debug!("evaluate_obligation: goal={:#?}", goal);
             let ParamEnvAnd {
                 param_env,
                 value: predicate,
index 91ca6415bdcb94b84e0d9307ebbbb0ce6aef6f51..cc5348623fabda0dfdf0eda61c36b30fafc7e935 100644 (file)
@@ -73,7 +73,9 @@
 }
 
 crate fn generator(tcx: TyCtxt<'tcx>, def_id: DefId) -> Ty<'tcx> {
-    tcx.mk_generator(def_id, ty::GeneratorSubsts {
-        substs: InternalSubsts::bound_vars_for_item(tcx, def_id),
-    }, hir::GeneratorMovability::Movable)
+    tcx.mk_generator(
+        def_id,
+        InternalSubsts::bound_vars_for_item(tcx, def_id),
+        hir::GeneratorMovability::Movable
+    )
 }
index 4c30227150fb1850fb371bd24ee1c70aac678ded..0df367fcca83c307fb4d5dc73c9d81311115c1c2 100644 (file)
@@ -218,7 +218,7 @@ fn program_clauses_for_trait(tcx: TyCtxt<'_>, def_id: DefId) -> Clauses<'_> {
 
     let implemented_from_env = Clause::ForAll(ty::Binder::bind(implemented_from_env));
 
-    let predicates = &tcx.predicates_defined_on(def_id).predicates;
+    let predicates = tcx.predicates_defined_on(def_id).predicates;
 
     // Warning: these where clauses are not substituted for bound vars yet,
     // so that we don't need to adjust binders in the `FromEnv` rules below
@@ -319,7 +319,7 @@ fn program_clauses_for_impl(tcx: TyCtxt<'tcx>, def_id: DefId) -> Clauses<'tcx> {
     let trait_pred = ty::TraitPredicate { trait_ref }.lower();
 
     // `WC`
-    let predicates = &tcx.predicates_of(def_id).predicates;
+    let predicates = tcx.predicates_of(def_id).predicates;
     let where_clauses = predicates
         .iter()
         .map(|(wc, _)| wc.lower())
index b8e2700803a5d4a16ad6dcf25db94a604a4b6317..7e0a9bc4011c0fdc4240549b8940470d012d87e3 100644 (file)
@@ -54,8 +54,7 @@ pub trait AstConv<'tcx> {
     /// but this can lead to cycle errors. The problem is that we have
     /// to do this resolution *in order to create the predicates in
     /// the first place*. Hence, we have this "special pass".
-    fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
-                                 -> &'tcx ty::GenericPredicates<'tcx>;
+    fn get_type_parameter_bounds(&self, span: Span, def_id: DefId) -> ty::GenericPredicates<'tcx>;
 
     /// Returns the lifetime to use when a lifetime is omitted (and not elided).
     fn re_infer(
index 03f0860c6605589637bcc7ba68bbd77621bc2a94..4f4133954cf1d359e7b51f33bab81c980f77f0af 100644 (file)
@@ -113,21 +113,21 @@ fn check_closure(
             }
         });
         if let Some(GeneratorTypes { yield_ty, interior, movability }) = generator_types {
-            let substs = ty::GeneratorSubsts { substs };
+            let generator_substs = substs.as_generator();
             self.demand_eqtype(
                 expr.span,
                 yield_ty,
-                substs.yield_ty(expr_def_id, self.tcx),
+                generator_substs.yield_ty(expr_def_id, self.tcx),
             );
             self.demand_eqtype(
                 expr.span,
                 liberated_sig.output(),
-                substs.return_ty(expr_def_id, self.tcx),
+                generator_substs.return_ty(expr_def_id, self.tcx),
             );
             self.demand_eqtype(
                 expr.span,
                 interior,
-                substs.witness(expr_def_id, self.tcx),
+                generator_substs.witness(expr_def_id, self.tcx),
             );
             return self.tcx.mk_generator(expr_def_id, substs, movability);
         }
@@ -611,6 +611,16 @@ fn supplied_sig_of_closure(
                 Some(hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Fn)) => {
                     debug!("supplied_sig_of_closure: closure is async fn body");
                     self.deduce_future_output_from_obligations(expr_def_id)
+                        .unwrap_or_else(|| {
+                            // AFAIK, deducing the future output
+                            // always succeeds *except* in error cases
+                            // like #65159. I'd like to return Error
+                            // here, but I can't because I can't
+                            // easily (and locally) prove that we
+                            // *have* reported an
+                            // error. --nikomatsakis
+                            astconv.ty_infer(None, decl.output.span())
+                        })
                 }
 
                 _ => astconv.ty_infer(None, decl.output.span()),
@@ -645,7 +655,7 @@ fn supplied_sig_of_closure(
     fn deduce_future_output_from_obligations(
         &self,
         expr_def_id: DefId,
-    ) -> Ty<'tcx> {
+    ) -> Option<Ty<'tcx>> {
         debug!("deduce_future_output_from_obligations(expr_def_id={:?})", expr_def_id);
 
         let ret_coercion =
@@ -688,8 +698,7 @@ fn deduce_future_output_from_obligations(
                 } else {
                     None
                 }
-            })
-            .unwrap();
+            });
 
         debug!("deduce_future_output_from_obligations: output_ty={:?}", output_ty);
         output_ty
index 2ea0afb1793562d47582a6e71143dc9d8c8e2fd5..677e2ea3566281fa0800b1c9bacd6e9ef04d980f 100644 (file)
@@ -115,6 +115,7 @@ pub fn demand_coerce_diag(&self,
             Err(e) => e
         };
 
+        let expr = expr.peel_drop_temps();
         let cause = self.misc(expr.span);
         let expr_ty = self.resolve_type_vars_with_obligations(checked_ty);
         let mut err = self.report_mismatched_types(&cause, expected, expr_ty, e);
@@ -349,7 +350,10 @@ pub fn check_ref(
 
         // If the span is from a macro, then it's hard to extract the text
         // and make a good suggestion, so don't bother.
-        let is_macro = sp.from_expansion();
+        let is_macro = sp.from_expansion() && sp.desugaring_kind().is_none();
+
+        // `ExprKind::DropTemps` is semantically irrelevant for these suggestions.
+        let expr = expr.peel_drop_temps();
 
         match (&expr.kind, &expected.kind, &checked_ty.kind) {
             (_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.kind, &check.kind) {
index d46ac4a39a33765866decdc23cf54e1016a7f5f3..0c8df9bad448fdce38f9058397424495a11893e6 100644 (file)
@@ -44,7 +44,7 @@ pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), Erro
             ensure_drop_predicates_are_implied_by_item_defn(
                 tcx,
                 drop_impl_did,
-                &dtor_predicates,
+                dtor_predicates,
                 adt_def.did,
                 self_to_impl_substs,
             )
@@ -140,7 +140,7 @@ fn ensure_drop_params_and_item_params_correspond<'tcx>(
 fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
     tcx: TyCtxt<'tcx>,
     drop_impl_did: DefId,
-    dtor_predicates: &ty::GenericPredicates<'tcx>,
+    dtor_predicates: ty::GenericPredicates<'tcx>,
     self_type_did: DefId,
     self_to_impl_substs: SubstsRef<'tcx>,
 ) -> Result<(), ErrorReported> {
@@ -199,7 +199,7 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
     // just to look for all the predicates directly.
 
     assert_eq!(dtor_predicates.parent, None);
-    for (predicate, _) in &dtor_predicates.predicates {
+    for (predicate, _) in dtor_predicates.predicates {
         // (We do not need to worry about deep analysis of type
         // expressions etc because the Drop impls are already forced
         // to take on a structure that is roughly an alpha-renaming of
index 7a6fe9560fbfff4a6ed5fb4d182090df8ec73133..ad46a443b8ffa9e886969f8409266a13d30b4d7a 100644 (file)
@@ -87,10 +87,8 @@ fn check_expr_meets_expectation_or_error(
         }
 
         if let Some(mut err) = self.demand_suptype_diag(expr.span, expected_ty, ty) {
-            let expr = match &expr.kind {
-                ExprKind::DropTemps(expr) => expr,
-                _ => expr,
-            };
+            let expr = expr.peel_drop_temps();
+            self.suggest_ref_or_into(&mut err, expr, expected_ty, ty);
             extend_err(&mut err);
             // Error possibly reported in `check_assign` so avoid emitting error again.
             err.emit_unless(self.is_assign_to_bool(expr, expected_ty));
index 2be311127214d2b6c3b6fd804e4b15c63d079bd5..e57cc809c347989488f7ba5e150ec4b05782141c 100644 (file)
@@ -58,7 +58,7 @@ pub enum MethodError<'tcx> {
 
     // Found a `Self: Sized` bound where `Self` is a trait object, also the caller may have
     // forgotten to import a trait.
-    IllegalSizedBound(Vec<DefId>),
+    IllegalSizedBound(Vec<DefId>, bool),
 
     // Found a match, but the return type is wrong
     BadReturnType,
@@ -214,32 +214,49 @@ pub fn lookup_method(
         );
 
         if result.illegal_sized_bound {
+            let mut needs_mut = false;
+            if let ty::Ref(region, t_type, mutability) = self_ty.kind {
+                let trait_type = self.tcx.mk_ref(region, ty::TypeAndMut {
+                    ty: t_type,
+                    mutbl: mutability.invert(),
+                });
+                // We probe again to see if there might be a borrow mutability discrepancy.
+                match self.lookup_probe(
+                    span,
+                    segment.ident,
+                    trait_type,
+                    call_expr,
+                    ProbeScope::TraitsInScope
+                ) {
+                    Ok(ref new_pick) if *new_pick != pick => {
+                        needs_mut = true;
+                    }
+                    _ => {}
+                }
+            }
+
             // We probe again, taking all traits into account (not only those in scope).
-            let candidates =
-                match self.lookup_probe(span,
-                                        segment.ident,
-                                        self_ty,
-                                        call_expr,
-                                        ProbeScope::AllTraits) {
-
-                    // If we find a different result the caller probably forgot to import a trait.
-                    Ok(ref new_pick) if *new_pick != pick => vec![new_pick.item.container.id()],
-                    Err(Ambiguity(ref sources)) => {
-                        sources.iter()
-                               .filter_map(|source| {
-                                   match *source {
-                                       // Note: this cannot come from an inherent impl,
-                                       // because the first probing succeeded.
-                                       ImplSource(def) => self.tcx.trait_id_of_impl(def),
-                                       TraitSource(_) => None,
-                                   }
-                               })
-                               .collect()
+            let candidates = match self.lookup_probe(
+                span,
+                segment.ident,
+                self_ty,
+                call_expr,
+                ProbeScope::AllTraits,
+            ) {
+                // If we find a different result the caller probably forgot to import a trait.
+                Ok(ref new_pick) if *new_pick != pick => vec![new_pick.item.container.id()],
+                Err(Ambiguity(ref sources)) => sources.iter().filter_map(|source| {
+                    match *source {
+                        // Note: this cannot come from an inherent impl,
+                        // because the first probing succeeded.
+                        ImplSource(def) => self.tcx.trait_id_of_impl(def),
+                        TraitSource(_) => None,
                     }
-                    _ => Vec::new(),
-                };
+                }).collect(),
+                _ => Vec::new(),
+            };
 
-            return Err(IllegalSizedBound(candidates));
+            return Err(IllegalSizedBound(candidates, needs_mut));
         }
 
         Ok(result.callee)
index 2b34c24b266d0d29d4e927dcc50c0b28f667c8c1..f2d001eadedde1cf9c543479a9795b38185d8b6d 100644 (file)
@@ -461,16 +461,36 @@ macro_rules! report_function {
                     err.span_label(span, "this is an associated function, not a method");
                 }
                 if static_sources.len() == 1 {
+                    let ty_str = if let Some(CandidateSource::ImplSource(
+                        impl_did,
+                    )) = static_sources.get(0) {
+                        // When the "method" is resolved through dereferencing, we really want the
+                        // original type that has the associated function for accurate suggestions.
+                        // (#61411)
+                        let ty = self.impl_self_ty(span, *impl_did).ty;
+                        match (&ty.peel_refs().kind, &actual.peel_refs().kind) {
+                            (ty::Adt(def, _), ty::Adt(def_actual, _)) if def == def_actual => {
+                                // Use `actual` as it will have more `substs` filled in.
+                                self.ty_to_value_string(actual.peel_refs())
+                            }
+                            _ => self.ty_to_value_string(ty.peel_refs()),
+                        }
+                    } else {
+                        self.ty_to_value_string(actual.peel_refs())
+                    };
                     if let SelfSource::MethodCall(expr) = source {
-                        err.span_suggestion(expr.span.to(span),
-                                            "use associated function syntax instead",
-                                            format!("{}::{}",
-                                                    self.ty_to_string(actual),
-                                                    item_name),
-                                            Applicability::MachineApplicable);
+                        err.span_suggestion(
+                            expr.span.to(span),
+                            "use associated function syntax instead",
+                            format!("{}::{}", ty_str, item_name),
+                            Applicability::MachineApplicable,
+                        );
                     } else {
-                        err.help(&format!("try with `{}::{}`",
-                                          self.ty_to_string(actual), item_name));
+                        err.help(&format!(
+                            "try with `{}::{}`",
+                            ty_str,
+                            item_name,
+                        ));
                     }
 
                     report_candidates(span, &mut err, static_sources);
@@ -518,7 +538,27 @@ macro_rules! report_function {
                     }
                 }
 
-                if let Some(lev_candidate) = lev_candidate {
+                let mut fallback_span = true;
+                let msg = "remove this method call";
+                if item_name.as_str() == "as_str" && actual.peel_refs().is_str() {
+                    if let SelfSource::MethodCall(expr) = source {
+                        let call_expr = self.tcx.hir().expect_expr(
+                            self.tcx.hir().get_parent_node(expr.hir_id),
+                        );
+                        if let Some(span) = call_expr.span.trim_start(expr.span) {
+                            err.span_suggestion(
+                                span,
+                                msg,
+                                String::new(),
+                                Applicability::MachineApplicable,
+                            );
+                            fallback_span = false;
+                        }
+                    }
+                    if fallback_span {
+                        err.span_label(span, msg);
+                    }
+                } else if let Some(lev_candidate) = lev_candidate {
                     let def_kind = lev_candidate.def_kind();
                     err.span_suggestion(
                         span,
@@ -553,22 +593,33 @@ macro_rules! report_function {
                 err.emit();
             }
 
-            MethodError::IllegalSizedBound(candidates) => {
+            MethodError::IllegalSizedBound(candidates, needs_mut) => {
                 let msg = format!("the `{}` method cannot be invoked on a trait object", item_name);
                 let mut err = self.sess().struct_span_err(span, &msg);
                 if !candidates.is_empty() {
-                    let help = format!("{an}other candidate{s} {were} found in the following \
-                                        trait{s}, perhaps add a `use` for {one_of_them}:",
-                                    an = if candidates.len() == 1 {"an" } else { "" },
-                                    s = pluralise!(candidates.len()),
-                                    were = if candidates.len() == 1 { "was" } else { "were" },
-                                    one_of_them = if candidates.len() == 1 {
-                                        "it"
-                                    } else {
-                                        "one_of_them"
-                                    });
+                    let help = format!(
+                        "{an}other candidate{s} {were} found in the following trait{s}, perhaps \
+                         add a `use` for {one_of_them}:",
+                        an = if candidates.len() == 1 {"an" } else { "" },
+                        s = pluralise!(candidates.len()),
+                        were = if candidates.len() == 1 { "was" } else { "were" },
+                        one_of_them = if candidates.len() == 1 {
+                            "it"
+                        } else {
+                            "one_of_them"
+                        },
+                    );
                     self.suggest_use_candidates(&mut err, help, candidates);
                 }
+                if let ty::Ref(region, t_type, mutability) = rcvr_ty.kind {
+                    if needs_mut {
+                        let trait_type = self.tcx.mk_ref(region, ty::TypeAndMut {
+                            ty: t_type,
+                            mutbl: mutability.invert(),
+                        });
+                        err.note(&format!("you need `{}` instead of `{}`", trait_type, rcvr_ty));
+                    }
+                }
                 err.emit();
             }
 
@@ -579,6 +630,14 @@ macro_rules! report_function {
         None
     }
 
+    /// Print out the type for use in value namespace.
+    fn ty_to_value_string(&self, ty: Ty<'tcx>) -> String {
+        match ty.kind {
+            ty::Adt(def, substs) => format!("{}", ty::Instance::new(def.did, substs)),
+            _ => self.ty_to_string(ty),
+        }
+    }
+
     fn suggest_use_candidates(&self,
                               err: &mut DiagnosticBuilder<'_>,
                               mut msg: String,
@@ -718,7 +777,7 @@ fn suggest_traits_to_import<'b>(
             } else {
                 "items from traits can only be used if the trait is implemented and in scope"
             });
-            let mut msg = format!(
+            let message = |action| format!(
                 "the following {traits_define} an item `{name}`, perhaps you need to {action} \
                  {one_of_them}:",
                 traits_define = if candidates.len() == 1 {
@@ -726,11 +785,7 @@ fn suggest_traits_to_import<'b>(
                 } else {
                     "traits define"
                 },
-                action = if let Some(param) = param_type {
-                    format!("restrict type parameter `{}` with", param)
-                } else {
-                    "implement".to_string()
-                },
+                action = action,
                 one_of_them = if candidates.len() == 1 {
                     "it"
                 } else {
@@ -750,56 +805,81 @@ fn suggest_traits_to_import<'b>(
                         // Get the `hir::Param` to verify whether it already has any bounds.
                         // We do this to avoid suggesting code that ends up as `T: FooBar`,
                         // instead we suggest `T: Foo + Bar` in that case.
-                        let mut has_bounds = false;
-                        let mut impl_trait = false;
-                        if let Node::GenericParam(ref param) = hir.get(id) {
-                            match param.kind {
-                                hir::GenericParamKind::Type { synthetic: Some(_), .. } => {
+                        match hir.get(id) {
+                            Node::GenericParam(ref param) => {
+                                let mut impl_trait = false;
+                                let has_bounds = if let hir::GenericParamKind::Type {
+                                    synthetic: Some(_), ..
+                                } = &param.kind {
                                     // We've found `fn foo(x: impl Trait)` instead of
                                     // `fn foo<T>(x: T)`. We want to suggest the correct
                                     // `fn foo(x: impl Trait + TraitBound)` instead of
                                     // `fn foo<T: TraitBound>(x: T)`. (#63706)
                                     impl_trait = true;
-                                    has_bounds = param.bounds.len() > 1;
-                                }
-                                _ => {
-                                    has_bounds = !param.bounds.is_empty();
-                                }
+                                    param.bounds.get(1)
+                                } else {
+                                    param.bounds.get(0)
+                                };
+                                let sp = hir.span(id);
+                                let sp = if let Some(first_bound) = has_bounds {
+                                    // `sp` only covers `T`, change it so that it covers
+                                    // `T:` when appropriate
+                                    sp.until(first_bound.span())
+                                } else {
+                                    sp
+                                };
+                                // FIXME: contrast `t.def_id` against `param.bounds` to not suggest
+                                // traits already there. That can happen when the cause is that
+                                // we're in a const scope or associated function used as a method.
+                                err.span_suggestions(
+                                    sp,
+                                    &message(format!(
+                                        "restrict type parameter `{}` with",
+                                        param.name.ident().as_str(),
+                                    )),
+                                    candidates.iter().map(|t| format!(
+                                        "{}{} {}{}",
+                                        param.name.ident().as_str(),
+                                        if impl_trait { " +" } else { ":" },
+                                        self.tcx.def_path_str(t.def_id),
+                                        if has_bounds.is_some() { " + "} else { "" },
+                                    )),
+                                    Applicability::MaybeIncorrect,
+                                );
+                                suggested = true;
+                            }
+                            Node::Item(hir::Item {
+                                kind: hir::ItemKind::Trait(.., bounds, _), ident, ..
+                            }) => {
+                                let (sp, sep, article) = if bounds.is_empty() {
+                                    (ident.span.shrink_to_hi(), ":", "a")
+                                } else {
+                                    (bounds.last().unwrap().span().shrink_to_hi(), " +", "another")
+                                };
+                                err.span_suggestions(
+                                    sp,
+                                    &message(format!("add {} supertrait for", article)),
+                                    candidates.iter().map(|t| format!(
+                                        "{} {}",
+                                        sep,
+                                        self.tcx.def_path_str(t.def_id),
+                                    )),
+                                    Applicability::MaybeIncorrect,
+                                );
+                                suggested = true;
                             }
+                            _ => {}
                         }
-                        let sp = hir.span(id);
-                        // `sp` only covers `T`, change it so that it covers
-                        // `T:` when appropriate
-                        let sp = if has_bounds {
-                            sp.to(self.tcx
-                                .sess
-                                .source_map()
-                                .next_point(self.tcx.sess.source_map().next_point(sp)))
-                        } else {
-                            sp
-                        };
-
-                        // FIXME: contrast `t.def_id` against `param.bounds` to not suggest traits
-                        // already there. That can happen when the cause is that we're in a const
-                        // scope or associated function used as a method.
-                        err.span_suggestions(
-                            sp,
-                            &msg[..],
-                            candidates.iter().map(|t| format!(
-                                "{}{} {}{}",
-                                param,
-                                if impl_trait { " +" } else { ":" },
-                                self.tcx.def_path_str(t.def_id),
-                                if has_bounds { " +"} else { "" },
-                            )),
-                            Applicability::MaybeIncorrect,
-                        );
-                        suggested = true;
                     }
                 };
             }
 
             if !suggested {
+                let mut msg = message(if let Some(param) = param_type {
+                    format!("restrict type parameter `{}` with", param)
+                } else {
+                    "implement".to_string()
+                });
                 for (i, trait_info) in candidates.iter().enumerate() {
                     msg.push_str(&format!(
                         "\ncandidate #{}: `{}`",
index 7380bf7536de587f76ddd6cebbb51cedd2b832cd..152edf8dd0e5ad2989be33191a22e7cd5e67f704 100644 (file)
@@ -1713,8 +1713,6 @@ fn check_specialization_validity<'tcx>(
     impl_id: DefId,
     impl_item: &hir::ImplItem,
 ) {
-    let ancestors = trait_def.ancestors(tcx, impl_id);
-
     let kind = match impl_item.kind {
         hir::ImplItemKind::Const(..) => ty::AssocKind::Const,
         hir::ImplItemKind::Method(..) => ty::AssocKind::Method,
@@ -1722,15 +1720,53 @@ fn check_specialization_validity<'tcx>(
         hir::ImplItemKind::TyAlias(_) => ty::AssocKind::Type,
     };
 
-    let parent = ancestors.defs(tcx, trait_item.ident, kind, trait_def.def_id).nth(1)
-        .map(|node_item| node_item.map(|parent| parent.defaultness));
+    let mut ancestor_impls = trait_def.ancestors(tcx, impl_id)
+        .skip(1)
+        .filter_map(|parent| {
+            if parent.is_from_trait() {
+                None
+            } else {
+                Some((parent, parent.item(tcx, trait_item.ident, kind, trait_def.def_id)))
+            }
+        })
+        .peekable();
 
-    if let Some(parent) = parent {
-        if tcx.impl_item_is_final(&parent) {
-            report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
-        }
+    if ancestor_impls.peek().is_none() {
+        // No parent, nothing to specialize.
+        return;
     }
 
+    let opt_result = ancestor_impls.find_map(|(parent_impl, parent_item)| {
+        match parent_item {
+            // Parent impl exists, and contains the parent item we're trying to specialize, but
+            // doesn't mark it `default`.
+            Some(parent_item) if tcx.impl_item_is_final(&parent_item) => {
+                Some(Err(parent_impl.def_id()))
+            }
+
+            // Parent impl contains item and makes it specializable.
+            Some(_) => {
+                Some(Ok(()))
+            }
+
+            // Parent impl doesn't mention the item. This means it's inherited from the
+            // grandparent. In that case, if parent is a `default impl`, inherited items use the
+            // "defaultness" from the grandparent, else they are final.
+            None => if tcx.impl_is_default(parent_impl.def_id()) {
+                None
+            } else {
+                Some(Err(parent_impl.def_id()))
+            }
+        }
+    });
+
+    // If `opt_result` is `None`, we have only encoutered `default impl`s that don't contain the
+    // item. This is allowed, the item isn't actually getting specialized here.
+    let result = opt_result.unwrap_or(Ok(()));
+
+    if let Err(parent_impl) = result {
+        report_forbidden_specialization(tcx, impl_item, parent_impl);
+    }
 }
 
 fn check_impl_items_against_trait<'tcx>(
@@ -1846,8 +1882,7 @@ fn check_impl_items_against_trait<'tcx>(
     let associated_type_overridden = overridden_associated_type.is_some();
     for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
         let is_implemented = trait_def.ancestors(tcx, impl_id)
-            .defs(tcx, trait_item.ident, trait_item.kind, impl_trait_ref.def_id)
-            .next()
+            .leaf_def(tcx, trait_item.ident, trait_item.kind)
             .map(|node_item| !node_item.node.is_from_trait())
             .unwrap_or(false);
 
@@ -2210,19 +2245,17 @@ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
         self.tcx
     }
 
-    fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
-                                 -> &'tcx ty::GenericPredicates<'tcx>
-    {
+    fn get_type_parameter_bounds(&self, _: Span, def_id: DefId) -> ty::GenericPredicates<'tcx> {
         let tcx = self.tcx;
         let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
         let item_id = tcx.hir().ty_param_owner(hir_id);
         let item_def_id = tcx.hir().local_def_id(item_id);
         let generics = tcx.generics_of(item_def_id);
         let index = generics.param_def_id_to_index[&def_id];
-        tcx.arena.alloc(ty::GenericPredicates {
+        ty::GenericPredicates {
             parent: None,
-            predicates: self.param_env.caller_bounds.iter().filter_map(|&predicate| {
-                match predicate {
+            predicates: tcx.arena.alloc_from_iter(
+                self.param_env.caller_bounds.iter().filter_map(|&predicate| match predicate {
                     ty::Predicate::Trait(ref data)
                     if data.skip_binder().self_ty().is_param(index) => {
                         // HACK(eddyb) should get the original `Span`.
@@ -2230,9 +2263,9 @@ fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
                         Some((predicate, span))
                     }
                     _ => None
-                }
-            }).collect()
-        })
+                }),
+            ),
+        }
     }
 
     fn re_infer(
@@ -4181,20 +4214,21 @@ pub fn get_fn_decl(&self, blk_id: hir::HirId) -> Option<(&'tcx hir::FnDecl, bool
     pub fn suggest_mismatched_types_on_tail(
         &self,
         err: &mut DiagnosticBuilder<'tcx>,
-        expression: &'tcx hir::Expr,
+        expr: &'tcx hir::Expr,
         expected: Ty<'tcx>,
         found: Ty<'tcx>,
         cause_span: Span,
         blk_id: hir::HirId,
     ) -> bool {
-        self.suggest_missing_semicolon(err, expression, expected, cause_span);
+        let expr = expr.peel_drop_temps();
+        self.suggest_missing_semicolon(err, expr, expected, cause_span);
         let mut pointing_at_return_type = false;
         if let Some((fn_decl, can_suggest)) = self.get_fn_decl(blk_id) {
             pointing_at_return_type = self.suggest_missing_return_type(
                 err, &fn_decl, expected, found, can_suggest);
         }
-        self.suggest_ref_or_into(err, expression, expected, found);
-        self.suggest_boxing_when_appropriate(err, expression, expected, found);
+        self.suggest_ref_or_into(err, expr, expected, found);
+        self.suggest_boxing_when_appropriate(err, expr, expected, found);
         pointing_at_return_type
     }
 
index e736a55a5f51cead6cbbfd6c23ce733c387cdfec..18b103960c74595b5f684ea77eeb9434b58d9bf6 100644 (file)
@@ -172,6 +172,18 @@ pub fn check_trait_item(tcx: TyCtxt<'_>, def_id: DefId) {
         _ => None
     };
     check_associated_item(tcx, trait_item.hir_id, trait_item.span, method_sig);
+
+    // Prohibits applying `#[track_caller]` to trait decls
+    for attr in &trait_item.attrs {
+        if attr.check_name(sym::track_caller) {
+            struct_span_err!(
+                tcx.sess,
+                attr.span,
+                E0738,
+                "`#[track_caller]` is not supported in trait declarations."
+            ).emit();
+        }
+    }
 }
 
 pub fn check_impl_item(tcx: TyCtxt<'_>, def_id: DefId) {
@@ -182,6 +194,30 @@ pub fn check_impl_item(tcx: TyCtxt<'_>, def_id: DefId) {
         hir::ImplItemKind::Method(ref sig, _) => Some(sig),
         _ => None
     };
+
+    // Prohibits applying `#[track_caller]` to trait impls
+    if method_sig.is_some() {
+        let track_caller_attr = impl_item.attrs.iter()
+            .find(|a| a.check_name(sym::track_caller));
+        if let Some(tc_attr) = track_caller_attr {
+            let parent_hir_id = tcx.hir().get_parent_item(hir_id);
+            let containing_item = tcx.hir().expect_item(parent_hir_id);
+            let containing_impl_is_for_trait = match &containing_item.kind {
+                hir::ItemKind::Impl(_, _, _, _, tr, _, _) => tr.is_some(),
+                _ => bug!("parent of an ImplItem must be an Impl"),
+            };
+
+            if containing_impl_is_for_trait {
+                struct_span_err!(
+                    tcx.sess,
+                    tc_attr.span,
+                    E0738,
+                    "`#[track_caller]` is not supported in traits yet."
+                ).emit();
+            }
+        }
+    }
+
     check_associated_item(tcx, impl_item.hir_id, impl_item.span, method_sig);
 }
 
@@ -755,7 +791,7 @@ fn check_opaque_types<'fcx, 'tcx>(
                         "check_opaque_types: may define, predicates={:#?}",
                         predicates,
                     );
-                    for &(pred, _) in predicates.predicates.iter() {
+                    for &(pred, _) in predicates.predicates {
                         let substituted_pred = pred.subst(fcx.tcx, substs);
                         // Avoid duplication of predicates that contain no parameters, for example.
                         if !predicates.predicates.iter().any(|&(p, _)| p == substituted_pred) {
@@ -975,7 +1011,7 @@ fn check_variances_for_type_defn<'tcx>(
 
     identify_constrained_generic_params(
         tcx,
-        &ty_predicates,
+        ty_predicates,
         None,
         &mut constrained_parameters,
     );
index d973106058eafc37b5bc2ffe5ef615bf59780294..1749fd1075e0524a86f0a904f8dbf3cfb8dd1f9b 100644 (file)
@@ -182,8 +182,7 @@ fn tcx(&self) -> TyCtxt<'tcx> {
         self.tcx
     }
 
-    fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
-                                 -> &'tcx ty::GenericPredicates<'tcx> {
+    fn get_type_parameter_bounds(&self, span: Span, def_id: DefId) -> ty::GenericPredicates<'tcx> {
         self.tcx
             .at(span)
             .type_param_predicates((self.item_def_id, def_id))
@@ -254,7 +253,7 @@ fn record_ty(&self, _hir_id: hir::HirId, _ty: Ty<'tcx>, _span: Span) {
 fn type_param_predicates(
     tcx: TyCtxt<'_>,
     (item_def_id, def_id): (DefId, DefId),
-) -> &ty::GenericPredicates<'_> {
+) -> ty::GenericPredicates<'_> {
     use rustc::hir::*;
 
     // In the AST, bounds can derive from two places. Either
@@ -275,10 +274,10 @@ fn type_param_predicates(
         tcx.generics_of(item_def_id).parent
     };
 
-    let result = parent.map_or(&tcx.common.empty_predicates, |parent| {
+    let mut result = parent.map(|parent| {
         let icx = ItemCtxt::new(tcx, parent);
         icx.get_type_parameter_bounds(DUMMY_SP, def_id)
-    });
+    }).unwrap_or_default();
     let mut extend = None;
 
     let item_hir_id = tcx.hir().as_local_hir_id(item_def_id).unwrap();
@@ -321,9 +320,7 @@ fn type_param_predicates(
     };
 
     let icx = ItemCtxt::new(tcx, item_def_id);
-    let mut result = (*result).clone();
-    result.predicates.extend(extend.into_iter());
-    result.predicates.extend(
+    let extra_predicates = extend.into_iter().chain(
         icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty, OnlySelfBounds(true))
             .into_iter()
             .filter(|(predicate, _)| {
@@ -331,9 +328,12 @@ fn type_param_predicates(
                     ty::Predicate::Trait(ref data) => data.skip_binder().self_ty().is_param(index),
                     _ => false,
                 }
-            })
+            }),
+    );
+    result.predicates = tcx.arena.alloc_from_iter(
+        result.predicates.iter().copied().chain(extra_predicates),
     );
-    tcx.arena.alloc(result)
+    result
 }
 
 impl ItemCtxt<'tcx> {
@@ -698,7 +698,7 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::AdtDef {
 fn super_predicates_of(
     tcx: TyCtxt<'_>,
     trait_def_id: DefId,
-) -> &ty::GenericPredicates<'_> {
+) -> ty::GenericPredicates<'_> {
     debug!("super_predicates(trait_def_id={:?})", trait_def_id);
     let trait_hir_id = tcx.hir().as_local_hir_id(trait_def_id).unwrap();
 
@@ -732,21 +732,23 @@ fn super_predicates_of(
         generics, item.hir_id, self_param_ty, OnlySelfBounds(!is_trait_alias));
 
     // Combine the two lists to form the complete set of superbounds:
-    let superbounds: Vec<_> = superbounds1.into_iter().chain(superbounds2).collect();
+    let superbounds = &*tcx.arena.alloc_from_iter(
+        superbounds1.into_iter().chain(superbounds2)
+    );
 
     // Now require that immediate supertraits are converted,
     // which will, in turn, reach indirect supertraits.
-    for &(pred, span) in &superbounds {
+    for &(pred, span) in superbounds {
         debug!("superbound: {:?}", pred);
         if let ty::Predicate::Trait(bound) = pred {
             tcx.at(span).super_predicates_of(bound.def_id());
         }
     }
 
-    tcx.arena.alloc(ty::GenericPredicates {
+    ty::GenericPredicates {
         parent: None,
         predicates: superbounds,
-    })
+    }
 }
 
 fn trait_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::TraitDef {
@@ -1508,9 +1510,29 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option<Ty<
         }
 
         Node::GenericParam(param) => match &param.kind {
-            hir::GenericParamKind::Type { default: Some(ref ty), .. } |
-            hir::GenericParamKind::Const { ref ty, .. } => {
-                icx.to_ty(ty)
+            hir::GenericParamKind::Type { default: Some(ref ty), .. } => icx.to_ty(ty),
+            hir::GenericParamKind::Const { ty: ref hir_ty, .. } => {
+                let ty = icx.to_ty(hir_ty);
+                if !tcx.features().const_compare_raw_pointers {
+                    let err = match ty.peel_refs().kind {
+                        ty::FnPtr(_) => Some("function pointers"),
+                        ty::RawPtr(_) => Some("raw pointers"),
+                        _ => None,
+                    };
+                    if let Some(unsupported_type) = err {
+                        feature_gate::emit_feature_err(
+                            &tcx.sess.parse_sess,
+                            sym::const_compare_raw_pointers,
+                            hir_ty.span,
+                            feature_gate::GateIssue::Language,
+                            &format!(
+                                "using {} as const generic parameters is unstable",
+                                unsupported_type
+                            ),
+                        );
+                    };
+                }
+                ty
             }
             x => {
                 if !fail {
@@ -1938,7 +1960,7 @@ fn early_bound_lifetimes_from_generics<'a, 'tcx: 'a>(
 fn predicates_defined_on(
     tcx: TyCtxt<'_>,
     def_id: DefId,
-) -> &ty::GenericPredicates<'_> {
+) -> ty::GenericPredicates<'_> {
     debug!("predicates_defined_on({:?})", def_id);
     let mut result = tcx.explicit_predicates_of(def_id);
     debug!(
@@ -1954,9 +1976,13 @@ fn predicates_defined_on(
             def_id,
             inferred_outlives,
         );
-        let mut predicates = (*result).clone();
-        predicates.predicates.extend(inferred_outlives.iter().map(|&p| (p, span)));
-        result = tcx.arena.alloc(predicates);
+        result.predicates = tcx.arena.alloc_from_iter(
+            result.predicates.iter().copied().chain(
+                // FIXME(eddyb) use better spans - maybe add `Span`s
+                // to `inferred_outlives_of` predicates as well?
+                inferred_outlives.iter().map(|&p| (p, span)),
+            ),
+        );
     }
     debug!("predicates_defined_on({:?}) = {:?}", def_id, result);
     result
@@ -1965,7 +1991,7 @@ fn predicates_defined_on(
 /// Returns a list of all type predicates (explicit and implicit) for the definition with
 /// ID `def_id`. This includes all predicates returned by `predicates_defined_on`, plus
 /// `Self: Trait` predicates for traits.
-fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::GenericPredicates<'_> {
+fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicates<'_> {
     let mut result = tcx.predicates_defined_on(def_id);
 
     if tcx.is_trait(def_id) {
@@ -1982,9 +2008,11 @@ fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::GenericPredicates<'_> {
         // used, and adding the predicate into this list ensures
         // that this is done.
         let span = tcx.def_span(def_id);
-        let mut predicates = (*result).clone();
-        predicates.predicates.push((ty::TraitRef::identity(tcx, def_id).to_predicate(), span));
-        result = tcx.arena.alloc(predicates);
+        result.predicates = tcx.arena.alloc_from_iter(
+            result.predicates.iter().copied().chain(
+                std::iter::once((ty::TraitRef::identity(tcx, def_id).to_predicate(), span))
+            ),
+        );
     }
     debug!("predicates_of(def_id={:?}) = {:?}", def_id, result);
     result
@@ -1995,7 +2023,7 @@ fn predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::GenericPredicates<'_> {
 fn explicit_predicates_of(
     tcx: TyCtxt<'_>,
     def_id: DefId,
-) -> &ty::GenericPredicates<'_> {
+) -> ty::GenericPredicates<'_> {
     use rustc::hir::*;
     use rustc_data_structures::fx::FxHashSet;
 
@@ -2004,6 +2032,7 @@ fn explicit_predicates_of(
     /// A data structure with unique elements, which preserves order of insertion.
     /// Preserving the order of insertion is important here so as not to break
     /// compile-fail UI tests.
+    // FIXME(eddyb) just use `IndexSet` from `indexmap`.
     struct UniquePredicates<'tcx> {
         predicates: Vec<(ty::Predicate<'tcx>, Span)>,
         uniques: FxHashSet<(ty::Predicate<'tcx>, Span)>,
@@ -2113,10 +2142,10 @@ fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter:
                     let bounds_predicates = bounds.predicates(tcx, opaque_ty);
                     if impl_trait_fn.is_some() {
                         // opaque types
-                        return tcx.arena.alloc(ty::GenericPredicates {
+                        return ty::GenericPredicates {
                             parent: None,
-                            predicates: bounds_predicates,
-                        });
+                            predicates: tcx.arena.alloc_from_iter(bounds_predicates),
+                        };
                     } else {
                         // named opaque types
                         predicates.extend(bounds_predicates);
@@ -2319,10 +2348,10 @@ fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter:
         );
     }
 
-    let result = tcx.arena.alloc(ty::GenericPredicates {
+    let result = ty::GenericPredicates {
         parent: generics.parent,
-        predicates,
-    });
+        predicates: tcx.arena.alloc_from_iter(predicates),
+    };
     debug!("explicit_predicates_of(def_id={:?}) = {:?}", def_id, result);
     result
 }
@@ -2560,6 +2589,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
     let whitelist = tcx.target_features_whitelist(LOCAL_CRATE);
 
     let mut inline_span = None;
+    let mut link_ordinal_span = None;
     for attr in attrs.iter() {
         if attr.check_name(sym::cold) {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD;
@@ -2593,6 +2623,16 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::USED;
         } else if attr.check_name(sym::thread_local) {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::THREAD_LOCAL;
+        } else if attr.check_name(sym::track_caller) {
+            if tcx.fn_sig(id).abi() != abi::Abi::Rust {
+                struct_span_err!(
+                    tcx.sess,
+                    attr.span,
+                    E0737,
+                    "rust ABI is required to use `#[track_caller]`"
+                ).emit();
+            }
+            codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER;
         } else if attr.check_name(sym::export_name) {
             if let Some(s) = attr.value_str() {
                 if s.as_str().contains("\0") {
@@ -2641,6 +2681,11 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
             }
         } else if attr.check_name(sym::link_name) {
             codegen_fn_attrs.link_name = attr.value_str();
+        } else if attr.check_name(sym::link_ordinal) {
+            link_ordinal_span = Some(attr.span);
+            if let ordinal @ Some(_) = check_link_ordinal(tcx, attr) {
+                codegen_fn_attrs.link_ordinal = ordinal;
+            }
         }
     }
 
@@ -2718,6 +2763,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
     // purpose functions as they wouldn't have the right target features
     // enabled. For that reason we also forbid #[inline(always)] as it can't be
     // respected.
+
     if codegen_fn_attrs.target_features.len() > 0 {
         if codegen_fn_attrs.inline == InlineAttr::Always {
             if let Some(span) = inline_span {
@@ -2742,6 +2788,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
         codegen_fn_attrs.export_name = Some(name);
         codegen_fn_attrs.link_name = Some(name);
     }
+    check_link_name_xor_ordinal(tcx, &codegen_fn_attrs, link_ordinal_span);
 
     // Internal symbols to the standard library all have no_mangle semantics in
     // that they have defined symbol names present in the function name. This
@@ -2752,3 +2799,48 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
 
     codegen_fn_attrs
 }
+
+fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<usize> {
+    use syntax::ast::{Lit, LitIntType, LitKind};
+    let meta_item_list = attr.meta_item_list();
+    let meta_item_list: Option<&[ast::NestedMetaItem]> = meta_item_list.as_ref().map(Vec::as_ref);
+    let sole_meta_list = match meta_item_list {
+        Some([item]) => item.literal(),
+        _ => None,
+    };
+    if let Some(Lit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) = sole_meta_list {
+        if *ordinal <= std::usize::MAX as u128 {
+            Some(*ordinal as usize)
+        } else {
+            let msg = format!(
+                "ordinal value in `link_ordinal` is too large: `{}`",
+                &ordinal
+            );
+            tcx.sess.struct_span_err(attr.span, &msg)
+                .note("the value may not exceed `std::usize::MAX`")
+                .emit();
+            None
+        }
+    } else {
+        tcx.sess.struct_span_err(attr.span, "illegal ordinal format in `link_ordinal`")
+            .note("an unsuffixed integer value, e.g., `1`, is expected")
+            .emit();
+        None
+    }
+}
+
+fn check_link_name_xor_ordinal(
+    tcx: TyCtxt<'_>,
+    codegen_fn_attrs: &CodegenFnAttrs,
+    inline_span: Option<Span>,
+) {
+    if codegen_fn_attrs.link_name.is_none() || codegen_fn_attrs.link_ordinal.is_none() {
+        return;
+    }
+    let msg = "cannot use `#[link_name]` with `#[link_ordinal]`";
+    if let Some(span) = inline_span {
+        tcx.sess.span_err(span, msg);
+    } else {
+        tcx.sess.err(msg);
+    }
+}
index 31476eb73179857bb866fa6f7c5009535eab26bb..1fdf49fde55b53082907be472a3865d543e88fc9 100644 (file)
@@ -86,11 +86,11 @@ fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool {
 
 pub fn identify_constrained_generic_params<'tcx>(
     tcx: TyCtxt<'tcx>,
-    predicates: &ty::GenericPredicates<'tcx>,
+    predicates: ty::GenericPredicates<'tcx>,
     impl_trait_ref: Option<ty::TraitRef<'tcx>>,
     input_parameters: &mut FxHashSet<Parameter>,
 ) {
-    let mut predicates = predicates.predicates.clone();
+    let mut predicates = predicates.predicates.to_vec();
     setup_constraining_predicates(tcx, &mut predicates, impl_trait_ref, input_parameters);
 }
 
index 3a07171b12fb80160016eb7a83463f41d141aa5f..ef08e8d4f0b7ab341d428a998b7061f653ebe29e 100644 (file)
@@ -1873,13 +1873,14 @@ struct Foo<'a> {
 differs from the behavior for `&T`, which is always `Copy`).
 "##,
 
-/*
 E0205: r##"
+#### Note: this error code is no longer emitted by the compiler.
+
 An attempt to implement the `Copy` trait for an enum failed because one of the
 variants does not implement `Copy`. To fix this, you must implement `Copy` for
 the mentioned variant. Note that this may not be possible, as in the example of
 
-```compile_fail,E0205
+```compile_fail,E0204
 enum Foo {
     Bar(Vec<u32>),
     Baz,
@@ -1892,7 +1893,7 @@ impl Copy for Foo { }
 
 Here's another example that will fail:
 
-```compile_fail,E0205
+```compile_fail,E0204
 #[derive(Copy)]
 enum Foo<'a> {
     Bar(&'a mut bool),
@@ -1903,7 +1904,6 @@ enum Foo<'a> {
 This fails because `&mut T` is not `Copy`, even when `T` is `Copy` (this
 differs from the behavior for `&T`, which is always `Copy`).
 "##,
-*/
 
 E0206: r##"
 You can only implement `Copy` for a struct or enum. Both of the following
@@ -2126,8 +2126,9 @@ impl<P1, ..., Pm> ForeignTrait<T1, ..., Tn> for T0 { ... }
 [RFC 1023]: https://github.com/rust-lang/rfcs/blob/master/text/1023-rebalancing-coherence.md
 "##,
 
-/*
 E0211: r##"
+#### Note: this error code is no longer emitted by the compiler.
+
 You used a function or type which doesn't fit the requirements for where it was
 used. Erroneous code examples:
 
@@ -2174,7 +2175,7 @@ fn x(self: Rc<Foo>) {}
 }
 ```
 
-The second case example is a bit particular : the main function must always
+The second case example is a bit particular: the main function must always
 have this definition:
 
 ```compile_fail
@@ -2206,7 +2207,6 @@ fn x(self: Box<Foo>) {} // ok!
 }
 ```
 "##,
-     */
 
 E0220: r##"
 You used an associated type which isn't defined in the trait.
@@ -2727,14 +2727,9 @@ impl<T, U> CoerceUnsized<MyType<U>> for MyType<T>
 [`CoerceUnsized`]: https://doc.rust-lang.org/std/ops/trait.CoerceUnsized.html
 "##,
 
-/*
-// Associated consts can now be accessed through generic type parameters, and
-// this error is no longer emitted.
-//
-// FIXME: consider whether to leave it in the error index, or remove it entirely
-//        as associated consts is not stabilized yet.
-
 E0329: r##"
+#### Note: this error code is no longer emitted by the compiler.
+
 An attempt was made to access an associated constant through either a generic
 type parameter or `Self`. This is not supported yet. An example causing this
 error is shown below:
@@ -2765,12 +2760,15 @@ trait Foo {
 
 struct MyStruct;
 
+impl Foo for MyStruct {
+    const BAR: f64 = 0f64;
+}
+
 fn get_bar_good() -> f64 {
     <MyStruct as Foo>::BAR
 }
 ```
 "##,
-*/
 
 E0366: r##"
 An attempt was made to implement `Drop` on a concrete specialization of a
@@ -4907,6 +4905,75 @@ fn foo_recursive(n: usize) -> Pin<Box<dyn Future<Output = ()>>> {
 The `Box<...>` ensures that the result is of known size,
 and the pin is required to keep it in the same place in memory.
 "##,
+
+E0737: r##"
+#[track_caller] requires functions to have the "Rust" ABI for implicitly
+receiving caller location. See [RFC 2091] for details on this and other
+restrictions.
+
+Erroneous code example:
+
+```compile_fail,E0737
+#![feature(track_caller)]
+
+#[track_caller]
+extern "C" fn foo() {}
+```
+
+[RFC 2091]: https://github.com/rust-lang/rfcs/blob/master/text/2091-inline-semantic.md
+"##,
+
+E0738: r##"
+#[track_caller] cannot be used in traits yet.  This is due to limitations in the
+compiler which are likely to be temporary. See [RFC 2091] for details on this
+and other restrictions.
+
+Erroneous example with a trait method implementation:
+
+```compile_fail,E0738
+#![feature(track_caller)]
+
+trait Foo {
+    fn bar(&self);
+}
+
+impl Foo for u64 {
+    #[track_caller]
+    fn bar(&self) {}
+}
+```
+
+Erroneous example with a blanket trait method implementation:
+
+```compile_fail,E0738
+#![feature(track_caller)]
+
+trait Foo {
+    #[track_caller]
+    fn bar(&self) {}
+    fn baz(&self);
+}
+```
+
+Erroneous example with a trait method declaration:
+
+```compile_fail,E0738
+#![feature(track_caller)]
+
+trait Foo {
+    fn bar(&self) {}
+
+    #[track_caller]
+    fn baz(&self);
+}
+```
+
+Note that while the compiler may be able to support the attribute in traits in
+the future, [RFC 2091] prohibits their implementation without a follow-up RFC.
+
+[RFC 2091]: https://github.com/rust-lang/rfcs/blob/master/text/2091-inline-semantic.md
+"##,
+
 ;
 //  E0035, merged into E0087/E0089
 //  E0036, merged into E0087/E0089
@@ -4973,7 +5040,7 @@ fn foo_recursive(n: usize) -> Pin<Box<dyn Future<Output = ()>>> {
            // between structures with the same definition
 //  E0558, // replaced with a generic attribute input check
 //  E0563, // cannot determine a type for this `impl Trait` removed in 6383de15
-    E0564, // only named lifetimes are allowed in `impl Trait`,
+//  E0564, // only named lifetimes are allowed in `impl Trait`,
            // but `{}` was found in the type `{}`
     E0587, // type has conflicting packed and align representation hints
     E0588, // packed type cannot transitively contain a `[repr(align)]` type
@@ -4986,7 +5053,7 @@ fn foo_recursive(n: usize) -> Pin<Box<dyn Future<Output = ()>>> {
     E0634, // type has conflicting packed representaton hints
     E0640, // infer outlives requirements
     E0641, // cannot cast to/from a pointer with an unknown kind
-    E0645, // trait aliases not finished
+//  E0645, // trait aliases not finished
     E0719, // duplicate values for associated type binding
     E0722, // Malformed `#[optimize]` attribute
     E0724, // `#[ffi_returns_twice]` is only allowed in foreign functions
index ab660caa222ae800a49364888288c5ba4f29fbca..2d188007712ad376c4bb570e3cec41177fed30a1 100644 (file)
@@ -114,7 +114,7 @@ fn enforce_impl_params_are_constrained(
 
     let mut input_parameters = cgp::parameters_for_impl(impl_self_ty, impl_trait_ref);
     cgp::identify_constrained_generic_params(
-        tcx, &impl_predicates, impl_trait_ref, &mut input_parameters);
+        tcx, impl_predicates, impl_trait_ref, &mut input_parameters);
 
     // Disallow unconstrained lifetimes, but only if they appear in assoc types.
     let lifetimes_in_associated_types: FxHashSet<_> = impl_item_refs.iter()
index 26a8f79b8d8315df83321766d6f8b72b3cadc161..9374113e1c95042f61872001a8a4722e46af3755 100644 (file)
@@ -67,8 +67,6 @@
 #![feature(nll)]
 #![feature(slice_patterns)]
 #![feature(never_type)]
-#![feature(inner_deref)]
-#![feature(mem_take)]
 
 #![recursion_limit="256"]
 
index 40a57788c0710b9f73289bc5b7a2b93cfd2db55f..83194144216ee8d7177a0b0eb1d2c8d450f96a1c 100644 (file)
@@ -30,7 +30,7 @@ pub fn explicit_predicates_of(
             let mut required_predicates = RequiredPredicates::default();
 
             // process predicates and convert to `RequiredPredicates` entry, see below
-            for (pred, _) in predicates.predicates.iter() {
+            for (pred, _) in predicates.predicates {
                 match pred {
                     ty::Predicate::TypeOutlives(predicate) => {
                         let OutlivesPredicate(ref ty, ref reg) = predicate.skip_binder();
index 0eb8b73016d1048493b682d6b33f86cee31ed05f..e3de7fe20493e8353dbb0f729ab2390a58d47f37 100644 (file)
@@ -11,5 +11,5 @@ path = "lib.rs"
 [dependencies]
 pulldown-cmark = { version = "0.5.3", default-features = false }
 minifier = "0.0.33"
-rayon = { version = "0.2.0", package = "rustc-rayon" }
+rayon = { version = "0.3.0", package = "rustc-rayon" }
 tempfile = "3"
index 18a84cd0eeb76fe24c0b7eaee03f1b53324c0b83..b7f5ed9d004d49506deaf897f26c1bda3f466131 100644 (file)
@@ -104,7 +104,7 @@ pub fn get_auto_trait_impls(
                     // regardless of the choice of `T`.
                     let params = (
                         self.cx.tcx.generics_of(param_env_def_id),
-                        &&self.cx.tcx.common.empty_predicates,
+                        ty::GenericPredicates::default(),
                     ).clean(self.cx).params;
 
                     Generics {
@@ -489,7 +489,7 @@ fn param_env_to_generics(
 
         let mut generic_params = (
             tcx.generics_of(param_env_def_id),
-            &tcx.explicit_predicates_of(param_env_def_id),
+            tcx.explicit_predicates_of(param_env_def_id),
         ).clean(self.cx).params;
 
         let mut has_sized = FxHashSet::default();
index afed11e7fab26c607c288135398583a78868e41c..ff59dcab672f37b49642f5d67aa7c0057b0d5fcc 100644 (file)
@@ -107,7 +107,7 @@ pub fn get_blanket_impls(
                         unsafety: hir::Unsafety::Normal,
                         generics: (
                             self.cx.tcx.generics_of(impl_def_id),
-                            &self.cx.tcx.explicit_predicates_of(impl_def_id),
+                            self.cx.tcx.explicit_predicates_of(impl_def_id),
                         ).clean(self.cx),
                         provided_trait_methods,
                         // FIXME(eddyb) compute both `trait_` and `for_` from
index cf440924b2e63cc6d92b001a2d95e52513a70175..11f45c5f6d01c4ff1d849916daf91ec4100b1b9f 100644 (file)
@@ -9,7 +9,7 @@
 
 use syntax::symbol::{Symbol, sym};
 use syntax::ast::{MetaItem, MetaItemKind, NestedMetaItem, LitKind};
-use syntax::parse::ParseSess;
+use syntax::sess::ParseSess;
 use syntax::feature_gate::Features;
 
 use syntax_pos::Span;
index 532c5f67bf3baecf036ec5908acfba8f34e5efcb..e7cc8b76e485d0a16b7bc48747f6ae16fe041ad8 100644 (file)
@@ -3,7 +3,7 @@
 use std::iter::once;
 
 use syntax::ast;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::symbol::sym;
 use syntax_pos::Span;
 
@@ -193,7 +193,7 @@ pub fn build_external_trait(cx: &DocContext<'_>, did: DefId) -> clean::Trait {
     let auto_trait = cx.tcx.trait_def(did).has_auto_impl;
     let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect();
     let predicates = cx.tcx.predicates_of(did);
-    let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
+    let generics = (cx.tcx.generics_of(did), predicates).clean(cx);
     let generics = filter_non_trait_generics(did, generics);
     let (generics, supertrait_bounds) = separate_supertrait_bounds(generics);
     let is_spotlight = load_attrs(cx, did).clean(cx).has_doc_flag(sym::spotlight);
@@ -220,7 +220,7 @@ fn build_external_function(cx: &DocContext<'_>, did: DefId) -> clean::Function {
     let asyncness =  cx.tcx.asyncness(did);
     let predicates = cx.tcx.predicates_of(did);
     let (generics, decl) = clean::enter_impl_trait(cx, || {
-        ((cx.tcx.generics_of(did), &predicates).clean(cx), (did, sig).clean(cx))
+        ((cx.tcx.generics_of(did), predicates).clean(cx), (did, sig).clean(cx))
     });
     let (all_types, ret_types) = clean::get_all_types(&generics, &decl, cx);
     clean::Function {
@@ -241,7 +241,7 @@ fn build_enum(cx: &DocContext<'_>, did: DefId) -> clean::Enum {
     let predicates = cx.tcx.explicit_predicates_of(did);
 
     clean::Enum {
-        generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
+        generics: (cx.tcx.generics_of(did), predicates).clean(cx),
         variants_stripped: false,
         variants: cx.tcx.adt_def(did).variants.clean(cx),
     }
@@ -257,7 +257,7 @@ fn build_struct(cx: &DocContext<'_>, did: DefId) -> clean::Struct {
             CtorKind::Fn => doctree::Tuple,
             CtorKind::Const => doctree::Unit,
         },
-        generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
+        generics: (cx.tcx.generics_of(did), predicates).clean(cx),
         fields: variant.fields.clean(cx),
         fields_stripped: false,
     }
@@ -269,7 +269,7 @@ fn build_union(cx: &DocContext<'_>, did: DefId) -> clean::Union {
 
     clean::Union {
         struct_type: doctree::Plain,
-        generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
+        generics: (cx.tcx.generics_of(did), predicates).clean(cx),
         fields: variant.fields.clean(cx),
         fields_stripped: false,
     }
@@ -280,7 +280,7 @@ fn build_type_alias(cx: &DocContext<'_>, did: DefId) -> clean::Typedef {
 
     clean::Typedef {
         type_: cx.tcx.type_of(did).clean(cx),
-        generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
+        generics: (cx.tcx.generics_of(did), predicates).clean(cx),
     }
 }
 
@@ -376,7 +376,7 @@ pub fn build_impl(cx: &DocContext<'_>, did: DefId, attrs: Option<Attrs<'_>>,
                 }
             }).collect::<Vec<_>>(),
             clean::enter_impl_trait(cx, || {
-                (tcx.generics_of(did), &predicates).clean(cx)
+                (tcx.generics_of(did), predicates).clean(cx)
             }),
         )
     };
index 8f35ca01f79df2c8349109e9fa2ad95aca54374c..c355f661410e55e7b6d386a42d29b2ea4bf54768 100644 (file)
@@ -28,7 +28,7 @@
 use rustc::util::nodemap::{FxHashMap, FxHashSet};
 use syntax::ast::{self, AttrStyle, Ident};
 use syntax::attr;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::source_map::DUMMY_SP;
 use syntax::symbol::{Symbol, kw, sym};
 use syntax::symbol::InternedString;
@@ -198,7 +198,7 @@ pub fn krate(mut cx: &mut DocContext<'_>) -> Crate {
             Item {
                 source: Span::empty(),
                 name: Some(kw.clone()),
-                attrs: attrs,
+                attrs,
                 visibility: Public,
                 stability: get_stability(cx, def_id),
                 deprecation: get_deprecation(cx, def_id),
@@ -1570,7 +1570,7 @@ fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
                     did: cx.tcx.hir().local_def_id(self.hir_id),
                     bounds: self.bounds.clean(cx),
                     default: default.clean(cx),
-                    synthetic: synthetic,
+                    synthetic,
                 })
             }
             hir::GenericParamKind::Const { ref ty } => {
@@ -1664,8 +1664,7 @@ fn is_impl_trait(param: &hir::GenericParam) -> bool {
     }
 }
 
-impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
-                                    &'a &'tcx ty::GenericPredicates<'tcx>) {
+impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics, ty::GenericPredicates<'tcx>) {
     fn clean(&self, cx: &DocContext<'_>) -> Generics {
         use self::WherePredicate as WP;
         use std::collections::BTreeMap;
@@ -2213,7 +2212,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Item {
         let is_spotlight = attrs.has_doc_flag(sym::spotlight);
         Item {
             name: Some(self.name.clean(cx)),
-            attrs: attrs,
+            attrs,
             source: self.whence.clean(cx),
             def_id: cx.tcx.hir().local_def_id(self.id),
             visibility: self.vis.clean(cx),
@@ -2369,7 +2368,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Item {
             }
             ty::AssocKind::Method => {
                 let generics = (cx.tcx.generics_of(self.def_id),
-                                &cx.tcx.explicit_predicates_of(self.def_id)).clean(cx);
+                                cx.tcx.explicit_predicates_of(self.def_id)).clean(cx);
                 let sig = cx.tcx.fn_sig(self.def_id);
                 let mut decl = (self.def_id, sig).clean(cx);
 
@@ -2448,7 +2447,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Item {
                     // all of the generics from there and then look for bounds that are
                     // applied to this associated type in question.
                     let predicates = cx.tcx.explicit_predicates_of(did);
-                    let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
+                    let generics = (cx.tcx.generics_of(did), predicates).clean(cx);
                     let mut bounds = generics.where_predicates.iter().filter_map(|pred| {
                         let (name, self_type, trait_, bounds) = match *pred {
                             WherePredicate::BoundPredicate {
@@ -2844,7 +2843,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Type {
                 } else {
                     Some(l.clean(cx))
                 };
-                BorrowedRef {lifetime: lifetime, mutability: m.mutbl.clean(cx),
+                BorrowedRef {lifetime, mutability: m.mutbl.clean(cx),
                              type_: box m.ty.clean(cx)}
             }
             TyKind::Slice(ref ty) => Slice(box ty.clean(cx)),
@@ -3102,9 +3101,9 @@ fn clean(&self, cx: &DocContext<'_>) -> Type {
                 let path = external_path(cx, cx.tcx.item_name(did),
                                          None, false, vec![], InternalSubsts::empty());
                 ResolvedPath {
-                    path: path,
+                    path,
                     param_names: None,
-                    did: did,
+                    did,
                     is_generic: false,
                 }
             }
@@ -4274,7 +4273,7 @@ fn resolve_type(cx: &DocContext<'_>,
         _ => false,
     };
     let did = register_res(&*cx, path.res);
-    ResolvedPath { path: path, param_names: None, did: did, is_generic: is_generic }
+    ResolvedPath { path, param_names: None, did, is_generic }
 }
 
 pub fn register_res(cx: &DocContext<'_>, res: Res) -> DefId {
index fe4e2bd091519b9e912d3a2fe5fb8854af8fd473..1c0d1b327373195c49c337e25aa84c25486651bf 100644 (file)
@@ -343,10 +343,7 @@ pub fn from_matches(matches: &getopts::Matches) -> Result<Options, i32> {
         let output = matches.opt_str("o")
                             .map(|s| PathBuf::from(&s))
                             .unwrap_or_else(|| PathBuf::from("doc"));
-        let mut cfgs = matches.opt_strs("cfg");
-        if should_test {
-            cfgs.push("doctest".to_string());
-        }
+        let cfgs = matches.opt_strs("cfg");
 
         let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s));
 
index 6e453561f6da20f06d283b1e666dbc958f4a3032..0dc094ae329fb414eb26d12fa1c0145f27677aea 100644 (file)
@@ -4,7 +4,7 @@
 
 use syntax::ast;
 use syntax::ast::Name;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax_pos::{self, Span};
 
 use rustc::hir;
index 5d86ee9721b75711aa73b12290c2493c15f82eac..30c9453a643d48bde9a5c9754229ca057ba39607 100644 (file)
@@ -14,7 +14,7 @@
 use syntax::source_map::{SourceMap, FilePathMapping};
 use syntax::parse::lexer;
 use syntax::parse::token::{self, Token};
-use syntax::parse;
+use syntax::sess::ParseSess;
 use syntax::symbol::{kw, sym};
 use syntax_pos::{Span, FileName};
 
@@ -33,7 +33,7 @@ pub fn render_with_highlighting(
                class, tooltip).unwrap();
     }
 
-    let sess = parse::ParseSess::new(FilePathMapping::empty());
+    let sess = ParseSess::new(FilePathMapping::empty());
     let fm = sess.source_map().new_source_file(
         FileName::Custom(String::from("rustdoc-highlighting")),
         src.to_owned(),
index 5fb9afd6c49a04abd4f5c2cf7c727267c138bca9..e015739b03c40b358c10b0822bfd9d4106d9e161 100644 (file)
@@ -1,7 +1,7 @@
 //! Item types.
 
 use std::fmt;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use crate::clean;
 
 /// Item type. Corresponds to `clean::ItemEnum` variants.
index 1ff71a0024b28c853d54285ddd5e2b60a084fd2e..72a72e892814a97e88507a11d7d7a3b8620b0ba6 100644 (file)
 use serialize::json::{ToJson, Json, as_json};
 use syntax::ast;
 use syntax::edition::Edition;
-use syntax::ext::base::MacroKind;
-use syntax::source_map::FileName;
 use syntax::feature_gate::UnstableFeatures;
+use syntax::print::pprust;
+use syntax::source_map::FileName;
 use syntax::symbol::{Symbol, sym};
+use syntax_expand::base::MacroKind;
 use rustc::hir::def_id::DefId;
 use rustc::middle::privacy::AccessLevels;
 use rustc::middle::stability;
@@ -2957,7 +2958,7 @@ fn item_enum(w: &mut Buffer, cx: &Context, it: &clean::Item, e: &clean::Enum) {
 }
 
 fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
-    let path = attr.path.to_string();
+    let path = pprust::path_to_string(&attr.path);
 
     if attr.is_word() {
         Some(path)
index 539bf5dfe28e629fff59367218888c7853f6ad0e..8cd32a3d1b508cc3e55e9ccbab6a53319fed9da2 100644 (file)
@@ -14,9 +14,7 @@
 #![feature(crate_visibility_modifier)]
 #![feature(const_fn)]
 #![feature(drain_filter)]
-#![feature(inner_deref)]
 #![feature(never_type)]
-#![feature(mem_take)]
 #![feature(unicode_internals)]
 
 #![recursion_limit="256"]
@@ -36,6 +34,7 @@
 extern crate rustc_lexer;
 extern crate serialize;
 extern crate syntax;
+extern crate syntax_expand;
 extern crate syntax_pos;
 extern crate test as testing;
 #[macro_use] extern crate log;
@@ -487,8 +486,8 @@ fn rust_input<R, F>(options: config::Options, f: F) -> R
         krate.version = crate_version;
 
         f(Output {
-            krate: krate,
-            renderinfo: renderinfo,
+            krate,
+            renderinfo,
             renderopts,
         })
     });
index 32044e48b6f99207f94a071ea0d396724922a7a0..10e15ab8881afa29c7c5b55ffa54f3bf210e08ef 100644 (file)
@@ -1,6 +1,7 @@
 use errors::Applicability;
 use syntax::parse::lexer::{StringReader as Lexer};
-use syntax::parse::{ParseSess, token};
+use syntax::parse::token;
+use syntax::sess::ParseSess;
 use syntax::source_map::FilePathMapping;
 use syntax_pos::{InnerSpan, FileName};
 
index 9186ed514202ff736ccb63cea9e67098e69ed86e..4270b162bafa4837aab4fef529914252a93ac5d2 100644 (file)
@@ -7,7 +7,7 @@
 use rustc_resolve::ParentScope;
 use syntax;
 use syntax::ast::{self, Ident};
-use syntax::ext::base::SyntaxExtensionKind;
+use syntax_expand::base::SyntaxExtensionKind;
 use syntax::feature_gate::UnstableFeatures;
 use syntax::symbol::Symbol;
 use syntax_pos::DUMMY_SP;
index 3e77ca47e8a6a887feb2a67a87715acbf00f399e..0be6340df96e87d5b946b5a2d086e31bbe975461 100644 (file)
@@ -64,6 +64,7 @@ pub fn run(options: Options) -> i32 {
 
     let mut cfgs = options.cfgs.clone();
     cfgs.push("rustdoc".to_owned());
+    cfgs.push("doctest".to_owned());
     let config = interface::Config {
         opts: sessopts,
         crate_cfg: config::parse_cfgspecs(cfgs),
@@ -393,7 +394,7 @@ pub fn make_test(s: &str,
     // Uses libsyntax to parse the doctest and find if there's a main fn and the extern
     // crate already is included.
     let (already_has_main, already_has_extern_crate, found_macro) = with_globals(edition, || {
-        use crate::syntax::{parse::{self, ParseSess}, source_map::FilePathMapping};
+        use crate::syntax::{parse, sess::ParseSess, source_map::FilePathMapping};
         use errors::emitter::EmitterWriter;
         use errors::Handler;
 
@@ -704,6 +705,7 @@ fn add_test(&mut self, test: String, config: LangString, line: usize) {
                 // compiler failures are test failures
                 should_panic: testing::ShouldPanic::No,
                 allow_fail: config.allow_fail,
+                test_type: testing::TestType::DocTest,
             },
             testfn: testing::DynTestFn(box move || {
                 let res = run_test(
index b6a90e1fb988bf5f901a629dfd512bc9c49e9b61..70c30687dabca6b45abb87f7751ba3bac87b215b 100644 (file)
@@ -8,7 +8,7 @@
 use rustc::util::nodemap::{FxHashSet, FxHashMap};
 use rustc::ty::TyCtxt;
 use syntax::ast;
-use syntax::ext::base::MacroKind;
+use syntax_expand::base::MacroKind;
 use syntax::source_map::Spanned;
 use syntax::symbol::sym;
 use syntax_pos::{self, Span};
index 0de9e6fcca5da970552fc8b28e85cdb5c5a5d389..1c99b091329f473b01084bfe6305e899e64f58c6 100644 (file)
@@ -23,7 +23,7 @@ libc = { version = "0.2.51", default-features = false, features = ['rustc-dep-of
 compiler_builtins = { version = "0.1.16" }
 profiler_builtins = { path = "../libprofiler_builtins", optional = true }
 unwind = { path = "../libunwind" }
-hashbrown = { version = "0.5.0", features = ['rustc-dep-of-std'] }
+hashbrown = { version = "0.6.1", default-features = false, features = ['rustc-dep-of-std'] }
 
 [dependencies.backtrace_rs]
 package = "backtrace"
index ff50051ef504043d673d3f3bdec1466093fa0850..6b0225a1b443ae489c7cf09e1aa615fb81dea166 100644 (file)
@@ -2030,6 +2030,31 @@ pub fn and_modify<F>(self, f: F) -> Self
             Vacant(entry) => Vacant(entry),
         }
     }
+
+    /// Sets the value of the entry, and returns an OccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(entry_insert)]
+    /// use std::collections::HashMap;
+    ///
+    /// let mut map: HashMap<&str, String> = HashMap::new();
+    /// let entry = map.entry("poneyland").insert("hoho".to_string());
+    ///
+    /// assert_eq!(entry.key(), &"poneyland");
+    /// ```
+    #[inline]
+    #[unstable(feature = "entry_insert", issue = "65225")]
+    pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V> {
+        match self {
+            Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            },
+            Vacant(entry) => entry.insert_entry(value),
+        }
+    }
 }
 
 impl<'a, K, V: Default> Entry<'a, K, V> {
@@ -2347,6 +2372,28 @@ pub fn into_key(self) -> K {
     pub fn insert(self, value: V) -> &'a mut V {
         self.base.insert(value)
     }
+
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns an OccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::HashMap;
+    /// use std::collections::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let Entry::Vacant(o) = map.entry("poneyland") {
+    ///     o.insert(37);
+    /// }
+    /// assert_eq!(map["poneyland"], 37);
+    /// ```
+    #[inline]
+    fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V> {
+        let base = self.base.insert_entry(value);
+        OccupiedEntry { base }
+    }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -2362,6 +2409,8 @@ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> HashMap<K, V, S> {
     }
 }
 
+/// Inserts all new key-values from the iterator and replaces values with existing
+/// keys with new values returned from the iterator.
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
 where
index 4a1bb75d588c9c008c4b670124ffd87f2a895cd6..6b9a35fccc4dcaa6278a75ffaca46ebdaff4bc34 100644 (file)
@@ -269,8 +269,8 @@ fn from(err: E) -> Box<dyn Error + 'a> {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a, E: Error + Send + Sync + 'a> From<E> for Box<dyn Error + Send + Sync + 'a> {
-    /// Converts a type of [`Error`] + [`trait@Send`] + [`trait@Sync`] into a box of
-    /// dyn [`Error`] + [`trait@Send`] + [`trait@Sync`].
+    /// Converts a type of [`Error`] + [`Send`] + [`Sync`] into a box of
+    /// dyn [`Error`] + [`Send`] + [`Sync`].
     ///
     /// [`Error`]: ../error/trait.Error.html
     ///
@@ -313,7 +313,7 @@ fn from(err: E) -> Box<dyn Error + Send + Sync + 'a> {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl From<String> for Box<dyn Error + Send + Sync> {
-    /// Converts a [`String`] into a box of dyn [`Error`] + [`trait@Send`] + [`trait@Sync`].
+    /// Converts a [`String`] into a box of dyn [`Error`] + [`Send`] + [`Sync`].
     ///
     /// [`Error`]: ../error/trait.Error.html
     ///
@@ -377,7 +377,7 @@ fn from(str_err: String) -> Box<dyn Error> {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a> From<&str> for Box<dyn Error + Send + Sync + 'a> {
-    /// Converts a [`str`] into a box of dyn [`Error`] + [`trait@Send`] + [`trait@Sync`].
+    /// Converts a [`str`] into a box of dyn [`Error`] + [`Send`] + [`Sync`].
     ///
     /// [`Error`]: ../error/trait.Error.html
     ///
@@ -420,7 +420,7 @@ fn from(err: &str) -> Box<dyn Error> {
 
 #[stable(feature = "cow_box_error", since = "1.22.0")]
 impl<'a, 'b> From<Cow<'b, str>> for Box<dyn Error + Send + Sync + 'a> {
-    /// Converts a [`Cow`] into a box of dyn [`Error`] + [`trait@Send`] + [`trait@Sync`].
+    /// Converts a [`Cow`] into a box of dyn [`Error`] + [`Send`] + [`Sync`].
     ///
     /// [`Cow`]: ../borrow/enum.Cow.html
     /// [`Error`]: ../error/trait.Error.html
index d7f4cc5d1fdaafc15d572f05c67ce262bd97e771..483f2ba52eca658c5516f27a7e0adab2213d96ef 100644 (file)
@@ -919,7 +919,7 @@ fn description(&self) -> &str {
         "C string contained non-utf8 bytes"
     }
 
-    fn cause(&self) -> Option<&dyn Error> {
+    fn source(&self) -> Option<&(dyn Error + 'static)> {
         Some(&self.error)
     }
 }
index 8933f027a065f8c0b727d085c4f976bfb565a354..6595f54162f0a8a1ceeca203cf93ba5205937622 100644 (file)
@@ -1090,13 +1090,14 @@ pub fn accessed(&self) -> io::Result<SystemTime> {
 
     /// Returns the creation time listed in this metadata.
     ///
-    /// The returned value corresponds to the `birthtime` field of `stat` on
-    /// Unix platforms and the `ftCreationTime` field on Windows platforms.
+    /// The returned value corresponds to the `btime` field of `statx` on
+    /// Linux kernel starting from to 4.11, the `birthtime` field of `stat` on other
+    /// Unix platforms, and the `ftCreationTime` field on Windows platforms.
     ///
     /// # Errors
     ///
     /// This field may not be available on all platforms, and will return an
-    /// `Err` on platforms where it is not available.
+    /// `Err` on platforms or filesystems where it is not available.
     ///
     /// # Examples
     ///
@@ -1109,7 +1110,7 @@ pub fn accessed(&self) -> io::Result<SystemTime> {
     ///     if let Ok(time) = metadata.created() {
     ///         println!("{:?}", time);
     ///     } else {
-    ///         println!("Not supported on this platform");
+    ///         println!("Not supported on this platform or filesystem");
     ///     }
     ///     Ok(())
     /// }
@@ -3112,8 +3113,10 @@ fn c<T: Clone>(t: &T) -> T { t.clone() }
 
         #[cfg(windows)]
         let invalid_options = 87; // ERROR_INVALID_PARAMETER
-        #[cfg(unix)]
+        #[cfg(all(unix, not(target_os = "vxworks")))]
         let invalid_options = "Invalid argument";
+        #[cfg(target_os = "vxworks")]
+        let invalid_options = "invalid argument";
 
         // Test various combinations of creation modes and access modes.
         //
@@ -3441,5 +3444,18 @@ fn metadata_access_times() {
             check!(a.created());
             check!(b.created());
         }
+
+        if cfg!(target_os = "linux") {
+            // Not always available
+            match (a.created(), b.created()) {
+                (Ok(t1), Ok(t2)) => assert!(t1 <= t2),
+                (Err(e1), Err(e2)) if e1.kind() == ErrorKind::Other &&
+                                      e2.kind() == ErrorKind::Other => {}
+                (a, b) => panic!(
+                    "creation time must be always supported or not supported: {:?} {:?}",
+                    a, b,
+                ),
+            }
+        }
     }
 }
index 5ff32d7adafc23f9b8d89b852623435a49620048..af6cb656444d425fb2805f247f35c60e521373fe 100644 (file)
 #![feature(log_syntax)]
 #![feature(maybe_uninit_ref)]
 #![feature(maybe_uninit_slice)]
-#![feature(mem_take)]
 #![feature(needs_panic_runtime)]
 #![feature(never_type)]
 #![feature(nll)]
index a5e7cd992f2272d226ef606c489b02f34183fac0..46bbd8855dedabdf098e2b7b90139ae12befe940 100644 (file)
@@ -185,7 +185,6 @@ pub fn send_to<A: ToSocketAddrs>(&self, buf: &[u8], addr: A)
     /// # Examples
     ///
     /// ```no_run
-    /// #![feature(udp_peer_addr)]
     /// use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4, UdpSocket};
     ///
     /// let socket = UdpSocket::bind("127.0.0.1:34254").expect("couldn't bind to address");
@@ -199,14 +198,13 @@ pub fn send_to<A: ToSocketAddrs>(&self, buf: &[u8], addr: A)
     /// [`NotConnected`]: ../../std/io/enum.ErrorKind.html#variant.NotConnected
     ///
     /// ```no_run
-    /// #![feature(udp_peer_addr)]
     /// use std::net::UdpSocket;
     ///
     /// let socket = UdpSocket::bind("127.0.0.1:34254").expect("couldn't bind to address");
     /// assert_eq!(socket.peer_addr().unwrap_err().kind(),
     ///            ::std::io::ErrorKind::NotConnected);
     /// ```
-    #[unstable(feature = "udp_peer_addr", issue = "59127")]
+    #[stable(feature = "udp_peer_addr", since = "1.40.0")]
     pub fn peer_addr(&self) -> io::Result<SocketAddr> {
         self.0.peer_addr()
     }
index 1d4fd98dd754f2991846a5330f59263a1403b0d5..24c693790e84bcf239a57e3bff233604b595e36d 100644 (file)
@@ -12,7 +12,9 @@
 use crate::panicking;
 use crate::ptr::{Unique, NonNull};
 use crate::rc::Rc;
-use crate::sync::{Arc, Mutex, RwLock, atomic};
+use crate::sync::{Arc, Mutex, RwLock};
+#[cfg(not(bootstrap))]
+use crate::sync::atomic;
 use crate::task::{Context, Poll};
 use crate::thread::Result;
 
@@ -240,49 +242,49 @@ impl<T: ?Sized> RefUnwindSafe for Mutex<T> {}
 #[stable(feature = "unwind_safe_lock_refs", since = "1.12.0")]
 impl<T: ?Sized> RefUnwindSafe for RwLock<T> {}
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(target_has_atomic_load_store = "ptr")]
 #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
 impl RefUnwindSafe for atomic::AtomicIsize {}
-#[cfg(target_has_atomic = "8")]
+#[cfg(target_has_atomic_load_store = "8")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicI8 {}
-#[cfg(target_has_atomic = "16")]
+#[cfg(target_has_atomic_load_store = "16")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicI16 {}
-#[cfg(target_has_atomic = "32")]
+#[cfg(target_has_atomic_load_store = "32")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicI32 {}
-#[cfg(target_has_atomic = "64")]
+#[cfg(target_has_atomic_load_store = "64")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicI64 {}
-#[cfg(target_has_atomic = "128")]
+#[cfg(target_has_atomic_load_store = "128")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicI128 {}
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(target_has_atomic_load_store = "ptr")]
 #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
 impl RefUnwindSafe for atomic::AtomicUsize {}
-#[cfg(target_has_atomic = "8")]
+#[cfg(target_hastarget_has_atomic_load_store_atomic = "8")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicU8 {}
-#[cfg(target_has_atomic = "16")]
+#[cfg(target_has_atomic_load_store = "16")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicU16 {}
-#[cfg(target_has_atomic = "32")]
+#[cfg(target_has_atomic_load_store = "32")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicU32 {}
-#[cfg(target_has_atomic = "64")]
+#[cfg(target_has_atomic_load_store = "64")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicU64 {}
-#[cfg(target_has_atomic = "128")]
+#[cfg(target_has_atomic_load_store = "128")]
 #[unstable(feature = "integer_atomics", issue = "32976")]
 impl RefUnwindSafe for atomic::AtomicU128 {}
 
-#[cfg(target_has_atomic = "8")]
+#[cfg(target_has_atomic_load_store = "8")]
 #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
 impl RefUnwindSafe for atomic::AtomicBool {}
 
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(target_has_atomic_load_store = "ptr")]
 #[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")]
 impl<T> RefUnwindSafe for atomic::AtomicPtr<T> {}
 
index 28fb40244043e5aa46e534b9b3a1131060d9b66a..638ce1679b8e98dc0c4db2f37b0462f9516d4a8c 100644 (file)
 use crate::mem;
 use crate::ptr;
 use crate::raw;
+use crate::sync::atomic::{AtomicBool, Ordering};
 use crate::sys::stdio::panic_output;
 use crate::sys_common::rwlock::RWLock;
-use crate::sys_common::{thread_info, util, backtrace};
+use crate::sys_common::{thread_info, util};
+use crate::sys_common::backtrace::{self, RustBacktrace};
 use crate::thread;
 
 #[cfg(not(test))]
@@ -158,16 +160,10 @@ pub fn take_hook() -> Box<dyn Fn(&PanicInfo<'_>) + 'static + Sync + Send> {
 fn default_hook(info: &PanicInfo<'_>) {
     // If this is a double panic, make sure that we print a backtrace
     // for this panic. Otherwise only print it if logging is enabled.
-    let log_backtrace = if cfg!(feature = "backtrace") {
-        let panics = update_panic_count(0);
-
-        if panics >= 2 {
-            Some(backtrace_rs::PrintFmt::Full)
-        } else {
-            backtrace::log_enabled()
-        }
+    let backtrace_env = if update_panic_count(0) >= 2 {
+        RustBacktrace::Print(backtrace_rs::PrintFmt::Full)
     } else {
-        None
+        backtrace::rust_backtrace_env()
     };
 
     // The current implementation always returns `Some`.
@@ -187,16 +183,16 @@ fn default_hook(info: &PanicInfo<'_>) {
         let _ = writeln!(err, "thread '{}' panicked at '{}', {}",
                          name, msg, location);
 
-        if cfg!(feature = "backtrace") {
-            use crate::sync::atomic::{AtomicBool, Ordering};
-
-            static FIRST_PANIC: AtomicBool = AtomicBool::new(true);
+        static FIRST_PANIC: AtomicBool = AtomicBool::new(true);
 
-            if let Some(format) = log_backtrace {
-                let _ = backtrace::print(err, format);
-            } else if FIRST_PANIC.compare_and_swap(true, false, Ordering::SeqCst) {
-                let _ = writeln!(err, "note: run with `RUST_BACKTRACE=1` \
-                                       environment variable to display a backtrace.");
+        match backtrace_env {
+            RustBacktrace::Print(format) => drop(backtrace::print(err, format)),
+            RustBacktrace::Disabled => {}
+            RustBacktrace::RuntimeDisabled => {
+                if FIRST_PANIC.swap(false, Ordering::SeqCst) {
+                    let _ = writeln!(err, "note: run with `RUST_BACKTRACE=1` \
+                                           environment variable to display a backtrace.");
+                }
             }
         }
     };
index fd6ff1032bb81da2a22097645ba66636938a2e4d..ca81044ee85609502722665ebebca67ae50962bc 100644 (file)
@@ -1627,7 +1627,7 @@ fn from(p: Cow<'a, Path>) -> Self {
 
 #[stable(feature = "shared_from_slice2", since = "1.24.0")]
 impl From<PathBuf> for Arc<Path> {
-    /// Converts a Path into a Rc by copying the Path data into a new Rc buffer.
+    /// Converts a `PathBuf` into an `Arc` by moving the `PathBuf` data into a new `Arc` buffer.
     #[inline]
     fn from(s: PathBuf) -> Arc<Path> {
         let arc: Arc<OsStr> = Arc::from(s.into_os_string());
@@ -1637,7 +1637,7 @@ fn from(s: PathBuf) -> Arc<Path> {
 
 #[stable(feature = "shared_from_slice2", since = "1.24.0")]
 impl From<&Path> for Arc<Path> {
-    /// Converts a Path into a Rc by copying the Path data into a new Rc buffer.
+    /// Converts a `Path` into an `Arc` by copying the `Path` data into a new `Arc` buffer.
     #[inline]
     fn from(s: &Path) -> Arc<Path> {
         let arc: Arc<OsStr> = Arc::from(s.as_os_str());
@@ -1647,7 +1647,7 @@ fn from(s: &Path) -> Arc<Path> {
 
 #[stable(feature = "shared_from_slice2", since = "1.24.0")]
 impl From<PathBuf> for Rc<Path> {
-    /// Converts a Path into a Rc by copying the Path data into a new Rc buffer.
+    /// Converts a `PathBuf` into an `Rc` by moving the `PathBuf` data into a new `Rc` buffer.
     #[inline]
     fn from(s: PathBuf) -> Rc<Path> {
         let rc: Rc<OsStr> = Rc::from(s.into_os_string());
@@ -1657,7 +1657,7 @@ fn from(s: PathBuf) -> Rc<Path> {
 
 #[stable(feature = "shared_from_slice2", since = "1.24.0")]
 impl From<&Path> for Rc<Path> {
-    /// Converts a Path into a Rc by copying the Path data into a new Rc buffer.
+    /// Converts a `Path` into an `Rc` by copying the `Path` data into a new `Rc` buffer.
     #[inline]
     fn from(s: &Path) -> Rc<Path> {
         let rc: Rc<OsStr> = Rc::from(s.as_os_str());
@@ -2219,6 +2219,7 @@ pub fn extension(&self) -> Option<&OsStr> {
     /// assert_eq!(Path::new("/etc").join("passwd"), PathBuf::from("/etc/passwd"));
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
+    #[must_use]
     pub fn join<P: AsRef<Path>>(&self, path: P) -> PathBuf {
         self._join(path.as_ref())
     }
index cf45eb0daba3988b5626730c3988ca8a732dcd80..63e35d5ed919a294fa1c191479b008650b363aac 100644 (file)
@@ -44,12 +44,9 @@ fn lang_start_internal(main: &(dyn Fn() -> i32 + Sync + crate::panic::RefUnwindS
         sys::args::init(argc, argv);
 
         // Let's run some code!
-        #[cfg(feature = "backtrace")]
         let exit_code = panic::catch_unwind(|| {
             sys_common::backtrace::__rust_begin_short_backtrace(move || main())
         });
-        #[cfg(not(feature = "backtrace"))]
-        let exit_code = panic::catch_unwind(move || main());
 
         sys_common::cleanup();
         exit_code.unwrap_or(101) as isize
index 69ecd201063b034baf0f8d5df0074823a89ecda6..c2884a28f3ccdb57ceda20b368f3cfcea59d9c59 100644 (file)
@@ -1581,10 +1581,6 @@ impl<T: Send> error::Error for SendError<T> {
     fn description(&self) -> &str {
         "sending on a closed channel"
     }
-
-    fn cause(&self) -> Option<&dyn error::Error> {
-        None
-    }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -1624,10 +1620,6 @@ fn description(&self) -> &str {
             }
         }
     }
-
-    fn cause(&self) -> Option<&dyn error::Error> {
-        None
-    }
 }
 
 #[stable(feature = "mpsc_error_conversions", since = "1.24.0")]
@@ -1652,10 +1644,6 @@ impl error::Error for RecvError {
     fn description(&self) -> &str {
         "receiving on a closed channel"
     }
-
-    fn cause(&self) -> Option<&dyn error::Error> {
-        None
-    }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -1685,10 +1673,6 @@ fn description(&self) -> &str {
             }
         }
     }
-
-    fn cause(&self) -> Option<&dyn error::Error> {
-        None
-    }
 }
 
 #[stable(feature = "mpsc_error_conversions", since = "1.24.0")]
@@ -1726,10 +1710,6 @@ fn description(&self) -> &str {
             }
         }
     }
-
-    fn cause(&self) -> Option<&dyn error::Error> {
-        None
-    }
 }
 
 #[stable(feature = "mpsc_error_conversions", since = "1.24.0")]
index e529b8c4227fa7a1d4c6c5ca14f0cb070529380e..e28fbca7fa1c2ab10b934dae0f740e599fd1ec3a 100644 (file)
 
 /// A synchronization primitive which can be used to run a one-time global
 /// initialization. Useful for one-time initialization for FFI or related
-/// functionality. This type can only be constructed with the [`ONCE_INIT`]
-/// value or the equivalent [`Once::new`] constructor.
+/// functionality. This type can only be constructed with the [`Once::new`]
+/// constructor.
 ///
-/// [`ONCE_INIT`]: constant.ONCE_INIT.html
 /// [`Once::new`]: struct.Once.html#method.new
 ///
 /// # Examples
index c2064fca23eac8821437048c0597512a5a2e5eba..4054cb1f9c586d974d94a74f24118593d4e7c3c5 100644 (file)
 
 pub struct File(FileDesc);
 
-#[derive(Clone)]
-pub struct FileAttr {
-    stat: stat64,
+// FIXME: This should be available on Linux with all `target_arch` and `target_env`.
+// https://github.com/rust-lang/libc/issues/1545
+macro_rules! cfg_has_statx {
+    ({ $($then_tt:tt)* } else { $($else_tt:tt)* }) => {
+        cfg_if::cfg_if! {
+            if #[cfg(all(target_os = "linux", target_env = "gnu", any(
+                target_arch = "x86",
+                target_arch = "arm",
+                // target_arch = "mips",
+                target_arch = "powerpc",
+                target_arch = "x86_64",
+                // target_arch = "aarch64",
+                target_arch = "powerpc64",
+                // target_arch = "mips64",
+                // target_arch = "s390x",
+                target_arch = "sparc64",
+            )))] {
+                $($then_tt)*
+            } else {
+                $($else_tt)*
+            }
+        }
+    };
+    ($($block_inner:tt)*) => {
+        #[cfg(all(target_os = "linux", target_env = "gnu", any(
+            target_arch = "x86",
+            target_arch = "arm",
+            // target_arch = "mips",
+            target_arch = "powerpc",
+            target_arch = "x86_64",
+            // target_arch = "aarch64",
+            target_arch = "powerpc64",
+            // target_arch = "mips64",
+            // target_arch = "s390x",
+            target_arch = "sparc64",
+        )))]
+        {
+            $($block_inner)*
+        }
+    };
 }
 
+cfg_has_statx! {{
+    #[derive(Clone)]
+    pub struct FileAttr {
+        stat: stat64,
+        statx_extra_fields: Option<StatxExtraFields>,
+    }
+
+    #[derive(Clone)]
+    struct StatxExtraFields {
+        // This is needed to check if btime is supported by the filesystem.
+        stx_mask: u32,
+        stx_btime: libc::statx_timestamp,
+    }
+
+    // We prefer `statx` on Linux if available, which contains file creation time.
+    // Default `stat64` contains no creation time.
+    unsafe fn try_statx(
+        fd: c_int,
+        path: *const libc::c_char,
+        flags: i32,
+        mask: u32,
+    ) -> Option<io::Result<FileAttr>> {
+        use crate::sync::atomic::{AtomicBool, Ordering};
+
+        // Linux kernel prior to 4.11 or glibc prior to glibc 2.28 don't support `statx`
+        // We store the availability in a global to avoid unnecessary syscalls
+        static HAS_STATX: AtomicBool = AtomicBool::new(true);
+        syscall! {
+            fn statx(
+                fd: c_int,
+                pathname: *const libc::c_char,
+                flags: c_int,
+                mask: libc::c_uint,
+                statxbuf: *mut libc::statx
+            ) -> c_int
+        }
+
+        if !HAS_STATX.load(Ordering::Relaxed) {
+            return None;
+        }
+
+        let mut buf: libc::statx = mem::zeroed();
+        let ret = cvt(statx(fd, path, flags, mask, &mut buf));
+        match ret {
+            Err(err) => match err.raw_os_error() {
+                Some(libc::ENOSYS) => {
+                    HAS_STATX.store(false, Ordering::Relaxed);
+                    return None;
+                }
+                _ => return Some(Err(err)),
+            }
+            Ok(_) => {
+                // We cannot fill `stat64` exhaustively because of private padding fields.
+                let mut stat: stat64 = mem::zeroed();
+                // `c_ulong` on gnu-mips, `dev_t` otherwise
+                stat.st_dev = libc::makedev(buf.stx_dev_major, buf.stx_dev_minor) as _;
+                stat.st_ino = buf.stx_ino as libc::ino64_t;
+                stat.st_nlink = buf.stx_nlink as libc::nlink_t;
+                stat.st_mode = buf.stx_mode as libc::mode_t;
+                stat.st_uid = buf.stx_uid as libc::uid_t;
+                stat.st_gid = buf.stx_gid as libc::gid_t;
+                stat.st_rdev = libc::makedev(buf.stx_rdev_major, buf.stx_rdev_minor) as _;
+                stat.st_size = buf.stx_size as off64_t;
+                stat.st_blksize = buf.stx_blksize as libc::blksize_t;
+                stat.st_blocks = buf.stx_blocks as libc::blkcnt64_t;
+                stat.st_atime = buf.stx_atime.tv_sec as libc::time_t;
+                // `i64` on gnu-x86_64-x32, `c_ulong` otherwise.
+                stat.st_atime_nsec = buf.stx_atime.tv_nsec as _;
+                stat.st_mtime = buf.stx_mtime.tv_sec as libc::time_t;
+                stat.st_mtime_nsec = buf.stx_mtime.tv_nsec as _;
+                stat.st_ctime = buf.stx_ctime.tv_sec as libc::time_t;
+                stat.st_ctime_nsec = buf.stx_ctime.tv_nsec as _;
+
+                let extra = StatxExtraFields {
+                    stx_mask: buf.stx_mask,
+                    stx_btime: buf.stx_btime,
+                };
+
+                Some(Ok(FileAttr { stat, statx_extra_fields: Some(extra) }))
+            }
+        }
+    }
+
+} else {
+    #[derive(Clone)]
+    pub struct FileAttr {
+        stat: stat64,
+    }
+}}
+
 // all DirEntry's will have a reference to this struct
 struct InnerReadDir {
     dirp: Dir,
@@ -97,6 +224,20 @@ pub struct FileType { mode: mode_t }
 #[derive(Debug)]
 pub struct DirBuilder { mode: mode_t }
 
+cfg_has_statx! {{
+    impl FileAttr {
+        fn from_stat64(stat: stat64) -> Self {
+            Self { stat, statx_extra_fields: None }
+        }
+    }
+} else {
+    impl FileAttr {
+        fn from_stat64(stat: stat64) -> Self {
+            Self { stat }
+        }
+    }
+}}
+
 impl FileAttr {
     pub fn size(&self) -> u64 { self.stat.st_size as u64 }
     pub fn perm(&self) -> FilePermissions {
@@ -164,6 +305,22 @@ pub fn created(&self) -> io::Result<SystemTime> {
                   target_os = "macos",
                   target_os = "ios")))]
     pub fn created(&self) -> io::Result<SystemTime> {
+        cfg_has_statx! {
+            if let Some(ext) = &self.statx_extra_fields {
+                return if (ext.stx_mask & libc::STATX_BTIME) != 0 {
+                    Ok(SystemTime::from(libc::timespec {
+                        tv_sec: ext.stx_btime.tv_sec as libc::time_t,
+                        tv_nsec: ext.stx_btime.tv_nsec as _,
+                    }))
+                } else {
+                    Err(io::Error::new(
+                        io::ErrorKind::Other,
+                        "creation time is not available for the filesystem",
+                    ))
+                };
+            }
+        }
+
         Err(io::Error::new(io::ErrorKind::Other,
                            "creation time is not available on this platform \
                             currently"))
@@ -306,12 +463,25 @@ pub fn file_name(&self) -> OsString {
 
     #[cfg(any(target_os = "linux", target_os = "emscripten", target_os = "android"))]
     pub fn metadata(&self) -> io::Result<FileAttr> {
-        let fd = cvt(unsafe {dirfd(self.dir.inner.dirp.0)})?;
+        let fd = cvt(unsafe { dirfd(self.dir.inner.dirp.0) })?;
+        let name = self.entry.d_name.as_ptr();
+
+        cfg_has_statx! {
+            if let Some(ret) = unsafe { try_statx(
+                fd,
+                name,
+                libc::AT_SYMLINK_NOFOLLOW | libc::AT_STATX_SYNC_AS_STAT,
+                libc::STATX_ALL,
+            ) } {
+                return ret;
+            }
+        }
+
         let mut stat: stat64 = unsafe { mem::zeroed() };
         cvt(unsafe {
-            fstatat64(fd, self.entry.d_name.as_ptr(), &mut stat, libc::AT_SYMLINK_NOFOLLOW)
+            fstatat64(fd, name, &mut stat, libc::AT_SYMLINK_NOFOLLOW)
         })?;
-        Ok(FileAttr { stat })
+        Ok(FileAttr::from_stat64(stat))
     }
 
     #[cfg(not(any(target_os = "linux", target_os = "emscripten", target_os = "android")))]
@@ -515,11 +685,24 @@ fn ensure_cloexec(_: &FileDesc) -> io::Result<()> {
     }
 
     pub fn file_attr(&self) -> io::Result<FileAttr> {
+        let fd = self.0.raw();
+
+        cfg_has_statx! {
+            if let Some(ret) = unsafe { try_statx(
+                fd,
+                b"\0" as *const _ as *const libc::c_char,
+                libc::AT_EMPTY_PATH | libc::AT_STATX_SYNC_AS_STAT,
+                libc::STATX_ALL,
+            ) } {
+                return ret;
+            }
+        }
+
         let mut stat: stat64 = unsafe { mem::zeroed() };
         cvt(unsafe {
-            fstat64(self.0.raw(), &mut stat)
+            fstat64(fd, &mut stat)
         })?;
-        Ok(FileAttr { stat })
+        Ok(FileAttr::from_stat64(stat))
     }
 
     pub fn fsync(&self) -> io::Result<()> {
@@ -796,20 +979,44 @@ pub fn link(src: &Path, dst: &Path) -> io::Result<()> {
 
 pub fn stat(p: &Path) -> io::Result<FileAttr> {
     let p = cstr(p)?;
+
+    cfg_has_statx! {
+        if let Some(ret) = unsafe { try_statx(
+            libc::AT_FDCWD,
+            p.as_ptr(),
+            libc::AT_STATX_SYNC_AS_STAT,
+            libc::STATX_ALL,
+        ) } {
+            return ret;
+        }
+    }
+
     let mut stat: stat64 = unsafe { mem::zeroed() };
     cvt(unsafe {
         stat64(p.as_ptr(), &mut stat)
     })?;
-    Ok(FileAttr { stat })
+    Ok(FileAttr::from_stat64(stat))
 }
 
 pub fn lstat(p: &Path) -> io::Result<FileAttr> {
     let p = cstr(p)?;
+
+    cfg_has_statx! {
+        if let Some(ret) = unsafe { try_statx(
+            libc::AT_FDCWD,
+            p.as_ptr(),
+            libc::AT_SYMLINK_NOFOLLOW | libc::AT_STATX_SYNC_AS_STAT,
+            libc::STATX_ALL,
+        ) } {
+            return ret;
+        }
+    }
+
     let mut stat: stat64 = unsafe { mem::zeroed() };
     cvt(unsafe {
         lstat64(p.as_ptr(), &mut stat)
     })?;
-    Ok(FileAttr { stat })
+    Ok(FileAttr::from_stat64(stat))
 }
 
 pub fn canonicalize(p: &Path) -> io::Result<PathBuf> {
index 51fdb1c0e55ec9dbbd1596424424a31b58fd5e03..adb08d8005ad486ed3c7701de7812a90c498b9fb 100644 (file)
@@ -400,13 +400,27 @@ fn from_inner(fd: c_int) -> File {
 
 impl fmt::Debug for File {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fn get_path(_fd: c_int) -> Option<PathBuf> {
-            // FIXME(#:(): implement this for VxWorks
-            None
+        fn get_path(fd: c_int) -> Option<PathBuf> {
+            let mut buf = vec![0;libc::PATH_MAX as usize];
+            let n = unsafe { libc::ioctl(fd, libc::FIOGETNAME, buf.as_ptr()) };
+            if n == -1 {
+                return None;
+            }
+            let l = buf.iter().position(|&c| c == 0).unwrap();
+            buf.truncate(l as usize);
+            Some(PathBuf::from(OsString::from_vec(buf)))
         }
-        fn get_mode(_fd: c_int) -> Option<(bool, bool)> {
-            // FIXME(#:(): implement this for VxWorks
-            None
+        fn get_mode(fd: c_int) -> Option<(bool, bool)> {
+            let mode = unsafe { libc::fcntl(fd, libc::F_GETFL) };
+            if mode == -1 {
+                return None;
+            }
+            match mode & libc::O_ACCMODE {
+                libc::O_RDONLY => Some((true, false)),
+                libc::O_RDWR => Some((true, true)),
+                libc::O_WRONLY => Some((false, true)),
+                _ => None
+            }
         }
 
         let fd = self.0.raw();
index 718f422ed11e635b7fdc5d47b51ee9b11691e79d..19b123f2b6131aab14c9ee14b2f0a7ec780d2c72 100644 (file)
@@ -25,7 +25,7 @@ pub unsafe fn read(&self) {
         let r = libc::pthread_rwlock_rdlock(self.inner.get());
         if r == libc::EAGAIN {
             panic!("rwlock maximum reader count exceeded");
-        } else if r == libc::EDEADLK || *self.write_locked.get() {
+        } else if r == libc::EDEADLK || (r == 0 && *self.write_locked.get()) {
             if r == 0 {
                 self.raw_unlock();
             }
index 204f6af5fc1a0432788922315838da2deb70b86e..4160123c9a2abebfa960ab43b072a5d0d5b1c338 100644 (file)
@@ -412,7 +412,7 @@ pub fn seek(&self, pos: SeekFrom) -> io::Result<u64> {
 
     pub fn duplicate(&self) -> io::Result<File> {
         Ok(File {
-            handle: self.handle.duplicate(0, true, c::DUPLICATE_SAME_ACCESS)?,
+            handle: self.handle.duplicate(0, false, c::DUPLICATE_SAME_ACCESS)?,
         })
     }
 
index 01711d415d86c28f6655d2f49956bc4685a65eb2..9c406ec39cc45a3afebe9154db865346318337a2 100644 (file)
@@ -7,6 +7,7 @@
 use crate::borrow::Cow;
 use crate::io::prelude::*;
 use crate::path::{self, Path, PathBuf};
+use crate::sync::atomic::{self, Ordering};
 use crate::sys::mutex::Mutex;
 
 use backtrace_rs::{BacktraceFmt, BytesOrWideString, PrintFmt};
@@ -115,8 +116,10 @@ unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt::
     Ok(())
 }
 
-/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`.
-#[inline(never)]
+/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`. Note that
+/// this is only inline(never) when backtraces in libstd are enabled, otherwise
+/// it's fine to optimize away.
+#[cfg_attr(feature = "backtrace", inline(never))]
 pub fn __rust_begin_short_backtrace<F, T>(f: F) -> T
 where
     F: FnOnce() -> T,
@@ -126,42 +129,49 @@ pub fn __rust_begin_short_backtrace<F, T>(f: F) -> T
     f()
 }
 
+pub enum RustBacktrace {
+    Print(PrintFmt),
+    Disabled,
+    RuntimeDisabled,
+}
+
 // For now logging is turned off by default, and this function checks to see
 // whether the magical environment variable is present to see if it's turned on.
-pub fn log_enabled() -> Option<PrintFmt> {
-    use crate::sync::atomic::{self, Ordering};
+pub fn rust_backtrace_env() -> RustBacktrace {
+    // If the `backtrace` feature of this crate isn't enabled quickly return
+    // `None` so this can be constant propagated all over the place to turn
+    // optimize away callers.
+    if !cfg!(feature = "backtrace") {
+        return RustBacktrace::Disabled;
+    }
 
     // Setting environment variables for Fuchsia components isn't a standard
     // or easily supported workflow. For now, always display backtraces.
     if cfg!(target_os = "fuchsia") {
-        return Some(PrintFmt::Full);
+        return RustBacktrace::Print(PrintFmt::Full);
     }
 
     static ENABLED: atomic::AtomicIsize = atomic::AtomicIsize::new(0);
     match ENABLED.load(Ordering::SeqCst) {
         0 => {}
-        1 => return None,
-        2 => return Some(PrintFmt::Short),
-        _ => return Some(PrintFmt::Full),
+        1 => return RustBacktrace::RuntimeDisabled,
+        2 => return RustBacktrace::Print(PrintFmt::Short),
+        _ => return RustBacktrace::Print(PrintFmt::Full),
     }
 
-    let val = env::var_os("RUST_BACKTRACE").and_then(|x| {
-        if &x == "0" {
-            None
-        } else if &x == "full" {
-            Some(PrintFmt::Full)
-        } else {
-            Some(PrintFmt::Short)
-        }
-    });
-    ENABLED.store(
-        match val {
-            Some(v) => v as isize,
-            None => 1,
-        },
-        Ordering::SeqCst,
-    );
-    val
+    let (format, cache) = env::var_os("RUST_BACKTRACE")
+        .map(|x| {
+            if &x == "0" {
+                (RustBacktrace::RuntimeDisabled, 1)
+            } else if &x == "full" {
+                (RustBacktrace::Print(PrintFmt::Full), 3)
+            } else {
+                (RustBacktrace::Print(PrintFmt::Short), 2)
+            }
+        })
+        .unwrap_or((RustBacktrace::RuntimeDisabled, 1));
+    ENABLED.store(cache, Ordering::SeqCst);
+    format
 }
 
 /// Prints the filename of the backtrace frame.
index d734f412bf88650115881856f86ea744c1aa1d04..3753269adfe45c7034c96632656d531407f84104 100644 (file)
@@ -193,7 +193,7 @@ pub fn into_rc(&self) -> Rc<Slice> {
 pub trait OsStringExt {
     /// Creates an [`OsString`] from a byte vector.
     ///
-    /// See the module docmentation for an example.
+    /// See the module documentation for an example.
     ///
     /// [`OsString`]: ../../../ffi/struct.OsString.html
     #[stable(feature = "rust1", since = "1.0.0")]
@@ -201,7 +201,7 @@ pub trait OsStringExt {
 
     /// Yields the underlying byte vector of this [`OsString`].
     ///
-    /// See the module docmentation for an example.
+    /// See the module documentation for an example.
     ///
     /// [`OsString`]: ../../../ffi/struct.OsString.html
     #[stable(feature = "rust1", since = "1.0.0")]
@@ -226,14 +226,14 @@ pub trait OsStrExt {
     #[stable(feature = "rust1", since = "1.0.0")]
     /// Creates an [`OsStr`] from a byte slice.
     ///
-    /// See the module docmentation for an example.
+    /// See the module documentation for an example.
     ///
     /// [`OsStr`]: ../../../ffi/struct.OsStr.html
     fn from_bytes(slice: &[u8]) -> &Self;
 
     /// Gets the underlying byte view of the [`OsStr`] slice.
     ///
-    /// See the module docmentation for an example.
+    /// See the module documentation for an example.
     ///
     /// [`OsStr`]: ../../../ffi/struct.OsStr.html
     #[stable(feature = "rust1", since = "1.0.0")]
index 764041d2f4239f668c9efafc3a99bac075dcca83..0ffa6ace2e4d2ea73ce1824b2149152c2bd90b70 100644 (file)
@@ -465,12 +465,9 @@ pub unsafe fn spawn_unchecked<'a, F, T>(self, f: F) -> io::Result<JoinHandle<T>>
             }
 
             thread_info::set(imp::guard::current(), their_thread);
-            #[cfg(feature = "backtrace")]
             let try_result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
                 crate::sys_common::backtrace::__rust_begin_short_backtrace(f)
             }));
-            #[cfg(not(feature = "backtrace"))]
-            let try_result = panic::catch_unwind(panic::AssertUnwindSafe(f));
             *their_packet.get() = Some(try_result);
         };
 
index 3bf2b8be1fe8e32d68a43f7301a0dcae418773be..e1ae01b602a8db8d97e1620837b79af23a485d04 100644 (file)
 use crate::cmp;
 use crate::error::Error;
 use crate::fmt;
-use crate::ops::{Add, Sub, AddAssign, SubAssign};
+use crate::ops::{Add, AddAssign, Sub, SubAssign};
 use crate::sys::time;
-use crate::sys_common::FromInner;
 use crate::sys_common::mutex::Mutex;
+use crate::sys_common::FromInner;
 
 #[stable(feature = "time", since = "1.3.0")]
 pub use core::time::Duration;
@@ -216,17 +216,17 @@ pub fn now() -> Instant {
         // * https://bugzilla.mozilla.org/show_bug.cgi?id=1487778 - a similar
         //   Firefox bug
         //
-        // It simply seems that this it just happens so that a lot in the wild
-        // we're seeing panics across various platforms where consecutive calls
+        // It seems that this just happens a lot in the wild.
+        // We're seeing panics across various platforms where consecutive calls
         // to `Instant::now`, such as via the `elapsed` function, are panicking
         // as they're going backwards. Placed here is a last-ditch effort to try
         // to fix things up. We keep a global "latest now" instance which is
         // returned instead of what the OS says if the OS goes backwards.
         //
-        // To hopefully mitigate the impact of this though a few platforms are
+        // To hopefully mitigate the impact of this, a few platforms are
         // whitelisted as "these at least haven't gone backwards yet".
         if time::Instant::actually_monotonic() {
-            return Instant(os_now)
+            return Instant(os_now);
         }
 
         static LOCK: Mutex = Mutex::new();
@@ -353,8 +353,7 @@ impl Add<Duration> for Instant {
     ///
     /// [`checked_add`]: ../../std/time/struct.Instant.html#method.checked_add
     fn add(self, other: Duration) -> Instant {
-        self.checked_add(other)
-            .expect("overflow when adding duration to instant")
+        self.checked_add(other).expect("overflow when adding duration to instant")
     }
 }
 
@@ -370,8 +369,7 @@ impl Sub<Duration> for Instant {
     type Output = Instant;
 
     fn sub(self, other: Duration) -> Instant {
-        self.checked_sub(other)
-            .expect("overflow when subtracting duration from instant")
+        self.checked_sub(other).expect("overflow when subtracting duration from instant")
     }
 }
 
@@ -464,8 +462,7 @@ pub fn now() -> SystemTime {
     /// println!("{:?}", difference);
     /// ```
     #[stable(feature = "time2", since = "1.8.0")]
-    pub fn duration_since(&self, earlier: SystemTime)
-                          -> Result<Duration, SystemTimeError> {
+    pub fn duration_since(&self, earlier: SystemTime) -> Result<Duration, SystemTimeError> {
         self.0.sub_time(&earlier.0).map_err(SystemTimeError)
     }
 
@@ -532,8 +529,7 @@ impl Add<Duration> for SystemTime {
     ///
     /// [`checked_add`]: ../../std/time/struct.SystemTime.html#method.checked_add
     fn add(self, dur: Duration) -> SystemTime {
-        self.checked_add(dur)
-            .expect("overflow when adding duration to instant")
+        self.checked_add(dur).expect("overflow when adding duration to instant")
     }
 }
 
@@ -549,8 +545,7 @@ impl Sub<Duration> for SystemTime {
     type Output = SystemTime;
 
     fn sub(self, dur: Duration) -> SystemTime {
-        self.checked_sub(dur)
-            .expect("overflow when subtracting duration from instant")
+        self.checked_sub(dur).expect("overflow when subtracting duration from instant")
     }
 }
 
@@ -626,7 +621,9 @@ pub fn duration(&self) -> Duration {
 
 #[stable(feature = "time2", since = "1.8.0")]
 impl Error for SystemTimeError {
-    fn description(&self) -> &str { "other time was not earlier than self" }
+    fn description(&self) -> &str {
+        "other time was not earlier than self"
+    }
 }
 
 #[stable(feature = "time2", since = "1.8.0")]
@@ -644,17 +641,16 @@ fn from_inner(time: time::SystemTime) -> SystemTime {
 
 #[cfg(test)]
 mod tests {
-    use super::{Instant, SystemTime, Duration, UNIX_EPOCH};
+    use super::{Duration, Instant, SystemTime, UNIX_EPOCH};
 
     macro_rules! assert_almost_eq {
-        ($a:expr, $b:expr) => ({
+        ($a:expr, $b:expr) => {{
             let (a, b) = ($a, $b);
             if a != b {
-                let (a, b) = if a > b {(a, b)} else {(b, a)};
-                assert!(a - Duration::new(0, 1000) <= b,
-                        "{:?} is not almost equal to {:?}", a, b);
+                let (a, b) = if a > b { (a, b) } else { (b, a) };
+                assert!(a - Duration::new(0, 1000) <= b, "{:?} is not almost equal to {:?}", a, b);
             }
-        })
+        }};
     }
 
     #[test]
@@ -729,7 +725,7 @@ fn instant_checked_duration_since_nopanic() {
     fn instant_saturating_duration_since_nopanic() {
         let a = Instant::now();
         let ret = (a - Duration::new(1, 0)).saturating_duration_since(a);
-        assert_eq!(ret, Duration::new(0,0));
+        assert_eq!(ret, Duration::new(0, 0));
     }
 
     #[test]
@@ -755,15 +751,14 @@ fn system_time_math() {
 
         let second = Duration::new(1, 0);
         assert_almost_eq!(a.duration_since(a - second).unwrap(), second);
-        assert_almost_eq!(a.duration_since(a + second).unwrap_err()
-                           .duration(), second);
+        assert_almost_eq!(a.duration_since(a + second).unwrap_err().duration(), second);
 
         assert_almost_eq!(a - second + second, a);
         assert_almost_eq!(a.checked_sub(second).unwrap().checked_add(second).unwrap(), a);
 
         let one_second_from_epoch = UNIX_EPOCH + Duration::new(1, 0);
-        let one_second_from_epoch2 = UNIX_EPOCH + Duration::new(0, 500_000_000)
-            + Duration::new(0, 500_000_000);
+        let one_second_from_epoch2 =
+            UNIX_EPOCH + Duration::new(0, 500_000_000) + Duration::new(0, 500_000_000);
         assert_eq!(one_second_from_epoch, one_second_from_epoch2);
 
         // checked_add_duration will not panic on overflow
index 023952042e6d478ce13b0d3d3e1d629144a9a0b1..0408d7d1bc2aade8615dc2284b98c0a51ad74206 100644 (file)
@@ -2,31 +2,29 @@
 
 pub use GenericArgs::*;
 pub use UnsafeSource::*;
-pub use crate::symbol::{Ident, Symbol as Name};
 pub use crate::util::parser::ExprPrecedence;
 
-use crate::ext::hygiene::ExpnId;
 use crate::parse::token::{self, DelimToken};
-use crate::print::pprust;
 use crate::ptr::P;
 use crate::source_map::{dummy_spanned, respan, Spanned};
-use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::TokenStream;
-use crate::ThinVec;
+
+use rustc_target::spec::abi::Abi;
+pub use rustc_target::abi::FloatTy;
+
+use syntax_pos::{Span, DUMMY_SP, ExpnId};
+use syntax_pos::symbol::{kw, sym, Symbol};
+pub use syntax_pos::symbol::{Ident, Symbol as Name};
 
 use rustc_index::vec::Idx;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert_size;
-use rustc_target::spec::abi::Abi;
-use syntax_pos::{Span, DUMMY_SP};
-
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::thin_vec::ThinVec;
 use rustc_serialize::{self, Decoder, Encoder};
 use std::fmt;
 
-pub use rustc_target::abi::FloatTy;
-
 #[cfg(test)]
 mod tests;
 
@@ -70,7 +68,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 /// along with a bunch of supporting information.
 ///
 /// E.g., `std::cmp::PartialEq`.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct Path {
     pub span: Span,
     /// The segments in the path: the things separated by `::`.
@@ -86,18 +84,6 @@ fn eq(&self, symbol: &Symbol) -> bool {
     }
 }
 
-impl fmt::Debug for Path {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "path({})", pprust::path_to_string(self))
-    }
-}
-
-impl fmt::Display for Path {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", pprust::path_to_string(self))
-    }
-}
-
 impl Path {
     // Convert a span and an identifier to the corresponding
     // one-segment path.
@@ -507,19 +493,13 @@ pub struct Block {
     pub span: Span,
 }
 
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct Pat {
     pub id: NodeId,
     pub kind: PatKind,
     pub span: Span,
 }
 
-impl fmt::Debug for Pat {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self))
-    }
-}
-
 impl Pat {
     /// Attempt reparsing the pattern as a type.
     /// This is intended for use by diagnostics.
@@ -831,7 +811,7 @@ pub fn to_string(op: UnOp) -> &'static str {
 }
 
 /// A statement
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct Stmt {
     pub id: NodeId,
     pub kind: StmtKind,
@@ -865,18 +845,7 @@ pub fn is_expr(&self) -> bool {
     }
 }
 
-impl fmt::Debug for Stmt {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(
-            f,
-            "stmt({}: {})",
-            self.id.to_string(),
-            pprust::stmt_to_string(self)
-        )
-    }
-}
-
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub enum StmtKind {
     /// A local (let) binding.
     Local(P<Local>),
@@ -973,7 +942,7 @@ pub struct AnonConst {
 }
 
 /// An expression.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct Expr {
     pub id: NodeId,
     pub kind: ExprKind,
@@ -1100,12 +1069,6 @@ pub fn precedence(&self) -> ExprPrecedence {
     }
 }
 
-impl fmt::Debug for Expr {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self))
-    }
-}
-
 /// Limit types of a range (inclusive or exclusive)
 #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
 pub enum RangeLimits {
@@ -1660,19 +1623,13 @@ pub enum AssocTyConstraintKind {
     },
 }
 
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct Ty {
     pub id: NodeId,
     pub kind: TyKind,
     pub span: Span,
 }
 
-impl fmt::Debug for Ty {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "type({})", pprust::ty_to_string(self))
-    }
-}
-
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct BareFnTy {
     pub unsafety: Unsafety,
index 2a8e6b2cc95109c6f6ff5354734b2dddaf977f7e..28b61c5aa77ccfc08561046623a73330c14330cc 100644 (file)
@@ -2,9 +2,9 @@
 
 use crate::ast::{self, Attribute, MetaItem, NestedMetaItem};
 use crate::early_buffered_lints::BufferedEarlyLintId;
-use crate::ext::base::ExtCtxt;
 use crate::feature_gate::{Features, GatedCfg};
-use crate::parse::ParseSess;
+use crate::print::pprust;
+use crate::sess::ParseSess;
 
 use errors::{Applicability, Handler};
 use syntax_pos::hygiene::Transparency;
@@ -31,6 +31,10 @@ pub struct AttributeTemplate {
 }
 
 impl AttributeTemplate {
+    pub fn only_word() -> Self {
+        Self { word: true, list: None, name_value_str: None }
+    }
+
     /// Checks that the given meta-item is compatible with this template.
     fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool {
         match meta_item_kind {
@@ -243,7 +247,11 @@ fn find_stability_generic<'a, I>(sess: &ParseSess,
             let meta = meta.as_ref().unwrap();
             let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
                 if item.is_some() {
-                    handle_errors(sess, meta.span, AttrError::MultipleItem(meta.path.to_string()));
+                    handle_errors(
+                        sess,
+                        meta.span,
+                        AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
+                    );
                     return false
                 }
                 if let Some(v) = meta.value_str() {
@@ -271,7 +279,10 @@ macro_rules! get_meta {
                                     handle_errors(
                                         sess,
                                         mi.span,
-                                        AttrError::UnknownMetaItem(mi.path.to_string(), expected),
+                                        AttrError::UnknownMetaItem(
+                                            pprust::path_to_string(&mi.path),
+                                            expected,
+                                        ),
                                     );
                                     continue 'outer
                                 }
@@ -362,7 +373,7 @@ macro_rules! get_meta {
                                         sess,
                                         meta.span(),
                                         AttrError::UnknownMetaItem(
-                                            mi.path.to_string(),
+                                            pprust::path_to_string(&mi.path),
                                             &["feature", "reason", "issue", "soft"]
                                         ),
                                     );
@@ -434,7 +445,8 @@ macro_rules! get_meta {
                                             sess,
                                             meta.span(),
                                             AttrError::UnknownMetaItem(
-                                                mi.path.to_string(), &["since", "note"],
+                                                pprust::path_to_string(&mi.path),
+                                                &["since", "note"],
                                             ),
                                         );
                                         continue 'outer
@@ -597,8 +609,11 @@ pub fn eval_condition<F>(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F)
                     !eval_condition(mis[0].meta_item().unwrap(), sess, eval)
                 },
                 _ => {
-                    span_err!(sess.span_diagnostic, cfg.span, E0537,
-                              "invalid predicate `{}`", cfg.path);
+                    span_err!(
+                        sess.span_diagnostic, cfg.span, E0537,
+                        "invalid predicate `{}`",
+                        pprust::path_to_string(&cfg.path)
+                    );
                     false
                 }
             }
@@ -653,7 +668,9 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess,
                 let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
                     if item.is_some() {
                         handle_errors(
-                            sess, meta.span, AttrError::MultipleItem(meta.path.to_string())
+                            sess,
+                            meta.span,
+                            AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
                         );
                         return false
                     }
@@ -691,8 +708,10 @@ fn find_deprecation_generic<'a, I>(sess: &ParseSess,
                                     handle_errors(
                                         sess,
                                         meta.span(),
-                                        AttrError::UnknownMetaItem(mi.path.to_string(),
-                                                                   &["since", "note"]),
+                                        AttrError::UnknownMetaItem(
+                                            pprust::path_to_string(&mi.path),
+                                            &["since", "note"],
+                                        ),
                                     );
                                     continue 'outer
                                 }
@@ -921,14 +940,7 @@ pub fn find_transparency(
     (transparency.map_or(fallback, |t| t.0), error)
 }
 
-pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
-    // All the built-in macro attributes are "words" at the moment.
-    let template = AttributeTemplate { word: true, list: None, name_value_str: None };
-    let attr = ecx.attribute(meta_item.clone());
-    check_builtin_attribute(ecx.parse_sess, &attr, name, template);
-}
-
-crate fn check_builtin_attribute(
+pub fn check_builtin_attribute(
     sess: &ParseSess, attr: &ast::Attribute, name: Symbol, template: AttributeTemplate
 ) {
     // Some special attributes like `cfg` must be checked
index 7bef693a5be4cce1bb3fcf4d202add442587d515..4aec50408812f27a0fed880b3a095a68d8efe0ba 100644 (file)
 use crate::source_map::{BytePos, Spanned, DUMMY_SP};
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::parser::Parser;
-use crate::parse::{ParseSess, PResult};
+use crate::parse::PResult;
 use crate::parse::token::{self, Token};
 use crate::ptr::P;
+use crate::sess::ParseSess;
 use crate::symbol::{sym, Symbol};
 use crate::ThinVec;
-use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
+use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 use crate::GLOBALS;
 
 use log::debug;
@@ -279,7 +280,7 @@ pub fn meta(&self) -> Option<MetaItem> {
         self.item.meta(self.span)
     }
 
-    pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
+    crate fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     {
         let mut parser = Parser::new(
@@ -297,24 +298,11 @@ pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
         Ok(result)
     }
 
-    pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec<T>>
-        where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    {
+    pub fn parse_derive_paths<'a>(&self, sess: &'a ParseSess) -> PResult<'a, Vec<Path>> {
         if self.tokens.is_empty() {
             return Ok(Vec::new());
         }
-        self.parse(sess, |parser| {
-            parser.expect(&token::OpenDelim(token::Paren))?;
-            let mut list = Vec::new();
-            while !parser.eat(&token::CloseDelim(token::Paren)) {
-                list.push(f(parser)?);
-                if !parser.eat(&token::Comma) {
-                   parser.expect(&token::CloseDelim(token::Paren))?;
-                    break
-                }
-            }
-            Ok(list)
-        })
+        self.parse(sess, |p| p.parse_derive_paths())
     }
 
     pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> {
@@ -475,7 +463,7 @@ pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: Symbol) -> Option
 }
 
 impl MetaItem {
-    fn tokens(&self) -> TokenStream {
+    fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
         let mut idents = vec![];
         let mut last_pos = BytePos(0 as u32);
         for (i, segment) in self.path.segments.iter().enumerate() {
@@ -489,8 +477,8 @@ fn tokens(&self) -> TokenStream {
             idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
             last_pos = segment.ident.span.hi();
         }
-        self.kind.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
-        TokenStream::new(idents)
+        idents.extend(self.kind.token_trees_and_joints(self.span));
+        idents
     }
 
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
@@ -549,13 +537,14 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
 }
 
 impl MetaItemKind {
-    pub fn tokens(&self, span: Span) -> TokenStream {
+    pub fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
         match *self {
-            MetaItemKind::Word => TokenStream::empty(),
+            MetaItemKind::Word => vec![],
             MetaItemKind::NameValue(ref lit) => {
-                let mut vec = vec![TokenTree::token(token::Eq, span).into()];
-                lit.tokens().append_to_tree_and_joint_vec(&mut vec);
-                TokenStream::new(vec)
+                vec![
+                    TokenTree::token(token::Eq, span).into(),
+                    lit.token_tree().into(),
+                ]
             }
             MetaItemKind::List(ref list) => {
                 let mut tokens = Vec::new();
@@ -563,17 +552,26 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                     if i > 0 {
                         tokens.push(TokenTree::token(token::Comma, span).into());
                     }
-                    item.tokens().append_to_tree_and_joint_vec(&mut tokens);
+                    tokens.extend(item.token_trees_and_joints())
                 }
-                TokenTree::Delimited(
-                    DelimSpan::from_single(span),
-                    token::Paren,
-                    TokenStream::new(tokens).into(),
-                ).into()
+                vec![
+                    TokenTree::Delimited(
+                        DelimSpan::from_single(span),
+                        token::Paren,
+                        TokenStream::new(tokens).into(),
+                    ).into()
+                ]
             }
         }
     }
 
+    // Premature conversions of `TokenTree`s to `TokenStream`s can hurt
+    // performance. Do not use this function if `token_trees_and_joints()` can
+    // be used instead.
+    pub fn tokens(&self, span: Span) -> TokenStream {
+        TokenStream::new(self.token_trees_and_joints(span))
+    }
+
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
         where I: Iterator<Item = TokenTree>,
     {
@@ -615,10 +613,10 @@ pub fn span(&self) -> Span {
         }
     }
 
-    fn tokens(&self) -> TokenStream {
+    fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
         match *self {
-            NestedMetaItem::MetaItem(ref item) => item.tokens(),
-            NestedMetaItem::Literal(ref lit) => lit.tokens(),
+            NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(),
+            NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()],
         }
     }
 
index 2923cc86ba029a16c9b3cb8d6347bbd545e12b34..54dc95291d67f5d07ae31ac7374c88add4a02f5a 100644 (file)
@@ -10,8 +10,8 @@
 use crate::ast;
 use crate::edition::Edition;
 use crate::mut_visit::*;
-use crate::parse::{token, ParseSess};
 use crate::ptr::P;
+use crate::sess::ParseSess;
 use crate::symbol::sym;
 use crate::util::map_in_place::MapInPlace;
 
@@ -56,6 +56,7 @@ pub fn features(mut krate: ast::Crate, sess: &ParseSess, edition: Edition,
     (krate, features)
 }
 
+#[macro_export]
 macro_rules! configure {
     ($this:ident, $node:ident) => {
         match $this.configure($node) {
@@ -111,25 +112,7 @@ fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
             return vec![];
         }
 
-        let (cfg_predicate, expanded_attrs) = match attr.parse(self.sess, |parser| {
-            parser.expect(&token::OpenDelim(token::Paren))?;
-
-            let cfg_predicate = parser.parse_meta_item()?;
-            parser.expect(&token::Comma)?;
-
-            // Presumably, the majority of the time there will only be one attr.
-            let mut expanded_attrs = Vec::with_capacity(1);
-
-            while !parser.check(&token::CloseDelim(token::Paren)) {
-                let lo = parser.token.span.lo();
-                let item = parser.parse_attr_item()?;
-                expanded_attrs.push((item, parser.prev_span.with_lo(lo)));
-                parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?;
-            }
-
-            parser.expect(&token::CloseDelim(token::Paren))?;
-            Ok((cfg_predicate, expanded_attrs))
-        }) {
+        let (cfg_predicate, expanded_attrs) = match attr.parse(self.sess, |p| p.parse_cfg_attr()) {
             Ok(result) => result,
             Err(mut e) => {
                 e.emit();
index 36c1da2929975a3ec15ac7f4efbd58a7d0a16a99..5cc953b9066282400bd11c5493f90c1bf4a0873b 100644 (file)
@@ -11,6 +11,7 @@
 pub enum BufferedEarlyLintId {
     IllFormedAttributeInput,
     MetaVariableMisuse,
+    IncompleteInclude,
 }
 
 /// Stores buffered lint info which can later be passed to `librustc`.
index c0963026945b0f9039bb669f5c6ebd59c518b00a..17ea4767520646ae18be10ca27e26eb183302583 100644 (file)
@@ -163,6 +163,25 @@ fn the_banished() {} // ok!
 ```
 "##,
 
+E0551: r##"
+An invalid meta-item was used inside an attribute.
+
+Erroneous code example:
+
+```compile_fail,E0551
+#[deprecated(note)] // error!
+fn i_am_deprecated() {}
+```
+
+Meta items are the key-value pairs inside of an attribute. To fix this issue,
+you need to give a value to the `note` key. Example:
+
+```
+#[deprecated(note = "because")] // ok!
+fn i_am_deprecated() {}
+```
+"##,
+
 E0552: r##"
 A unrecognized representation attribute was used.
 
@@ -276,6 +295,33 @@ fn main() {}
 same directory.
 "##,
 
+E0584: r##"
+A doc comment that is not attached to anything has been encountered.
+
+Erroneous code example:
+
+```compile_fail,E0584
+trait Island {
+    fn lost();
+
+    /// I'm lost!
+}
+```
+
+A little reminder: a doc comment has to be placed before the item it's supposed
+to document. So if you want to document the `Island` trait, you need to put a
+doc comment before it, not inside it. Same goes for the `lost` method: the doc
+comment needs to be before it:
+
+```
+/// I'm THE island!
+trait Island {
+    /// I'm lost!
+    fn lost();
+}
+```
+"##,
+
 E0585: r##"
 A documentation comment that doesn't document anything was found.
 
@@ -473,10 +519,8 @@ pub(in crate::foo) struct Bar {
     // rustc_deprecated attribute must be paired with either stable or unstable
     // attribute
     E0549,
-    E0551, // incorrect meta item
     E0553, // multiple rustc_const_unstable attributes
 //  E0555, // replaced with a generic attribute input check
-    E0584, // file for module `..` found at both .. and ..
     E0629, // missing 'feature' (rustc_const_unstable)
     // rustc_const_unstable attribute must be paired with stable/unstable
     // attribute
diff --git a/src/libsyntax/ext/allocator.rs b/src/libsyntax/ext/allocator.rs
deleted file mode 100644 (file)
index 99aeb54..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-use crate::{ast, attr, visit};
-use crate::symbol::{sym, Symbol};
-use syntax_pos::Span;
-
-#[derive(Clone, Copy)]
-pub enum AllocatorKind {
-    Global,
-    DefaultLib,
-    DefaultExe,
-}
-
-impl AllocatorKind {
-    pub fn fn_name(&self, base: &str) -> String {
-        match *self {
-            AllocatorKind::Global => format!("__rg_{}", base),
-            AllocatorKind::DefaultLib => format!("__rdl_{}", base),
-            AllocatorKind::DefaultExe => format!("__rde_{}", base),
-        }
-    }
-}
-
-pub enum AllocatorTy {
-    Layout,
-    Ptr,
-    ResultPtr,
-    Unit,
-    Usize,
-}
-
-pub struct AllocatorMethod {
-    pub name: &'static str,
-    pub inputs: &'static [AllocatorTy],
-    pub output: AllocatorTy,
-}
-
-pub static ALLOCATOR_METHODS: &[AllocatorMethod] = &[
-    AllocatorMethod {
-        name: "alloc",
-        inputs: &[AllocatorTy::Layout],
-        output: AllocatorTy::ResultPtr,
-    },
-    AllocatorMethod {
-        name: "dealloc",
-        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout],
-        output: AllocatorTy::Unit,
-    },
-    AllocatorMethod {
-        name: "realloc",
-        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Usize],
-        output: AllocatorTy::ResultPtr,
-    },
-    AllocatorMethod {
-        name: "alloc_zeroed",
-        inputs: &[AllocatorTy::Layout],
-        output: AllocatorTy::ResultPtr,
-    },
-];
-
-pub fn global_allocator_spans(krate: &ast::Crate) -> Vec<Span> {
-    struct Finder { name: Symbol, spans: Vec<Span> }
-    impl<'ast> visit::Visitor<'ast> for Finder {
-        fn visit_item(&mut self, item: &'ast ast::Item) {
-            if item.ident.name == self.name &&
-               attr::contains_name(&item.attrs, sym::rustc_std_internal_symbol) {
-                self.spans.push(item.span);
-            }
-            visit::walk_item(self, item)
-        }
-    }
-
-    let name = Symbol::intern(&AllocatorKind::Global.fn_name("alloc"));
-    let mut f = Finder { name, spans: Vec::new() };
-    visit::walk_crate(&mut f, krate);
-    f.spans
-}
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
deleted file mode 100644 (file)
index 583fb3f..0000000
+++ /dev/null
@@ -1,1187 +0,0 @@
-use crate::ast::{self, NodeId, Attribute, Name, PatKind};
-use crate::attr::{self, HasAttrs, Stability, Deprecation};
-use crate::source_map::SourceMap;
-use crate::edition::Edition;
-use crate::ext::expand::{self, AstFragment, Invocation};
-use crate::ext::hygiene::ExpnId;
-use crate::mut_visit::{self, MutVisitor};
-use crate::parse::{self, parser, ParseSess, DirectoryOwnership};
-use crate::parse::token;
-use crate::ptr::P;
-use crate::symbol::{kw, sym, Ident, Symbol};
-use crate::{ThinVec, MACRO_ARGUMENTS};
-use crate::tokenstream::{self, TokenStream};
-use crate::visit::Visitor;
-
-use errors::{DiagnosticBuilder, DiagnosticId};
-use smallvec::{smallvec, SmallVec};
-use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
-use syntax_pos::hygiene::{AstPass, ExpnData, ExpnKind};
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::{self, Lrc};
-use std::iter;
-use std::path::PathBuf;
-use std::rc::Rc;
-use std::default::Default;
-
-pub use syntax_pos::hygiene::MacroKind;
-
-#[derive(Debug,Clone)]
-pub enum Annotatable {
-    Item(P<ast::Item>),
-    TraitItem(P<ast::TraitItem>),
-    ImplItem(P<ast::ImplItem>),
-    ForeignItem(P<ast::ForeignItem>),
-    Stmt(P<ast::Stmt>),
-    Expr(P<ast::Expr>),
-    Arm(ast::Arm),
-    Field(ast::Field),
-    FieldPat(ast::FieldPat),
-    GenericParam(ast::GenericParam),
-    Param(ast::Param),
-    StructField(ast::StructField),
-    Variant(ast::Variant),
-}
-
-impl HasAttrs for Annotatable {
-    fn attrs(&self) -> &[Attribute] {
-        match *self {
-            Annotatable::Item(ref item) => &item.attrs,
-            Annotatable::TraitItem(ref trait_item) => &trait_item.attrs,
-            Annotatable::ImplItem(ref impl_item) => &impl_item.attrs,
-            Annotatable::ForeignItem(ref foreign_item) => &foreign_item.attrs,
-            Annotatable::Stmt(ref stmt) => stmt.attrs(),
-            Annotatable::Expr(ref expr) => &expr.attrs,
-            Annotatable::Arm(ref arm) => &arm.attrs,
-            Annotatable::Field(ref field) => &field.attrs,
-            Annotatable::FieldPat(ref fp) => &fp.attrs,
-            Annotatable::GenericParam(ref gp) => &gp.attrs,
-            Annotatable::Param(ref p) => &p.attrs,
-            Annotatable::StructField(ref sf) => &sf.attrs,
-            Annotatable::Variant(ref v) => &v.attrs(),
-        }
-    }
-
-    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
-        match self {
-            Annotatable::Item(item) => item.visit_attrs(f),
-            Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
-            Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
-            Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
-            Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
-            Annotatable::Expr(expr) => expr.visit_attrs(f),
-            Annotatable::Arm(arm) => arm.visit_attrs(f),
-            Annotatable::Field(field) => field.visit_attrs(f),
-            Annotatable::FieldPat(fp) => fp.visit_attrs(f),
-            Annotatable::GenericParam(gp) => gp.visit_attrs(f),
-            Annotatable::Param(p) => p.visit_attrs(f),
-            Annotatable::StructField(sf) => sf.visit_attrs(f),
-            Annotatable::Variant(v) => v.visit_attrs(f),
-        }
-    }
-}
-
-impl Annotatable {
-    pub fn span(&self) -> Span {
-        match *self {
-            Annotatable::Item(ref item) => item.span,
-            Annotatable::TraitItem(ref trait_item) => trait_item.span,
-            Annotatable::ImplItem(ref impl_item) => impl_item.span,
-            Annotatable::ForeignItem(ref foreign_item) => foreign_item.span,
-            Annotatable::Stmt(ref stmt) => stmt.span,
-            Annotatable::Expr(ref expr) => expr.span,
-            Annotatable::Arm(ref arm) => arm.span,
-            Annotatable::Field(ref field) => field.span,
-            Annotatable::FieldPat(ref fp) => fp.pat.span,
-            Annotatable::GenericParam(ref gp) => gp.ident.span,
-            Annotatable::Param(ref p) => p.span,
-            Annotatable::StructField(ref sf) => sf.span,
-            Annotatable::Variant(ref v) => v.span,
-        }
-    }
-
-    pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
-        match self {
-            Annotatable::Item(item) => visitor.visit_item(item),
-            Annotatable::TraitItem(trait_item) => visitor.visit_trait_item(trait_item),
-            Annotatable::ImplItem(impl_item) => visitor.visit_impl_item(impl_item),
-            Annotatable::ForeignItem(foreign_item) => visitor.visit_foreign_item(foreign_item),
-            Annotatable::Stmt(stmt) => visitor.visit_stmt(stmt),
-            Annotatable::Expr(expr) => visitor.visit_expr(expr),
-            Annotatable::Arm(arm) => visitor.visit_arm(arm),
-            Annotatable::Field(field) => visitor.visit_field(field),
-            Annotatable::FieldPat(fp) => visitor.visit_field_pattern(fp),
-            Annotatable::GenericParam(gp) => visitor.visit_generic_param(gp),
-            Annotatable::Param(p) => visitor.visit_param(p),
-            Annotatable::StructField(sf) =>visitor.visit_struct_field(sf),
-            Annotatable::Variant(v) => visitor.visit_variant(v),
-        }
-    }
-
-    pub fn expect_item(self) -> P<ast::Item> {
-        match self {
-            Annotatable::Item(i) => i,
-            _ => panic!("expected Item")
-        }
-    }
-
-    pub fn map_item_or<F, G>(self, mut f: F, mut or: G) -> Annotatable
-        where F: FnMut(P<ast::Item>) -> P<ast::Item>,
-              G: FnMut(Annotatable) -> Annotatable
-    {
-        match self {
-            Annotatable::Item(i) => Annotatable::Item(f(i)),
-            _ => or(self)
-        }
-    }
-
-    pub fn expect_trait_item(self) -> ast::TraitItem {
-        match self {
-            Annotatable::TraitItem(i) => i.into_inner(),
-            _ => panic!("expected Item")
-        }
-    }
-
-    pub fn expect_impl_item(self) -> ast::ImplItem {
-        match self {
-            Annotatable::ImplItem(i) => i.into_inner(),
-            _ => panic!("expected Item")
-        }
-    }
-
-    pub fn expect_foreign_item(self) -> ast::ForeignItem {
-        match self {
-            Annotatable::ForeignItem(i) => i.into_inner(),
-            _ => panic!("expected foreign item")
-        }
-    }
-
-    pub fn expect_stmt(self) -> ast::Stmt {
-        match self {
-            Annotatable::Stmt(stmt) => stmt.into_inner(),
-            _ => panic!("expected statement"),
-        }
-    }
-
-    pub fn expect_expr(self) -> P<ast::Expr> {
-        match self {
-            Annotatable::Expr(expr) => expr,
-            _ => panic!("expected expression"),
-        }
-    }
-
-    pub fn expect_arm(self) -> ast::Arm {
-        match self {
-            Annotatable::Arm(arm) => arm,
-            _ => panic!("expected match arm")
-        }
-    }
-
-    pub fn expect_field(self) -> ast::Field {
-        match self {
-            Annotatable::Field(field) => field,
-            _ => panic!("expected field")
-        }
-    }
-
-    pub fn expect_field_pattern(self) -> ast::FieldPat {
-        match self {
-            Annotatable::FieldPat(fp) => fp,
-            _ => panic!("expected field pattern")
-        }
-    }
-
-    pub fn expect_generic_param(self) -> ast::GenericParam {
-        match self {
-            Annotatable::GenericParam(gp) => gp,
-            _ => panic!("expected generic parameter")
-        }
-    }
-
-    pub fn expect_param(self) -> ast::Param {
-        match self {
-            Annotatable::Param(param) => param,
-            _ => panic!("expected parameter")
-        }
-    }
-
-    pub fn expect_struct_field(self) -> ast::StructField {
-        match self {
-            Annotatable::StructField(sf) => sf,
-            _ => panic!("expected struct field")
-        }
-    }
-
-    pub fn expect_variant(self) -> ast::Variant {
-        match self {
-            Annotatable::Variant(v) => v,
-            _ => panic!("expected variant")
-        }
-    }
-
-    pub fn derive_allowed(&self) -> bool {
-        match *self {
-            Annotatable::Item(ref item) => match item.kind {
-                ast::ItemKind::Struct(..) |
-                ast::ItemKind::Enum(..) |
-                ast::ItemKind::Union(..) => true,
-                _ => false,
-            },
-            _ => false,
-        }
-    }
-}
-
-// `meta_item` is the annotation, and `item` is the item being modified.
-// FIXME Decorators should follow the same pattern too.
-pub trait MultiItemModifier {
-    fn expand(&self,
-              ecx: &mut ExtCtxt<'_>,
-              span: Span,
-              meta_item: &ast::MetaItem,
-              item: Annotatable)
-              -> Vec<Annotatable>;
-}
-
-impl<F, T> MultiItemModifier for F
-    where F: Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, Annotatable) -> T,
-          T: Into<Vec<Annotatable>>,
-{
-    fn expand(&self,
-              ecx: &mut ExtCtxt<'_>,
-              span: Span,
-              meta_item: &ast::MetaItem,
-              item: Annotatable)
-              -> Vec<Annotatable> {
-        (*self)(ecx, span, meta_item, item).into()
-    }
-}
-
-impl Into<Vec<Annotatable>> for Annotatable {
-    fn into(self) -> Vec<Annotatable> {
-        vec![self]
-    }
-}
-
-pub trait ProcMacro {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt<'_>,
-                   span: Span,
-                   ts: TokenStream)
-                   -> TokenStream;
-}
-
-impl<F> ProcMacro for F
-    where F: Fn(TokenStream) -> TokenStream
-{
-    fn expand<'cx>(&self,
-                   _ecx: &'cx mut ExtCtxt<'_>,
-                   _span: Span,
-                   ts: TokenStream)
-                   -> TokenStream {
-        // FIXME setup implicit context in TLS before calling self.
-        (*self)(ts)
-    }
-}
-
-pub trait AttrProcMacro {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt<'_>,
-                   span: Span,
-                   annotation: TokenStream,
-                   annotated: TokenStream)
-                   -> TokenStream;
-}
-
-impl<F> AttrProcMacro for F
-    where F: Fn(TokenStream, TokenStream) -> TokenStream
-{
-    fn expand<'cx>(&self,
-                   _ecx: &'cx mut ExtCtxt<'_>,
-                   _span: Span,
-                   annotation: TokenStream,
-                   annotated: TokenStream)
-                   -> TokenStream {
-        // FIXME setup implicit context in TLS before calling self.
-        (*self)(annotation, annotated)
-    }
-}
-
-/// Represents a thing that maps token trees to Macro Results
-pub trait TTMacroExpander {
-    fn expand<'cx>(
-        &self,
-        ecx: &'cx mut ExtCtxt<'_>,
-        span: Span,
-        input: TokenStream,
-    ) -> Box<dyn MacResult+'cx>;
-}
-
-pub type MacroExpanderFn =
-    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
-                -> Box<dyn MacResult+'cx>;
-
-impl<F> TTMacroExpander for F
-    where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
-    -> Box<dyn MacResult+'cx>
-{
-    fn expand<'cx>(
-        &self,
-        ecx: &'cx mut ExtCtxt<'_>,
-        span: Span,
-        mut input: TokenStream,
-    ) -> Box<dyn MacResult+'cx> {
-        struct AvoidInterpolatedIdents;
-
-        impl MutVisitor for AvoidInterpolatedIdents {
-            fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
-                if let tokenstream::TokenTree::Token(token) = tt {
-                    if let token::Interpolated(nt) = &token.kind {
-                        if let token::NtIdent(ident, is_raw) = **nt {
-                            *tt = tokenstream::TokenTree::token(
-                                token::Ident(ident.name, is_raw), ident.span
-                            );
-                        }
-                    }
-                }
-                mut_visit::noop_visit_tt(tt, self)
-            }
-
-            fn visit_mac(&mut self, mac: &mut ast::Mac) {
-                mut_visit::noop_visit_mac(mac, self)
-            }
-        }
-        AvoidInterpolatedIdents.visit_tts(&mut input);
-        (*self)(ecx, span, input)
-    }
-}
-
-// Use a macro because forwarding to a simple function has type system issues
-macro_rules! make_stmts_default {
-    ($me:expr) => {
-        $me.make_expr().map(|e| smallvec![ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            span: e.span,
-            kind: ast::StmtKind::Expr(e),
-        }])
-    }
-}
-
-/// The result of a macro expansion. The return values of the various
-/// methods are spliced into the AST at the callsite of the macro.
-pub trait MacResult {
-    /// Creates an expression.
-    fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
-        None
-    }
-    /// Creates zero or more items.
-    fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
-        None
-    }
-
-    /// Creates zero or more impl items.
-    fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
-        None
-    }
-
-    /// Creates zero or more trait items.
-    fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
-        None
-    }
-
-    /// Creates zero or more items in an `extern {}` block
-    fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None }
-
-    /// Creates a pattern.
-    fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
-        None
-    }
-
-    /// Creates zero or more statements.
-    ///
-    /// By default this attempts to create an expression statement,
-    /// returning None if that fails.
-    fn make_stmts(self: Box<Self>) -> Option<SmallVec<[ast::Stmt; 1]>> {
-        make_stmts_default!(self)
-    }
-
-    fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> {
-        None
-    }
-
-    fn make_arms(self: Box<Self>) -> Option<SmallVec<[ast::Arm; 1]>> {
-        None
-    }
-
-    fn make_fields(self: Box<Self>) -> Option<SmallVec<[ast::Field; 1]>> {
-        None
-    }
-
-    fn make_field_patterns(self: Box<Self>) -> Option<SmallVec<[ast::FieldPat; 1]>> {
-        None
-    }
-
-    fn make_generic_params(self: Box<Self>) -> Option<SmallVec<[ast::GenericParam; 1]>> {
-        None
-    }
-
-    fn make_params(self: Box<Self>) -> Option<SmallVec<[ast::Param; 1]>> {
-        None
-    }
-
-    fn make_struct_fields(self: Box<Self>) -> Option<SmallVec<[ast::StructField; 1]>> {
-        None
-    }
-
-    fn make_variants(self: Box<Self>) -> Option<SmallVec<[ast::Variant; 1]>> {
-        None
-    }
-}
-
-macro_rules! make_MacEager {
-    ( $( $fld:ident: $t:ty, )* ) => {
-        /// `MacResult` implementation for the common case where you've already
-        /// built each form of AST that you might return.
-        #[derive(Default)]
-        pub struct MacEager {
-            $(
-                pub $fld: Option<$t>,
-            )*
-        }
-
-        impl MacEager {
-            $(
-                pub fn $fld(v: $t) -> Box<dyn MacResult> {
-                    Box::new(MacEager {
-                        $fld: Some(v),
-                        ..Default::default()
-                    })
-                }
-            )*
-        }
-    }
-}
-
-make_MacEager! {
-    expr: P<ast::Expr>,
-    pat: P<ast::Pat>,
-    items: SmallVec<[P<ast::Item>; 1]>,
-    impl_items: SmallVec<[ast::ImplItem; 1]>,
-    trait_items: SmallVec<[ast::TraitItem; 1]>,
-    foreign_items: SmallVec<[ast::ForeignItem; 1]>,
-    stmts: SmallVec<[ast::Stmt; 1]>,
-    ty: P<ast::Ty>,
-}
-
-impl MacResult for MacEager {
-    fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
-        self.expr
-    }
-
-    fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
-        self.items
-    }
-
-    fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
-        self.impl_items
-    }
-
-    fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
-        self.trait_items
-    }
-
-    fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> {
-        self.foreign_items
-    }
-
-    fn make_stmts(self: Box<Self>) -> Option<SmallVec<[ast::Stmt; 1]>> {
-        match self.stmts.as_ref().map_or(0, |s| s.len()) {
-            0 => make_stmts_default!(self),
-            _ => self.stmts,
-        }
-    }
-
-    fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
-        if let Some(p) = self.pat {
-            return Some(p);
-        }
-        if let Some(e) = self.expr {
-            if let ast::ExprKind::Lit(_) = e.kind {
-                return Some(P(ast::Pat {
-                    id: ast::DUMMY_NODE_ID,
-                    span: e.span,
-                    kind: PatKind::Lit(e),
-                }));
-            }
-        }
-        None
-    }
-
-    fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> {
-        self.ty
-    }
-}
-
-/// Fill-in macro expansion result, to allow compilation to continue
-/// after hitting errors.
-#[derive(Copy, Clone)]
-pub struct DummyResult {
-    is_error: bool,
-    span: Span,
-}
-
-impl DummyResult {
-    /// Creates a default MacResult that can be anything.
-    ///
-    /// Use this as a return value after hitting any errors and
-    /// calling `span_err`.
-    pub fn any(span: Span) -> Box<dyn MacResult+'static> {
-        Box::new(DummyResult { is_error: true, span })
-    }
-
-    /// Same as `any`, but must be a valid fragment, not error.
-    pub fn any_valid(span: Span) -> Box<dyn MacResult+'static> {
-        Box::new(DummyResult { is_error: false, span })
-    }
-
-    /// A plain dummy expression.
-    pub fn raw_expr(sp: Span, is_error: bool) -> P<ast::Expr> {
-        P(ast::Expr {
-            id: ast::DUMMY_NODE_ID,
-            kind: if is_error { ast::ExprKind::Err } else { ast::ExprKind::Tup(Vec::new()) },
-            span: sp,
-            attrs: ThinVec::new(),
-        })
-    }
-
-    /// A plain dummy pattern.
-    pub fn raw_pat(sp: Span) -> ast::Pat {
-        ast::Pat {
-            id: ast::DUMMY_NODE_ID,
-            kind: PatKind::Wild,
-            span: sp,
-        }
-    }
-
-    /// A plain dummy type.
-    pub fn raw_ty(sp: Span, is_error: bool) -> P<ast::Ty> {
-        P(ast::Ty {
-            id: ast::DUMMY_NODE_ID,
-            kind: if is_error { ast::TyKind::Err } else { ast::TyKind::Tup(Vec::new()) },
-            span: sp
-        })
-    }
-}
-
-impl MacResult for DummyResult {
-    fn make_expr(self: Box<DummyResult>) -> Option<P<ast::Expr>> {
-        Some(DummyResult::raw_expr(self.span, self.is_error))
-    }
-
-    fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
-        Some(P(DummyResult::raw_pat(self.span)))
-    }
-
-    fn make_items(self: Box<DummyResult>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_impl_items(self: Box<DummyResult>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_trait_items(self: Box<DummyResult>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_stmts(self: Box<DummyResult>) -> Option<SmallVec<[ast::Stmt; 1]>> {
-        Some(smallvec![ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)),
-            span: self.span,
-        }])
-    }
-
-    fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
-        Some(DummyResult::raw_ty(self.span, self.is_error))
-    }
-
-    fn make_arms(self: Box<DummyResult>) -> Option<SmallVec<[ast::Arm; 1]>> {
-       Some(SmallVec::new())
-    }
-
-    fn make_fields(self: Box<DummyResult>) -> Option<SmallVec<[ast::Field; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_field_patterns(self: Box<DummyResult>) -> Option<SmallVec<[ast::FieldPat; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_generic_params(self: Box<DummyResult>) -> Option<SmallVec<[ast::GenericParam; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_params(self: Box<DummyResult>) -> Option<SmallVec<[ast::Param; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_struct_fields(self: Box<DummyResult>) -> Option<SmallVec<[ast::StructField; 1]>> {
-        Some(SmallVec::new())
-    }
-
-    fn make_variants(self: Box<DummyResult>) -> Option<SmallVec<[ast::Variant; 1]>> {
-        Some(SmallVec::new())
-    }
-}
-
-/// A syntax extension kind.
-pub enum SyntaxExtensionKind {
-    /// A token-based function-like macro.
-    Bang(
-        /// An expander with signature TokenStream -> TokenStream.
-        Box<dyn ProcMacro + sync::Sync + sync::Send>,
-    ),
-
-    /// An AST-based function-like macro.
-    LegacyBang(
-        /// An expander with signature TokenStream -> AST.
-        Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
-    ),
-
-    /// A token-based attribute macro.
-    Attr(
-        /// An expander with signature (TokenStream, TokenStream) -> TokenStream.
-        /// The first TokenSteam is the attribute itself, the second is the annotated item.
-        /// The produced TokenSteam replaces the input TokenSteam.
-        Box<dyn AttrProcMacro + sync::Sync + sync::Send>,
-    ),
-
-    /// An AST-based attribute macro.
-    LegacyAttr(
-        /// An expander with signature (AST, AST) -> AST.
-        /// The first AST fragment is the attribute itself, the second is the annotated item.
-        /// The produced AST fragment replaces the input AST fragment.
-        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
-    ),
-
-    /// A trivial attribute "macro" that does nothing,
-    /// only keeps the attribute and marks it as inert,
-    /// thus making it ineligible for further expansion.
-    NonMacroAttr {
-        /// Suppresses the `unused_attributes` lint for this attribute.
-        mark_used: bool,
-    },
-
-    /// A token-based derive macro.
-    Derive(
-        /// An expander with signature TokenStream -> TokenStream (not yet).
-        /// The produced TokenSteam is appended to the input TokenSteam.
-        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
-    ),
-
-    /// An AST-based derive macro.
-    LegacyDerive(
-        /// An expander with signature AST -> AST.
-        /// The produced AST fragment is appended to the input AST fragment.
-        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
-    ),
-}
-
-/// A struct representing a macro definition in "lowered" form ready for expansion.
-pub struct SyntaxExtension {
-    /// A syntax extension kind.
-    pub kind: SyntaxExtensionKind,
-    /// Span of the macro definition.
-    pub span: Span,
-    /// Whitelist of unstable features that are treated as stable inside this macro.
-    pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
-    /// Suppresses the `unsafe_code` lint for code produced by this macro.
-    pub allow_internal_unsafe: bool,
-    /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`) for this macro.
-    pub local_inner_macros: bool,
-    /// The macro's stability info.
-    pub stability: Option<Stability>,
-    /// The macro's deprecation info.
-    pub deprecation: Option<Deprecation>,
-    /// Names of helper attributes registered by this macro.
-    pub helper_attrs: Vec<Symbol>,
-    /// Edition of the crate in which this macro is defined.
-    pub edition: Edition,
-    /// Built-in macros have a couple of special properties like availability
-    /// in `#[no_implicit_prelude]` modules, so we have to keep this flag.
-    pub is_builtin: bool,
-    /// We have to identify macros providing a `Copy` impl early for compatibility reasons.
-    pub is_derive_copy: bool,
-}
-
-impl SyntaxExtension {
-    /// Returns which kind of macro calls this syntax extension.
-    pub fn macro_kind(&self) -> MacroKind {
-        match self.kind {
-            SyntaxExtensionKind::Bang(..) |
-            SyntaxExtensionKind::LegacyBang(..) => MacroKind::Bang,
-            SyntaxExtensionKind::Attr(..) |
-            SyntaxExtensionKind::LegacyAttr(..) |
-            SyntaxExtensionKind::NonMacroAttr { .. } => MacroKind::Attr,
-            SyntaxExtensionKind::Derive(..) |
-            SyntaxExtensionKind::LegacyDerive(..) => MacroKind::Derive,
-        }
-    }
-
-    /// Constructs a syntax extension with default properties.
-    pub fn default(kind: SyntaxExtensionKind, edition: Edition) -> SyntaxExtension {
-        SyntaxExtension {
-            span: DUMMY_SP,
-            allow_internal_unstable: None,
-            allow_internal_unsafe: false,
-            local_inner_macros: false,
-            stability: None,
-            deprecation: None,
-            helper_attrs: Vec::new(),
-            edition,
-            is_builtin: false,
-            is_derive_copy: false,
-            kind,
-        }
-    }
-
-    /// Constructs a syntax extension with the given properties
-    /// and other properties converted from attributes.
-    pub fn new(
-        sess: &ParseSess,
-        kind: SyntaxExtensionKind,
-        span: Span,
-        helper_attrs: Vec<Symbol>,
-        edition: Edition,
-        name: Name,
-        attrs: &[ast::Attribute],
-    ) -> SyntaxExtension {
-        let allow_internal_unstable = attr::allow_internal_unstable(
-            &attrs, &sess.span_diagnostic,
-        ).map(|features| features.collect::<Vec<Symbol>>().into());
-
-        let mut local_inner_macros = false;
-        if let Some(macro_export) = attr::find_by_name(attrs, sym::macro_export) {
-            if let Some(l) = macro_export.meta_item_list() {
-                local_inner_macros = attr::list_contains_name(&l, sym::local_inner_macros);
-            }
-        }
-
-        let is_builtin = attr::contains_name(attrs, sym::rustc_builtin_macro);
-
-        SyntaxExtension {
-            kind,
-            span,
-            allow_internal_unstable,
-            allow_internal_unsafe: attr::contains_name(attrs, sym::allow_internal_unsafe),
-            local_inner_macros,
-            stability: attr::find_stability(&sess, attrs, span),
-            deprecation: attr::find_deprecation(&sess, attrs, span),
-            helper_attrs,
-            edition,
-            is_builtin,
-            is_derive_copy: is_builtin && name == sym::Copy,
-        }
-    }
-
-    pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
-        fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: TokenStream)
-                         -> Box<dyn MacResult + 'cx> {
-            DummyResult::any(span)
-        }
-        SyntaxExtension::default(SyntaxExtensionKind::LegacyBang(Box::new(expander)), edition)
-    }
-
-    pub fn dummy_derive(edition: Edition) -> SyntaxExtension {
-        fn expander(_: &mut ExtCtxt<'_>, _: Span, _: &ast::MetaItem, _: Annotatable)
-                    -> Vec<Annotatable> {
-            Vec::new()
-        }
-        SyntaxExtension::default(SyntaxExtensionKind::Derive(Box::new(expander)), edition)
-    }
-
-    pub fn non_macro_attr(mark_used: bool, edition: Edition) -> SyntaxExtension {
-        SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition)
-    }
-
-    pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData {
-        ExpnData {
-            kind: ExpnKind::Macro(self.macro_kind(), descr),
-            parent,
-            call_site,
-            def_site: self.span,
-            allow_internal_unstable: self.allow_internal_unstable.clone(),
-            allow_internal_unsafe: self.allow_internal_unsafe,
-            local_inner_macros: self.local_inner_macros,
-            edition: self.edition,
-        }
-    }
-}
-
-pub type NamedSyntaxExtension = (Name, SyntaxExtension);
-
-/// Result of resolving a macro invocation.
-pub enum InvocationRes {
-    Single(Lrc<SyntaxExtension>),
-    DeriveContainer(Vec<Lrc<SyntaxExtension>>),
-}
-
-/// Error type that denotes indeterminacy.
-pub struct Indeterminate;
-
-bitflags::bitflags! {
-    /// Built-in derives that need some extra tracking beyond the usual macro functionality.
-    #[derive(Default)]
-    pub struct SpecialDerives: u8 {
-        const PARTIAL_EQ = 1 << 0;
-        const EQ         = 1 << 1;
-        const COPY       = 1 << 2;
-    }
-}
-
-pub trait Resolver {
-    fn next_node_id(&mut self) -> NodeId;
-
-    fn resolve_dollar_crates(&mut self);
-    fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment,
-                                            extra_placeholders: &[NodeId]);
-    fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension);
-
-    fn expansion_for_ast_pass(
-        &mut self,
-        call_site: Span,
-        pass: AstPass,
-        features: &[Symbol],
-        parent_module_id: Option<NodeId>,
-    ) -> ExpnId;
-
-    fn resolve_imports(&mut self);
-
-    fn resolve_macro_invocation(
-        &mut self, invoc: &Invocation, eager_expansion_root: ExpnId, force: bool
-    ) -> Result<InvocationRes, Indeterminate>;
-
-    fn check_unused_macros(&self);
-
-    fn has_derives(&self, expn_id: ExpnId, derives: SpecialDerives) -> bool;
-    fn add_derives(&mut self, expn_id: ExpnId, derives: SpecialDerives);
-}
-
-#[derive(Clone)]
-pub struct ModuleData {
-    pub mod_path: Vec<ast::Ident>,
-    pub directory: PathBuf,
-}
-
-#[derive(Clone)]
-pub struct ExpansionData {
-    pub id: ExpnId,
-    pub depth: usize,
-    pub module: Rc<ModuleData>,
-    pub directory_ownership: DirectoryOwnership,
-    pub prior_type_ascription: Option<(Span, bool)>,
-}
-
-/// One of these is made during expansion and incrementally updated as we go;
-/// when a macro expansion occurs, the resulting nodes have the `backtrace()
-/// -> expn_data` of their expansion context stored into their span.
-pub struct ExtCtxt<'a> {
-    pub parse_sess: &'a parse::ParseSess,
-    pub ecfg: expand::ExpansionConfig<'a>,
-    pub root_path: PathBuf,
-    pub resolver: &'a mut dyn Resolver,
-    pub current_expansion: ExpansionData,
-    pub expansions: FxHashMap<Span, Vec<String>>,
-}
-
-impl<'a> ExtCtxt<'a> {
-    pub fn new(parse_sess: &'a parse::ParseSess,
-               ecfg: expand::ExpansionConfig<'a>,
-               resolver: &'a mut dyn Resolver)
-               -> ExtCtxt<'a> {
-        ExtCtxt {
-            parse_sess,
-            ecfg,
-            root_path: PathBuf::new(),
-            resolver,
-            current_expansion: ExpansionData {
-                id: ExpnId::root(),
-                depth: 0,
-                module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }),
-                directory_ownership: DirectoryOwnership::Owned { relative: None },
-                prior_type_ascription: None,
-            },
-            expansions: FxHashMap::default(),
-        }
-    }
-
-    /// Returns a `Folder` for deeply expanding all macros in an AST node.
-    pub fn expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
-        expand::MacroExpander::new(self, false)
-    }
-
-    /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node.
-    /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified.
-    pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
-        expand::MacroExpander::new(self, true)
-    }
-    pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
-        parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
-    }
-    pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
-    pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
-    pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config }
-    pub fn call_site(&self) -> Span {
-        self.current_expansion.id.expn_data().call_site
-    }
-
-    /// Equivalent of `Span::def_site` from the proc macro API,
-    /// except that the location is taken from the span passed as an argument.
-    pub fn with_def_site_ctxt(&self, span: Span) -> Span {
-        span.with_def_site_ctxt(self.current_expansion.id)
-    }
-
-    /// Equivalent of `Span::call_site` from the proc macro API,
-    /// except that the location is taken from the span passed as an argument.
-    pub fn with_call_site_ctxt(&self, span: Span) -> Span {
-        span.with_call_site_ctxt(self.current_expansion.id)
-    }
-
-    /// Equivalent of `Span::mixed_site` from the proc macro API,
-    /// except that the location is taken from the span passed as an argument.
-    pub fn with_mixed_site_ctxt(&self, span: Span) -> Span {
-        span.with_mixed_site_ctxt(self.current_expansion.id)
-    }
-
-    /// Returns span for the macro which originally caused the current expansion to happen.
-    ///
-    /// Stops backtracing at include! boundary.
-    pub fn expansion_cause(&self) -> Option<Span> {
-        let mut expn_id = self.current_expansion.id;
-        let mut last_macro = None;
-        loop {
-            let expn_data = expn_id.expn_data();
-            // Stop going up the backtrace once include! is encountered
-            if expn_data.is_root() || expn_data.kind.descr() == sym::include {
-                break;
-            }
-            expn_id = expn_data.call_site.ctxt().outer_expn();
-            last_macro = Some(expn_data.call_site);
-        }
-        last_macro
-    }
-
-    pub fn struct_span_warn<S: Into<MultiSpan>>(&self,
-                                                sp: S,
-                                                msg: &str)
-                                                -> DiagnosticBuilder<'a> {
-        self.parse_sess.span_diagnostic.struct_span_warn(sp, msg)
-    }
-    pub fn struct_span_err<S: Into<MultiSpan>>(&self,
-                                               sp: S,
-                                               msg: &str)
-                                               -> DiagnosticBuilder<'a> {
-        self.parse_sess.span_diagnostic.struct_span_err(sp, msg)
-    }
-    pub fn struct_span_fatal<S: Into<MultiSpan>>(&self,
-                                                 sp: S,
-                                                 msg: &str)
-                                                 -> DiagnosticBuilder<'a> {
-        self.parse_sess.span_diagnostic.struct_span_fatal(sp, msg)
-    }
-
-    /// Emit `msg` attached to `sp`, and stop compilation immediately.
-    ///
-    /// `span_err` should be strongly preferred where-ever possible:
-    /// this should *only* be used when:
-    ///
-    /// - continuing has a high risk of flow-on errors (e.g., errors in
-    ///   declaring a macro would cause all uses of that macro to
-    ///   complain about "undefined macro"), or
-    /// - there is literally nothing else that can be done (however,
-    ///   in most cases one can construct a dummy expression/item to
-    ///   substitute; we never hit resolve/type-checking so the dummy
-    ///   value doesn't have to match anything)
-    pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
-        self.parse_sess.span_diagnostic.span_fatal(sp, msg).raise();
-    }
-
-    /// Emit `msg` attached to `sp`, without immediately stopping
-    /// compilation.
-    ///
-    /// Compilation will be stopped in the near future (at the end of
-    /// the macro expansion phase).
-    pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
-        self.parse_sess.span_diagnostic.span_err(sp, msg);
-    }
-    pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
-        self.parse_sess.span_diagnostic.span_err_with_code(sp, msg, code);
-    }
-    pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
-        self.parse_sess.span_diagnostic.span_warn(sp, msg);
-    }
-    pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
-        self.parse_sess.span_diagnostic.span_bug(sp, msg);
-    }
-    pub fn trace_macros_diag(&mut self) {
-        for (sp, notes) in self.expansions.iter() {
-            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
-            for note in notes {
-                db.note(note);
-            }
-            db.emit();
-        }
-        // Fixme: does this result in errors?
-        self.expansions.clear();
-    }
-    pub fn bug(&self, msg: &str) -> ! {
-        self.parse_sess.span_diagnostic.bug(msg);
-    }
-    pub fn trace_macros(&self) -> bool {
-        self.ecfg.trace_mac
-    }
-    pub fn set_trace_macros(&mut self, x: bool) {
-        self.ecfg.trace_mac = x
-    }
-    pub fn ident_of(&self, st: &str, sp: Span) -> ast::Ident {
-        ast::Ident::from_str_and_span(st, sp)
-    }
-    pub fn std_path(&self, components: &[Symbol]) -> Vec<ast::Ident> {
-        let def_site = self.with_def_site_ctxt(DUMMY_SP);
-        iter::once(Ident::new(kw::DollarCrate, def_site))
-            .chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
-            .collect()
-    }
-    pub fn name_of(&self, st: &str) -> ast::Name {
-        Symbol::intern(st)
-    }
-
-    pub fn check_unused_macros(&self) {
-        self.resolver.check_unused_macros();
-    }
-
-    /// Resolves a path mentioned inside Rust code.
-    ///
-    /// This unifies the logic used for resolving `include_X!`, and `#[doc(include)]` file paths.
-    ///
-    /// Returns an absolute path to the file that `path` refers to.
-    pub fn resolve_path(&self, path: impl Into<PathBuf>, span: Span) -> PathBuf {
-        let path = path.into();
-
-        // Relative paths are resolved relative to the file in which they are found
-        // after macro expansion (that is, they are unhygienic).
-        if !path.is_absolute() {
-            let callsite = span.source_callsite();
-            let mut result = match self.source_map().span_to_unmapped_path(callsite) {
-                FileName::Real(path) => path,
-                FileName::DocTest(path, _) => path,
-                other => panic!("cannot resolve relative path in non-file source `{}`", other),
-            };
-            result.pop();
-            result.push(path);
-            result
-        } else {
-            path
-        }
-    }
-}
-
-/// Extracts a string literal from the macro expanded version of `expr`,
-/// emitting `err_msg` if `expr` is not a string literal. This does not stop
-/// compilation on error, merely emits a non-fatal error and returns `None`.
-pub fn expr_to_spanned_string<'a>(
-    cx: &'a mut ExtCtxt<'_>,
-    expr: P<ast::Expr>,
-    err_msg: &str,
-) -> Result<(Symbol, ast::StrStyle, Span), Option<DiagnosticBuilder<'a>>> {
-    // Perform eager expansion on the expression.
-    // We want to be able to handle e.g., `concat!("foo", "bar")`.
-    let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
-
-    Err(match expr.kind {
-        ast::ExprKind::Lit(ref l) => match l.kind {
-            ast::LitKind::Str(s, style) => return Ok((s, style, expr.span)),
-            ast::LitKind::Err(_) => None,
-            _ => Some(cx.struct_span_err(l.span, err_msg))
-        },
-        ast::ExprKind::Err => None,
-        _ => Some(cx.struct_span_err(expr.span, err_msg))
-    })
-}
-
-pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
-                      -> Option<(Symbol, ast::StrStyle)> {
-    expr_to_spanned_string(cx, expr, err_msg)
-        .map_err(|err| err.map(|mut err| err.emit()))
-        .ok()
-        .map(|(symbol, style, _)| (symbol, style))
-}
-
-/// Non-fatally assert that `tts` is empty. Note that this function
-/// returns even when `tts` is non-empty, macros that *need* to stop
-/// compilation should call
-/// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
-/// done as rarely as possible).
-pub fn check_zero_tts(cx: &ExtCtxt<'_>,
-                      sp: Span,
-                      tts: TokenStream,
-                      name: &str) {
-    if !tts.is_empty() {
-        cx.span_err(sp, &format!("{} takes no arguments", name));
-    }
-}
-
-/// Interpreting `tts` as a comma-separated sequence of expressions,
-/// expect exactly one string literal, or emit an error and return `None`.
-pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
-                               sp: Span,
-                               tts: TokenStream,
-                               name: &str)
-                               -> Option<String> {
-    let mut p = cx.new_parser_from_tts(tts);
-    if p.token == token::Eof {
-        cx.span_err(sp, &format!("{} takes 1 argument", name));
-        return None
-    }
-    let ret = panictry!(p.parse_expr());
-    let _ = p.eat(&token::Comma);
-
-    if p.token != token::Eof {
-        cx.span_err(sp, &format!("{} takes 1 argument", name));
-    }
-    expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
-        s.to_string()
-    })
-}
-
-/// Extracts comma-separated expressions from `tts`. If there is a
-/// parsing error, emit a non-fatal error and return `None`.
-pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
-                          sp: Span,
-                          tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
-    let mut p = cx.new_parser_from_tts(tts);
-    let mut es = Vec::new();
-    while p.token != token::Eof {
-        let expr = panictry!(p.parse_expr());
-
-        // Perform eager expansion on the expression.
-        // We want to be able to handle e.g., `concat!("foo", "bar")`.
-        let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
-
-        es.push(expr);
-        if p.eat(&token::Comma) {
-            continue;
-        }
-        if p.token != token::Eof {
-            cx.span_err(sp, "expected token: `,`");
-            return None;
-        }
-    }
-    Some(es)
-}
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
deleted file mode 100644 (file)
index 8c52896..0000000
+++ /dev/null
@@ -1,639 +0,0 @@
-use crate::ast::{self, Ident, Expr, BlockCheckMode, UnOp, PatKind};
-use crate::attr;
-use crate::source_map::{respan, Spanned};
-use crate::ext::base::ExtCtxt;
-use crate::ptr::P;
-use crate::symbol::{kw, sym, Symbol};
-use crate::ThinVec;
-
-use syntax_pos::{Pos, Span};
-
-impl<'a> ExtCtxt<'a> {
-    pub fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
-        self.path_all(span, false, strs, vec![])
-    }
-    pub fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path {
-        self.path(span, vec![id])
-    }
-    pub fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
-        self.path_all(span, true, strs, vec![])
-    }
-    pub fn path_all(&self,
-                span: Span,
-                global: bool,
-                mut idents: Vec<ast::Ident> ,
-                args: Vec<ast::GenericArg>)
-                -> ast::Path {
-        assert!(!idents.is_empty());
-        let add_root = global && !idents[0].is_path_segment_keyword();
-        let mut segments = Vec::with_capacity(idents.len() + add_root as usize);
-        if add_root {
-            segments.push(ast::PathSegment::path_root(span));
-        }
-        let last_ident = idents.pop().unwrap();
-        segments.extend(idents.into_iter().map(|ident| {
-            ast::PathSegment::from_ident(ident.with_span_pos(span))
-        }));
-        let args = if !args.is_empty() {
-            ast::AngleBracketedArgs { args, constraints: Vec::new(), span }.into()
-        } else {
-            None
-        };
-        segments.push(ast::PathSegment {
-            ident: last_ident.with_span_pos(span),
-            id: ast::DUMMY_NODE_ID,
-            args,
-        });
-        ast::Path { span, segments }
-    }
-
-    pub fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
-        ast::MutTy {
-            ty,
-            mutbl,
-        }
-    }
-
-    pub fn ty(&self, span: Span, kind: ast::TyKind) -> P<ast::Ty> {
-        P(ast::Ty {
-            id: ast::DUMMY_NODE_ID,
-            span,
-            kind,
-        })
-    }
-
-    pub fn ty_path(&self, path: ast::Path) -> P<ast::Ty> {
-        self.ty(path.span, ast::TyKind::Path(None, path))
-    }
-
-    // Might need to take bounds as an argument in the future, if you ever want
-    // to generate a bounded existential trait type.
-    pub fn ty_ident(&self, span: Span, ident: ast::Ident)
-        -> P<ast::Ty> {
-        self.ty_path(self.path_ident(span, ident))
-    }
-
-    pub fn anon_const(&self, span: Span, kind: ast::ExprKind) -> ast::AnonConst {
-        ast::AnonConst {
-            id: ast::DUMMY_NODE_ID,
-            value: P(ast::Expr {
-                id: ast::DUMMY_NODE_ID,
-                kind,
-                span,
-                attrs: ThinVec::new(),
-            })
-        }
-    }
-
-    pub fn const_ident(&self, span: Span, ident: ast::Ident) -> ast::AnonConst {
-        self.anon_const(span, ast::ExprKind::Path(None, self.path_ident(span, ident)))
-    }
-
-    pub fn ty_rptr(&self,
-               span: Span,
-               ty: P<ast::Ty>,
-               lifetime: Option<ast::Lifetime>,
-               mutbl: ast::Mutability)
-        -> P<ast::Ty> {
-        self.ty(span,
-                ast::TyKind::Rptr(lifetime, self.ty_mt(ty, mutbl)))
-    }
-
-    pub fn ty_ptr(&self,
-              span: Span,
-              ty: P<ast::Ty>,
-              mutbl: ast::Mutability)
-        -> P<ast::Ty> {
-        self.ty(span,
-                ast::TyKind::Ptr(self.ty_mt(ty, mutbl)))
-    }
-
-    pub fn typaram(&self,
-               span: Span,
-               ident: ast::Ident,
-               attrs: Vec<ast::Attribute>,
-               bounds: ast::GenericBounds,
-               default: Option<P<ast::Ty>>) -> ast::GenericParam {
-        ast::GenericParam {
-            ident: ident.with_span_pos(span),
-            id: ast::DUMMY_NODE_ID,
-            attrs: attrs.into(),
-            bounds,
-            kind: ast::GenericParamKind::Type {
-                default,
-            },
-            is_placeholder: false
-        }
-    }
-
-    pub fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
-        ast::TraitRef {
-            path,
-            ref_id: ast::DUMMY_NODE_ID,
-        }
-    }
-
-    pub fn poly_trait_ref(&self, span: Span, path: ast::Path) -> ast::PolyTraitRef {
-        ast::PolyTraitRef {
-            bound_generic_params: Vec::new(),
-            trait_ref: self.trait_ref(path),
-            span,
-        }
-    }
-
-    pub fn trait_bound(&self, path: ast::Path) -> ast::GenericBound {
-        ast::GenericBound::Trait(self.poly_trait_ref(path.span, path),
-                                 ast::TraitBoundModifier::None)
-    }
-
-    pub fn lifetime(&self, span: Span, ident: ast::Ident) -> ast::Lifetime {
-        ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
-    }
-
-    pub fn lifetime_def(&self,
-                    span: Span,
-                    ident: ast::Ident,
-                    attrs: Vec<ast::Attribute>,
-                    bounds: ast::GenericBounds)
-                    -> ast::GenericParam {
-        let lifetime = self.lifetime(span, ident);
-        ast::GenericParam {
-            ident: lifetime.ident,
-            id: lifetime.id,
-            attrs: attrs.into(),
-            bounds,
-            kind: ast::GenericParamKind::Lifetime,
-            is_placeholder: false
-        }
-    }
-
-    pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
-        ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            span: expr.span,
-            kind: ast::StmtKind::Expr(expr),
-        }
-    }
-
-    pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
-                ex: P<ast::Expr>) -> ast::Stmt {
-        let pat = if mutbl {
-            let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mutable);
-            self.pat_ident_binding_mode(sp, ident, binding_mode)
-        } else {
-            self.pat_ident(sp, ident)
-        };
-        let local = P(ast::Local {
-            pat,
-            ty: None,
-            init: Some(ex),
-            id: ast::DUMMY_NODE_ID,
-            span: sp,
-            attrs: ThinVec::new(),
-        });
-        ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            kind: ast::StmtKind::Local(local),
-            span: sp,
-        }
-    }
-
-    // Generates `let _: Type;`, which is usually used for type assertions.
-    pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
-        let local = P(ast::Local {
-            pat: self.pat_wild(span),
-            ty: Some(ty),
-            init: None,
-            id: ast::DUMMY_NODE_ID,
-            span,
-            attrs: ThinVec::new(),
-        });
-        ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            kind: ast::StmtKind::Local(local),
-            span,
-        }
-    }
-
-    pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
-        ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            kind: ast::StmtKind::Item(item),
-            span: sp,
-        }
-    }
-
-    pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
-        self.block(expr.span, vec![ast::Stmt {
-            id: ast::DUMMY_NODE_ID,
-            span: expr.span,
-            kind: ast::StmtKind::Expr(expr),
-        }])
-    }
-    pub fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block> {
-        P(ast::Block {
-           stmts,
-           id: ast::DUMMY_NODE_ID,
-           rules: BlockCheckMode::Default,
-           span,
-        })
-    }
-
-    pub fn expr(&self, span: Span, kind: ast::ExprKind) -> P<ast::Expr> {
-        P(ast::Expr {
-            id: ast::DUMMY_NODE_ID,
-            kind,
-            span,
-            attrs: ThinVec::new(),
-        })
-    }
-
-    pub fn expr_path(&self, path: ast::Path) -> P<ast::Expr> {
-        self.expr(path.span, ast::ExprKind::Path(None, path))
-    }
-
-    pub fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr> {
-        self.expr_path(self.path_ident(span, id))
-    }
-    pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
-        self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
-    }
-
-    pub fn expr_binary(&self, sp: Span, op: ast::BinOpKind,
-                   lhs: P<ast::Expr>, rhs: P<ast::Expr>) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Binary(Spanned { node: op, span: sp }, lhs, rhs))
-    }
-
-    pub fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Unary(UnOp::Deref, e))
-    }
-
-    pub fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::AddrOf(ast::Mutability::Immutable, e))
-    }
-
-    pub fn expr_call(
-        &self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>,
-    ) -> P<ast::Expr> {
-        self.expr(span, ast::ExprKind::Call(expr, args))
-    }
-    pub fn expr_call_ident(&self, span: Span, id: ast::Ident,
-                       args: Vec<P<ast::Expr>>) -> P<ast::Expr> {
-        self.expr(span, ast::ExprKind::Call(self.expr_ident(span, id), args))
-    }
-    pub fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
-                      args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
-        let pathexpr = self.expr_path(self.path_global(sp, fn_path));
-        self.expr_call(sp, pathexpr, args)
-    }
-    pub fn expr_method_call(&self, span: Span,
-                        expr: P<ast::Expr>,
-                        ident: ast::Ident,
-                        mut args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
-        args.insert(0, expr);
-        let segment = ast::PathSegment::from_ident(ident.with_span_pos(span));
-        self.expr(span, ast::ExprKind::MethodCall(segment, args))
-    }
-    pub fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
-        self.expr(b.span, ast::ExprKind::Block(b, None))
-    }
-    pub fn field_imm(&self, span: Span, ident: Ident, e: P<ast::Expr>) -> ast::Field {
-        ast::Field {
-            ident: ident.with_span_pos(span),
-            expr: e,
-            span,
-            is_shorthand: false,
-            attrs: ThinVec::new(),
-            id: ast::DUMMY_NODE_ID,
-            is_placeholder: false,
-        }
-    }
-    pub fn expr_struct(
-        &self, span: Span, path: ast::Path, fields: Vec<ast::Field>
-    ) -> P<ast::Expr> {
-        self.expr(span, ast::ExprKind::Struct(path, fields, None))
-    }
-    pub fn expr_struct_ident(&self, span: Span,
-                         id: ast::Ident, fields: Vec<ast::Field>) -> P<ast::Expr> {
-        self.expr_struct(span, self.path_ident(span, id), fields)
-    }
-
-    pub fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
-        let lit = ast::Lit::from_lit_kind(lit_kind, span);
-        self.expr(span, ast::ExprKind::Lit(lit))
-    }
-    pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
-        self.expr_lit(span, ast::LitKind::Int(i as u128,
-                                              ast::LitIntType::Unsigned(ast::UintTy::Usize)))
-    }
-    pub fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> {
-        self.expr_lit(sp, ast::LitKind::Int(u as u128,
-                                            ast::LitIntType::Unsigned(ast::UintTy::U32)))
-    }
-    pub fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
-        self.expr_lit(sp, ast::LitKind::Bool(value))
-    }
-
-    pub fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Array(exprs))
-    }
-    pub fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
-        self.expr_addr_of(sp, self.expr_vec(sp, exprs))
-    }
-    pub fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
-        self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
-    }
-
-    pub fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Cast(expr, ty))
-    }
-
-    pub fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
-        let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
-        self.expr_call_global(sp, some, vec![expr])
-    }
-
-    pub fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
-        self.expr(sp, ast::ExprKind::Tup(exprs))
-    }
-
-    pub fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
-        let loc = self.source_map().lookup_char_pos(span.lo());
-        let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name.to_string()));
-        let expr_line = self.expr_u32(span, loc.line as u32);
-        let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1);
-        let expr_loc_tuple = self.expr_tuple(span, vec![expr_file, expr_line, expr_col]);
-        let expr_loc_ptr = self.expr_addr_of(span, expr_loc_tuple);
-        self.expr_call_global(
-            span,
-            [sym::std, sym::rt, sym::begin_panic].iter().map(|s| Ident::new(*s, span)).collect(),
-            vec![
-                self.expr_str(span, msg),
-                expr_loc_ptr])
-    }
-
-    pub fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
-        self.expr_fail(span, Symbol::intern("internal error: entered unreachable code"))
-    }
-
-    pub fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
-        let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
-        self.expr_call_global(sp, ok, vec![expr])
-    }
-
-    pub fn expr_try(&self, sp: Span, head: P<ast::Expr>) -> P<ast::Expr> {
-        let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
-        let ok_path = self.path_global(sp, ok);
-        let err = self.std_path(&[sym::result, sym::Result, sym::Err]);
-        let err_path = self.path_global(sp, err);
-
-        let binding_variable = self.ident_of("__try_var", sp);
-        let binding_pat = self.pat_ident(sp, binding_variable);
-        let binding_expr = self.expr_ident(sp, binding_variable);
-
-        // `Ok(__try_var)` pattern
-        let ok_pat = self.pat_tuple_struct(sp, ok_path, vec![binding_pat.clone()]);
-
-        // `Err(__try_var)` (pattern and expression respectively)
-        let err_pat = self.pat_tuple_struct(sp, err_path.clone(), vec![binding_pat]);
-        let err_inner_expr = self.expr_call(sp, self.expr_path(err_path),
-                                            vec![binding_expr.clone()]);
-        // `return Err(__try_var)`
-        let err_expr = self.expr(sp, ast::ExprKind::Ret(Some(err_inner_expr)));
-
-        // `Ok(__try_var) => __try_var`
-        let ok_arm = self.arm(sp, ok_pat, binding_expr);
-        // `Err(__try_var) => return Err(__try_var)`
-        let err_arm = self.arm(sp, err_pat, err_expr);
-
-        // `match head { Ok() => ..., Err() => ... }`
-        self.expr_match(sp, head, vec![ok_arm, err_arm])
-    }
-
-
-    pub fn pat(&self, span: Span, kind: PatKind) -> P<ast::Pat> {
-        P(ast::Pat { id: ast::DUMMY_NODE_ID, kind, span })
-    }
-    pub fn pat_wild(&self, span: Span) -> P<ast::Pat> {
-        self.pat(span, PatKind::Wild)
-    }
-    pub fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat> {
-        self.pat(span, PatKind::Lit(expr))
-    }
-    pub fn pat_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Pat> {
-        let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Immutable);
-        self.pat_ident_binding_mode(span, ident, binding_mode)
-    }
-
-    pub fn pat_ident_binding_mode(&self,
-                              span: Span,
-                              ident: ast::Ident,
-                              bm: ast::BindingMode) -> P<ast::Pat> {
-        let pat = PatKind::Ident(bm, ident.with_span_pos(span), None);
-        self.pat(span, pat)
-    }
-    pub fn pat_path(&self, span: Span, path: ast::Path) -> P<ast::Pat> {
-        self.pat(span, PatKind::Path(None, path))
-    }
-    pub fn pat_tuple_struct(&self, span: Span, path: ast::Path,
-                        subpats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
-        self.pat(span, PatKind::TupleStruct(path, subpats))
-    }
-    pub fn pat_struct(&self, span: Span, path: ast::Path,
-                      field_pats: Vec<ast::FieldPat>) -> P<ast::Pat> {
-        self.pat(span, PatKind::Struct(path, field_pats, false))
-    }
-    pub fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
-        self.pat(span, PatKind::Tuple(pats))
-    }
-
-    pub fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
-        let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
-        let path = self.path_global(span, some);
-        self.pat_tuple_struct(span, path, vec![pat])
-    }
-
-    pub fn pat_none(&self, span: Span) -> P<ast::Pat> {
-        let some = self.std_path(&[sym::option, sym::Option, sym::None]);
-        let path = self.path_global(span, some);
-        self.pat_path(span, path)
-    }
-
-    pub fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
-        let some = self.std_path(&[sym::result, sym::Result, sym::Ok]);
-        let path = self.path_global(span, some);
-        self.pat_tuple_struct(span, path, vec![pat])
-    }
-
-    pub fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
-        let some = self.std_path(&[sym::result, sym::Result, sym::Err]);
-        let path = self.path_global(span, some);
-        self.pat_tuple_struct(span, path, vec![pat])
-    }
-
-    pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm {
-        ast::Arm {
-            attrs: vec![],
-            pat,
-            guard: None,
-            body: expr,
-            span,
-            id: ast::DUMMY_NODE_ID,
-            is_placeholder: false,
-        }
-    }
-
-    pub fn arm_unreachable(&self, span: Span) -> ast::Arm {
-        self.arm(span, self.pat_wild(span), self.expr_unreachable(span))
-    }
-
-    pub fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm>) -> P<Expr> {
-        self.expr(span, ast::ExprKind::Match(arg, arms))
-    }
-
-    pub fn expr_if(&self, span: Span, cond: P<ast::Expr>,
-               then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr> {
-        let els = els.map(|x| self.expr_block(self.block_expr(x)));
-        self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
-    }
-
-    pub fn lambda_fn_decl(&self,
-                      span: Span,
-                      fn_decl: P<ast::FnDecl>,
-                      body: P<ast::Expr>,
-                      fn_decl_span: Span) // span of the `|...|` part
-                      -> P<ast::Expr> {
-        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
-                                               ast::IsAsync::NotAsync,
-                                               ast::Movability::Movable,
-                                               fn_decl,
-                                               body,
-                                               fn_decl_span))
-    }
-
-    pub fn lambda(&self,
-              span: Span,
-              ids: Vec<ast::Ident>,
-              body: P<ast::Expr>)
-              -> P<ast::Expr> {
-        let fn_decl = self.fn_decl(
-            ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
-            ast::FunctionRetTy::Default(span));
-
-        // FIXME -- We are using `span` as the span of the `|...|`
-        // part of the lambda, but it probably (maybe?) corresponds to
-        // the entire lambda body. Probably we should extend the API
-        // here, but that's not entirely clear.
-        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
-                                               ast::IsAsync::NotAsync,
-                                               ast::Movability::Movable,
-                                               fn_decl,
-                                               body,
-                                               span))
-    }
-
-    pub fn lambda0(&self, span: Span, body: P<ast::Expr>) -> P<ast::Expr> {
-        self.lambda(span, Vec::new(), body)
-    }
-
-    pub fn lambda1(&self, span: Span, body: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
-        self.lambda(span, vec![ident], body)
-    }
-
-    pub fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>,
-                      ident: ast::Ident) -> P<ast::Expr> {
-        self.lambda1(span, self.expr_block(self.block(span, stmts)), ident)
-    }
-
-    pub fn param(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Param {
-        let arg_pat = self.pat_ident(span, ident);
-        ast::Param {
-            attrs: ThinVec::default(),
-            id: ast::DUMMY_NODE_ID,
-            pat: arg_pat,
-            span,
-            ty,
-            is_placeholder: false,
-        }
-    }
-
-    // FIXME: unused `self`
-    pub fn fn_decl(&self, inputs: Vec<ast::Param>, output: ast::FunctionRetTy) -> P<ast::FnDecl> {
-        P(ast::FnDecl {
-            inputs,
-            output,
-        })
-    }
-
-    pub fn item(&self, span: Span, name: Ident,
-            attrs: Vec<ast::Attribute>, kind: ast::ItemKind) -> P<ast::Item> {
-        // FIXME: Would be nice if our generated code didn't violate
-        // Rust coding conventions
-        P(ast::Item {
-            ident: name,
-            attrs,
-            id: ast::DUMMY_NODE_ID,
-            kind,
-            vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
-            span,
-            tokens: None,
-        })
-    }
-
-    pub fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
-        let fields: Vec<_> = tys.into_iter().map(|ty| {
-            ast::StructField {
-                span: ty.span,
-                ty,
-                ident: None,
-                vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
-                attrs: Vec::new(),
-                id: ast::DUMMY_NODE_ID,
-                is_placeholder: false,
-            }
-        }).collect();
-
-        let vdata = if fields.is_empty() {
-            ast::VariantData::Unit(ast::DUMMY_NODE_ID)
-        } else {
-            ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID)
-        };
-
-        ast::Variant {
-            attrs: Vec::new(),
-            data: vdata,
-            disr_expr: None,
-            id: ast::DUMMY_NODE_ID,
-            ident,
-            span,
-            is_placeholder: false,
-        }
-    }
-
-    pub fn item_static(&self,
-                   span: Span,
-                   name: Ident,
-                   ty: P<ast::Ty>,
-                   mutbl: ast::Mutability,
-                   expr: P<ast::Expr>)
-                   -> P<ast::Item> {
-        self.item(span, name, Vec::new(), ast::ItemKind::Static(ty, mutbl, expr))
-    }
-
-    pub fn item_const(&self,
-                  span: Span,
-                  name: Ident,
-                  ty: P<ast::Ty>,
-                  expr: P<ast::Expr>)
-                  -> P<ast::Item> {
-        self.item(span, name, Vec::new(), ast::ItemKind::Const(ty, expr))
-    }
-
-    pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute {
-        attr::mk_attr_outer(mi)
-    }
-
-    pub fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem {
-        attr::mk_word_item(Ident::new(w, sp))
-    }
-}
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
deleted file mode 100644 (file)
index bbd8da2..0000000
+++ /dev/null
@@ -1,1579 +0,0 @@
-use crate::ast::{self, AttrItem, Block, Ident, LitKind, NodeId, PatKind, Path};
-use crate::ast::{MacStmtStyle, StmtKind, ItemKind};
-use crate::attr::{self, HasAttrs};
-use crate::source_map::respan;
-use crate::config::StripUnconfigured;
-use crate::ext::base::*;
-use crate::ext::proc_macro::{collect_derives, MarkAttrs};
-use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
-use crate::ext::mbe::macro_rules::annotate_err_with_kind;
-use crate::ext::placeholders::{placeholder, PlaceholderExpander};
-use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
-use crate::mut_visit::*;
-use crate::parse::{DirectoryOwnership, PResult, ParseSess};
-use crate::parse::token;
-use crate::parse::parser::Parser;
-use crate::ptr::P;
-use crate::symbol::{sym, Symbol};
-use crate::tokenstream::{TokenStream, TokenTree};
-use crate::visit::{self, Visitor};
-use crate::util::map_in_place::MapInPlace;
-
-use errors::{Applicability, FatalError};
-use smallvec::{smallvec, SmallVec};
-use syntax_pos::{Span, DUMMY_SP, FileName};
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::Lrc;
-use std::io::ErrorKind;
-use std::{iter, mem, slice};
-use std::ops::DerefMut;
-use std::rc::Rc;
-use std::path::PathBuf;
-
-macro_rules! ast_fragments {
-    (
-        $($Kind:ident($AstTy:ty) {
-            $kind_name:expr;
-            $(one fn $mut_visit_ast:ident; fn $visit_ast:ident;)?
-            $(many fn $flat_map_ast_elt:ident; fn $visit_ast_elt:ident;)?
-            fn $make_ast:ident;
-        })*
-    ) => {
-        /// A fragment of AST that can be produced by a single macro expansion.
-        /// Can also serve as an input and intermediate result for macro expansion operations.
-        pub enum AstFragment {
-            OptExpr(Option<P<ast::Expr>>),
-            $($Kind($AstTy),)*
-        }
-
-        /// "Discriminant" of an AST fragment.
-        #[derive(Copy, Clone, PartialEq, Eq)]
-        pub enum AstFragmentKind {
-            OptExpr,
-            $($Kind,)*
-        }
-
-        impl AstFragmentKind {
-            pub fn name(self) -> &'static str {
-                match self {
-                    AstFragmentKind::OptExpr => "expression",
-                    $(AstFragmentKind::$Kind => $kind_name,)*
-                }
-            }
-
-            fn make_from<'a>(self, result: Box<dyn MacResult + 'a>) -> Option<AstFragment> {
-                match self {
-                    AstFragmentKind::OptExpr =>
-                        result.make_expr().map(Some).map(AstFragment::OptExpr),
-                    $(AstFragmentKind::$Kind => result.$make_ast().map(AstFragment::$Kind),)*
-                }
-            }
-        }
-
-        impl AstFragment {
-            pub fn make_opt_expr(self) -> Option<P<ast::Expr>> {
-                match self {
-                    AstFragment::OptExpr(expr) => expr,
-                    _ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
-                }
-            }
-
-            $(pub fn $make_ast(self) -> $AstTy {
-                match self {
-                    AstFragment::$Kind(ast) => ast,
-                    _ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
-                }
-            })*
-
-            pub fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
-                match self {
-                    AstFragment::OptExpr(opt_expr) => {
-                        visit_clobber(opt_expr, |opt_expr| {
-                            if let Some(expr) = opt_expr {
-                                vis.filter_map_expr(expr)
-                            } else {
-                                None
-                            }
-                        });
-                    }
-                    $($(AstFragment::$Kind(ast) => vis.$mut_visit_ast(ast),)?)*
-                    $($(AstFragment::$Kind(ast) =>
-                        ast.flat_map_in_place(|ast| vis.$flat_map_ast_elt(ast)),)?)*
-                }
-            }
-
-            pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
-                match *self {
-                    AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr),
-                    AstFragment::OptExpr(None) => {}
-                    $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)*
-                    $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] {
-                        visitor.$visit_ast_elt(ast_elt);
-                    })?)*
-                }
-            }
-        }
-
-        impl<'a> MacResult for crate::ext::mbe::macro_rules::ParserAnyMacro<'a> {
-            $(fn $make_ast(self: Box<crate::ext::mbe::macro_rules::ParserAnyMacro<'a>>)
-                           -> Option<$AstTy> {
-                Some(self.make(AstFragmentKind::$Kind).$make_ast())
-            })*
-        }
-    }
-}
-
-ast_fragments! {
-    Expr(P<ast::Expr>) { "expression"; one fn visit_expr; fn visit_expr; fn make_expr; }
-    Pat(P<ast::Pat>) { "pattern"; one fn visit_pat; fn visit_pat; fn make_pat; }
-    Ty(P<ast::Ty>) { "type"; one fn visit_ty; fn visit_ty; fn make_ty; }
-    Stmts(SmallVec<[ast::Stmt; 1]>) {
-        "statement"; many fn flat_map_stmt; fn visit_stmt; fn make_stmts;
-    }
-    Items(SmallVec<[P<ast::Item>; 1]>) {
-        "item"; many fn flat_map_item; fn visit_item; fn make_items;
-    }
-    TraitItems(SmallVec<[ast::TraitItem; 1]>) {
-        "trait item"; many fn flat_map_trait_item; fn visit_trait_item; fn make_trait_items;
-    }
-    ImplItems(SmallVec<[ast::ImplItem; 1]>) {
-        "impl item"; many fn flat_map_impl_item; fn visit_impl_item; fn make_impl_items;
-    }
-    ForeignItems(SmallVec<[ast::ForeignItem; 1]>) {
-        "foreign item";
-        many fn flat_map_foreign_item;
-        fn visit_foreign_item;
-        fn make_foreign_items;
-    }
-    Arms(SmallVec<[ast::Arm; 1]>) {
-        "match arm"; many fn flat_map_arm; fn visit_arm; fn make_arms;
-    }
-    Fields(SmallVec<[ast::Field; 1]>) {
-        "field expression"; many fn flat_map_field; fn visit_field; fn make_fields;
-    }
-    FieldPats(SmallVec<[ast::FieldPat; 1]>) {
-        "field pattern";
-        many fn flat_map_field_pattern;
-        fn visit_field_pattern;
-        fn make_field_patterns;
-    }
-    GenericParams(SmallVec<[ast::GenericParam; 1]>) {
-        "generic parameter";
-        many fn flat_map_generic_param;
-        fn visit_generic_param;
-        fn make_generic_params;
-    }
-    Params(SmallVec<[ast::Param; 1]>) {
-        "function parameter"; many fn flat_map_param; fn visit_param; fn make_params;
-    }
-    StructFields(SmallVec<[ast::StructField; 1]>) {
-        "field";
-        many fn flat_map_struct_field;
-        fn visit_struct_field;
-        fn make_struct_fields;
-    }
-    Variants(SmallVec<[ast::Variant; 1]>) {
-        "variant"; many fn flat_map_variant; fn visit_variant; fn make_variants;
-    }
-}
-
-impl AstFragmentKind {
-    fn dummy(self, span: Span) -> AstFragment {
-        self.make_from(DummyResult::any(span)).expect("couldn't create a dummy AST fragment")
-    }
-
-    fn expect_from_annotatables<I: IntoIterator<Item = Annotatable>>(self, items: I)
-                                                                     -> AstFragment {
-        let mut items = items.into_iter();
-        match self {
-            AstFragmentKind::Arms =>
-                AstFragment::Arms(items.map(Annotatable::expect_arm).collect()),
-            AstFragmentKind::Fields =>
-                AstFragment::Fields(items.map(Annotatable::expect_field).collect()),
-            AstFragmentKind::FieldPats =>
-                AstFragment::FieldPats(items.map(Annotatable::expect_field_pattern).collect()),
-            AstFragmentKind::GenericParams =>
-                AstFragment::GenericParams(items.map(Annotatable::expect_generic_param).collect()),
-            AstFragmentKind::Params =>
-                AstFragment::Params(items.map(Annotatable::expect_param).collect()),
-            AstFragmentKind::StructFields => AstFragment::StructFields(
-                items.map(Annotatable::expect_struct_field).collect()
-            ),
-            AstFragmentKind::Variants =>
-                AstFragment::Variants(items.map(Annotatable::expect_variant).collect()),
-            AstFragmentKind::Items =>
-                AstFragment::Items(items.map(Annotatable::expect_item).collect()),
-            AstFragmentKind::ImplItems =>
-                AstFragment::ImplItems(items.map(Annotatable::expect_impl_item).collect()),
-            AstFragmentKind::TraitItems =>
-                AstFragment::TraitItems(items.map(Annotatable::expect_trait_item).collect()),
-            AstFragmentKind::ForeignItems =>
-                AstFragment::ForeignItems(items.map(Annotatable::expect_foreign_item).collect()),
-            AstFragmentKind::Stmts =>
-                AstFragment::Stmts(items.map(Annotatable::expect_stmt).collect()),
-            AstFragmentKind::Expr => AstFragment::Expr(
-                items.next().expect("expected exactly one expression").expect_expr()
-            ),
-            AstFragmentKind::OptExpr =>
-                AstFragment::OptExpr(items.next().map(Annotatable::expect_expr)),
-            AstFragmentKind::Pat | AstFragmentKind::Ty =>
-                panic!("patterns and types aren't annotatable"),
-        }
-    }
-}
-
-pub struct Invocation {
-    pub kind: InvocationKind,
-    pub fragment_kind: AstFragmentKind,
-    pub expansion_data: ExpansionData,
-}
-
-pub enum InvocationKind {
-    Bang {
-        mac: ast::Mac,
-        span: Span,
-    },
-    Attr {
-        attr: ast::Attribute,
-        item: Annotatable,
-        // Required for resolving derive helper attributes.
-        derives: Vec<Path>,
-        // We temporarily report errors for attribute macros placed after derives
-        after_derive: bool,
-    },
-    Derive {
-        path: Path,
-        item: Annotatable,
-    },
-    /// "Invocation" that contains all derives from an item,
-    /// broken into multiple `Derive` invocations when expanded.
-    /// FIXME: Find a way to remove it.
-    DeriveContainer {
-        derives: Vec<Path>,
-        item: Annotatable,
-    },
-}
-
-impl Invocation {
-    pub fn span(&self) -> Span {
-        match &self.kind {
-            InvocationKind::Bang { span, .. } => *span,
-            InvocationKind::Attr { attr, .. } => attr.span,
-            InvocationKind::Derive { path, .. } => path.span,
-            InvocationKind::DeriveContainer { item, .. } => item.span(),
-        }
-    }
-}
-
-pub struct MacroExpander<'a, 'b> {
-    pub cx: &'a mut ExtCtxt<'b>,
-    monotonic: bool, // cf. `cx.monotonic_expander()`
-}
-
-impl<'a, 'b> MacroExpander<'a, 'b> {
-    pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
-        MacroExpander { cx, monotonic }
-    }
-
-    pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
-        let mut module = ModuleData {
-            mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)],
-            directory: match self.cx.source_map().span_to_unmapped_path(krate.span) {
-                FileName::Real(path) => path,
-                other => PathBuf::from(other.to_string()),
-            },
-        };
-        module.directory.pop();
-        self.cx.root_path = module.directory.clone();
-        self.cx.current_expansion.module = Rc::new(module);
-
-        let orig_mod_span = krate.module.inner;
-
-        let krate_item = AstFragment::Items(smallvec![P(ast::Item {
-            attrs: krate.attrs,
-            span: krate.span,
-            kind: ast::ItemKind::Mod(krate.module),
-            ident: Ident::invalid(),
-            id: ast::DUMMY_NODE_ID,
-            vis: respan(krate.span.shrink_to_lo(), ast::VisibilityKind::Public),
-            tokens: None,
-        })]);
-
-        match self.fully_expand_fragment(krate_item).make_items().pop().map(P::into_inner) {
-            Some(ast::Item { attrs, kind: ast::ItemKind::Mod(module), .. }) => {
-                krate.attrs = attrs;
-                krate.module = module;
-            },
-            None => {
-                // Resolution failed so we return an empty expansion
-                krate.attrs = vec![];
-                krate.module = ast::Mod {
-                    inner: orig_mod_span,
-                    items: vec![],
-                    inline: true,
-                };
-            },
-            _ => unreachable!(),
-        };
-        self.cx.trace_macros_diag();
-        krate
-    }
-
-    // Recursively expand all macro invocations in this AST fragment.
-    pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment {
-        let orig_expansion_data = self.cx.current_expansion.clone();
-        self.cx.current_expansion.depth = 0;
-
-        // Collect all macro invocations and replace them with placeholders.
-        let (mut fragment_with_placeholders, mut invocations)
-            = self.collect_invocations(input_fragment, &[]);
-
-        // Optimization: if we resolve all imports now,
-        // we'll be able to immediately resolve most of imported macros.
-        self.resolve_imports();
-
-        // Resolve paths in all invocations and produce output expanded fragments for them, but
-        // do not insert them into our input AST fragment yet, only store in `expanded_fragments`.
-        // The output fragments also go through expansion recursively until no invocations are left.
-        // Unresolved macros produce dummy outputs as a recovery measure.
-        invocations.reverse();
-        let mut expanded_fragments = Vec::new();
-        let mut all_derive_placeholders: FxHashMap<ExpnId, Vec<_>> = FxHashMap::default();
-        let mut undetermined_invocations = Vec::new();
-        let (mut progress, mut force) = (false, !self.monotonic);
-        loop {
-            let invoc = if let Some(invoc) = invocations.pop() {
-                invoc
-            } else {
-                self.resolve_imports();
-                if undetermined_invocations.is_empty() { break }
-                invocations = mem::take(&mut undetermined_invocations);
-                force = !mem::replace(&mut progress, false);
-                continue
-            };
-
-            let eager_expansion_root =
-                if self.monotonic { invoc.expansion_data.id } else { orig_expansion_data.id };
-            let res = match self.cx.resolver.resolve_macro_invocation(
-                &invoc, eager_expansion_root, force
-            ) {
-                Ok(res) => res,
-                Err(Indeterminate) => {
-                    undetermined_invocations.push(invoc);
-                    continue
-                }
-            };
-
-            progress = true;
-            let ExpansionData { depth, id: expn_id, .. } = invoc.expansion_data;
-            self.cx.current_expansion = invoc.expansion_data.clone();
-
-            // FIXME(jseyfried): Refactor out the following logic
-            let (expanded_fragment, new_invocations) = match res {
-                InvocationRes::Single(ext) => {
-                    let fragment = self.expand_invoc(invoc, &ext.kind);
-                    self.collect_invocations(fragment, &[])
-                }
-                InvocationRes::DeriveContainer(exts) => {
-                    let (derives, item) = match invoc.kind {
-                        InvocationKind::DeriveContainer { derives, item } => (derives, item),
-                        _ => unreachable!(),
-                    };
-                    if !item.derive_allowed() {
-                        let attr = attr::find_by_name(item.attrs(), sym::derive)
-                            .expect("`derive` attribute should exist");
-                        let span = attr.span;
-                        let mut err = self.cx.struct_span_err(span,
-                            "`derive` may only be applied to structs, enums and unions");
-                        if let ast::AttrStyle::Inner = attr.style {
-                            let trait_list = derives.iter()
-                                .map(|t| t.to_string()).collect::<Vec<_>>();
-                            let suggestion = format!("#[derive({})]", trait_list.join(", "));
-                            err.span_suggestion(
-                                span, "try an outer attribute", suggestion,
-                                // We don't 𝑘𝑛𝑜𝑤 that the following item is an ADT
-                                Applicability::MaybeIncorrect
-                            );
-                        }
-                        err.emit();
-                    }
-
-                    let mut item = self.fully_configure(item);
-                    item.visit_attrs(|attrs| attrs.retain(|a| a.path != sym::derive));
-                    let mut helper_attrs = Vec::new();
-                    let mut has_copy = false;
-                    for ext in exts {
-                        helper_attrs.extend(&ext.helper_attrs);
-                        has_copy |= ext.is_derive_copy;
-                    }
-                    // Mark derive helpers inside this item as known and used.
-                    // FIXME: This is a hack, derive helpers should be integrated with regular name
-                    // resolution instead. For example, helpers introduced by a derive container
-                    // can be in scope for all code produced by that container's expansion.
-                    item.visit_with(&mut MarkAttrs(&helper_attrs));
-                    if has_copy {
-                        self.cx.resolver.add_derives(invoc.expansion_data.id, SpecialDerives::COPY);
-                    }
-
-                    let derive_placeholders =
-                        all_derive_placeholders.entry(invoc.expansion_data.id).or_default();
-                    derive_placeholders.reserve(derives.len());
-                    invocations.reserve(derives.len());
-                    for path in derives {
-                        let expn_id = ExpnId::fresh(None);
-                        derive_placeholders.push(NodeId::placeholder_from_expn_id(expn_id));
-                        invocations.push(Invocation {
-                            kind: InvocationKind::Derive { path, item: item.clone() },
-                            fragment_kind: invoc.fragment_kind,
-                            expansion_data: ExpansionData {
-                                id: expn_id,
-                                ..invoc.expansion_data.clone()
-                            },
-                        });
-                    }
-                    let fragment = invoc.fragment_kind
-                        .expect_from_annotatables(::std::iter::once(item));
-                    self.collect_invocations(fragment, derive_placeholders)
-                }
-            };
-
-            if expanded_fragments.len() < depth {
-                expanded_fragments.push(Vec::new());
-            }
-            expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
-            if !self.cx.ecfg.single_step {
-                invocations.extend(new_invocations.into_iter().rev());
-            }
-        }
-
-        self.cx.current_expansion = orig_expansion_data;
-
-        // Finally incorporate all the expanded macros into the input AST fragment.
-        let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic);
-        while let Some(expanded_fragments) = expanded_fragments.pop() {
-            for (expn_id, expanded_fragment) in expanded_fragments.into_iter().rev() {
-                let derive_placeholders =
-                    all_derive_placeholders.remove(&expn_id).unwrap_or_else(Vec::new);
-                placeholder_expander.add(NodeId::placeholder_from_expn_id(expn_id),
-                                         expanded_fragment, derive_placeholders);
-            }
-        }
-        fragment_with_placeholders.mut_visit_with(&mut placeholder_expander);
-        fragment_with_placeholders
-    }
-
-    fn resolve_imports(&mut self) {
-        if self.monotonic {
-            self.cx.resolver.resolve_imports();
-        }
-    }
-
-    /// Collects all macro invocations reachable at this time in this AST fragment, and replace
-    /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s.
-    /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and
-    /// prepares data for resolving paths of macro invocations.
-    fn collect_invocations(&mut self, mut fragment: AstFragment, extra_placeholders: &[NodeId])
-                           -> (AstFragment, Vec<Invocation>) {
-        // Resolve `$crate`s in the fragment for pretty-printing.
-        self.cx.resolver.resolve_dollar_crates();
-
-        let invocations = {
-            let mut collector = InvocationCollector {
-                cfg: StripUnconfigured {
-                    sess: self.cx.parse_sess,
-                    features: self.cx.ecfg.features,
-                },
-                cx: self.cx,
-                invocations: Vec::new(),
-                monotonic: self.monotonic,
-            };
-            fragment.mut_visit_with(&mut collector);
-            collector.invocations
-        };
-
-        // FIXME: Merge `extra_placeholders` into the `fragment` as regular placeholders.
-        if self.monotonic {
-            self.cx.resolver.visit_ast_fragment_with_placeholders(
-                self.cx.current_expansion.id, &fragment, extra_placeholders);
-        }
-
-        (fragment, invocations)
-    }
-
-    fn fully_configure(&mut self, item: Annotatable) -> Annotatable {
-        let mut cfg = StripUnconfigured {
-            sess: self.cx.parse_sess,
-            features: self.cx.ecfg.features,
-        };
-        // Since the item itself has already been configured by the InvocationCollector,
-        // we know that fold result vector will contain exactly one element
-        match item {
-            Annotatable::Item(item) => {
-                Annotatable::Item(cfg.flat_map_item(item).pop().unwrap())
-            }
-            Annotatable::TraitItem(item) => {
-                Annotatable::TraitItem(
-                    item.map(|item| cfg.flat_map_trait_item(item).pop().unwrap()))
-            }
-            Annotatable::ImplItem(item) => {
-                Annotatable::ImplItem(item.map(|item| cfg.flat_map_impl_item(item).pop().unwrap()))
-            }
-            Annotatable::ForeignItem(item) => {
-                Annotatable::ForeignItem(
-                    item.map(|item| cfg.flat_map_foreign_item(item).pop().unwrap())
-                )
-            }
-            Annotatable::Stmt(stmt) => {
-                Annotatable::Stmt(stmt.map(|stmt| cfg.flat_map_stmt(stmt).pop().unwrap()))
-            }
-            Annotatable::Expr(mut expr) => {
-                Annotatable::Expr({ cfg.visit_expr(&mut expr); expr })
-            }
-            Annotatable::Arm(arm) => {
-                Annotatable::Arm(cfg.flat_map_arm(arm).pop().unwrap())
-            }
-            Annotatable::Field(field) => {
-                Annotatable::Field(cfg.flat_map_field(field).pop().unwrap())
-            }
-            Annotatable::FieldPat(fp) => {
-                Annotatable::FieldPat(cfg.flat_map_field_pattern(fp).pop().unwrap())
-            }
-            Annotatable::GenericParam(param) => {
-                Annotatable::GenericParam(cfg.flat_map_generic_param(param).pop().unwrap())
-            }
-            Annotatable::Param(param) => {
-                Annotatable::Param(cfg.flat_map_param(param).pop().unwrap())
-            }
-            Annotatable::StructField(sf) => {
-                Annotatable::StructField(cfg.flat_map_struct_field(sf).pop().unwrap())
-            }
-            Annotatable::Variant(v) => {
-                Annotatable::Variant(cfg.flat_map_variant(v).pop().unwrap())
-            }
-        }
-    }
-
-    fn expand_invoc(&mut self, invoc: Invocation, ext: &SyntaxExtensionKind) -> AstFragment {
-        if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit {
-            let expn_data = self.cx.current_expansion.id.expn_data();
-            let suggested_limit = self.cx.ecfg.recursion_limit * 2;
-            let mut err = self.cx.struct_span_err(expn_data.call_site,
-                &format!("recursion limit reached while expanding the macro `{}`",
-                         expn_data.kind.descr()));
-            err.help(&format!(
-                "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
-                suggested_limit));
-            err.emit();
-            self.cx.trace_macros_diag();
-            FatalError.raise();
-        }
-
-        let (fragment_kind, span) = (invoc.fragment_kind, invoc.span());
-        match invoc.kind {
-            InvocationKind::Bang { mac, .. } => match ext {
-                SyntaxExtensionKind::Bang(expander) => {
-                    self.gate_proc_macro_expansion_kind(span, fragment_kind);
-                    let tok_result = expander.expand(self.cx, span, mac.stream());
-                    let result =
-                        self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span);
-                    self.gate_proc_macro_expansion(span, &result);
-                    result
-                }
-                SyntaxExtensionKind::LegacyBang(expander) => {
-                    let prev = self.cx.current_expansion.prior_type_ascription;
-                    self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription;
-                    let tok_result = expander.expand(self.cx, span, mac.stream());
-                    let result = if let Some(result) = fragment_kind.make_from(tok_result) {
-                        result
-                    } else {
-                        let msg = format!("non-{kind} macro in {kind} position: {path}",
-                                          kind = fragment_kind.name(), path = mac.path);
-                        self.cx.span_err(span, &msg);
-                        self.cx.trace_macros_diag();
-                        fragment_kind.dummy(span)
-                    };
-                    self.cx.current_expansion.prior_type_ascription = prev;
-                    result
-                }
-                _ => unreachable!()
-            }
-            InvocationKind::Attr { attr, mut item, .. } => match ext {
-                SyntaxExtensionKind::Attr(expander) => {
-                    self.gate_proc_macro_attr_item(span, &item);
-                    let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
-                        Annotatable::Item(item) => token::NtItem(item),
-                        Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
-                        Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
-                        Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
-                        Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
-                        Annotatable::Expr(expr) => token::NtExpr(expr),
-                        Annotatable::Arm(..)
-                        | Annotatable::Field(..)
-                        | Annotatable::FieldPat(..)
-                        | Annotatable::GenericParam(..)
-                        | Annotatable::Param(..)
-                        | Annotatable::StructField(..)
-                        | Annotatable::Variant(..)
-                            => panic!("unexpected annotatable"),
-                    })), DUMMY_SP).into();
-                    let input = self.extract_proc_macro_attr_input(attr.item.tokens, span);
-                    let tok_result = expander.expand(self.cx, span, input, item_tok);
-                    let res =
-                        self.parse_ast_fragment(tok_result, fragment_kind, &attr.item.path, span);
-                    self.gate_proc_macro_expansion(span, &res);
-                    res
-                }
-                SyntaxExtensionKind::LegacyAttr(expander) => {
-                    match attr.parse_meta(self.cx.parse_sess) {
-                        Ok(meta) => {
-                            let item = expander.expand(self.cx, span, &meta, item);
-                            fragment_kind.expect_from_annotatables(item)
-                        }
-                        Err(mut err) => {
-                            err.emit();
-                            fragment_kind.dummy(span)
-                        }
-                    }
-                }
-                SyntaxExtensionKind::NonMacroAttr { mark_used } => {
-                    attr::mark_known(&attr);
-                    if *mark_used {
-                        attr::mark_used(&attr);
-                    }
-                    item.visit_attrs(|attrs| attrs.push(attr));
-                    fragment_kind.expect_from_annotatables(iter::once(item))
-                }
-                _ => unreachable!()
-            }
-            InvocationKind::Derive { path, item } => match ext {
-                SyntaxExtensionKind::Derive(expander) |
-                SyntaxExtensionKind::LegacyDerive(expander) => {
-                    if !item.derive_allowed() {
-                        return fragment_kind.dummy(span);
-                    }
-                    let meta = ast::MetaItem { kind: ast::MetaItemKind::Word, span, path };
-                    let items = expander.expand(self.cx, span, &meta, item);
-                    fragment_kind.expect_from_annotatables(items)
-                }
-                _ => unreachable!()
-            }
-            InvocationKind::DeriveContainer { .. } => unreachable!()
-        }
-    }
-
-    fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream {
-        let mut trees = tokens.trees();
-        match trees.next() {
-            Some(TokenTree::Delimited(_, _, tts)) => {
-                if trees.next().is_none() {
-                    return tts.into()
-                }
-            }
-            Some(TokenTree::Token(..)) => {}
-            None => return TokenStream::empty(),
-        }
-        self.cx.span_err(span, "custom attribute invocations must be \
-            of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
-            followed by a delimiter token");
-        TokenStream::empty()
-    }
-
-    fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
-        let (kind, gate) = match *item {
-            Annotatable::Item(ref item) => {
-                match item.kind {
-                    ItemKind::Mod(_) if self.cx.ecfg.proc_macro_hygiene() => return,
-                    ItemKind::Mod(_) => ("modules", sym::proc_macro_hygiene),
-                    _ => return,
-                }
-            }
-            Annotatable::TraitItem(_) => return,
-            Annotatable::ImplItem(_) => return,
-            Annotatable::ForeignItem(_) => return,
-            Annotatable::Stmt(_) |
-            Annotatable::Expr(_) if self.cx.ecfg.proc_macro_hygiene() => return,
-            Annotatable::Stmt(_) => ("statements", sym::proc_macro_hygiene),
-            Annotatable::Expr(_) => ("expressions", sym::proc_macro_hygiene),
-            Annotatable::Arm(..)
-            | Annotatable::Field(..)
-            | Annotatable::FieldPat(..)
-            | Annotatable::GenericParam(..)
-            | Annotatable::Param(..)
-            | Annotatable::StructField(..)
-            | Annotatable::Variant(..)
-            => panic!("unexpected annotatable"),
-        };
-        emit_feature_err(
-            self.cx.parse_sess,
-            gate,
-            span,
-            GateIssue::Language,
-            &format!("custom attributes cannot be applied to {}", kind),
-        );
-    }
-
-    fn gate_proc_macro_expansion(&self, span: Span, fragment: &AstFragment) {
-        if self.cx.ecfg.proc_macro_hygiene() {
-            return
-        }
-
-        fragment.visit_with(&mut DisallowMacros {
-            span,
-            parse_sess: self.cx.parse_sess,
-        });
-
-        struct DisallowMacros<'a> {
-            span: Span,
-            parse_sess: &'a ParseSess,
-        }
-
-        impl<'ast, 'a> Visitor<'ast> for DisallowMacros<'a> {
-            fn visit_item(&mut self, i: &'ast ast::Item) {
-                if let ast::ItemKind::MacroDef(_) = i.kind {
-                    emit_feature_err(
-                        self.parse_sess,
-                        sym::proc_macro_hygiene,
-                        self.span,
-                        GateIssue::Language,
-                        "procedural macros cannot expand to macro definitions",
-                    );
-                }
-                visit::walk_item(self, i);
-            }
-
-            fn visit_mac(&mut self, _mac: &'ast ast::Mac) {
-                // ...
-            }
-        }
-    }
-
-    fn gate_proc_macro_expansion_kind(&self, span: Span, kind: AstFragmentKind) {
-        let kind = match kind {
-            AstFragmentKind::Expr |
-            AstFragmentKind::OptExpr => "expressions",
-            AstFragmentKind::Pat => "patterns",
-            AstFragmentKind::Stmts => "statements",
-            AstFragmentKind::Ty |
-            AstFragmentKind::Items |
-            AstFragmentKind::TraitItems |
-            AstFragmentKind::ImplItems |
-            AstFragmentKind::ForeignItems => return,
-            AstFragmentKind::Arms
-            | AstFragmentKind::Fields
-            | AstFragmentKind::FieldPats
-            | AstFragmentKind::GenericParams
-            | AstFragmentKind::Params
-            | AstFragmentKind::StructFields
-            | AstFragmentKind::Variants
-                => panic!("unexpected AST fragment kind"),
-        };
-        if self.cx.ecfg.proc_macro_hygiene() {
-            return
-        }
-        emit_feature_err(
-            self.cx.parse_sess,
-            sym::proc_macro_hygiene,
-            span,
-            GateIssue::Language,
-            &format!("procedural macros cannot be expanded to {}", kind),
-        );
-    }
-
-    fn parse_ast_fragment(
-        &mut self,
-        toks: TokenStream,
-        kind: AstFragmentKind,
-        path: &Path,
-        span: Span,
-    ) -> AstFragment {
-        let mut parser = self.cx.new_parser_from_tts(toks);
-        match parser.parse_ast_fragment(kind, false) {
-            Ok(fragment) => {
-                parser.ensure_complete_parse(path, kind.name(), span);
-                fragment
-            }
-            Err(mut err) => {
-                err.set_span(span);
-                annotate_err_with_kind(&mut err, kind, span);
-                err.emit();
-                self.cx.trace_macros_diag();
-                kind.dummy(span)
-            }
-        }
-    }
-}
-
-impl<'a> Parser<'a> {
-    pub fn parse_ast_fragment(&mut self, kind: AstFragmentKind, macro_legacy_warnings: bool)
-                              -> PResult<'a, AstFragment> {
-        Ok(match kind {
-            AstFragmentKind::Items => {
-                let mut items = SmallVec::new();
-                while let Some(item) = self.parse_item()? {
-                    items.push(item);
-                }
-                AstFragment::Items(items)
-            }
-            AstFragmentKind::TraitItems => {
-                let mut items = SmallVec::new();
-                while self.token != token::Eof {
-                    items.push(self.parse_trait_item(&mut false)?);
-                }
-                AstFragment::TraitItems(items)
-            }
-            AstFragmentKind::ImplItems => {
-                let mut items = SmallVec::new();
-                while self.token != token::Eof {
-                    items.push(self.parse_impl_item(&mut false)?);
-                }
-                AstFragment::ImplItems(items)
-            }
-            AstFragmentKind::ForeignItems => {
-                let mut items = SmallVec::new();
-                while self.token != token::Eof {
-                    items.push(self.parse_foreign_item(DUMMY_SP)?);
-                }
-                AstFragment::ForeignItems(items)
-            }
-            AstFragmentKind::Stmts => {
-                let mut stmts = SmallVec::new();
-                while self.token != token::Eof &&
-                      // won't make progress on a `}`
-                      self.token != token::CloseDelim(token::Brace) {
-                    if let Some(stmt) = self.parse_full_stmt(macro_legacy_warnings)? {
-                        stmts.push(stmt);
-                    }
-                }
-                AstFragment::Stmts(stmts)
-            }
-            AstFragmentKind::Expr => AstFragment::Expr(self.parse_expr()?),
-            AstFragmentKind::OptExpr => {
-                if self.token != token::Eof {
-                    AstFragment::OptExpr(Some(self.parse_expr()?))
-                } else {
-                    AstFragment::OptExpr(None)
-                }
-            },
-            AstFragmentKind::Ty => AstFragment::Ty(self.parse_ty()?),
-            AstFragmentKind::Pat => AstFragment::Pat(self.parse_pat(None)?),
-            AstFragmentKind::Arms
-            | AstFragmentKind::Fields
-            | AstFragmentKind::FieldPats
-            | AstFragmentKind::GenericParams
-            | AstFragmentKind::Params
-            | AstFragmentKind::StructFields
-            | AstFragmentKind::Variants
-                => panic!("unexpected AST fragment kind"),
-        })
-    }
-
-    pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span: Span) {
-        if self.token != token::Eof {
-            let msg = format!("macro expansion ignores token `{}` and any following",
-                              self.this_token_to_string());
-            // Avoid emitting backtrace info twice.
-            let def_site_span = self.token.span.with_ctxt(SyntaxContext::root());
-            let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
-            err.span_label(span, "caused by the macro expansion here");
-            let msg = format!(
-                "the usage of `{}!` is likely invalid in {} context",
-                macro_path,
-                kind_name,
-            );
-            err.note(&msg);
-            let semi_span = self.sess.source_map().next_point(span);
-
-            let semi_full_span = semi_span.to(self.sess.source_map().next_point(semi_span));
-            match self.sess.source_map().span_to_snippet(semi_full_span) {
-                Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => {
-                    err.span_suggestion(
-                        semi_span,
-                        "you might be missing a semicolon here",
-                        ";".to_owned(),
-                        Applicability::MaybeIncorrect,
-                    );
-                }
-                _ => {}
-            }
-            err.emit();
-        }
-    }
-}
-
-struct InvocationCollector<'a, 'b> {
-    cx: &'a mut ExtCtxt<'b>,
-    cfg: StripUnconfigured<'a>,
-    invocations: Vec<Invocation>,
-    monotonic: bool,
-}
-
-impl<'a, 'b> InvocationCollector<'a, 'b> {
-    fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment {
-        // Expansion data for all the collected invocations is set upon their resolution,
-        // with exception of the derive container case which is not resolved and can get
-        // its expansion data immediately.
-        let expn_data = match &kind {
-            InvocationKind::DeriveContainer { item, .. } => Some(ExpnData {
-                parent: self.cx.current_expansion.id,
-                ..ExpnData::default(
-                    ExpnKind::Macro(MacroKind::Attr, sym::derive),
-                    item.span(), self.cx.parse_sess.edition,
-                )
-            }),
-            _ => None,
-        };
-        let expn_id = ExpnId::fresh(expn_data);
-        self.invocations.push(Invocation {
-            kind,
-            fragment_kind,
-            expansion_data: ExpansionData {
-                id: expn_id,
-                depth: self.cx.current_expansion.depth + 1,
-                ..self.cx.current_expansion.clone()
-            },
-        });
-        placeholder(fragment_kind, NodeId::placeholder_from_expn_id(expn_id))
-    }
-
-    fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: AstFragmentKind) -> AstFragment {
-        self.collect(kind, InvocationKind::Bang { mac, span })
-    }
-
-    fn collect_attr(&mut self,
-                    attr: Option<ast::Attribute>,
-                    derives: Vec<Path>,
-                    item: Annotatable,
-                    kind: AstFragmentKind,
-                    after_derive: bool)
-                    -> AstFragment {
-        self.collect(kind, match attr {
-            Some(attr) => InvocationKind::Attr { attr, item, derives, after_derive },
-            None => InvocationKind::DeriveContainer { derives, item },
-        })
-    }
-
-    fn find_attr_invoc(&self, attrs: &mut Vec<ast::Attribute>, after_derive: &mut bool)
-                       -> Option<ast::Attribute> {
-        let attr = attrs.iter()
-                        .position(|a| {
-                            if a.path == sym::derive {
-                                *after_derive = true;
-                            }
-                            !attr::is_known(a) && !is_builtin_attr(a)
-                        })
-                        .map(|i| attrs.remove(i));
-        if let Some(attr) = &attr {
-            if !self.cx.ecfg.custom_inner_attributes() &&
-               attr.style == ast::AttrStyle::Inner && attr.path != sym::test {
-                emit_feature_err(&self.cx.parse_sess, sym::custom_inner_attributes,
-                                 attr.span, GateIssue::Language,
-                                 "non-builtin inner attributes are unstable");
-            }
-        }
-        attr
-    }
-
-    /// If `item` is an attr invocation, remove and return the macro attribute and derive traits.
-    fn classify_item<T>(&mut self, item: &mut T)
-                        -> (Option<ast::Attribute>, Vec<Path>, /* after_derive */ bool)
-        where T: HasAttrs,
-    {
-        let (mut attr, mut traits, mut after_derive) = (None, Vec::new(), false);
-
-        item.visit_attrs(|mut attrs| {
-            attr = self.find_attr_invoc(&mut attrs, &mut after_derive);
-            traits = collect_derives(&mut self.cx, &mut attrs);
-        });
-
-        (attr, traits, after_derive)
-    }
-
-    /// Alternative to `classify_item()` that ignores `#[derive]` so invocations fallthrough
-    /// to the unused-attributes lint (making it an error on statements and expressions
-    /// is a breaking change)
-    fn classify_nonitem<T: HasAttrs>(&mut self, nonitem: &mut T)
-                                     -> (Option<ast::Attribute>, /* after_derive */ bool) {
-        let (mut attr, mut after_derive) = (None, false);
-
-        nonitem.visit_attrs(|mut attrs| {
-            attr = self.find_attr_invoc(&mut attrs, &mut after_derive);
-        });
-
-        (attr, after_derive)
-    }
-
-    fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
-        self.cfg.configure(node)
-    }
-
-    // Detect use of feature-gated or invalid attributes on macro invocations
-    // since they will not be detected after macro expansion.
-    fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
-        let features = self.cx.ecfg.features.unwrap();
-        for attr in attrs.iter() {
-            feature_gate::check_attribute(attr, self.cx.parse_sess, features);
-
-            // macros are expanded before any lint passes so this warning has to be hardcoded
-            if attr.path == sym::derive {
-                self.cx.struct_span_warn(attr.span, "`#[derive]` does nothing on macro invocations")
-                    .note("this may become a hard error in a future release")
-                    .emit();
-            }
-        }
-    }
-}
-
-impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
-    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
-        self.cfg.configure_expr(expr);
-        visit_clobber(expr.deref_mut(), |mut expr| {
-            self.cfg.configure_expr_kind(&mut expr.kind);
-
-            // ignore derives so they remain unused
-            let (attr, after_derive) = self.classify_nonitem(&mut expr);
-
-            if attr.is_some() {
-                // Collect the invoc regardless of whether or not attributes are permitted here
-                // expansion will eat the attribute so it won't error later.
-                attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a));
-
-                // AstFragmentKind::Expr requires the macro to emit an expression.
-                return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)),
-                                          AstFragmentKind::Expr, after_derive)
-                    .make_expr()
-                    .into_inner()
-            }
-
-            if let ast::ExprKind::Mac(mac) = expr.kind {
-                self.check_attributes(&expr.attrs);
-                self.collect_bang(mac, expr.span, AstFragmentKind::Expr)
-                    .make_expr()
-                    .into_inner()
-            } else {
-                noop_visit_expr(&mut expr, self);
-                expr
-            }
-        });
-    }
-
-    fn flat_map_arm(&mut self, arm: ast::Arm) -> SmallVec<[ast::Arm; 1]> {
-        let mut arm = configure!(self, arm);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut arm);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::Arm(arm),
-                                     AstFragmentKind::Arms, after_derive)
-                                     .make_arms();
-        }
-
-        noop_flat_map_arm(arm, self)
-    }
-
-    fn flat_map_field(&mut self, field: ast::Field) -> SmallVec<[ast::Field; 1]> {
-        let mut field = configure!(self, field);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut field);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::Field(field),
-                                     AstFragmentKind::Fields, after_derive)
-                                     .make_fields();
-        }
-
-        noop_flat_map_field(field, self)
-    }
-
-    fn flat_map_field_pattern(&mut self, fp: ast::FieldPat) -> SmallVec<[ast::FieldPat; 1]> {
-        let mut fp = configure!(self, fp);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut fp);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::FieldPat(fp),
-                                     AstFragmentKind::FieldPats, after_derive)
-                                     .make_field_patterns();
-        }
-
-        noop_flat_map_field_pattern(fp, self)
-    }
-
-    fn flat_map_param(&mut self, p: ast::Param) -> SmallVec<[ast::Param; 1]> {
-        let mut p = configure!(self, p);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut p);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::Param(p),
-                                     AstFragmentKind::Params, after_derive)
-                                     .make_params();
-        }
-
-        noop_flat_map_param(p, self)
-    }
-
-    fn flat_map_struct_field(&mut self, sf: ast::StructField) -> SmallVec<[ast::StructField; 1]> {
-        let mut sf = configure!(self, sf);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut sf);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::StructField(sf),
-                                     AstFragmentKind::StructFields, after_derive)
-                                     .make_struct_fields();
-        }
-
-        noop_flat_map_struct_field(sf, self)
-    }
-
-    fn flat_map_variant(&mut self, variant: ast::Variant) -> SmallVec<[ast::Variant; 1]> {
-        let mut variant = configure!(self, variant);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut variant);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::Variant(variant),
-                                     AstFragmentKind::Variants, after_derive)
-                                     .make_variants();
-        }
-
-        noop_flat_map_variant(variant, self)
-    }
-
-    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
-        let expr = configure!(self, expr);
-        expr.filter_map(|mut expr| {
-            self.cfg.configure_expr_kind(&mut expr.kind);
-
-            // Ignore derives so they remain unused.
-            let (attr, after_derive) = self.classify_nonitem(&mut expr);
-
-            if attr.is_some() {
-                attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a));
-
-                return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)),
-                                         AstFragmentKind::OptExpr, after_derive)
-                    .make_opt_expr()
-                    .map(|expr| expr.into_inner())
-            }
-
-            if let ast::ExprKind::Mac(mac) = expr.kind {
-                self.check_attributes(&expr.attrs);
-                self.collect_bang(mac, expr.span, AstFragmentKind::OptExpr)
-                    .make_opt_expr()
-                    .map(|expr| expr.into_inner())
-            } else {
-                Some({ noop_visit_expr(&mut expr, self); expr })
-            }
-        })
-    }
-
-    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
-        self.cfg.configure_pat(pat);
-        match pat.kind {
-            PatKind::Mac(_) => {}
-            _ => return noop_visit_pat(pat, self),
-        }
-
-        visit_clobber(pat, |mut pat| {
-            match mem::replace(&mut pat.kind, PatKind::Wild) {
-                PatKind::Mac(mac) =>
-                    self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(),
-                _ => unreachable!(),
-            }
-        });
-    }
-
-    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
-        let mut stmt = configure!(self, stmt);
-
-        // we'll expand attributes on expressions separately
-        if !stmt.is_expr() {
-            let (attr, derives, after_derive) = if stmt.is_item() {
-                self.classify_item(&mut stmt)
-            } else {
-                // ignore derives on non-item statements so it falls through
-                // to the unused-attributes lint
-                let (attr, after_derive) = self.classify_nonitem(&mut stmt);
-                (attr, vec![], after_derive)
-            };
-
-            if attr.is_some() || !derives.is_empty() {
-                return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt)),
-                                         AstFragmentKind::Stmts, after_derive).make_stmts();
-            }
-        }
-
-        if let StmtKind::Mac(mac) = stmt.kind {
-            let (mac, style, attrs) = mac.into_inner();
-            self.check_attributes(&attrs);
-            let mut placeholder = self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts)
-                                        .make_stmts();
-
-            // If this is a macro invocation with a semicolon, then apply that
-            // semicolon to the final statement produced by expansion.
-            if style == MacStmtStyle::Semicolon {
-                if let Some(stmt) = placeholder.pop() {
-                    placeholder.push(stmt.add_trailing_semicolon());
-                }
-            }
-
-            return placeholder;
-        }
-
-        // The placeholder expander gives ids to statements, so we avoid folding the id here.
-        let ast::Stmt { id, kind, span } = stmt;
-        noop_flat_map_stmt_kind(kind, self).into_iter().map(|kind| {
-            ast::Stmt { id, kind, span }
-        }).collect()
-
-    }
-
-    fn visit_block(&mut self, block: &mut P<Block>) {
-        let old_directory_ownership = self.cx.current_expansion.directory_ownership;
-        self.cx.current_expansion.directory_ownership = DirectoryOwnership::UnownedViaBlock;
-        noop_visit_block(block, self);
-        self.cx.current_expansion.directory_ownership = old_directory_ownership;
-    }
-
-    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
-        let mut item = configure!(self, item);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut item);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::Item(item),
-                                     AstFragmentKind::Items, after_derive).make_items();
-        }
-
-        match item.kind {
-            ast::ItemKind::Mac(..) => {
-                self.check_attributes(&item.attrs);
-                item.and_then(|item| match item.kind {
-                    ItemKind::Mac(mac) => self.collect(
-                        AstFragmentKind::Items, InvocationKind::Bang { mac, span: item.span }
-                    ).make_items(),
-                    _ => unreachable!(),
-                })
-            }
-            ast::ItemKind::Mod(ast::Mod { inner, .. }) => {
-                if item.ident == Ident::invalid() {
-                    return noop_flat_map_item(item, self);
-                }
-
-                let orig_directory_ownership = self.cx.current_expansion.directory_ownership;
-                let mut module = (*self.cx.current_expansion.module).clone();
-                module.mod_path.push(item.ident);
-
-                // Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`).
-                // In the non-inline case, `inner` is never the dummy span (cf. `parse_item_mod`).
-                // Thus, if `inner` is the dummy span, we know the module is inline.
-                let inline_module = item.span.contains(inner) || inner.is_dummy();
-
-                if inline_module {
-                    if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, sym::path) {
-                        self.cx.current_expansion.directory_ownership =
-                            DirectoryOwnership::Owned { relative: None };
-                        module.directory.push(&*path.as_str());
-                    } else {
-                        module.directory.push(&*item.ident.as_str());
-                    }
-                } else {
-                    let path = self.cx.parse_sess.source_map().span_to_unmapped_path(inner);
-                    let mut path = match path {
-                        FileName::Real(path) => path,
-                        other => PathBuf::from(other.to_string()),
-                    };
-                    let directory_ownership = match path.file_name().unwrap().to_str() {
-                        Some("mod.rs") => DirectoryOwnership::Owned { relative: None },
-                        Some(_) => DirectoryOwnership::Owned {
-                            relative: Some(item.ident),
-                        },
-                        None => DirectoryOwnership::UnownedViaMod(false),
-                    };
-                    path.pop();
-                    module.directory = path;
-                    self.cx.current_expansion.directory_ownership = directory_ownership;
-                }
-
-                let orig_module =
-                    mem::replace(&mut self.cx.current_expansion.module, Rc::new(module));
-                let result = noop_flat_map_item(item, self);
-                self.cx.current_expansion.module = orig_module;
-                self.cx.current_expansion.directory_ownership = orig_directory_ownership;
-                result
-            }
-
-            _ => noop_flat_map_item(item, self),
-        }
-    }
-
-    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
-        let mut item = configure!(self, item);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut item);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::TraitItem(P(item)),
-                                     AstFragmentKind::TraitItems, after_derive).make_trait_items()
-        }
-
-        match item.kind {
-            ast::TraitItemKind::Macro(mac) => {
-                let ast::TraitItem { attrs, span, .. } = item;
-                self.check_attributes(&attrs);
-                self.collect_bang(mac, span, AstFragmentKind::TraitItems).make_trait_items()
-            }
-            _ => noop_flat_map_trait_item(item, self),
-        }
-    }
-
-    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
-        let mut item = configure!(self, item);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut item);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::ImplItem(P(item)),
-                                     AstFragmentKind::ImplItems, after_derive).make_impl_items();
-        }
-
-        match item.kind {
-            ast::ImplItemKind::Macro(mac) => {
-                let ast::ImplItem { attrs, span, .. } = item;
-                self.check_attributes(&attrs);
-                self.collect_bang(mac, span, AstFragmentKind::ImplItems).make_impl_items()
-            }
-            _ => noop_flat_map_impl_item(item, self),
-        }
-    }
-
-    fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
-        match ty.kind {
-            ast::TyKind::Mac(_) => {}
-            _ => return noop_visit_ty(ty, self),
-        };
-
-        visit_clobber(ty, |mut ty| {
-            match mem::replace(&mut ty.kind, ast::TyKind::Err) {
-                ast::TyKind::Mac(mac) =>
-                    self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(),
-                _ => unreachable!(),
-            }
-        });
-    }
-
-    fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
-        self.cfg.configure_foreign_mod(foreign_mod);
-        noop_visit_foreign_mod(foreign_mod, self);
-    }
-
-    fn flat_map_foreign_item(&mut self, mut foreign_item: ast::ForeignItem)
-        -> SmallVec<[ast::ForeignItem; 1]>
-    {
-        let (attr, traits, after_derive) = self.classify_item(&mut foreign_item);
-
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::ForeignItem(P(foreign_item)),
-                                     AstFragmentKind::ForeignItems, after_derive)
-                                     .make_foreign_items();
-        }
-
-        if let ast::ForeignItemKind::Macro(mac) = foreign_item.kind {
-            self.check_attributes(&foreign_item.attrs);
-            return self.collect_bang(mac, foreign_item.span, AstFragmentKind::ForeignItems)
-                .make_foreign_items();
-        }
-
-        noop_flat_map_foreign_item(foreign_item, self)
-    }
-
-    fn visit_item_kind(&mut self, item: &mut ast::ItemKind) {
-        match item {
-            ast::ItemKind::MacroDef(..) => {}
-            _ => {
-                self.cfg.configure_item_kind(item);
-                noop_visit_item_kind(item, self);
-            }
-        }
-    }
-
-    fn flat_map_generic_param(
-        &mut self,
-        param: ast::GenericParam
-    ) -> SmallVec<[ast::GenericParam; 1]>
-    {
-        let mut param = configure!(self, param);
-
-        let (attr, traits, after_derive) = self.classify_item(&mut param);
-        if attr.is_some() || !traits.is_empty() {
-            return self.collect_attr(attr, traits, Annotatable::GenericParam(param),
-                                     AstFragmentKind::GenericParams, after_derive)
-                                     .make_generic_params();
-        }
-
-        noop_flat_map_generic_param(param, self)
-    }
-
-    fn visit_attribute(&mut self, at: &mut ast::Attribute) {
-        // turn `#[doc(include="filename")]` attributes into `#[doc(include(file="filename",
-        // contents="file contents")]` attributes
-        if !at.check_name(sym::doc) {
-            return noop_visit_attribute(at, self);
-        }
-
-        if let Some(list) = at.meta_item_list() {
-            if !list.iter().any(|it| it.check_name(sym::include)) {
-                return noop_visit_attribute(at, self);
-            }
-
-            let mut items = vec![];
-
-            for mut it in list {
-                if !it.check_name(sym::include) {
-                    items.push({ noop_visit_meta_list_item(&mut it, self); it });
-                    continue;
-                }
-
-                if let Some(file) = it.value_str() {
-                    let err_count = self.cx.parse_sess.span_diagnostic.err_count();
-                    self.check_attributes(slice::from_ref(at));
-                    if self.cx.parse_sess.span_diagnostic.err_count() > err_count {
-                        // avoid loading the file if they haven't enabled the feature
-                        return noop_visit_attribute(at, self);
-                    }
-
-                    let filename = self.cx.resolve_path(&*file.as_str(), it.span());
-                    match self.cx.source_map().load_file(&filename) {
-                        Ok(source_file) => {
-                            let src = source_file.src.as_ref()
-                                .expect("freshly loaded file should have a source");
-                            let src_interned = Symbol::intern(src.as_str());
-
-                            let include_info = vec![
-                                ast::NestedMetaItem::MetaItem(
-                                    attr::mk_name_value_item_str(
-                                        Ident::with_dummy_span(sym::file),
-                                        file,
-                                        DUMMY_SP,
-                                    ),
-                                ),
-                                ast::NestedMetaItem::MetaItem(
-                                    attr::mk_name_value_item_str(
-                                        Ident::with_dummy_span(sym::contents),
-                                        src_interned,
-                                        DUMMY_SP,
-                                    ),
-                                ),
-                            ];
-
-                            let include_ident = Ident::with_dummy_span(sym::include);
-                            let item = attr::mk_list_item(include_ident, include_info);
-                            items.push(ast::NestedMetaItem::MetaItem(item));
-                        }
-                        Err(e) => {
-                            let lit = it
-                                .meta_item()
-                                .and_then(|item| item.name_value_literal())
-                                .unwrap();
-
-                            if e.kind() == ErrorKind::InvalidData {
-                                self.cx
-                                    .struct_span_err(
-                                        lit.span,
-                                        &format!("{} wasn't a utf-8 file", filename.display()),
-                                    )
-                                    .span_label(lit.span, "contains invalid utf-8")
-                                    .emit();
-                            } else {
-                                let mut err = self.cx.struct_span_err(
-                                    lit.span,
-                                    &format!("couldn't read {}: {}", filename.display(), e),
-                                );
-                                err.span_label(lit.span, "couldn't read file");
-
-                                err.emit();
-                            }
-                        }
-                    }
-                } else {
-                    let mut err = self.cx.struct_span_err(
-                        it.span(),
-                        &format!("expected path to external documentation"),
-                    );
-
-                    // Check if the user erroneously used `doc(include(...))` syntax.
-                    let literal = it.meta_item_list().and_then(|list| {
-                        if list.len() == 1 {
-                            list[0].literal().map(|literal| &literal.kind)
-                        } else {
-                            None
-                        }
-                    });
-
-                    let (path, applicability) = match &literal {
-                        Some(LitKind::Str(path, ..)) => {
-                            (path.to_string(), Applicability::MachineApplicable)
-                        }
-                        _ => (String::from("<path>"), Applicability::HasPlaceholders),
-                    };
-
-                    err.span_suggestion(
-                        it.span(),
-                        "provide a file path with `=`",
-                        format!("include = \"{}\"", path),
-                        applicability,
-                    );
-
-                    err.emit();
-                }
-            }
-
-            let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
-            *at = attr::Attribute {
-                item: AttrItem { path: meta.path, tokens: meta.kind.tokens(meta.span) },
-                span: at.span,
-                id: at.id,
-                style: at.style,
-                is_sugared_doc: false,
-            };
-        } else {
-            noop_visit_attribute(at, self)
-        }
-    }
-
-    fn visit_id(&mut self, id: &mut ast::NodeId) {
-        if self.monotonic {
-            debug_assert_eq!(*id, ast::DUMMY_NODE_ID);
-            *id = self.cx.resolver.next_node_id()
-        }
-    }
-
-    fn visit_fn_decl(&mut self, mut fn_decl: &mut P<ast::FnDecl>) {
-        self.cfg.configure_fn_decl(&mut fn_decl);
-        noop_visit_fn_decl(fn_decl, self);
-    }
-}
-
-pub struct ExpansionConfig<'feat> {
-    pub crate_name: String,
-    pub features: Option<&'feat Features>,
-    pub recursion_limit: usize,
-    pub trace_mac: bool,
-    pub should_test: bool, // If false, strip `#[test]` nodes
-    pub single_step: bool,
-    pub keep_macs: bool,
-}
-
-impl<'feat> ExpansionConfig<'feat> {
-    pub fn default(crate_name: String) -> ExpansionConfig<'static> {
-        ExpansionConfig {
-            crate_name,
-            features: None,
-            recursion_limit: 1024,
-            trace_mac: false,
-            should_test: false,
-            single_step: false,
-            keep_macs: false,
-        }
-    }
-
-    fn proc_macro_hygiene(&self) -> bool {
-        self.features.map_or(false, |features| features.proc_macro_hygiene)
-    }
-    fn custom_inner_attributes(&self) -> bool {
-        self.features.map_or(false, |features| features.custom_inner_attributes)
-    }
-}
diff --git a/src/libsyntax/ext/mbe.rs b/src/libsyntax/ext/mbe.rs
deleted file mode 100644 (file)
index a87da79..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-//! This module implements declarative macros: old `macro_rules` and the newer
-//! `macro`. Declarative macros are also known as "macro by example", and that's
-//! why we call this module `mbe`. For external documentation, prefer the
-//! official terminology: "declarative macros".
-
-crate mod transcribe;
-crate mod macro_check;
-crate mod macro_parser;
-crate mod macro_rules;
-crate mod quoted;
-
-use crate::ast;
-use crate::parse::token::{self, Token, TokenKind};
-use crate::tokenstream::{DelimSpan};
-
-use syntax_pos::{BytePos, Span};
-
-use rustc_data_structures::sync::Lrc;
-
-/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
-/// that the delimiter itself might be `NoDelim`.
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
-struct Delimited {
-    delim: token::DelimToken,
-    tts: Vec<TokenTree>,
-}
-
-impl Delimited {
-    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
-    fn open_tt(&self, span: Span) -> TokenTree {
-        let open_span = if span.is_dummy() {
-            span
-        } else {
-            span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
-        };
-        TokenTree::token(token::OpenDelim(self.delim), open_span)
-    }
-
-    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
-    fn close_tt(&self, span: Span) -> TokenTree {
-        let close_span = if span.is_dummy() {
-            span
-        } else {
-            span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
-        };
-        TokenTree::token(token::CloseDelim(self.delim), close_span)
-    }
-}
-
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
-struct SequenceRepetition {
-    /// The sequence of token trees
-    tts: Vec<TokenTree>,
-    /// The optional separator
-    separator: Option<Token>,
-    /// Whether the sequence can be repeated zero (*), or one or more times (+)
-    kleene: KleeneToken,
-    /// The number of `Match`s that appear in the sequence (and subsequences)
-    num_captures: usize,
-}
-
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
-struct KleeneToken {
-    span: Span,
-    op: KleeneOp,
-}
-
-impl KleeneToken {
-    fn new(op: KleeneOp, span: Span) -> KleeneToken {
-        KleeneToken { span, op }
-    }
-}
-
-/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
-/// for token sequences.
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-enum KleeneOp {
-    /// Kleene star (`*`) for zero or more repetitions
-    ZeroOrMore,
-    /// Kleene plus (`+`) for one or more repetitions
-    OneOrMore,
-    /// Kleene optional (`?`) for zero or one reptitions
-    ZeroOrOne,
-}
-
-/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
-/// are "first-class" token trees. Useful for parsing macros.
-#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
-enum TokenTree {
-    Token(Token),
-    Delimited(DelimSpan, Lrc<Delimited>),
-    /// A kleene-style repetition sequence
-    Sequence(DelimSpan, Lrc<SequenceRepetition>),
-    /// e.g., `$var`
-    MetaVar(Span, ast::Ident),
-    /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
-    MetaVarDecl(
-        Span,
-        ast::Ident, /* name to bind */
-        ast::Ident, /* kind of nonterminal */
-    ),
-}
-
-impl TokenTree {
-    /// Return the number of tokens in the tree.
-    fn len(&self) -> usize {
-        match *self {
-            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
-                token::NoDelim => delimed.tts.len(),
-                _ => delimed.tts.len() + 2,
-            },
-            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
-            _ => 0,
-        }
-    }
-
-    /// Returns `true` if the given token tree is delimited.
-    fn is_delimited(&self) -> bool {
-        match *self {
-            TokenTree::Delimited(..) => true,
-            _ => false,
-        }
-    }
-
-    /// Returns `true` if the given token tree is a token of the given kind.
-    fn is_token(&self, expected_kind: &TokenKind) -> bool {
-        match self {
-            TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
-            _ => false,
-        }
-    }
-
-    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
-    fn get_tt(&self, index: usize) -> TokenTree {
-        match (self, index) {
-            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
-                delimed.tts[index].clone()
-            }
-            (&TokenTree::Delimited(span, ref delimed), _) => {
-                if index == 0 {
-                    return delimed.open_tt(span.open);
-                }
-                if index == delimed.tts.len() + 1 {
-                    return delimed.close_tt(span.close);
-                }
-                delimed.tts[index - 1].clone()
-            }
-            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
-            _ => panic!("Cannot expand a token tree"),
-        }
-    }
-
-    /// Retrieves the `TokenTree`'s span.
-    fn span(&self) -> Span {
-        match *self {
-            TokenTree::Token(Token { span, .. })
-            | TokenTree::MetaVar(span, _)
-            | TokenTree::MetaVarDecl(span, _, _) => span,
-            TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
-        }
-    }
-
-    fn token(kind: TokenKind, span: Span) -> TokenTree {
-        TokenTree::Token(Token::new(kind, span))
-    }
-}
diff --git a/src/libsyntax/ext/mbe/macro_check.rs b/src/libsyntax/ext/mbe/macro_check.rs
deleted file mode 100644 (file)
index 97074f5..0000000
+++ /dev/null
@@ -1,626 +0,0 @@
-//! Checks that meta-variables in macro definition are correctly declared and used.
-//!
-//! # What is checked
-//!
-//! ## Meta-variables must not be bound twice
-//!
-//! ```
-//! macro_rules! foo { ($x:tt $x:tt) => { $x }; }
-//! ```
-//!
-//! This check is sound (no false-negative) and complete (no false-positive).
-//!
-//! ## Meta-variables must not be free
-//!
-//! ```
-//! macro_rules! foo { () => { $x }; }
-//! ```
-//!
-//! This check is also done at macro instantiation but only if the branch is taken.
-//!
-//! ## Meta-variables must repeat at least as many times as their binder
-//!
-//! ```
-//! macro_rules! foo { ($($x:tt)*) => { $x }; }
-//! ```
-//!
-//! This check is also done at macro instantiation but only if the branch is taken.
-//!
-//! ## Meta-variables must repeat with the same Kleene operators as their binder
-//!
-//! ```
-//! macro_rules! foo { ($($x:tt)+) => { $($x)* }; }
-//! ```
-//!
-//! This check is not done at macro instantiation.
-//!
-//! # Disclaimer
-//!
-//! In the presence of nested macros (a macro defined in a macro), those checks may have false
-//! positives and false negatives. We try to detect those cases by recognizing potential macro
-//! definitions in RHSes, but nested macros may be hidden through the use of particular values of
-//! meta-variables.
-//!
-//! ## Examples of false positive
-//!
-//! False positives can come from cases where we don't recognize a nested macro, because it depends
-//! on particular values of meta-variables. In the following example, we think both instances of
-//! `$x` are free, which is a correct statement if `$name` is anything but `macro_rules`. But when
-//! `$name` is `macro_rules`, like in the instantiation below, then `$x:tt` is actually a binder of
-//! the nested macro and `$x` is bound to it.
-//!
-//! ```
-//! macro_rules! foo { ($name:ident) => { $name! bar { ($x:tt) => { $x }; } }; }
-//! foo!(macro_rules);
-//! ```
-//!
-//! False positives can also come from cases where we think there is a nested macro while there
-//! isn't. In the following example, we think `$x` is free, which is incorrect because `bar` is not
-//! a nested macro since it is not evaluated as code by `stringify!`.
-//!
-//! ```
-//! macro_rules! foo { () => { stringify!(macro_rules! bar { () => { $x }; }) }; }
-//! ```
-//!
-//! ## Examples of false negative
-//!
-//! False negatives can come from cases where we don't recognize a meta-variable, because it depends
-//! on particular values of meta-variables. In the following examples, we don't see that if `$d` is
-//! instantiated with `$` then `$d z` becomes `$z` in the nested macro definition and is thus a free
-//! meta-variable. Note however, that if `foo` is instantiated, then we would check the definition
-//! of `bar` and would see the issue.
-//!
-//! ```
-//! macro_rules! foo { ($d:tt) => { macro_rules! bar { ($y:tt) => { $d z }; } }; }
-//! ```
-//!
-//! # How it is checked
-//!
-//! There are 3 main functions: `check_binders`, `check_occurrences`, and `check_nested_macro`. They
-//! all need some kind of environment.
-//!
-//! ## Environments
-//!
-//! Environments are used to pass information.
-//!
-//! ### From LHS to RHS
-//!
-//! When checking a LHS with `check_binders`, we produce (and use) an environment for binders,
-//! namely `Binders`. This is a mapping from binder name to information about that binder: the span
-//! of the binder for error messages and the stack of Kleene operators under which it was bound in
-//! the LHS.
-//!
-//! This environment is used by both the LHS and RHS. The LHS uses it to detect duplicate binders.
-//! The RHS uses it to detect the other errors.
-//!
-//! ### From outer macro to inner macro
-//!
-//! When checking the RHS of an outer macro and we detect a nested macro definition, we push the
-//! current state, namely `MacroState`, to an environment of nested macro definitions. Each state
-//! stores the LHS binders when entering the macro definition as well as the stack of Kleene
-//! operators under which the inner macro is defined in the RHS.
-//!
-//! This environment is a stack representing the nesting of macro definitions. As such, the stack of
-//! Kleene operators under which a meta-variable is repeating is the concatenation of the stacks
-//! stored when entering a macro definition starting from the state in which the meta-variable is
-//! bound.
-use crate::ast::NodeId;
-use crate::early_buffered_lints::BufferedEarlyLintId;
-use crate::ext::mbe::{KleeneToken, TokenTree};
-use crate::parse::token::TokenKind;
-use crate::parse::token::{DelimToken, Token};
-use crate::parse::ParseSess;
-use crate::symbol::{kw, sym};
-
-use rustc_data_structures::fx::FxHashMap;
-use smallvec::SmallVec;
-use syntax_pos::{symbol::Ident, MultiSpan, Span};
-
-/// Stack represented as linked list.
-///
-/// Those are used for environments because they grow incrementally and are not mutable.
-enum Stack<'a, T> {
-    /// Empty stack.
-    Empty,
-    /// A non-empty stack.
-    Push {
-        /// The top element.
-        top: T,
-        /// The previous elements.
-        prev: &'a Stack<'a, T>,
-    },
-}
-
-impl<'a, T> Stack<'a, T> {
-    /// Returns whether a stack is empty.
-    fn is_empty(&self) -> bool {
-        match *self {
-            Stack::Empty => true,
-            _ => false,
-        }
-    }
-
-    /// Returns a new stack with an element of top.
-    fn push(&'a self, top: T) -> Stack<'a, T> {
-        Stack::Push { top, prev: self }
-    }
-}
-
-impl<'a, T> Iterator for &'a Stack<'a, T> {
-    type Item = &'a T;
-
-    // Iterates from top to bottom of the stack.
-    fn next(&mut self) -> Option<&'a T> {
-        match *self {
-            Stack::Empty => None,
-            Stack::Push { ref top, ref prev } => {
-                *self = prev;
-                Some(top)
-            }
-        }
-    }
-}
-
-impl From<&Stack<'_, KleeneToken>> for SmallVec<[KleeneToken; 1]> {
-    fn from(ops: &Stack<'_, KleeneToken>) -> SmallVec<[KleeneToken; 1]> {
-        let mut ops: SmallVec<[KleeneToken; 1]> = ops.cloned().collect();
-        // The stack is innermost on top. We want outermost first.
-        ops.reverse();
-        ops
-    }
-}
-
-/// Information attached to a meta-variable binder in LHS.
-struct BinderInfo {
-    /// The span of the meta-variable in LHS.
-    span: Span,
-    /// The stack of Kleene operators (outermost first).
-    ops: SmallVec<[KleeneToken; 1]>,
-}
-
-/// An environment of meta-variables to their binder information.
-type Binders = FxHashMap<Ident, BinderInfo>;
-
-/// The state at which we entered a macro definition in the RHS of another macro definition.
-struct MacroState<'a> {
-    /// The binders of the branch where we entered the macro definition.
-    binders: &'a Binders,
-    /// The stack of Kleene operators (outermost first) where we entered the macro definition.
-    ops: SmallVec<[KleeneToken; 1]>,
-}
-
-/// Checks that meta-variables are used correctly in a macro definition.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `span` is used when no spans are available
-/// - `lhses` and `rhses` should have the same length and represent the macro definition
-pub(super) fn check_meta_variables(
-    sess: &ParseSess,
-    node_id: NodeId,
-    span: Span,
-    lhses: &[TokenTree],
-    rhses: &[TokenTree],
-) -> bool {
-    if lhses.len() != rhses.len() {
-        sess.span_diagnostic.span_bug(span, "length mismatch between LHSes and RHSes")
-    }
-    let mut valid = true;
-    for (lhs, rhs) in lhses.iter().zip(rhses.iter()) {
-        let mut binders = Binders::default();
-        check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut valid);
-        check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut valid);
-    }
-    valid
-}
-
-/// Checks `lhs` as part of the LHS of a macro definition, extends `binders` with new binders, and
-/// sets `valid` to false in case of errors.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `lhs` is checked as part of a LHS
-/// - `macros` is the stack of possible outer macros
-/// - `binders` contains the binders of the LHS
-/// - `ops` is the stack of Kleene operators from the LHS
-/// - `valid` is set in case of errors
-fn check_binders(
-    sess: &ParseSess,
-    node_id: NodeId,
-    lhs: &TokenTree,
-    macros: &Stack<'_, MacroState<'_>>,
-    binders: &mut Binders,
-    ops: &Stack<'_, KleeneToken>,
-    valid: &mut bool,
-) {
-    match *lhs {
-        TokenTree::Token(..) => {}
-        // This can only happen when checking a nested macro because this LHS is then in the RHS of
-        // the outer macro. See ui/macros/macro-of-higher-order.rs where $y:$fragment in the
-        // LHS of the nested macro (and RHS of the outer macro) is parsed as MetaVar(y) Colon
-        // MetaVar(fragment) and not as MetaVarDecl(y, fragment).
-        TokenTree::MetaVar(span, name) => {
-            if macros.is_empty() {
-                sess.span_diagnostic.span_bug(span, "unexpected MetaVar in lhs");
-            }
-            // There are 3 possibilities:
-            if let Some(prev_info) = binders.get(&name) {
-                // 1. The meta-variable is already bound in the current LHS: This is an error.
-                let mut span = MultiSpan::from_span(span);
-                span.push_span_label(prev_info.span, "previous declaration".into());
-                buffer_lint(sess, span, node_id, "duplicate matcher binding");
-            } else if get_binder_info(macros, binders, name).is_none() {
-                // 2. The meta-variable is free: This is a binder.
-                binders.insert(name, BinderInfo { span, ops: ops.into() });
-            } else {
-                // 3. The meta-variable is bound: This is an occurrence.
-                check_occurrences(sess, node_id, lhs, macros, binders, ops, valid);
-            }
-        }
-        // Similarly, this can only happen when checking a toplevel macro.
-        TokenTree::MetaVarDecl(span, name, _kind) => {
-            if !macros.is_empty() {
-                sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in nested lhs");
-            }
-            if let Some(prev_info) = get_binder_info(macros, binders, name) {
-                // Duplicate binders at the top-level macro definition are errors. The lint is only
-                // for nested macro definitions.
-                sess.span_diagnostic
-                    .struct_span_err(span, "duplicate matcher binding")
-                    .span_note(prev_info.span, "previous declaration was here")
-                    .emit();
-                *valid = false;
-            } else {
-                binders.insert(name, BinderInfo { span, ops: ops.into() });
-            }
-        }
-        TokenTree::Delimited(_, ref del) => {
-            for tt in &del.tts {
-                check_binders(sess, node_id, tt, macros, binders, ops, valid);
-            }
-        }
-        TokenTree::Sequence(_, ref seq) => {
-            let ops = ops.push(seq.kleene);
-            for tt in &seq.tts {
-                check_binders(sess, node_id, tt, macros, binders, &ops, valid);
-            }
-        }
-    }
-}
-
-/// Returns the binder information of a meta-variable.
-///
-/// Arguments:
-/// - `macros` is the stack of possible outer macros
-/// - `binders` contains the current binders
-/// - `name` is the name of the meta-variable we are looking for
-fn get_binder_info<'a>(
-    mut macros: &'a Stack<'a, MacroState<'a>>,
-    binders: &'a Binders,
-    name: Ident,
-) -> Option<&'a BinderInfo> {
-    binders.get(&name).or_else(|| macros.find_map(|state| state.binders.get(&name)))
-}
-
-/// Checks `rhs` as part of the RHS of a macro definition and sets `valid` to false in case of
-/// errors.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `rhs` is checked as part of a RHS
-/// - `macros` is the stack of possible outer macros
-/// - `binders` contains the binders of the associated LHS
-/// - `ops` is the stack of Kleene operators from the RHS
-/// - `valid` is set in case of errors
-fn check_occurrences(
-    sess: &ParseSess,
-    node_id: NodeId,
-    rhs: &TokenTree,
-    macros: &Stack<'_, MacroState<'_>>,
-    binders: &Binders,
-    ops: &Stack<'_, KleeneToken>,
-    valid: &mut bool,
-) {
-    match *rhs {
-        TokenTree::Token(..) => {}
-        TokenTree::MetaVarDecl(span, _name, _kind) => {
-            sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in rhs")
-        }
-        TokenTree::MetaVar(span, name) => {
-            check_ops_is_prefix(sess, node_id, macros, binders, ops, span, name);
-        }
-        TokenTree::Delimited(_, ref del) => {
-            check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
-        }
-        TokenTree::Sequence(_, ref seq) => {
-            let ops = ops.push(seq.kleene);
-            check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, valid);
-        }
-    }
-}
-
-/// Represents the processed prefix of a nested macro.
-#[derive(Clone, Copy, PartialEq, Eq)]
-enum NestedMacroState {
-    /// Nothing that matches a nested macro definition was processed yet.
-    Empty,
-    /// The token `macro_rules` was processed.
-    MacroRules,
-    /// The tokens `macro_rules!` were processed.
-    MacroRulesNot,
-    /// The tokens `macro_rules!` followed by a name were processed. The name may be either directly
-    /// an identifier or a meta-variable (that hopefully would be instantiated by an identifier).
-    MacroRulesNotName,
-    /// The keyword `macro` was processed.
-    Macro,
-    /// The keyword `macro` followed by a name was processed.
-    MacroName,
-    /// The keyword `macro` followed by a name and a token delimited by parentheses was processed.
-    MacroNameParen,
-}
-
-/// Checks `tts` as part of the RHS of a macro definition, tries to recognize nested macro
-/// definitions, and sets `valid` to false in case of errors.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `tts` is checked as part of a RHS and may contain macro definitions
-/// - `macros` is the stack of possible outer macros
-/// - `binders` contains the binders of the associated LHS
-/// - `ops` is the stack of Kleene operators from the RHS
-/// - `valid` is set in case of errors
-fn check_nested_occurrences(
-    sess: &ParseSess,
-    node_id: NodeId,
-    tts: &[TokenTree],
-    macros: &Stack<'_, MacroState<'_>>,
-    binders: &Binders,
-    ops: &Stack<'_, KleeneToken>,
-    valid: &mut bool,
-) {
-    let mut state = NestedMacroState::Empty;
-    let nested_macros = macros.push(MacroState { binders, ops: ops.into() });
-    let mut nested_binders = Binders::default();
-    for tt in tts {
-        match (state, tt) {
-            (
-                NestedMacroState::Empty,
-                &TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
-            ) => {
-                if name == sym::macro_rules {
-                    state = NestedMacroState::MacroRules;
-                } else if name == kw::Macro {
-                    state = NestedMacroState::Macro;
-                }
-            }
-            (
-                NestedMacroState::MacroRules,
-                &TokenTree::Token(Token { kind: TokenKind::Not, .. }),
-            ) => {
-                state = NestedMacroState::MacroRulesNot;
-            }
-            (
-                NestedMacroState::MacroRulesNot,
-                &TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
-            ) => {
-                state = NestedMacroState::MacroRulesNotName;
-            }
-            (NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => {
-                state = NestedMacroState::MacroRulesNotName;
-                // We check that the meta-variable is correctly used.
-                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
-            }
-            (NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
-            | (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
-                if del.delim == DelimToken::Brace =>
-            {
-                let legacy = state == NestedMacroState::MacroRulesNotName;
-                state = NestedMacroState::Empty;
-                let rest =
-                    check_nested_macro(sess, node_id, legacy, &del.tts, &nested_macros, valid);
-                // If we did not check the whole macro definition, then check the rest as if outside
-                // the macro definition.
-                check_nested_occurrences(
-                    sess,
-                    node_id,
-                    &del.tts[rest..],
-                    macros,
-                    binders,
-                    ops,
-                    valid,
-                );
-            }
-            (
-                NestedMacroState::Macro,
-                &TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
-            ) => {
-                state = NestedMacroState::MacroName;
-            }
-            (NestedMacroState::Macro, &TokenTree::MetaVar(..)) => {
-                state = NestedMacroState::MacroName;
-                // We check that the meta-variable is correctly used.
-                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
-            }
-            (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
-                if del.delim == DelimToken::Paren =>
-            {
-                state = NestedMacroState::MacroNameParen;
-                nested_binders = Binders::default();
-                check_binders(
-                    sess,
-                    node_id,
-                    tt,
-                    &nested_macros,
-                    &mut nested_binders,
-                    &Stack::Empty,
-                    valid,
-                );
-            }
-            (NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
-                if del.delim == DelimToken::Brace =>
-            {
-                state = NestedMacroState::Empty;
-                check_occurrences(
-                    sess,
-                    node_id,
-                    tt,
-                    &nested_macros,
-                    &nested_binders,
-                    &Stack::Empty,
-                    valid,
-                );
-            }
-            (_, ref tt) => {
-                state = NestedMacroState::Empty;
-                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
-            }
-        }
-    }
-}
-
-/// Checks the body of nested macro, returns where the check stopped, and sets `valid` to false in
-/// case of errors.
-///
-/// The token trees are checked as long as they look like a list of (LHS) => {RHS} token trees. This
-/// check is a best-effort to detect a macro definition. It returns the position in `tts` where we
-/// stopped checking because we detected we were not in a macro definition anymore.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `legacy` specifies whether the macro is legacy
-/// - `tts` is checked as a list of (LHS) => {RHS}
-/// - `macros` is the stack of outer macros
-/// - `valid` is set in case of errors
-fn check_nested_macro(
-    sess: &ParseSess,
-    node_id: NodeId,
-    legacy: bool,
-    tts: &[TokenTree],
-    macros: &Stack<'_, MacroState<'_>>,
-    valid: &mut bool,
-) -> usize {
-    let n = tts.len();
-    let mut i = 0;
-    let separator = if legacy { TokenKind::Semi } else { TokenKind::Comma };
-    loop {
-        // We expect 3 token trees: `(LHS) => {RHS}`. The separator is checked after.
-        if i + 2 >= n
-            || !tts[i].is_delimited()
-            || !tts[i + 1].is_token(&TokenKind::FatArrow)
-            || !tts[i + 2].is_delimited()
-        {
-            break;
-        }
-        let lhs = &tts[i];
-        let rhs = &tts[i + 2];
-        let mut binders = Binders::default();
-        check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, valid);
-        check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, valid);
-        // Since the last semicolon is optional for legacy macros and decl_macro are not terminated,
-        // we increment our checked position by how many token trees we already checked (the 3
-        // above) before checking for the separator.
-        i += 3;
-        if i == n || !tts[i].is_token(&separator) {
-            break;
-        }
-        // We increment our checked position for the semicolon.
-        i += 1;
-    }
-    i
-}
-
-/// Checks that a meta-variable occurrence is valid.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `macros` is the stack of possible outer macros
-/// - `binders` contains the binders of the associated LHS
-/// - `ops` is the stack of Kleene operators from the RHS
-/// - `span` is the span of the meta-variable to check
-/// - `name` is the name of the meta-variable to check
-fn check_ops_is_prefix(
-    sess: &ParseSess,
-    node_id: NodeId,
-    macros: &Stack<'_, MacroState<'_>>,
-    binders: &Binders,
-    ops: &Stack<'_, KleeneToken>,
-    span: Span,
-    name: Ident,
-) {
-    let macros = macros.push(MacroState { binders, ops: ops.into() });
-    // Accumulates the stacks the operators of each state until (and including when) the
-    // meta-variable is found. The innermost stack is first.
-    let mut acc: SmallVec<[&SmallVec<[KleeneToken; 1]>; 1]> = SmallVec::new();
-    for state in &macros {
-        acc.push(&state.ops);
-        if let Some(binder) = state.binders.get(&name) {
-            // This variable concatenates the stack of operators from the RHS of the LHS where the
-            // meta-variable was defined to where it is used (in possibly nested macros). The
-            // outermost operator is first.
-            let mut occurrence_ops: SmallVec<[KleeneToken; 2]> = SmallVec::new();
-            // We need to iterate from the end to start with outermost stack.
-            for ops in acc.iter().rev() {
-                occurrence_ops.extend_from_slice(ops);
-            }
-            ops_is_prefix(sess, node_id, span, name, &binder.ops, &occurrence_ops);
-            return;
-        }
-    }
-    buffer_lint(sess, span.into(), node_id, &format!("unknown macro variable `{}`", name));
-}
-
-/// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
-///
-/// The stack of Kleene operators of a meta-variable occurrence just needs to have the stack of
-/// Kleene operators of its binder as a prefix.
-///
-/// Consider $i in the following example:
-///
-///     ( $( $i:ident = $($j:ident),+ );* ) => { $($( $i += $j; )+)* }
-///
-/// It occurs under the Kleene stack ["*", "+"] and is bound under ["*"] only.
-///
-/// Arguments:
-/// - `sess` is used to emit diagnostics and lints
-/// - `node_id` is used to emit lints
-/// - `span` is the span of the meta-variable being check
-/// - `name` is the name of the meta-variable being check
-/// - `binder_ops` is the stack of Kleene operators for the binder
-/// - `occurrence_ops` is the stack of Kleene operators for the occurrence
-fn ops_is_prefix(
-    sess: &ParseSess,
-    node_id: NodeId,
-    span: Span,
-    name: Ident,
-    binder_ops: &[KleeneToken],
-    occurrence_ops: &[KleeneToken],
-) {
-    for (i, binder) in binder_ops.iter().enumerate() {
-        if i >= occurrence_ops.len() {
-            let mut span = MultiSpan::from_span(span);
-            span.push_span_label(binder.span, "expected repetition".into());
-            let message = &format!("variable '{}' is still repeating at this depth", name);
-            buffer_lint(sess, span, node_id, message);
-            return;
-        }
-        let occurrence = &occurrence_ops[i];
-        if occurrence.op != binder.op {
-            let mut span = MultiSpan::from_span(span);
-            span.push_span_label(binder.span, "expected repetition".into());
-            span.push_span_label(occurrence.span, "conflicting repetition".into());
-            let message = "meta-variable repeats with different Kleene operator";
-            buffer_lint(sess, span, node_id, message);
-            return;
-        }
-    }
-}
-
-fn buffer_lint(sess: &ParseSess, span: MultiSpan, node_id: NodeId, message: &str) {
-    sess.buffer_lint(BufferedEarlyLintId::MetaVariableMisuse, span, node_id, message);
-}
diff --git a/src/libsyntax/ext/mbe/macro_parser.rs b/src/libsyntax/ext/mbe/macro_parser.rs
deleted file mode 100644 (file)
index d1c50fd..0000000
+++ /dev/null
@@ -1,940 +0,0 @@
-//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals
-//! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
-//! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
-//! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
-//! fit for Macro-by-Example-style rules.
-//!
-//! (In order to prevent the pathological case, we'd need to lazily construct the resulting
-//! `NamedMatch`es at the very end. It'd be a pain, and require more memory to keep around old
-//! items, but it would also save overhead)
-//!
-//! We don't say this parser uses the Earley algorithm, because it's unnecessarily inaccurate.
-//! The macro parser restricts itself to the features of finite state automata. Earley parsers
-//! can be described as an extension of NFAs with completion rules, prediction rules, and recursion.
-//!
-//! Quick intro to how the parser works:
-//!
-//! A 'position' is a dot in the middle of a matcher, usually represented as a
-//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
-//!
-//! The parser walks through the input a character at a time, maintaining a list
-//! of threads consistent with the current position in the input string: `cur_items`.
-//!
-//! As it processes them, it fills up `eof_items` with threads that would be valid if
-//! the macro invocation is now over, `bb_items` with threads that are waiting on
-//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
-//! on a particular token. Most of the logic concerns moving the · through the
-//! repetitions indicated by Kleene stars. The rules for moving the · without
-//! consuming any input are called epsilon transitions. It only advances or calls
-//! out to the real Rust parser when no `cur_items` threads remain.
-//!
-//! Example:
-//!
-//! ```text, ignore
-//! Start parsing a a a a b against [· a $( a )* a b].
-//!
-//! Remaining input: a a a a b
-//! next: [· a $( a )* a b]
-//!
-//! - - - Advance over an a. - - -
-//!
-//! Remaining input: a a a b
-//! cur: [a · $( a )* a b]
-//! Descend/Skip (first item).
-//! next: [a $( · a )* a b]  [a $( a )* · a b].
-//!
-//! - - - Advance over an a. - - -
-//!
-//! Remaining input: a a b
-//! cur: [a $( a · )* a b]  [a $( a )* a · b]
-//! Follow epsilon transition: Finish/Repeat (first item)
-//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
-//!
-//! - - - Advance over an a. - - - (this looks exactly like the last step)
-//!
-//! Remaining input: a b
-//! cur: [a $( a · )* a b]  [a $( a )* a · b]
-//! Follow epsilon transition: Finish/Repeat (first item)
-//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
-//!
-//! - - - Advance over an a. - - - (this looks exactly like the last step)
-//!
-//! Remaining input: b
-//! cur: [a $( a · )* a b]  [a $( a )* a · b]
-//! Follow epsilon transition: Finish/Repeat (first item)
-//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
-//!
-//! - - - Advance over a b. - - -
-//!
-//! Remaining input: ''
-//! eof: [a $( a )* a b ·]
-//! ```
-
-crate use NamedMatch::*;
-crate use ParseResult::*;
-use TokenTreeOrTokenTreeSlice::*;
-
-use crate::ast::{Ident, Name};
-use crate::ext::mbe::{self, TokenTree};
-use crate::parse::{Directory, ParseSess};
-use crate::parse::parser::{Parser, PathStyle};
-use crate::parse::token::{self, DocComment, Nonterminal, Token};
-use crate::print::pprust;
-use crate::symbol::{kw, sym, Symbol};
-use crate::tokenstream::{DelimSpan, TokenStream};
-
-use errors::FatalError;
-use smallvec::{smallvec, SmallVec};
-use syntax_pos::Span;
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::Lrc;
-use std::collections::hash_map::Entry::{Occupied, Vacant};
-use std::mem;
-use std::ops::{Deref, DerefMut};
-
-// To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body.
-
-/// Either a sequence of token trees or a single one. This is used as the representation of the
-/// sequence of tokens that make up a matcher.
-#[derive(Clone)]
-enum TokenTreeOrTokenTreeSlice<'tt> {
-    Tt(TokenTree),
-    TtSeq(&'tt [TokenTree]),
-}
-
-impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
-    /// Returns the number of constituent top-level token trees of `self` (top-level in that it
-    /// will not recursively descend into subtrees).
-    fn len(&self) -> usize {
-        match *self {
-            TtSeq(ref v) => v.len(),
-            Tt(ref tt) => tt.len(),
-        }
-    }
-
-    /// The `index`-th token tree of `self`.
-    fn get_tt(&self, index: usize) -> TokenTree {
-        match *self {
-            TtSeq(ref v) => v[index].clone(),
-            Tt(ref tt) => tt.get_tt(index),
-        }
-    }
-}
-
-/// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`.
-///
-/// This is used by `inner_parse_loop` to keep track of delimited submatchers that we have
-/// descended into.
-#[derive(Clone)]
-struct MatcherTtFrame<'tt> {
-    /// The "parent" matcher that we are descending into.
-    elts: TokenTreeOrTokenTreeSlice<'tt>,
-    /// The position of the "dot" in `elts` at the time we descended.
-    idx: usize,
-}
-
-type NamedMatchVec = SmallVec<[NamedMatch; 4]>;
-
-/// Represents a single "position" (aka "matcher position", aka "item"), as
-/// described in the module documentation.
-///
-/// Here:
-///
-/// - `'root` represents the lifetime of the stack slot that holds the root
-///   `MatcherPos`. As described in `MatcherPosHandle`, the root `MatcherPos`
-///   structure is stored on the stack, but subsequent instances are put into
-///   the heap.
-/// - `'tt` represents the lifetime of the token trees that this matcher
-///   position refers to.
-///
-/// It is important to distinguish these two lifetimes because we have a
-/// `SmallVec<TokenTreeOrTokenTreeSlice<'tt>>` below, and the destructor of
-/// that is considered to possibly access the data from its elements (it lacks
-/// a `#[may_dangle]` attribute). As a result, the compiler needs to know that
-/// all the elements in that `SmallVec` strictly outlive the root stack slot
-/// lifetime. By separating `'tt` from `'root`, we can show that.
-#[derive(Clone)]
-struct MatcherPos<'root, 'tt> {
-    /// The token or sequence of tokens that make up the matcher
-    top_elts: TokenTreeOrTokenTreeSlice<'tt>,
-
-    /// The position of the "dot" in this matcher
-    idx: usize,
-
-    /// The first span of source that the beginning of this matcher corresponds to. In other
-    /// words, the token in the source whose span is `sp_open` is matched against the first token of
-    /// the matcher.
-    sp_open: Span,
-
-    /// For each named metavar in the matcher, we keep track of token trees matched against the
-    /// metavar by the black box parser. In particular, there may be more than one match per
-    /// metavar if we are in a repetition (each repetition matches each of the variables).
-    /// Moreover, matchers and repetitions can be nested; the `matches` field is shared (hence the
-    /// `Rc`) among all "nested" matchers. `match_lo`, `match_cur`, and `match_hi` keep track of
-    /// the current position of the `self` matcher position in the shared `matches` list.
-    ///
-    /// Also, note that while we are descending into a sequence, matchers are given their own
-    /// `matches` vector. Only once we reach the end of a full repetition of the sequence do we add
-    /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
-    /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
-    /// wants the shared `matches`, one should use `up.matches`.
-    matches: Box<[Lrc<NamedMatchVec>]>,
-    /// The position in `matches` corresponding to the first metavar in this matcher's sequence of
-    /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
-    /// to `matches[match_lo]`.
-    match_lo: usize,
-    /// The position in `matches` corresponding to the metavar we are currently trying to match
-    /// against the source token stream. `match_lo <= match_cur <= match_hi`.
-    match_cur: usize,
-    /// Similar to `match_lo` except `match_hi` is the position in `matches` of the _last_ metavar
-    /// in this matcher.
-    match_hi: usize,
-
-    // The following fields are used if we are matching a repetition. If we aren't, they should be
-    // `None`.
-
-    /// The KleeneOp of this sequence if we are in a repetition.
-    seq_op: Option<mbe::KleeneOp>,
-
-    /// The separator if we are in a repetition.
-    sep: Option<Token>,
-
-    /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
-    /// before we enter the sequence.
-    up: Option<MatcherPosHandle<'root, 'tt>>,
-
-    /// Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from
-    /// a delimited token tree (e.g., something wrapped in `(` `)`) or to get the contents of a doc
-    /// comment...
-    ///
-    /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
-    /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
-    /// that where the bottom of the stack is the outermost matcher.
-    /// Also, throughout the comments, this "descent" is often referred to as "unzipping"...
-    stack: SmallVec<[MatcherTtFrame<'tt>; 1]>,
-}
-
-impl<'root, 'tt> MatcherPos<'root, 'tt> {
-    /// Adds `m` as a named match for the `idx`-th metavar.
-    fn push_match(&mut self, idx: usize, m: NamedMatch) {
-        let matches = Lrc::make_mut(&mut self.matches[idx]);
-        matches.push(m);
-    }
-}
-
-// Lots of MatcherPos instances are created at runtime. Allocating them on the
-// heap is slow. Furthermore, using SmallVec<MatcherPos> to allocate them all
-// on the stack is also slow, because MatcherPos is quite a large type and
-// instances get moved around a lot between vectors, which requires lots of
-// slow memcpy calls.
-//
-// Therefore, the initial MatcherPos is always allocated on the stack,
-// subsequent ones (of which there aren't that many) are allocated on the heap,
-// and this type is used to encapsulate both cases.
-enum MatcherPosHandle<'root, 'tt> {
-    Ref(&'root mut MatcherPos<'root, 'tt>),
-    Box(Box<MatcherPos<'root, 'tt>>),
-}
-
-impl<'root, 'tt> Clone for MatcherPosHandle<'root, 'tt> {
-    // This always produces a new Box.
-    fn clone(&self) -> Self {
-        MatcherPosHandle::Box(match *self {
-            MatcherPosHandle::Ref(ref r) => Box::new((**r).clone()),
-            MatcherPosHandle::Box(ref b) => b.clone(),
-        })
-    }
-}
-
-impl<'root, 'tt> Deref for MatcherPosHandle<'root, 'tt> {
-    type Target = MatcherPos<'root, 'tt>;
-    fn deref(&self) -> &Self::Target {
-        match *self {
-            MatcherPosHandle::Ref(ref r) => r,
-            MatcherPosHandle::Box(ref b) => b,
-        }
-    }
-}
-
-impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
-    fn deref_mut(&mut self) -> &mut MatcherPos<'root, 'tt> {
-        match *self {
-            MatcherPosHandle::Ref(ref mut r) => r,
-            MatcherPosHandle::Box(ref mut b) => b,
-        }
-    }
-}
-
-/// Represents the possible results of an attempted parse.
-crate enum ParseResult<T> {
-    /// Parsed successfully.
-    Success(T),
-    /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
-    /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
-    Failure(Token, &'static str),
-    /// Fatal error (malformed macro?). Abort compilation.
-    Error(syntax_pos::Span, String),
-}
-
-/// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
-/// This represents the mapping of metavars to the token trees they bind to.
-crate type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
-
-/// Count how many metavars are named in the given matcher `ms`.
-pub(super) fn count_names(ms: &[TokenTree]) -> usize {
-    ms.iter().fold(0, |count, elt| {
-        count + match *elt {
-            TokenTree::Sequence(_, ref seq) => seq.num_captures,
-            TokenTree::Delimited(_, ref delim) => count_names(&delim.tts),
-            TokenTree::MetaVar(..) => 0,
-            TokenTree::MetaVarDecl(..) => 1,
-            TokenTree::Token(..) => 0,
-        }
-    })
-}
-
-/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
-fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
-    if len == 0 {
-        vec![]
-    } else {
-        let empty_matches = Lrc::new(SmallVec::new());
-        vec![empty_matches; len]
-    }.into_boxed_slice()
-}
-
-/// Generates the top-level matcher position in which the "dot" is before the first token of the
-/// matcher `ms` and we are going to start matching at the span `open` in the source.
-fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> {
-    let match_idx_hi = count_names(ms);
-    let matches = create_matches(match_idx_hi);
-    MatcherPos {
-        // Start with the top level matcher given to us
-        top_elts: TtSeq(ms), // "elts" is an abbr. for "elements"
-        // The "dot" is before the first token of the matcher
-        idx: 0,
-        // We start matching at the span `open` in the source code
-        sp_open: open,
-
-        // Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in `top_elts`.
-        // `match_lo` for `top_elts` is 0 and `match_hi` is `matches.len()`. `match_cur` is 0 since
-        // we haven't actually matched anything yet.
-        matches,
-        match_lo: 0,
-        match_cur: 0,
-        match_hi: match_idx_hi,
-
-        // Haven't descended into any delimiters, so empty stack
-        stack: smallvec![],
-
-        // Haven't descended into any sequences, so both of these are `None`.
-        seq_op: None,
-        sep: None,
-        up: None,
-    }
-}
-
-/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
-/// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
-/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
-/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
-///
-/// The in-memory structure of a particular `NamedMatch` represents the match
-/// that occurred when a particular subset of a matcher was applied to a
-/// particular token tree.
-///
-/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
-/// the `MatchedNonterminal`s, will depend on the token tree it was applied
-/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
-/// token tree. The depth of the `NamedMatch` structure will therefore depend
-/// only on the nesting depth of `ast::TTSeq`s in the originating
-/// token tree it was derived from.
-#[derive(Debug, Clone)]
-crate enum NamedMatch {
-    MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
-    MatchedNonterminal(Lrc<Nonterminal>),
-}
-
-/// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
-/// and an iterator of items that matched input and produces a `NamedParseResult`.
-fn nameize<I: Iterator<Item = NamedMatch>>(
-    sess: &ParseSess,
-    ms: &[TokenTree],
-    mut res: I,
-) -> NamedParseResult {
-    // Recursively descend into each type of matcher (e.g., sequences, delimited, metavars) and make
-    // sure that each metavar has _exactly one_ binding. If a metavar does not have exactly one
-    // binding, then there is an error. If it does, then we insert the binding into the
-    // `NamedParseResult`.
-    fn n_rec<I: Iterator<Item = NamedMatch>>(
-        sess: &ParseSess,
-        m: &TokenTree,
-        res: &mut I,
-        ret_val: &mut FxHashMap<Ident, NamedMatch>,
-    ) -> Result<(), (syntax_pos::Span, String)> {
-        match *m {
-            TokenTree::Sequence(_, ref seq) => for next_m in &seq.tts {
-                n_rec(sess, next_m, res.by_ref(), ret_val)?
-            },
-            TokenTree::Delimited(_, ref delim) => for next_m in &delim.tts {
-                n_rec(sess, next_m, res.by_ref(), ret_val)?;
-            },
-            TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
-                if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
-                    return Err((span, "missing fragment specifier".to_string()));
-                }
-            }
-            TokenTree::MetaVarDecl(sp, bind_name, _) => {
-                match ret_val.entry(bind_name) {
-                    Vacant(spot) => {
-                        spot.insert(res.next().unwrap());
-                    }
-                    Occupied(..) => {
-                        return Err((sp, format!("duplicated bind name: {}", bind_name)))
-                    }
-                }
-            }
-            TokenTree::MetaVar(..) | TokenTree::Token(..) => (),
-        }
-
-        Ok(())
-    }
-
-    let mut ret_val = FxHashMap::default();
-    for m in ms {
-        match n_rec(sess, m, res.by_ref(), &mut ret_val) {
-            Ok(_) => {}
-            Err((sp, msg)) => return Error(sp, msg),
-        }
-    }
-
-    Success(ret_val)
-}
-
-/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
-fn token_name_eq(t1: &Token, t2: &Token) -> bool {
-    if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
-        ident1.name == ident2.name && is_raw1 == is_raw2
-    } else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
-        ident1.name == ident2.name
-    } else {
-        t1.kind == t2.kind
-    }
-}
-
-/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
-/// produce more items in `next_items`, `eof_items`, and `bb_items`.
-///
-/// For more info about the how this happens, see the module-level doc comments and the inline
-/// comments of this function.
-///
-/// # Parameters
-///
-/// - `sess`: the parsing session into which errors are emitted.
-/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
-///   successful execution of this function.
-/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
-///   the function `parse`.
-/// - `eof_items`: the set of items that would be valid if this was the EOF.
-/// - `bb_items`: the set of items that are waiting for the black-box parser.
-/// - `token`: the current token of the parser.
-/// - `span`: the `Span` in the source code corresponding to the token trees we are trying to match
-///   against the matcher positions in `cur_items`.
-///
-/// # Returns
-///
-/// A `ParseResult`. Note that matches are kept track of through the items generated.
-fn inner_parse_loop<'root, 'tt>(
-    sess: &ParseSess,
-    cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
-    eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
-    token: &Token,
-) -> ParseResult<()> {
-    // Pop items from `cur_items` until it is empty.
-    while let Some(mut item) = cur_items.pop() {
-        // When unzipped trees end, remove them. This corresponds to backtracking out of a
-        // delimited submatcher into which we already descended. In backtracking out again, we need
-        // to advance the "dot" past the delimiters in the outer matcher.
-        while item.idx >= item.top_elts.len() {
-            match item.stack.pop() {
-                Some(MatcherTtFrame { elts, idx }) => {
-                    item.top_elts = elts;
-                    item.idx = idx + 1;
-                }
-                None => break,
-            }
-        }
-
-        // Get the current position of the "dot" (`idx`) in `item` and the number of token trees in
-        // the matcher (`len`).
-        let idx = item.idx;
-        let len = item.top_elts.len();
-
-        // If `idx >= len`, then we are at or past the end of the matcher of `item`.
-        if idx >= len {
-            // We are repeating iff there is a parent. If the matcher is inside of a repetition,
-            // then we could be at the end of a sequence or at the beginning of the next
-            // repetition.
-            if item.up.is_some() {
-                // At this point, regardless of whether there is a separator, we should add all
-                // matches from the complete repetition of the sequence to the shared, top-level
-                // `matches` list (actually, `up.matches`, which could itself not be the top-level,
-                // but anyway...). Moreover, we add another item to `cur_items` in which the "dot"
-                // is at the end of the `up` matcher. This ensures that the "dot" in the `up`
-                // matcher is also advanced sufficiently.
-                //
-                // NOTE: removing the condition `idx == len` allows trailing separators.
-                if idx == len {
-                    // Get the `up` matcher
-                    let mut new_pos = item.up.clone().unwrap();
-
-                    // Add matches from this repetition to the `matches` of `up`
-                    for idx in item.match_lo..item.match_hi {
-                        let sub = item.matches[idx].clone();
-                        let span = DelimSpan::from_pair(item.sp_open, token.span);
-                        new_pos.push_match(idx, MatchedSeq(sub, span));
-                    }
-
-                    // Move the "dot" past the repetition in `up`
-                    new_pos.match_cur = item.match_hi;
-                    new_pos.idx += 1;
-                    cur_items.push(new_pos);
-                }
-
-                // Check if we need a separator.
-                if idx == len && item.sep.is_some() {
-                    // We have a separator, and it is the current token. We can advance past the
-                    // separator token.
-                    if item.sep
-                        .as_ref()
-                        .map(|sep| token_name_eq(token, sep))
-                        .unwrap_or(false)
-                    {
-                        item.idx += 1;
-                        next_items.push(item);
-                    }
-                }
-                // We don't need a separator. Move the "dot" back to the beginning of the matcher
-                // and try to match again UNLESS we are only allowed to have _one_ repetition.
-                else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
-                    item.match_cur = item.match_lo;
-                    item.idx = 0;
-                    cur_items.push(item);
-                }
-            }
-            // If we are not in a repetition, then being at the end of a matcher means that we have
-            // reached the potential end of the input.
-            else {
-                eof_items.push(item);
-            }
-        }
-        // We are in the middle of a matcher.
-        else {
-            // Look at what token in the matcher we are trying to match the current token (`token`)
-            // against. Depending on that, we may generate new items.
-            match item.top_elts.get_tt(idx) {
-                // Need to descend into a sequence
-                TokenTree::Sequence(sp, seq) => {
-                    // Examine the case where there are 0 matches of this sequence. We are
-                    // implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
-                    // result in a "no rules expected token" error by virtue of this matcher not
-                    // working.
-                    if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
-                        || seq.kleene.op == mbe::KleeneOp::ZeroOrOne
-                    {
-                        let mut new_item = item.clone();
-                        new_item.match_cur += seq.num_captures;
-                        new_item.idx += 1;
-                        for idx in item.match_cur..item.match_cur + seq.num_captures {
-                            new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![]), sp));
-                        }
-                        cur_items.push(new_item);
-                    }
-
-                    let matches = create_matches(item.matches.len());
-                    cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos {
-                        stack: smallvec![],
-                        sep: seq.separator.clone(),
-                        seq_op: Some(seq.kleene.op),
-                        idx: 0,
-                        matches,
-                        match_lo: item.match_cur,
-                        match_cur: item.match_cur,
-                        match_hi: item.match_cur + seq.num_captures,
-                        up: Some(item),
-                        sp_open: sp.open,
-                        top_elts: Tt(TokenTree::Sequence(sp, seq)),
-                    })));
-                }
-
-                // We need to match a metavar (but the identifier is invalid)... this is an error
-                TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
-                    if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
-                        return Error(span, "missing fragment specifier".to_string());
-                    }
-                }
-
-                // We need to match a metavar with a valid ident... call out to the black-box
-                // parser by adding an item to `bb_items`.
-                TokenTree::MetaVarDecl(_, _, id) => {
-                    // Built-in nonterminals never start with these tokens,
-                    // so we can eliminate them from consideration.
-                    if may_begin_with(token, id.name) {
-                        bb_items.push(item);
-                    }
-                }
-
-                // We need to descend into a delimited submatcher or a doc comment. To do this, we
-                // push the current matcher onto a stack and push a new item containing the
-                // submatcher onto `cur_items`.
-                //
-                // At the beginning of the loop, if we reach the end of the delimited submatcher,
-                // we pop the stack to backtrack out of the descent.
-                seq @ TokenTree::Delimited(..) |
-                seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
-                    let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
-                    let idx = item.idx;
-                    item.stack.push(MatcherTtFrame {
-                        elts: lower_elts,
-                        idx,
-                    });
-                    item.idx = 0;
-                    cur_items.push(item);
-                }
-
-                // We just matched a normal token. We can just advance the parser.
-                TokenTree::Token(t) if token_name_eq(&t, token) => {
-                    item.idx += 1;
-                    next_items.push(item);
-                }
-
-                // There was another token that was not `token`... This means we can't add any
-                // rules. NOTE that this is not necessarily an error unless _all_ items in
-                // `cur_items` end up doing this. There may still be some other matchers that do
-                // end up working out.
-                TokenTree::Token(..) | TokenTree::MetaVar(..) => {}
-            }
-        }
-    }
-
-    // Yay a successful parse (so far)!
-    Success(())
-}
-
-/// Use the given sequence of token trees (`ms`) as a matcher. Match the given token stream `tts`
-/// against it and return the match.
-///
-/// # Parameters
-///
-/// - `sess`: The session into which errors are emitted
-/// - `tts`: The tokenstream we are matching against the pattern `ms`
-/// - `ms`: A sequence of token trees representing a pattern against which we are matching
-/// - `directory`: Information about the file locations (needed for the black-box parser)
-/// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box
-///   parser)
-pub(super) fn parse(
-    sess: &ParseSess,
-    tts: TokenStream,
-    ms: &[TokenTree],
-    directory: Option<Directory<'_>>,
-    recurse_into_modules: bool,
-) -> NamedParseResult {
-    // Create a parser that can be used for the "black box" parts.
-    let mut parser = Parser::new(
-        sess,
-        tts,
-        directory,
-        recurse_into_modules,
-        true,
-        crate::MACRO_ARGUMENTS,
-    );
-
-    // A queue of possible matcher positions. We initialize it with the matcher position in which
-    // the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then
-    // processes all of these possible matcher positions and produces possible next positions into
-    // `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items`
-    // and we start over again.
-    //
-    // This MatcherPos instance is allocated on the stack. All others -- and
-    // there are frequently *no* others! -- are allocated on the heap.
-    let mut initial = initial_matcher_pos(ms, parser.token.span);
-    let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
-    let mut next_items = Vec::new();
-
-    loop {
-        // Matcher positions black-box parsed by parser.rs (`parser`)
-        let mut bb_items = SmallVec::new();
-
-        // Matcher positions that would be valid if the macro invocation was over now
-        let mut eof_items = SmallVec::new();
-        assert!(next_items.is_empty());
-
-        // Process `cur_items` until either we have finished the input or we need to get some
-        // parsing from the black-box parser done. The result is that `next_items` will contain a
-        // bunch of possible next matcher positions in `next_items`.
-        match inner_parse_loop(
-            sess,
-            &mut cur_items,
-            &mut next_items,
-            &mut eof_items,
-            &mut bb_items,
-            &parser.token,
-        ) {
-            Success(_) => {}
-            Failure(token, msg) => return Failure(token, msg),
-            Error(sp, msg) => return Error(sp, msg),
-        }
-
-        // inner parse loop handled all cur_items, so it's empty
-        assert!(cur_items.is_empty());
-
-        // We need to do some post processing after the `inner_parser_loop`.
-        //
-        // Error messages here could be improved with links to original rules.
-
-        // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
-        // either the parse is ambiguous (which should never happen) or there is a syntax error.
-        if parser.token == token::Eof {
-            if eof_items.len() == 1 {
-                let matches = eof_items[0]
-                    .matches
-                    .iter_mut()
-                    .map(|dv| Lrc::make_mut(dv).pop().unwrap());
-                return nameize(sess, ms, matches);
-            } else if eof_items.len() > 1 {
-                return Error(
-                    parser.token.span,
-                    "ambiguity: multiple successful parses".to_string(),
-                );
-            } else {
-                return Failure(
-                    Token::new(token::Eof, if parser.token.span.is_dummy() {
-                        parser.token.span
-                    } else {
-                        sess.source_map().next_point(parser.token.span)
-                    }),
-                    "missing tokens in macro arguments",
-                );
-            }
-        }
-        // Performance hack: eof_items may share matchers via Rc with other things that we want
-        // to modify. Dropping eof_items now may drop these refcounts to 1, preventing an
-        // unnecessary implicit clone later in Rc::make_mut.
-        drop(eof_items);
-
-        // Another possibility is that we need to call out to parse some rust nonterminal
-        // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
-        if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 {
-            let nts = bb_items
-                .iter()
-                .map(|item| match item.top_elts.get_tt(item.idx) {
-                    TokenTree::MetaVarDecl(_, bind, name) => format!("{} ('{}')", name, bind),
-                    _ => panic!(),
-                })
-                .collect::<Vec<String>>()
-                .join(" or ");
-
-            return Error(
-                parser.token.span,
-                format!(
-                    "local ambiguity: multiple parsing options: {}",
-                    match next_items.len() {
-                        0 => format!("built-in NTs {}.", nts),
-                        1 => format!("built-in NTs {} or 1 other option.", nts),
-                        n => format!("built-in NTs {} or {} other options.", nts, n),
-                    }
-                ),
-            );
-        }
-        // If there are no possible next positions AND we aren't waiting for the black-box parser,
-        // then there is a syntax error.
-        else if bb_items.is_empty() && next_items.is_empty() {
-            return Failure(
-                parser.token.take(),
-                "no rules expected this token in macro call",
-            );
-        }
-        // Dump all possible `next_items` into `cur_items` for the next iteration.
-        else if !next_items.is_empty() {
-            // Now process the next token
-            cur_items.extend(next_items.drain(..));
-            parser.bump();
-        }
-        // Finally, we have the case where we need to call the black-box parser to get some
-        // nonterminal.
-        else {
-            assert_eq!(bb_items.len(), 1);
-
-            let mut item = bb_items.pop().unwrap();
-            if let TokenTree::MetaVarDecl(span, _, ident) = item.top_elts.get_tt(item.idx) {
-                let match_cur = item.match_cur;
-                item.push_match(
-                    match_cur,
-                    MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, ident.name))),
-                );
-                item.idx += 1;
-                item.match_cur += 1;
-            } else {
-                unreachable!()
-            }
-            cur_items.push(item);
-        }
-
-        assert!(!cur_items.is_empty());
-    }
-}
-
-/// The token is an identifier, but not `_`.
-/// We prohibit passing `_` to macros expecting `ident` for now.
-fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
-    match token.kind {
-        token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
-        _ => None,
-    }
-}
-
-/// Checks whether a non-terminal may begin with a particular token.
-///
-/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
-/// token. Be conservative (return true) if not sure.
-fn may_begin_with(token: &Token, name: Name) -> bool {
-    /// Checks whether the non-terminal may contain a single (non-keyword) identifier.
-    fn may_be_ident(nt: &token::Nonterminal) -> bool {
-        match *nt {
-            token::NtItem(_) | token::NtBlock(_) | token::NtVis(_) => false,
-            _ => true,
-        }
-    }
-
-    match name {
-        sym::expr => token.can_begin_expr()
-            // This exception is here for backwards compatibility.
-            && !token.is_keyword(kw::Let),
-        sym::ty => token.can_begin_type(),
-        sym::ident => get_macro_name(token).is_some(),
-        sym::literal => token.can_begin_literal_or_bool(),
-        sym::vis => match token.kind {
-            // The follow-set of :vis + "priv" keyword + interpolated
-            token::Comma | token::Ident(..) | token::Interpolated(_) => true,
-            _ => token.can_begin_type(),
-        },
-        sym::block => match token.kind {
-            token::OpenDelim(token::Brace) => true,
-            token::Interpolated(ref nt) => match **nt {
-                token::NtItem(_)
-                | token::NtPat(_)
-                | token::NtTy(_)
-                | token::NtIdent(..)
-                | token::NtMeta(_)
-                | token::NtPath(_)
-                | token::NtVis(_) => false, // none of these may start with '{'.
-                _ => true,
-            },
-            _ => false,
-        },
-        sym::path | sym::meta => match token.kind {
-            token::ModSep | token::Ident(..) => true,
-            token::Interpolated(ref nt) => match **nt {
-                token::NtPath(_) | token::NtMeta(_) => true,
-                _ => may_be_ident(&nt),
-            },
-            _ => false,
-        },
-        sym::pat => match token.kind {
-            token::Ident(..) |               // box, ref, mut, and other identifiers (can stricten)
-            token::OpenDelim(token::Paren) |    // tuple pattern
-            token::OpenDelim(token::Bracket) |  // slice pattern
-            token::BinOp(token::And) |          // reference
-            token::BinOp(token::Minus) |        // negative literal
-            token::AndAnd |                     // double reference
-            token::Literal(..) |                // literal
-            token::DotDot |                     // range pattern (future compat)
-            token::DotDotDot |                  // range pattern (future compat)
-            token::ModSep |                     // path
-            token::Lt |                         // path (UFCS constant)
-            token::BinOp(token::Shl) => true,   // path (double UFCS)
-            token::Interpolated(ref nt) => may_be_ident(nt),
-            _ => false,
-        },
-        sym::lifetime => match token.kind {
-            token::Lifetime(_) => true,
-            token::Interpolated(ref nt) => match **nt {
-                token::NtLifetime(_) | token::NtTT(_) => true,
-                _ => false,
-            },
-            _ => false,
-        },
-        _ => match token.kind {
-            token::CloseDelim(_) => false,
-            _ => true,
-        },
-    }
-}
-
-/// A call to the "black-box" parser to parse some Rust non-terminal.
-///
-/// # Parameters
-///
-/// - `p`: the "black-box" parser to use
-/// - `sp`: the `Span` we want to parse
-/// - `name`: the name of the metavar _matcher_ we want to match (e.g., `tt`, `ident`, `block`,
-///   etc...)
-///
-/// # Returns
-///
-/// The parsed non-terminal.
-fn parse_nt(p: &mut Parser<'_>, sp: Span, name: Symbol) -> Nonterminal {
-    if name == sym::tt {
-        return token::NtTT(p.parse_token_tree());
-    }
-    // check at the beginning and the parser checks after each bump
-    p.process_potential_macro_variable();
-    match name {
-        sym::item => match panictry!(p.parse_item()) {
-            Some(i) => token::NtItem(i),
-            None => {
-                p.fatal("expected an item keyword").emit();
-                FatalError.raise();
-            }
-        },
-        sym::block => token::NtBlock(panictry!(p.parse_block())),
-        sym::stmt => match panictry!(p.parse_stmt()) {
-            Some(s) => token::NtStmt(s),
-            None => {
-                p.fatal("expected a statement").emit();
-                FatalError.raise();
-            }
-        },
-        sym::pat => token::NtPat(panictry!(p.parse_pat(None))),
-        sym::expr => token::NtExpr(panictry!(p.parse_expr())),
-        sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
-        sym::ty => token::NtTy(panictry!(p.parse_ty())),
-        // this could be handled like a token, since it is one
-        sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
-            let span = p.token.span;
-            p.bump();
-            token::NtIdent(Ident::new(name, span), is_raw)
-        } else {
-            let token_str = pprust::token_to_string(&p.token);
-            p.fatal(&format!("expected ident, found {}", &token_str)).emit();
-            FatalError.raise()
-        }
-        sym::path => token::NtPath(panictry!(p.parse_path(PathStyle::Type))),
-        sym::meta => token::NtMeta(panictry!(p.parse_attr_item())),
-        sym::vis => token::NtVis(panictry!(p.parse_visibility(true))),
-        sym::lifetime => if p.check_lifetime() {
-            token::NtLifetime(p.expect_lifetime().ident)
-        } else {
-            let token_str = pprust::token_to_string(&p.token);
-            p.fatal(&format!("expected a lifetime, found `{}`", &token_str)).emit();
-            FatalError.raise();
-        }
-        // this is not supposed to happen, since it has been checked
-        // when compiling the macro.
-        _ => p.span_bug(sp, "invalid fragment specifier"),
-    }
-}
diff --git a/src/libsyntax/ext/mbe/macro_rules.rs b/src/libsyntax/ext/mbe/macro_rules.rs
deleted file mode 100644 (file)
index aec4a68..0000000
+++ /dev/null
@@ -1,1191 +0,0 @@
-use crate::ast;
-use crate::attr::{self, TransparencyError};
-use crate::edition::Edition;
-use crate::ext::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
-use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
-use crate::ext::expand::{AstFragment, AstFragmentKind};
-use crate::ext::mbe;
-use crate::ext::mbe::macro_check;
-use crate::ext::mbe::macro_parser::parse;
-use crate::ext::mbe::macro_parser::{Error, Failure, Success};
-use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedParseResult};
-use crate::ext::mbe::transcribe::transcribe;
-use crate::feature_gate::Features;
-use crate::parse::parser::Parser;
-use crate::parse::token::TokenKind::*;
-use crate::parse::token::{self, NtTT, Token};
-use crate::parse::{Directory, ParseSess};
-use crate::print::pprust;
-use crate::symbol::{kw, sym, Symbol};
-use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
-
-use errors::{DiagnosticBuilder, FatalError};
-use log::debug;
-use syntax_pos::hygiene::Transparency;
-use syntax_pos::Span;
-
-use rustc_data_structures::fx::FxHashMap;
-use std::borrow::Cow;
-use std::collections::hash_map::Entry;
-use std::slice;
-
-use errors::Applicability;
-use rustc_data_structures::sync::Lrc;
-
-const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
-                                        `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
-                                        `literal`, `path`, `meta`, `tt`, `item` and `vis`";
-
-crate struct ParserAnyMacro<'a> {
-    parser: Parser<'a>,
-
-    /// Span of the expansion site of the macro this parser is for
-    site_span: Span,
-    /// The ident of the macro we're parsing
-    macro_ident: ast::Ident,
-    arm_span: Span,
-}
-
-crate fn annotate_err_with_kind(
-    err: &mut DiagnosticBuilder<'_>,
-    kind: AstFragmentKind,
-    span: Span,
-) {
-    match kind {
-        AstFragmentKind::Ty => {
-            err.span_label(span, "this macro call doesn't expand to a type");
-        }
-        AstFragmentKind::Pat => {
-            err.span_label(span, "this macro call doesn't expand to a pattern");
-        }
-        _ => {}
-    };
-}
-
-impl<'a> ParserAnyMacro<'a> {
-    crate fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
-        let ParserAnyMacro { site_span, macro_ident, ref mut parser, arm_span } = *self;
-        let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
-            if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
-                if !e.span.is_dummy() {
-                    // early end of macro arm (#52866)
-                    e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
-                }
-                let msg = &e.message[0];
-                e.message[0] = (
-                    format!(
-                        "macro expansion ends with an incomplete expression: {}",
-                        msg.0.replace(", found `<eof>`", ""),
-                    ),
-                    msg.1,
-                );
-            }
-            if e.span.is_dummy() {
-                // Get around lack of span in error (#30128)
-                e.replace_span_with(site_span);
-                if parser.sess.source_map().span_to_filename(arm_span).is_real() {
-                    e.span_label(arm_span, "in this macro arm");
-                }
-            } else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() {
-                e.span_label(site_span, "in this macro invocation");
-            }
-            match kind {
-                AstFragmentKind::Pat if macro_ident.name == sym::vec => {
-                    let mut suggestion = None;
-                    if let Ok(code) = parser.sess.source_map().span_to_snippet(site_span) {
-                        if let Some(bang) = code.find('!') {
-                            suggestion = Some(code[bang + 1..].to_string());
-                        }
-                    }
-                    if let Some(suggestion) = suggestion {
-                        e.span_suggestion(
-                            site_span,
-                            "use a slice pattern here instead",
-                            suggestion,
-                            Applicability::MachineApplicable,
-                        );
-                    } else {
-                        e.span_label(
-                            site_span,
-                            "use a slice pattern here instead",
-                        );
-                    }
-                    e.help("for more information, see https://doc.rust-lang.org/edition-guide/\
-                            rust-2018/slice-patterns.html");
-                }
-                _ => annotate_err_with_kind(&mut e, kind, site_span),
-            };
-            e
-        }));
-
-        // We allow semicolons at the end of expressions -- e.g., the semicolon in
-        // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
-        // but `m!()` is allowed in expression positions (cf. issue #34706).
-        if kind == AstFragmentKind::Expr && parser.token == token::Semi {
-            parser.bump();
-        }
-
-        // Make sure we don't have any tokens left to parse so we don't silently drop anything.
-        let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
-        parser.ensure_complete_parse(&path, kind.name(), site_span);
-        fragment
-    }
-}
-
-struct MacroRulesMacroExpander {
-    name: ast::Ident,
-    span: Span,
-    transparency: Transparency,
-    lhses: Vec<mbe::TokenTree>,
-    rhses: Vec<mbe::TokenTree>,
-    valid: bool,
-}
-
-impl TTMacroExpander for MacroRulesMacroExpander {
-    fn expand<'cx>(
-        &self,
-        cx: &'cx mut ExtCtxt<'_>,
-        sp: Span,
-        input: TokenStream,
-    ) -> Box<dyn MacResult + 'cx> {
-        if !self.valid {
-            return DummyResult::any(sp);
-        }
-        generic_extension(
-            cx, sp, self.span, self.name, self.transparency, input, &self.lhses, &self.rhses
-        )
-    }
-}
-
-fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) {
-    let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
-    cx.expansions.entry(sp).or_default().push(message);
-}
-
-/// Given `lhses` and `rhses`, this is the new macro we create
-fn generic_extension<'cx>(
-    cx: &'cx mut ExtCtxt<'_>,
-    sp: Span,
-    def_span: Span,
-    name: ast::Ident,
-    transparency: Transparency,
-    arg: TokenStream,
-    lhses: &[mbe::TokenTree],
-    rhses: &[mbe::TokenTree],
-) -> Box<dyn MacResult + 'cx> {
-    if cx.trace_macros() {
-        trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
-    }
-
-    // Which arm's failure should we report? (the one furthest along)
-    let mut best_failure: Option<(Token, &str)> = None;
-
-    for (i, lhs) in lhses.iter().enumerate() {
-        // try each arm's matchers
-        let lhs_tt = match *lhs {
-            mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
-            _ => cx.span_bug(sp, "malformed macro lhs"),
-        };
-
-        match TokenTree::parse(cx, lhs_tt, arg.clone()) {
-            Success(named_matches) => {
-                let rhs = match rhses[i] {
-                    // ignore delimiters
-                    mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
-                    _ => cx.span_bug(sp, "malformed macro rhs"),
-                };
-                let arm_span = rhses[i].span();
-
-                let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
-                // rhs has holes ( `$id` and `$(...)` that need filled)
-                let mut tts = transcribe(cx, &named_matches, rhs, transparency);
-
-                // Replace all the tokens for the corresponding positions in the macro, to maintain
-                // proper positions in error reporting, while maintaining the macro_backtrace.
-                if rhs_spans.len() == tts.len() {
-                    tts = tts.map_enumerated(|i, mut tt| {
-                        let mut sp = rhs_spans[i];
-                        sp = sp.with_ctxt(tt.span().ctxt());
-                        tt.set_span(sp);
-                        tt
-                    });
-                }
-
-                if cx.trace_macros() {
-                    trace_macros_note(cx, sp, format!("to `{}`", tts));
-                }
-
-                let directory = Directory {
-                    path: Cow::from(cx.current_expansion.module.directory.as_path()),
-                    ownership: cx.current_expansion.directory_ownership,
-                };
-                let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
-                p.root_module_name =
-                    cx.current_expansion.module.mod_path.last().map(|id| id.as_str().to_string());
-                p.last_type_ascription = cx.current_expansion.prior_type_ascription;
-
-                p.process_potential_macro_variable();
-                // Let the context choose how to interpret the result.
-                // Weird, but useful for X-macros.
-                return Box::new(ParserAnyMacro {
-                    parser: p,
-
-                    // Pass along the original expansion site and the name of the macro
-                    // so we can print a useful error message if the parse of the expanded
-                    // macro leaves unparsed tokens.
-                    site_span: sp,
-                    macro_ident: name,
-                    arm_span,
-                });
-            }
-            Failure(token, msg) => match best_failure {
-                Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
-                _ => best_failure = Some((token, msg)),
-            },
-            Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]),
-        }
-    }
-
-    let (token, label) = best_failure.expect("ran no matchers");
-    let span = token.span.substitute_dummy(sp);
-    let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
-    err.span_label(span, label);
-    if !def_span.is_dummy() && cx.source_map().span_to_filename(def_span).is_real() {
-        err.span_label(cx.source_map().def_span(def_span), "when calling this macro");
-    }
-
-    // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
-    if let Some((arg, comma_span)) = arg.add_comma() {
-        for lhs in lhses {
-            // try each arm's matchers
-            let lhs_tt = match *lhs {
-                mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
-                _ => continue,
-            };
-            match TokenTree::parse(cx, lhs_tt, arg.clone()) {
-                Success(_) => {
-                    if comma_span.is_dummy() {
-                        err.note("you might be missing a comma");
-                    } else {
-                        err.span_suggestion_short(
-                            comma_span,
-                            "missing comma here",
-                            ", ".to_string(),
-                            Applicability::MachineApplicable,
-                        );
-                    }
-                }
-                _ => {}
-            }
-        }
-    }
-    err.emit();
-    cx.trace_macros_diag();
-    DummyResult::any(sp)
-}
-
-// Note that macro-by-example's input is also matched against a token tree:
-//                   $( $lhs:tt => $rhs:tt );+
-//
-// Holy self-referential!
-
-/// Converts a macro item into a syntax extension.
-pub fn compile_declarative_macro(
-    sess: &ParseSess,
-    features: &Features,
-    def: &ast::Item,
-    edition: Edition,
-) -> SyntaxExtension {
-    let diag = &sess.span_diagnostic;
-    let lhs_nm = ast::Ident::new(sym::lhs, def.span);
-    let rhs_nm = ast::Ident::new(sym::rhs, def.span);
-    let tt_spec = ast::Ident::new(sym::tt, def.span);
-
-    // Parse the macro_rules! invocation
-    let body = match def.kind {
-        ast::ItemKind::MacroDef(ref body) => body,
-        _ => unreachable!(),
-    };
-
-    // The pattern that macro_rules matches.
-    // The grammar for macro_rules! is:
-    // $( $lhs:tt => $rhs:tt );+
-    // ...quasiquoting this would be nice.
-    // These spans won't matter, anyways
-    let argument_gram = vec![
-        mbe::TokenTree::Sequence(
-            DelimSpan::dummy(),
-            Lrc::new(mbe::SequenceRepetition {
-                tts: vec![
-                    mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
-                    mbe::TokenTree::token(token::FatArrow, def.span),
-                    mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
-                ],
-                separator: Some(Token::new(
-                    if body.legacy { token::Semi } else { token::Comma },
-                    def.span,
-                )),
-                kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
-                num_captures: 2,
-            }),
-        ),
-        // to phase into semicolon-termination instead of semicolon-separation
-        mbe::TokenTree::Sequence(
-            DelimSpan::dummy(),
-            Lrc::new(mbe::SequenceRepetition {
-                tts: vec![mbe::TokenTree::token(
-                    if body.legacy { token::Semi } else { token::Comma },
-                    def.span,
-                )],
-                separator: None,
-                kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
-                num_captures: 0,
-            }),
-        ),
-    ];
-
-    let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
-        Success(m) => m,
-        Failure(token, msg) => {
-            let s = parse_failure_msg(&token);
-            let sp = token.span.substitute_dummy(def.span);
-            let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
-            err.span_label(sp, msg);
-            err.emit();
-            FatalError.raise();
-        }
-        Error(sp, s) => {
-            sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
-        }
-    };
-
-    let mut valid = true;
-
-    // Extract the arguments:
-    let lhses = match argument_map[&lhs_nm] {
-        MatchedSeq(ref s, _) => s
-            .iter()
-            .map(|m| {
-                if let MatchedNonterminal(ref nt) = *m {
-                    if let NtTT(ref tt) = **nt {
-                        let tt = mbe::quoted::parse(
-                            tt.clone().into(),
-                            true,
-                            sess,
-                        )
-                        .pop()
-                        .unwrap();
-                        valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
-                        return tt;
-                    }
-                }
-                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
-            })
-            .collect::<Vec<mbe::TokenTree>>(),
-        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
-    };
-
-    let rhses = match argument_map[&rhs_nm] {
-        MatchedSeq(ref s, _) => s
-            .iter()
-            .map(|m| {
-                if let MatchedNonterminal(ref nt) = *m {
-                    if let NtTT(ref tt) = **nt {
-                        return mbe::quoted::parse(
-                            tt.clone().into(),
-                            false,
-                            sess,
-                        )
-                        .pop()
-                        .unwrap();
-                    }
-                }
-                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
-            })
-            .collect::<Vec<mbe::TokenTree>>(),
-        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
-    };
-
-    for rhs in &rhses {
-        valid &= check_rhs(sess, rhs);
-    }
-
-    // don't abort iteration early, so that errors for multiple lhses can be reported
-    for lhs in &lhses {
-        valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
-    }
-
-    // We use CRATE_NODE_ID instead of `def.id` otherwise we may emit buffered lints for a node id
-    // that is not lint-checked and trigger the "failed to process buffered lint here" bug.
-    valid &= macro_check::check_meta_variables(sess, ast::CRATE_NODE_ID, def.span, &lhses, &rhses);
-
-    let (transparency, transparency_error) = attr::find_transparency(&def.attrs, body.legacy);
-    match transparency_error {
-        Some(TransparencyError::UnknownTransparency(value, span)) =>
-            diag.span_err(span, &format!("unknown macro transparency: `{}`", value)),
-        Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) =>
-            diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes"),
-        None => {}
-    }
-
-    let expander: Box<_> = Box::new(MacroRulesMacroExpander {
-        name: def.ident, span: def.span, transparency, lhses, rhses, valid
-    });
-
-    SyntaxExtension::new(
-        sess,
-        SyntaxExtensionKind::LegacyBang(expander),
-        def.span,
-        Vec::new(),
-        edition,
-        def.ident.name,
-        &def.attrs,
-    )
-}
-
-fn check_lhs_nt_follows(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    lhs: &mbe::TokenTree,
-) -> bool {
-    // lhs is going to be like TokenTree::Delimited(...), where the
-    // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
-        check_matcher(sess, features, attrs, &tts.tts)
-    } else {
-        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
-        sess.span_diagnostic.span_err(lhs.span(), msg);
-        false
-    }
-    // we don't abort on errors on rejection, the driver will do that for us
-    // after parsing/expansion. we can report every error in every macro this way.
-}
-
-/// Checks that the lhs contains no repetition which could match an empty token
-/// tree, because then the matcher would hang indefinitely.
-fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
-    use mbe::TokenTree;
-    for tt in tts {
-        match *tt {
-            TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
-            TokenTree::Delimited(_, ref del) => {
-                if !check_lhs_no_empty_seq(sess, &del.tts) {
-                    return false;
-                }
-            }
-            TokenTree::Sequence(span, ref seq) => {
-                if seq.separator.is_none()
-                    && seq.tts.iter().all(|seq_tt| match *seq_tt {
-                        TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
-                        TokenTree::Sequence(_, ref sub_seq) => {
-                            sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
-                                || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
-                        }
-                        _ => false,
-                    })
-                {
-                    let sp = span.entire();
-                    sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
-                    return false;
-                }
-                if !check_lhs_no_empty_seq(sess, &seq.tts) {
-                    return false;
-                }
-            }
-        }
-    }
-
-    true
-}
-
-fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
-    match *rhs {
-        mbe::TokenTree::Delimited(..) => return true,
-        _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
-    }
-    false
-}
-
-fn check_matcher(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    matcher: &[mbe::TokenTree],
-) -> bool {
-    let first_sets = FirstSets::new(matcher);
-    let empty_suffix = TokenSet::empty();
-    let err = sess.span_diagnostic.err_count();
-    check_matcher_core(sess, features, attrs, &first_sets, matcher, &empty_suffix);
-    err == sess.span_diagnostic.err_count()
-}
-
-// `The FirstSets` for a matcher is a mapping from subsequences in the
-// matcher to the FIRST set for that subsequence.
-//
-// This mapping is partially precomputed via a backwards scan over the
-// token trees of the matcher, which provides a mapping from each
-// repetition sequence to its *first* set.
-//
-// (Hypothetically, sequences should be uniquely identifiable via their
-// spans, though perhaps that is false, e.g., for macro-generated macros
-// that do not try to inject artificial span information. My plan is
-// to try to catch such cases ahead of time and not include them in
-// the precomputed mapping.)
-struct FirstSets {
-    // this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
-    // span in the original matcher to the First set for the inner sequence `tt ...`.
-    //
-    // If two sequences have the same span in a matcher, then map that
-    // span to None (invalidating the mapping here and forcing the code to
-    // use a slow path).
-    first: FxHashMap<Span, Option<TokenSet>>,
-}
-
-impl FirstSets {
-    fn new(tts: &[mbe::TokenTree]) -> FirstSets {
-        use mbe::TokenTree;
-
-        let mut sets = FirstSets { first: FxHashMap::default() };
-        build_recur(&mut sets, tts);
-        return sets;
-
-        // walks backward over `tts`, returning the FIRST for `tts`
-        // and updating `sets` at the same time for all sequence
-        // substructure we find within `tts`.
-        fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
-            let mut first = TokenSet::empty();
-            for tt in tts.iter().rev() {
-                match *tt {
-                    TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
-                        first.replace_with(tt.clone());
-                    }
-                    TokenTree::Delimited(span, ref delimited) => {
-                        build_recur(sets, &delimited.tts[..]);
-                        first.replace_with(delimited.open_tt(span.open));
-                    }
-                    TokenTree::Sequence(sp, ref seq_rep) => {
-                        let subfirst = build_recur(sets, &seq_rep.tts[..]);
-
-                        match sets.first.entry(sp.entire()) {
-                            Entry::Vacant(vac) => {
-                                vac.insert(Some(subfirst.clone()));
-                            }
-                            Entry::Occupied(mut occ) => {
-                                // if there is already an entry, then a span must have collided.
-                                // This should not happen with typical macro_rules macros,
-                                // but syntax extensions need not maintain distinct spans,
-                                // so distinct syntax trees can be assigned the same span.
-                                // In such a case, the map cannot be trusted; so mark this
-                                // entry as unusable.
-                                occ.insert(None);
-                            }
-                        }
-
-                        // If the sequence contents can be empty, then the first
-                        // token could be the separator token itself.
-
-                        if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::Token(sep.clone()));
-                        }
-
-                        // Reverse scan: Sequence comes before `first`.
-                        if subfirst.maybe_empty
-                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
-                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
-                        {
-                            // If sequence is potentially empty, then
-                            // union them (preserving first emptiness).
-                            first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
-                        } else {
-                            // Otherwise, sequence guaranteed
-                            // non-empty; replace first.
-                            first = subfirst;
-                        }
-                    }
-                }
-            }
-
-            first
-        }
-    }
-
-    // walks forward over `tts` until all potential FIRST tokens are
-    // identified.
-    fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
-        use mbe::TokenTree;
-
-        let mut first = TokenSet::empty();
-        for tt in tts.iter() {
-            assert!(first.maybe_empty);
-            match *tt {
-                TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
-                    first.add_one(tt.clone());
-                    return first;
-                }
-                TokenTree::Delimited(span, ref delimited) => {
-                    first.add_one(delimited.open_tt(span.open));
-                    return first;
-                }
-                TokenTree::Sequence(sp, ref seq_rep) => {
-                    let subfirst_owned;
-                    let subfirst = match self.first.get(&sp.entire()) {
-                        Some(&Some(ref subfirst)) => subfirst,
-                        Some(&None) => {
-                            subfirst_owned = self.first(&seq_rep.tts[..]);
-                            &subfirst_owned
-                        }
-                        None => {
-                            panic!("We missed a sequence during FirstSets construction");
-                        }
-                    };
-
-                    // If the sequence contents can be empty, then the first
-                    // token could be the separator token itself.
-                    if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
-                        first.add_one_maybe(TokenTree::Token(sep.clone()));
-                    }
-
-                    assert!(first.maybe_empty);
-                    first.add_all(subfirst);
-                    if subfirst.maybe_empty
-                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
-                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
-                    {
-                        // Continue scanning for more first
-                        // tokens, but also make sure we
-                        // restore empty-tracking state.
-                        first.maybe_empty = true;
-                        continue;
-                    } else {
-                        return first;
-                    }
-                }
-            }
-        }
-
-        // we only exit the loop if `tts` was empty or if every
-        // element of `tts` matches the empty sequence.
-        assert!(first.maybe_empty);
-        first
-    }
-}
-
-// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
-// (for macro-by-example syntactic variables). It also carries the
-// `maybe_empty` flag; that is true if and only if the matcher can
-// match an empty token sequence.
-//
-// The First set is computed on submatchers like `$($a:expr b),* $(c)* d`,
-// which has corresponding FIRST = {$a:expr, c, d}.
-// Likewise, `$($a:expr b),* $(c)+ d` has FIRST = {$a:expr, c}.
-//
-// (Notably, we must allow for *-op to occur zero times.)
-#[derive(Clone, Debug)]
-struct TokenSet {
-    tokens: Vec<mbe::TokenTree>,
-    maybe_empty: bool,
-}
-
-impl TokenSet {
-    // Returns a set for the empty sequence.
-    fn empty() -> Self {
-        TokenSet { tokens: Vec::new(), maybe_empty: true }
-    }
-
-    // Returns the set `{ tok }` for the single-token (and thus
-    // non-empty) sequence [tok].
-    fn singleton(tok: mbe::TokenTree) -> Self {
-        TokenSet { tokens: vec![tok], maybe_empty: false }
-    }
-
-    // Changes self to be the set `{ tok }`.
-    // Since `tok` is always present, marks self as non-empty.
-    fn replace_with(&mut self, tok: mbe::TokenTree) {
-        self.tokens.clear();
-        self.tokens.push(tok);
-        self.maybe_empty = false;
-    }
-
-    // Changes self to be the empty set `{}`; meant for use when
-    // the particular token does not matter, but we want to
-    // record that it occurs.
-    fn replace_with_irrelevant(&mut self) {
-        self.tokens.clear();
-        self.maybe_empty = false;
-    }
-
-    // Adds `tok` to the set for `self`, marking sequence as non-empy.
-    fn add_one(&mut self, tok: mbe::TokenTree) {
-        if !self.tokens.contains(&tok) {
-            self.tokens.push(tok);
-        }
-        self.maybe_empty = false;
-    }
-
-    // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
-    fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
-        if !self.tokens.contains(&tok) {
-            self.tokens.push(tok);
-        }
-    }
-
-    // Adds all elements of `other` to this.
-    //
-    // (Since this is a set, we filter out duplicates.)
-    //
-    // If `other` is potentially empty, then preserves the previous
-    // setting of the empty flag of `self`. If `other` is guaranteed
-    // non-empty, then `self` is marked non-empty.
-    fn add_all(&mut self, other: &Self) {
-        for tok in &other.tokens {
-            if !self.tokens.contains(tok) {
-                self.tokens.push(tok.clone());
-            }
-        }
-        if !other.maybe_empty {
-            self.maybe_empty = false;
-        }
-    }
-}
-
-// Checks that `matcher` is internally consistent and that it
-// can legally be followed by a token `N`, for all `N` in `follow`.
-// (If `follow` is empty, then it imposes no constraint on
-// the `matcher`.)
-//
-// Returns the set of NT tokens that could possibly come last in
-// `matcher`. (If `matcher` matches the empty sequence, then
-// `maybe_empty` will be set to true.)
-//
-// Requires that `first_sets` is pre-computed for `matcher`;
-// see `FirstSets::new`.
-fn check_matcher_core(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    first_sets: &FirstSets,
-    matcher: &[mbe::TokenTree],
-    follow: &TokenSet,
-) -> TokenSet {
-    use mbe::TokenTree;
-
-    let mut last = TokenSet::empty();
-
-    // 2. For each token and suffix  [T, SUFFIX] in M:
-    // ensure that T can be followed by SUFFIX, and if SUFFIX may be empty,
-    // then ensure T can also be followed by any element of FOLLOW.
-    'each_token: for i in 0..matcher.len() {
-        let token = &matcher[i];
-        let suffix = &matcher[i + 1..];
-
-        let build_suffix_first = || {
-            let mut s = first_sets.first(suffix);
-            if s.maybe_empty {
-                s.add_all(follow);
-            }
-            s
-        };
-
-        // (we build `suffix_first` on demand below; you can tell
-        // which cases are supposed to fall through by looking for the
-        // initialization of this variable.)
-        let suffix_first;
-
-        // First, update `last` so that it corresponds to the set
-        // of NT tokens that might end the sequence `... token`.
-        match *token {
-            TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
-                let can_be_followed_by_any;
-                if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
-                    let msg = format!("invalid fragment specifier `{}`", bad_frag);
-                    sess.span_diagnostic
-                        .struct_span_err(token.span(), &msg)
-                        .help(VALID_FRAGMENT_NAMES_MSG)
-                        .emit();
-                    // (This eliminates false positives and duplicates
-                    // from error messages.)
-                    can_be_followed_by_any = true;
-                } else {
-                    can_be_followed_by_any = token_can_be_followed_by_any(token);
-                }
-
-                if can_be_followed_by_any {
-                    // don't need to track tokens that work with any,
-                    last.replace_with_irrelevant();
-                    // ... and don't need to check tokens that can be
-                    // followed by anything against SUFFIX.
-                    continue 'each_token;
-                } else {
-                    last.replace_with(token.clone());
-                    suffix_first = build_suffix_first();
-                }
-            }
-            TokenTree::Delimited(span, ref d) => {
-                let my_suffix = TokenSet::singleton(d.close_tt(span.close));
-                check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix);
-                // don't track non NT tokens
-                last.replace_with_irrelevant();
-
-                // also, we don't need to check delimited sequences
-                // against SUFFIX
-                continue 'each_token;
-            }
-            TokenTree::Sequence(_, ref seq_rep) => {
-                suffix_first = build_suffix_first();
-                // The trick here: when we check the interior, we want
-                // to include the separator (if any) as a potential
-                // (but not guaranteed) element of FOLLOW. So in that
-                // case, we make a temp copy of suffix and stuff
-                // delimiter in there.
-                //
-                // FIXME: Should I first scan suffix_first to see if
-                // delimiter is already in it before I go through the
-                // work of cloning it? But then again, this way I may
-                // get a "tighter" span?
-                let mut new;
-                let my_suffix = if let Some(sep) = &seq_rep.separator {
-                    new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::Token(sep.clone()));
-                    &new
-                } else {
-                    &suffix_first
-                };
-
-                // At this point, `suffix_first` is built, and
-                // `my_suffix` is some TokenSet that we can use
-                // for checking the interior of `seq_rep`.
-                let next =
-                    check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix);
-                if next.maybe_empty {
-                    last.add_all(&next);
-                } else {
-                    last = next;
-                }
-
-                // the recursive call to check_matcher_core already ran the 'each_last
-                // check below, so we can just keep going forward here.
-                continue 'each_token;
-            }
-        }
-
-        // (`suffix_first` guaranteed initialized once reaching here.)
-
-        // Now `last` holds the complete set of NT tokens that could
-        // end the sequence before SUFFIX. Check that every one works with `suffix`.
-        'each_last: for token in &last.tokens {
-            if let TokenTree::MetaVarDecl(_, name, frag_spec) = *token {
-                for next_token in &suffix_first.tokens {
-                    match is_in_follow(next_token, frag_spec.name) {
-                        IsInFollow::Invalid(msg, help) => {
-                            sess.span_diagnostic
-                                .struct_span_err(next_token.span(), &msg)
-                                .help(help)
-                                .emit();
-                            // don't bother reporting every source of
-                            // conflict for a particular element of `last`.
-                            continue 'each_last;
-                        }
-                        IsInFollow::Yes => {}
-                        IsInFollow::No(possible) => {
-                            let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
-                            {
-                                "is"
-                            } else {
-                                "may be"
-                            };
-
-                            let sp = next_token.span();
-                            let mut err = sess.span_diagnostic.struct_span_err(
-                                sp,
-                                &format!(
-                                    "`${name}:{frag}` {may_be} followed by `{next}`, which \
-                                     is not allowed for `{frag}` fragments",
-                                    name = name,
-                                    frag = frag_spec,
-                                    next = quoted_tt_to_string(next_token),
-                                    may_be = may_be
-                                ),
-                            );
-                            err.span_label(
-                                sp,
-                                format!("not allowed after `{}` fragments", frag_spec),
-                            );
-                            let msg = "allowed there are: ";
-                            match possible {
-                                &[] => {}
-                                &[t] => {
-                                    err.note(&format!(
-                                        "only {} is allowed after `{}` fragments",
-                                        t, frag_spec,
-                                    ));
-                                }
-                                ts => {
-                                    err.note(&format!(
-                                        "{}{} or {}",
-                                        msg,
-                                        ts[..ts.len() - 1]
-                                            .iter()
-                                            .map(|s| *s)
-                                            .collect::<Vec<_>>()
-                                            .join(", "),
-                                        ts[ts.len() - 1],
-                                    ));
-                                }
-                            }
-                            err.emit();
-                        }
-                    }
-                }
-            }
-        }
-    }
-    last
-}
-
-fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
-    if let mbe::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
-        frag_can_be_followed_by_any(frag_spec.name)
-    } else {
-        // (Non NT's can always be followed by anthing in matchers.)
-        true
-    }
-}
-
-/// Returns `true` if a fragment of type `frag` can be followed by any sort of
-/// token. We use this (among other things) as a useful approximation
-/// for when `frag` can be followed by a repetition like `$(...)*` or
-/// `$(...)+`. In general, these can be a bit tricky to reason about,
-/// so we adopt a conservative position that says that any fragment
-/// specifier which consumes at most one token tree can be followed by
-/// a fragment specifier (indeed, these fragments can be followed by
-/// ANYTHING without fear of future compatibility hazards).
-fn frag_can_be_followed_by_any(frag: Symbol) -> bool {
-    match frag {
-        sym::item     | // always terminated by `}` or `;`
-        sym::block    | // exactly one token tree
-        sym::ident    | // exactly one token tree
-        sym::literal  | // exactly one token tree
-        sym::meta     | // exactly one token tree
-        sym::lifetime | // exactly one token tree
-        sym::tt =>   // exactly one token tree
-            true,
-
-        _ =>
-            false,
-    }
-}
-
-enum IsInFollow {
-    Yes,
-    No(&'static [&'static str]),
-    Invalid(String, &'static str),
-}
-
-/// Returns `true` if `frag` can legally be followed by the token `tok`. For
-/// fragments that can consume an unbounded number of tokens, `tok`
-/// must be within a well-defined follow set. This is intended to
-/// guarantee future compatibility: for example, without this rule, if
-/// we expanded `expr` to include a new binary operator, we might
-/// break macros that were relying on that binary operator as a
-/// separator.
-// when changing this do not forget to update doc/book/macros.md!
-fn is_in_follow(tok: &mbe::TokenTree, frag: Symbol) -> IsInFollow {
-    use mbe::TokenTree;
-
-    if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
-        // closing a token tree can never be matched by any fragment;
-        // iow, we always require that `(` and `)` match, etc.
-        IsInFollow::Yes
-    } else {
-        match frag {
-            sym::item => {
-                // since items *must* be followed by either a `;` or a `}`, we can
-                // accept anything after them
-                IsInFollow::Yes
-            }
-            sym::block => {
-                // anything can follow block, the braces provide an easy boundary to
-                // maintain
-                IsInFollow::Yes
-            }
-            sym::stmt | sym::expr => {
-                const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
-                match tok {
-                    TokenTree::Token(token) => match token.kind {
-                        FatArrow | Comma | Semi => IsInFollow::Yes,
-                        _ => IsInFollow::No(TOKENS),
-                    },
-                    _ => IsInFollow::No(TOKENS),
-                }
-            }
-            sym::pat => {
-                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
-                match tok {
-                    TokenTree::Token(token) => match token.kind {
-                        FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
-                        _ => IsInFollow::No(TOKENS),
-                    },
-                    _ => IsInFollow::No(TOKENS),
-                }
-            }
-            sym::path | sym::ty => {
-                const TOKENS: &[&str] = &[
-                    "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
-                    "`where`",
-                ];
-                match tok {
-                    TokenTree::Token(token) => match token.kind {
-                        OpenDelim(token::DelimToken::Brace)
-                        | OpenDelim(token::DelimToken::Bracket)
-                        | Comma
-                        | FatArrow
-                        | Colon
-                        | Eq
-                        | Gt
-                        | BinOp(token::Shr)
-                        | Semi
-                        | BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(name, false) if name == kw::As || name == kw::Where => {
-                            IsInFollow::Yes
-                        }
-                        _ => IsInFollow::No(TOKENS),
-                    },
-                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => {
-                        IsInFollow::Yes
-                    }
-                    _ => IsInFollow::No(TOKENS),
-                }
-            }
-            sym::ident | sym::lifetime => {
-                // being a single token, idents and lifetimes are harmless
-                IsInFollow::Yes
-            }
-            sym::literal => {
-                // literals may be of a single token, or two tokens (negative numbers)
-                IsInFollow::Yes
-            }
-            sym::meta | sym::tt => {
-                // being either a single token or a delimited sequence, tt is
-                // harmless
-                IsInFollow::Yes
-            }
-            sym::vis => {
-                // Explicitly disallow `priv`, on the off chance it comes back.
-                const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
-                match tok {
-                    TokenTree::Token(token) => match token.kind {
-                        Comma => IsInFollow::Yes,
-                        Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
-                        _ => {
-                            if token.can_begin_type() {
-                                IsInFollow::Yes
-                            } else {
-                                IsInFollow::No(TOKENS)
-                            }
-                        }
-                    },
-                    TokenTree::MetaVarDecl(_, _, frag)
-                        if frag.name == sym::ident
-                            || frag.name == sym::ty
-                            || frag.name == sym::path =>
-                    {
-                        IsInFollow::Yes
-                    }
-                    _ => IsInFollow::No(TOKENS),
-                }
-            }
-            kw::Invalid => IsInFollow::Yes,
-            _ => IsInFollow::Invalid(
-                format!("invalid fragment specifier `{}`", frag),
-                VALID_FRAGMENT_NAMES_MSG,
-            ),
-        }
-    }
-}
-
-fn has_legal_fragment_specifier(
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    tok: &mbe::TokenTree,
-) -> Result<(), String> {
-    debug!("has_legal_fragment_specifier({:?})", tok);
-    if let mbe::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
-        let frag_span = tok.span();
-        if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
-            return Err(frag_spec.to_string());
-        }
-    }
-    Ok(())
-}
-
-fn is_legal_fragment_specifier(
-    _sess: &ParseSess,
-    _features: &Features,
-    _attrs: &[ast::Attribute],
-    frag_name: Symbol,
-    _frag_span: Span,
-) -> bool {
-    /*
-     * If new fragment specifiers are invented in nightly, `_sess`,
-     * `_features`, `_attrs`, and `_frag_span` will be useful here
-     * for checking against feature gates. See past versions of
-     * this function.
-     */
-    match frag_name {
-        sym::item
-        | sym::block
-        | sym::stmt
-        | sym::expr
-        | sym::pat
-        | sym::lifetime
-        | sym::path
-        | sym::ty
-        | sym::ident
-        | sym::meta
-        | sym::tt
-        | sym::vis
-        | sym::literal
-        | kw::Invalid => true,
-        _ => false,
-    }
-}
-
-fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
-    match *tt {
-        mbe::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
-        mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
-        mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
-        _ => panic!(
-            "unexpected mbe::TokenTree::{{Sequence or Delimited}} \
-             in follow set checker"
-        ),
-    }
-}
-
-impl TokenTree {
-    /// Use this token tree as a matcher to parse given tts.
-    fn parse(cx: &ExtCtxt<'_>, mtch: &[mbe::TokenTree], tts: TokenStream)
-             -> NamedParseResult {
-        // `None` is because we're not interpolating
-        let directory = Directory {
-            path: Cow::from(cx.current_expansion.module.directory.as_path()),
-            ownership: cx.current_expansion.directory_ownership,
-        };
-        parse(cx.parse_sess(), tts, mtch, Some(directory), true)
-    }
-}
-
-/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
-/// other tokens, this is "unexpected token...".
-fn parse_failure_msg(tok: &Token) -> String {
-    match tok.kind {
-        token::Eof => "unexpected end of macro invocation".to_string(),
-        _ => format!(
-            "no rules expected the token `{}`",
-            pprust::token_to_string(tok),
-        ),
-    }
-}
diff --git a/src/libsyntax/ext/mbe/quoted.rs b/src/libsyntax/ext/mbe/quoted.rs
deleted file mode 100644 (file)
index 8cb85bd..0000000
+++ /dev/null
@@ -1,263 +0,0 @@
-use crate::ast;
-use crate::ext::mbe::macro_parser;
-use crate::ext::mbe::{TokenTree, KleeneOp, KleeneToken, SequenceRepetition, Delimited};
-use crate::parse::token::{self, Token};
-use crate::parse::ParseSess;
-use crate::print::pprust;
-use crate::symbol::kw;
-use crate::tokenstream;
-
-use syntax_pos::Span;
-
-use rustc_data_structures::sync::Lrc;
-
-/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
-/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
-/// collection of `TokenTree` for use in parsing a macro.
-///
-/// # Parameters
-///
-/// - `input`: a token stream to read from, the contents of which we are parsing.
-/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
-///   macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
-///   their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
-///   `ident` are "matchers". They are not present in the body of a macro rule -- just in the
-///   pattern, so we pass a parameter to indicate whether to expect them or not.
-/// - `sess`: the parsing session. Any errors will be emitted to this session.
-/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
-///   unstable features or not.
-/// - `edition`: which edition are we in.
-/// - `macro_node_id`: the NodeId of the macro we are parsing.
-///
-/// # Returns
-///
-/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
-pub(super) fn parse(
-    input: tokenstream::TokenStream,
-    expect_matchers: bool,
-    sess: &ParseSess,
-) -> Vec<TokenTree> {
-    // Will contain the final collection of `self::TokenTree`
-    let mut result = Vec::new();
-
-    // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
-    // additional trees if need be.
-    let mut trees = input.trees();
-    while let Some(tree) = trees.next() {
-        // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
-        // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
-        let tree = parse_tree(
-            tree,
-            &mut trees,
-            expect_matchers,
-            sess,
-        );
-        match tree {
-            TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
-                let span = match trees.next() {
-                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
-                        match trees.next() {
-                            Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
-                                Some((kind, _)) => {
-                                    let span = token.span.with_lo(start_sp.lo());
-                                    result.push(TokenTree::MetaVarDecl(span, ident, kind));
-                                    continue;
-                                }
-                                _ => token.span,
-                            },
-                            tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
-                        }
-                    }
-                    tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
-                };
-                sess.missing_fragment_specifiers.borrow_mut().insert(span);
-                result.push(TokenTree::MetaVarDecl(span, ident, ast::Ident::invalid()));
-            }
-
-            // Not a metavar or no matchers allowed, so just return the tree
-            _ => result.push(tree),
-        }
-    }
-    result
-}
-
-/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
-/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
-/// for use in parsing a macro.
-///
-/// Converting the given tree may involve reading more tokens.
-///
-/// # Parameters
-///
-/// - `tree`: the tree we wish to convert.
-/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
-///   converting `tree`
-/// - `expect_matchers`: same as for `parse` (see above).
-/// - `sess`: the parsing session. Any errors will be emitted to this session.
-/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
-///   unstable features or not.
-fn parse_tree(
-    tree: tokenstream::TokenTree,
-    trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
-    expect_matchers: bool,
-    sess: &ParseSess,
-) -> TokenTree {
-    // Depending on what `tree` is, we could be parsing different parts of a macro
-    match tree {
-        // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
-            // `tree` is followed by a delimited set of token trees. This indicates the beginning
-            // of a repetition sequence in the macro (e.g. `$(pat)*`).
-            Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
-                // Must have `(` not `{` or `[`
-                if delim != token::Paren {
-                    let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
-                    let msg = format!("expected `(`, found `{}`", tok);
-                    sess.span_diagnostic.span_err(span.entire(), &msg);
-                }
-                // Parse the contents of the sequence itself
-                let sequence = parse(
-                    tts.into(),
-                    expect_matchers,
-                    sess,
-                );
-                // Get the Kleene operator and optional separator
-                let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
-                // Count the number of captured "names" (i.e., named metavars)
-                let name_captures = macro_parser::count_names(&sequence);
-                TokenTree::Sequence(
-                    span,
-                    Lrc::new(SequenceRepetition {
-                        tts: sequence,
-                        separator,
-                        kleene,
-                        num_captures: name_captures,
-                    }),
-                )
-            }
-
-            // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
-            // metavariable that names the crate of the invocation.
-            Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
-                let (ident, is_raw) = token.ident().unwrap();
-                let span = ident.span.with_lo(span.lo());
-                if ident.name == kw::Crate && !is_raw {
-                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
-                } else {
-                    TokenTree::MetaVar(span, ident)
-                }
-            }
-
-            // `tree` is followed by a random token. This is an error.
-            Some(tokenstream::TokenTree::Token(token)) => {
-                let msg =
-                    format!("expected identifier, found `{}`", pprust::token_to_string(&token),);
-                sess.span_diagnostic.span_err(token.span, &msg);
-                TokenTree::MetaVar(token.span, ast::Ident::invalid())
-            }
-
-            // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::token(token::Dollar, span),
-        },
-
-        // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
-
-        // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
-        // descend into the delimited set and further parse it.
-        tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
-            span,
-            Lrc::new(Delimited {
-                delim,
-                tts: parse(
-                    tts.into(),
-                    expect_matchers,
-                    sess,
-                ),
-            }),
-        ),
-    }
-}
-
-/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
-/// `None`.
-fn kleene_op(token: &Token) -> Option<KleeneOp> {
-    match token.kind {
-        token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
-        token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
-        token::Question => Some(KleeneOp::ZeroOrOne),
-        _ => None,
-    }
-}
-
-/// Parse the next token tree of the input looking for a KleeneOp. Returns
-///
-/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
-/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
-/// - Err(span) if the next token tree is not a token
-fn parse_kleene_op(
-    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
-    span: Span,
-) -> Result<Result<(KleeneOp, Span), Token>, Span> {
-    match input.next() {
-        Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
-            Some(op) => Ok(Ok((op, token.span))),
-            None => Ok(Err(token)),
-        },
-        tree => Err(tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span)),
-    }
-}
-
-/// Attempt to parse a single Kleene star, possibly with a separator.
-///
-/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
-/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
-/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
-/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
-/// stream of tokens in an invocation of a macro.
-///
-/// This function will take some input iterator `input` corresponding to `span` and a parsing
-/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
-/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
-/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
-fn parse_sep_and_kleene_op(
-    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
-    span: Span,
-    sess: &ParseSess,
-) -> (Option<Token>, KleeneToken) {
-    // We basically look at two token trees here, denoted as #1 and #2 below
-    let span = match parse_kleene_op(input, span) {
-        // #1 is a `?`, `+`, or `*` KleeneOp
-        Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
-
-        // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
-            // #2 is the `?` Kleene op, which does not take a separator (error)
-            Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
-                // Error!
-                sess.span_diagnostic.span_err(
-                    token.span,
-                    "the `?` macro repetition operator does not take a separator",
-                );
-
-                // Return a dummy
-                return (None, KleeneToken::new(KleeneOp::ZeroOrMore, span));
-            }
-
-            // #2 is a KleeneOp :D
-            Ok(Ok((op, span))) => return (Some(token), KleeneToken::new(op, span)),
-
-            // #2 is a random token or not a token at all :(
-            Ok(Err(Token { span, .. })) | Err(span) => span,
-        },
-
-        // #1 is not a token
-        Err(span) => span,
-    };
-
-    // If we ever get to this point, we have experienced an "unexpected token" error
-    sess.span_diagnostic.span_err(span, "expected one of: `*`, `+`, or `?`");
-
-    // Return a dummy
-    (None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
-}
diff --git a/src/libsyntax/ext/mbe/transcribe.rs b/src/libsyntax/ext/mbe/transcribe.rs
deleted file mode 100644 (file)
index ba818eb..0000000
+++ /dev/null
@@ -1,398 +0,0 @@
-use crate::ast::{Ident, Mac};
-use crate::ext::base::ExtCtxt;
-use crate::ext::mbe;
-use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
-use crate::mut_visit::{self, MutVisitor};
-use crate::parse::token::{self, NtTT, Token};
-use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
-
-use smallvec::{smallvec, SmallVec};
-
-use errors::pluralise;
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::Lrc;
-use syntax_pos::hygiene::{ExpnId, Transparency};
-use syntax_pos::Span;
-
-use std::mem;
-
-// A Marker adds the given mark to the syntax context.
-struct Marker(ExpnId, Transparency);
-
-impl MutVisitor for Marker {
-    fn visit_span(&mut self, span: &mut Span) {
-        *span = span.apply_mark(self.0, self.1)
-    }
-
-    fn visit_mac(&mut self, mac: &mut Mac) {
-        mut_visit::noop_visit_mac(mac, self)
-    }
-}
-
-impl Marker {
-    fn visit_delim_span(&mut self, dspan: &mut DelimSpan) {
-        self.visit_span(&mut dspan.open);
-        self.visit_span(&mut dspan.close);
-    }
-}
-
-/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
-enum Frame {
-    Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
-    Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
-}
-
-impl Frame {
-    /// Construct a new frame around the delimited set of tokens.
-    fn new(tts: Vec<mbe::TokenTree>) -> Frame {
-        let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
-        Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
-    }
-}
-
-impl Iterator for Frame {
-    type Item = mbe::TokenTree;
-
-    fn next(&mut self) -> Option<mbe::TokenTree> {
-        match *self {
-            Frame::Delimited { ref forest, ref mut idx, .. } => {
-                *idx += 1;
-                forest.tts.get(*idx - 1).cloned()
-            }
-            Frame::Sequence { ref forest, ref mut idx, .. } => {
-                *idx += 1;
-                forest.tts.get(*idx - 1).cloned()
-            }
-        }
-    }
-}
-
-/// This can do Macro-By-Example transcription.
-/// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
-///   invocation. We are assuming we already know there is a match.
-/// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
-///
-/// For example,
-///
-/// ```rust
-/// macro_rules! foo {
-///     ($id:ident) => { println!("{}", stringify!($id)); }
-/// }
-///
-/// foo!(bar);
-/// ```
-///
-/// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
-///
-/// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
-///
-/// Along the way, we do some additional error checking.
-pub(super) fn transcribe(
-    cx: &ExtCtxt<'_>,
-    interp: &FxHashMap<Ident, NamedMatch>,
-    src: Vec<mbe::TokenTree>,
-    transparency: Transparency,
-) -> TokenStream {
-    // Nothing for us to transcribe...
-    if src.is_empty() {
-        return TokenStream::empty();
-    }
-
-    // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
-    // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
-    let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
-
-    // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
-    // `repeats` keeps track of where we are in matching at each level, with the last element being
-    // the most deeply nested sequence. This is used as a stack.
-    let mut repeats = Vec::new();
-
-    // `result` contains resulting token stream from the TokenTree we just finished processing. At
-    // the end, this will contain the full result of transcription, but at arbitrary points during
-    // `transcribe`, `result` will contain subsets of the final result.
-    //
-    // Specifically, as we descend into each TokenTree, we will push the existing results onto the
-    // `result_stack` and clear `results`. We will then produce the results of transcribing the
-    // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
-    // `result_stack` and append `results` too it to produce the new `results` up to that point.
-    //
-    // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
-    // again, and we are done transcribing.
-    let mut result: Vec<TreeAndJoint> = Vec::new();
-    let mut result_stack = Vec::new();
-    let mut marker = Marker(cx.current_expansion.id, transparency);
-
-    loop {
-        // Look at the last frame on the stack.
-        let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
-            // If it still has a TokenTree we have not looked at yet, use that tree.
-            tree
-        }
-        // The else-case never produces a value for `tree` (it `continue`s or `return`s).
-        else {
-            // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
-            // go back to the beginning of the sequence.
-            if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
-                let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
-                *repeat_idx += 1;
-                if repeat_idx < repeat_len {
-                    *idx = 0;
-                    if let Some(sep) = sep {
-                        result.push(TokenTree::Token(sep.clone()).into());
-                    }
-                    continue;
-                }
-            }
-
-            // We are done with the top of the stack. Pop it. Depending on what it was, we do
-            // different things. Note that the outermost item must be the delimited, wrapped RHS
-            // that was passed in originally to `transcribe`.
-            match stack.pop().unwrap() {
-                // Done with a sequence. Pop from repeats.
-                Frame::Sequence { .. } => {
-                    repeats.pop();
-                }
-
-                // We are done processing a Delimited. If this is the top-level delimited, we are
-                // done. Otherwise, we unwind the result_stack to append what we have produced to
-                // any previous results.
-                Frame::Delimited { forest, span, .. } => {
-                    if result_stack.is_empty() {
-                        // No results left to compute! We are back at the top-level.
-                        return TokenStream::new(result);
-                    }
-
-                    // Step back into the parent Delimited.
-                    let tree =
-                        TokenTree::Delimited(span, forest.delim, TokenStream::new(result).into());
-                    result = result_stack.pop().unwrap();
-                    result.push(tree.into());
-                }
-            }
-            continue;
-        };
-
-        // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
-        // `tree` contains the next `TokenTree` to be processed.
-        match tree {
-            // We are descending into a sequence. We first make sure that the matchers in the RHS
-            // and the matches in `interp` have the same shape. Otherwise, either the caller or the
-            // macro writer has made a mistake.
-            seq @ mbe::TokenTree::Sequence(..) => {
-                match lockstep_iter_size(&seq, interp, &repeats) {
-                    LockstepIterSize::Unconstrained => {
-                        cx.span_fatal(
-                            seq.span(), /* blame macro writer */
-                            "attempted to repeat an expression containing no syntax variables \
-                             matched as repeating at this depth",
-                        );
-                    }
-
-                    LockstepIterSize::Contradiction(ref msg) => {
-                        // FIXME: this really ought to be caught at macro definition time... It
-                        // happens when two meta-variables are used in the same repetition in a
-                        // sequence, but they come from different sequence matchers and repeat
-                        // different amounts.
-                        cx.span_fatal(seq.span(), &msg[..]);
-                    }
-
-                    LockstepIterSize::Constraint(len, _) => {
-                        // We do this to avoid an extra clone above. We know that this is a
-                        // sequence already.
-                        let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
-                            (sp, seq)
-                        } else {
-                            unreachable!()
-                        };
-
-                        // Is the repetition empty?
-                        if len == 0 {
-                            if seq.kleene.op == mbe::KleeneOp::OneOrMore {
-                                // FIXME: this really ought to be caught at macro definition
-                                // time... It happens when the Kleene operator in the matcher and
-                                // the body for the same meta-variable do not match.
-                                cx.span_fatal(sp.entire(), "this must repeat at least once");
-                            }
-                        } else {
-                            // 0 is the initial counter (we have done 0 repretitions so far). `len`
-                            // is the total number of reptitions we should generate.
-                            repeats.push((0, len));
-
-                            // The first time we encounter the sequence we push it to the stack. It
-                            // then gets reused (see the beginning of the loop) until we are done
-                            // repeating.
-                            stack.push(Frame::Sequence {
-                                idx: 0,
-                                sep: seq.separator.clone(),
-                                forest: seq,
-                            });
-                        }
-                    }
-                }
-            }
-
-            // Replace the meta-var with the matched token tree from the invocation.
-            mbe::TokenTree::MetaVar(mut sp, mut ident) => {
-                // Find the matched nonterminal from the macro invocation, and use it to replace
-                // the meta-var.
-                if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
-                    if let MatchedNonterminal(ref nt) = cur_matched {
-                        // FIXME #2887: why do we apply a mark when matching a token tree meta-var
-                        // (e.g. `$x:tt`), but not when we are matching any other type of token
-                        // tree?
-                        if let NtTT(ref tt) = **nt {
-                            result.push(tt.clone().into());
-                        } else {
-                            marker.visit_span(&mut sp);
-                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
-                            result.push(token.into());
-                        }
-                    } else {
-                        // We were unable to descend far enough. This is an error.
-                        cx.span_fatal(
-                            sp, /* blame the macro writer */
-                            &format!("variable '{}' is still repeating at this depth", ident),
-                        );
-                    }
-                } else {
-                    // If we aren't able to match the meta-var, we push it back into the result but
-                    // with modified syntax context. (I believe this supports nested macros).
-                    marker.visit_span(&mut sp);
-                    marker.visit_ident(&mut ident);
-                    result.push(TokenTree::token(token::Dollar, sp).into());
-                    result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
-                }
-            }
-
-            // If we are entering a new delimiter, we push its contents to the `stack` to be
-            // processed, and we push all of the currently produced results to the `result_stack`.
-            // We will produce all of the results of the inside of the `Delimited` and then we will
-            // jump back out of the Delimited, pop the result_stack and add the new results back to
-            // the previous results (from outside the Delimited).
-            mbe::TokenTree::Delimited(mut span, delimited) => {
-                marker.visit_delim_span(&mut span);
-                stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
-                result_stack.push(mem::take(&mut result));
-            }
-
-            // Nothing much to do here. Just push the token to the result, being careful to
-            // preserve syntax context.
-            mbe::TokenTree::Token(token) => {
-                let mut tt = TokenTree::Token(token);
-                marker.visit_tt(&mut tt);
-                result.push(tt.into());
-            }
-
-            // There should be no meta-var declarations in the invocation of a macro.
-            mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
-        }
-    }
-}
-
-/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
-/// the set of matches `interpolations`.
-///
-/// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
-/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
-/// made a mistake, and we return `None`.
-fn lookup_cur_matched<'a>(
-    ident: Ident,
-    interpolations: &'a FxHashMap<Ident, NamedMatch>,
-    repeats: &[(usize, usize)],
-) -> Option<&'a NamedMatch> {
-    interpolations.get(&ident).map(|matched| {
-        let mut matched = matched;
-        for &(idx, _) in repeats {
-            match matched {
-                MatchedNonterminal(_) => break,
-                MatchedSeq(ref ads, _) => matched = ads.get(idx).unwrap(),
-            }
-        }
-
-        matched
-    })
-}
-
-/// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
-/// sure that the size of each sequence and all of its nested sequences are the same as the sizes
-/// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
-/// has made a mistake (either the macro writer or caller).
-#[derive(Clone)]
-enum LockstepIterSize {
-    /// No constraints on length of matcher. This is true for any TokenTree variants except a
-    /// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
-    Unconstrained,
-
-    /// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
-    /// meta-var are returned.
-    Constraint(usize, Ident),
-
-    /// Two `Constraint`s on the same sequence had different lengths. This is an error.
-    Contradiction(String),
-}
-
-impl LockstepIterSize {
-    /// Find incompatibilities in matcher/invocation sizes.
-    /// - `Unconstrained` is compatible with everything.
-    /// - `Contradiction` is incompatible with everything.
-    /// - `Constraint(len)` is only compatible with other constraints of the same length.
-    fn with(self, other: LockstepIterSize) -> LockstepIterSize {
-        match self {
-            LockstepIterSize::Unconstrained => other,
-            LockstepIterSize::Contradiction(_) => self,
-            LockstepIterSize::Constraint(l_len, ref l_id) => match other {
-                LockstepIterSize::Unconstrained => self,
-                LockstepIterSize::Contradiction(_) => other,
-                LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
-                LockstepIterSize::Constraint(r_len, r_id) => {
-                    let msg = format!(
-                        "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
-                        l_id,
-                        l_len,
-                        pluralise!(l_len),
-                        r_id,
-                        r_len,
-                        pluralise!(r_len),
-                    );
-                    LockstepIterSize::Contradiction(msg)
-                }
-            },
-        }
-    }
-}
-
-/// Given a `tree`, make sure that all sequences have the same length as the matches for the
-/// appropriate meta-vars in `interpolations`.
-///
-/// Note that if `repeats` does not match the exact correct depth of a meta-var,
-/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
-/// multiple nested matcher sequences.
-fn lockstep_iter_size(
-    tree: &mbe::TokenTree,
-    interpolations: &FxHashMap<Ident, NamedMatch>,
-    repeats: &[(usize, usize)],
-) -> LockstepIterSize {
-    use mbe::TokenTree;
-    match *tree {
-        TokenTree::Delimited(_, ref delimed) => {
-            delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
-                size.with(lockstep_iter_size(tt, interpolations, repeats))
-            })
-        }
-        TokenTree::Sequence(_, ref seq) => {
-            seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
-                size.with(lockstep_iter_size(tt, interpolations, repeats))
-            })
-        }
-        TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
-            match lookup_cur_matched(name, interpolations, repeats) {
-                Some(matched) => match matched {
-                    MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
-                    MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name),
-                },
-                _ => LockstepIterSize::Unconstrained,
-            }
-        }
-        TokenTree::Token(..) => LockstepIterSize::Unconstrained,
-    }
-}
diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs
deleted file mode 100644 (file)
index 8eecef1..0000000
+++ /dev/null
@@ -1,349 +0,0 @@
-use crate::ast::{self, NodeId};
-use crate::source_map::{DUMMY_SP, dummy_spanned};
-use crate::ext::base::ExtCtxt;
-use crate::ext::expand::{AstFragment, AstFragmentKind};
-use crate::tokenstream::TokenStream;
-use crate::mut_visit::*;
-use crate::ptr::P;
-use crate::ThinVec;
-
-use smallvec::{smallvec, SmallVec};
-
-use rustc_data_structures::fx::FxHashMap;
-
-pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
-    fn mac_placeholder() -> ast::Mac {
-        ast::Mac {
-            path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
-            tts: TokenStream::empty().into(),
-            delim: ast::MacDelimiter::Brace,
-            span: DUMMY_SP,
-            prior_type_ascription: None,
-        }
-    }
-
-    let ident = ast::Ident::invalid();
-    let attrs = Vec::new();
-    let generics = ast::Generics::default();
-    let vis = dummy_spanned(ast::VisibilityKind::Inherited);
-    let span = DUMMY_SP;
-    let expr_placeholder = || P(ast::Expr {
-        id, span,
-        attrs: ThinVec::new(),
-        kind: ast::ExprKind::Mac(mac_placeholder()),
-    });
-    let ty = P(ast::Ty {
-        id,
-        kind: ast::TyKind::Mac(mac_placeholder()),
-        span,
-    });
-    let pat = P(ast::Pat {
-        id,
-        kind: ast::PatKind::Mac(mac_placeholder()),
-        span,
-    });
-
-    match kind {
-        AstFragmentKind::Expr => AstFragment::Expr(expr_placeholder()),
-        AstFragmentKind::OptExpr => AstFragment::OptExpr(Some(expr_placeholder())),
-        AstFragmentKind::Items => AstFragment::Items(smallvec![P(ast::Item {
-            id, span, ident, vis, attrs,
-            kind: ast::ItemKind::Mac(mac_placeholder()),
-            tokens: None,
-        })]),
-        AstFragmentKind::TraitItems => AstFragment::TraitItems(smallvec![ast::TraitItem {
-            id, span, ident, attrs, generics,
-            kind: ast::TraitItemKind::Macro(mac_placeholder()),
-            tokens: None,
-        }]),
-        AstFragmentKind::ImplItems => AstFragment::ImplItems(smallvec![ast::ImplItem {
-            id, span, ident, vis, attrs, generics,
-            kind: ast::ImplItemKind::Macro(mac_placeholder()),
-            defaultness: ast::Defaultness::Final,
-            tokens: None,
-        }]),
-        AstFragmentKind::ForeignItems =>
-            AstFragment::ForeignItems(smallvec![ast::ForeignItem {
-                id, span, ident, vis, attrs,
-                kind: ast::ForeignItemKind::Macro(mac_placeholder()),
-            }]),
-        AstFragmentKind::Pat => AstFragment::Pat(P(ast::Pat {
-            id, span, kind: ast::PatKind::Mac(mac_placeholder()),
-        })),
-        AstFragmentKind::Ty => AstFragment::Ty(P(ast::Ty {
-            id, span, kind: ast::TyKind::Mac(mac_placeholder()),
-        })),
-        AstFragmentKind::Stmts => AstFragment::Stmts(smallvec![{
-            let mac = P((mac_placeholder(), ast::MacStmtStyle::Braces, ThinVec::new()));
-            ast::Stmt { id, span, kind: ast::StmtKind::Mac(mac) }
-        }]),
-        AstFragmentKind::Arms => AstFragment::Arms(smallvec![
-            ast::Arm {
-                attrs: Default::default(),
-                body: expr_placeholder(),
-                guard: None,
-                id,
-                pat,
-                span,
-                is_placeholder: true,
-            }
-        ]),
-        AstFragmentKind::Fields => AstFragment::Fields(smallvec![
-            ast::Field {
-                attrs: Default::default(),
-                expr: expr_placeholder(),
-                id,
-                ident,
-                is_shorthand: false,
-                span,
-                is_placeholder: true,
-            }
-        ]),
-        AstFragmentKind::FieldPats => AstFragment::FieldPats(smallvec![
-            ast::FieldPat {
-                attrs: Default::default(),
-                id,
-                ident,
-                is_shorthand: false,
-                pat,
-                span,
-                is_placeholder: true,
-            }
-        ]),
-        AstFragmentKind::GenericParams => AstFragment::GenericParams(smallvec![{
-            ast::GenericParam {
-                attrs: Default::default(),
-                bounds: Default::default(),
-                id,
-                ident,
-                is_placeholder: true,
-                kind: ast::GenericParamKind::Lifetime,
-            }
-        }]),
-        AstFragmentKind::Params => AstFragment::Params(smallvec![
-            ast::Param {
-                attrs: Default::default(),
-                id,
-                pat,
-                span,
-                ty,
-                is_placeholder: true,
-            }
-        ]),
-        AstFragmentKind::StructFields => AstFragment::StructFields(smallvec![
-            ast::StructField {
-                attrs: Default::default(),
-                id,
-                ident: None,
-                span,
-                ty,
-                vis,
-                is_placeholder: true,
-            }
-        ]),
-        AstFragmentKind::Variants => AstFragment::Variants(smallvec![
-            ast::Variant {
-                attrs: Default::default(),
-                data: ast::VariantData::Struct(Default::default(), false),
-                disr_expr: None,
-                id,
-                ident,
-                span,
-                is_placeholder: true,
-            }
-        ])
-    }
-}
-
-pub struct PlaceholderExpander<'a, 'b> {
-    expanded_fragments: FxHashMap<ast::NodeId, AstFragment>,
-    cx: &'a mut ExtCtxt<'b>,
-    monotonic: bool,
-}
-
-impl<'a, 'b> PlaceholderExpander<'a, 'b> {
-    pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
-        PlaceholderExpander {
-            cx,
-            expanded_fragments: FxHashMap::default(),
-            monotonic,
-        }
-    }
-
-    pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment, placeholders: Vec<NodeId>) {
-        fragment.mut_visit_with(self);
-        if let AstFragment::Items(mut items) = fragment {
-            for placeholder in placeholders {
-                match self.remove(placeholder) {
-                    AstFragment::Items(derived_items) => items.extend(derived_items),
-                    _ => unreachable!(),
-                }
-            }
-            fragment = AstFragment::Items(items);
-        }
-        self.expanded_fragments.insert(id, fragment);
-    }
-
-    fn remove(&mut self, id: ast::NodeId) -> AstFragment {
-        self.expanded_fragments.remove(&id).unwrap()
-    }
-}
-
-impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> {
-    fn flat_map_arm(&mut self, arm: ast::Arm) -> SmallVec<[ast::Arm; 1]> {
-        if arm.is_placeholder {
-            self.remove(arm.id).make_arms()
-        } else {
-            noop_flat_map_arm(arm, self)
-        }
-    }
-
-    fn flat_map_field(&mut self, field: ast::Field) -> SmallVec<[ast::Field; 1]> {
-        if field.is_placeholder {
-            self.remove(field.id).make_fields()
-        } else {
-            noop_flat_map_field(field, self)
-        }
-    }
-
-    fn flat_map_field_pattern(&mut self, fp: ast::FieldPat) -> SmallVec<[ast::FieldPat; 1]> {
-        if fp.is_placeholder {
-            self.remove(fp.id).make_field_patterns()
-        } else {
-            noop_flat_map_field_pattern(fp, self)
-        }
-    }
-
-    fn flat_map_generic_param(
-        &mut self,
-        param: ast::GenericParam
-    ) -> SmallVec<[ast::GenericParam; 1]>
-    {
-        if param.is_placeholder {
-            self.remove(param.id).make_generic_params()
-        } else {
-            noop_flat_map_generic_param(param, self)
-        }
-    }
-
-    fn flat_map_param(&mut self, p: ast::Param) -> SmallVec<[ast::Param; 1]> {
-        if p.is_placeholder {
-            self.remove(p.id).make_params()
-        } else {
-            noop_flat_map_param(p, self)
-        }
-    }
-
-    fn flat_map_struct_field(&mut self, sf: ast::StructField) -> SmallVec<[ast::StructField; 1]> {
-        if sf.is_placeholder {
-            self.remove(sf.id).make_struct_fields()
-        } else {
-            noop_flat_map_struct_field(sf, self)
-        }
-    }
-
-    fn flat_map_variant(&mut self, variant: ast::Variant) -> SmallVec<[ast::Variant; 1]> {
-        if variant.is_placeholder {
-            self.remove(variant.id).make_variants()
-        } else {
-            noop_flat_map_variant(variant, self)
-        }
-    }
-
-    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
-        match item.kind {
-            ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(),
-            ast::ItemKind::MacroDef(_) => return smallvec![item],
-            _ => {}
-        }
-
-        noop_flat_map_item(item, self)
-    }
-
-    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
-        match item.kind {
-            ast::TraitItemKind::Macro(_) => self.remove(item.id).make_trait_items(),
-            _ => noop_flat_map_trait_item(item, self),
-        }
-    }
-
-    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
-        match item.kind {
-            ast::ImplItemKind::Macro(_) => self.remove(item.id).make_impl_items(),
-            _ => noop_flat_map_impl_item(item, self),
-        }
-    }
-
-    fn flat_map_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> {
-        match item.kind {
-            ast::ForeignItemKind::Macro(_) => self.remove(item.id).make_foreign_items(),
-            _ => noop_flat_map_foreign_item(item, self),
-        }
-    }
-
-    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
-        match expr.kind {
-            ast::ExprKind::Mac(_) => *expr = self.remove(expr.id).make_expr(),
-            _ => noop_visit_expr(expr, self),
-        }
-    }
-
-    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
-        match expr.kind {
-            ast::ExprKind::Mac(_) => self.remove(expr.id).make_opt_expr(),
-            _ => noop_filter_map_expr(expr, self),
-        }
-    }
-
-    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
-        let (style, mut stmts) = match stmt.kind {
-            ast::StmtKind::Mac(mac) => (mac.1, self.remove(stmt.id).make_stmts()),
-            _ => return noop_flat_map_stmt(stmt, self),
-        };
-
-        if style == ast::MacStmtStyle::Semicolon {
-            if let Some(stmt) = stmts.pop() {
-                stmts.push(stmt.add_trailing_semicolon());
-            }
-        }
-
-        stmts
-    }
-
-    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
-        match pat.kind {
-            ast::PatKind::Mac(_) => *pat = self.remove(pat.id).make_pat(),
-            _ => noop_visit_pat(pat, self),
-        }
-    }
-
-    fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
-        match ty.kind {
-            ast::TyKind::Mac(_) => *ty = self.remove(ty.id).make_ty(),
-            _ => noop_visit_ty(ty, self),
-        }
-    }
-
-    fn visit_block(&mut self, block: &mut P<ast::Block>) {
-        noop_visit_block(block, self);
-
-        for stmt in block.stmts.iter_mut() {
-            if self.monotonic {
-                assert_eq!(stmt.id, ast::DUMMY_NODE_ID);
-                stmt.id = self.cx.resolver.next_node_id();
-            }
-        }
-    }
-
-    fn visit_mod(&mut self, module: &mut ast::Mod) {
-        noop_visit_mod(module, self);
-        module.items.retain(|item| match item.kind {
-            ast::ItemKind::Mac(_) if !self.cx.ecfg.keep_macs => false, // remove macro definitions
-            _ => true,
-        });
-    }
-
-    fn visit_mac(&mut self, _mac: &mut ast::Mac) {
-        // Do nothing.
-    }
-}
diff --git a/src/libsyntax/ext/proc_macro.rs b/src/libsyntax/ext/proc_macro.rs
deleted file mode 100644 (file)
index e17bbf7..0000000
+++ /dev/null
@@ -1,221 +0,0 @@
-use crate::ast::{self, ItemKind, Attribute, Mac};
-use crate::attr::{mark_used, mark_known};
-use crate::errors::{Applicability, FatalError};
-use crate::ext::base::{self, *};
-use crate::ext::proc_macro_server;
-use crate::parse::{self, token};
-use crate::parse::parser::PathStyle;
-use crate::symbol::sym;
-use crate::tokenstream::{self, TokenStream};
-use crate::visit::Visitor;
-
-use rustc_data_structures::sync::Lrc;
-use syntax_pos::{Span, DUMMY_SP};
-
-const EXEC_STRATEGY: proc_macro::bridge::server::SameThread =
-    proc_macro::bridge::server::SameThread;
-
-pub struct BangProcMacro {
-    pub client: proc_macro::bridge::client::Client<
-        fn(proc_macro::TokenStream) -> proc_macro::TokenStream,
-    >,
-}
-
-impl base::ProcMacro for BangProcMacro {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt<'_>,
-                   span: Span,
-                   input: TokenStream)
-                   -> TokenStream {
-        let server = proc_macro_server::Rustc::new(ecx);
-        match self.client.run(&EXEC_STRATEGY, server, input) {
-            Ok(stream) => stream,
-            Err(e) => {
-                let msg = "proc macro panicked";
-                let mut err = ecx.struct_span_fatal(span, msg);
-                if let Some(s) = e.as_str() {
-                    err.help(&format!("message: {}", s));
-                }
-
-                err.emit();
-                FatalError.raise();
-            }
-        }
-    }
-}
-
-pub struct AttrProcMacro {
-    pub client: proc_macro::bridge::client::Client<
-        fn(proc_macro::TokenStream, proc_macro::TokenStream) -> proc_macro::TokenStream,
-    >,
-}
-
-impl base::AttrProcMacro for AttrProcMacro {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt<'_>,
-                   span: Span,
-                   annotation: TokenStream,
-                   annotated: TokenStream)
-                   -> TokenStream {
-        let server = proc_macro_server::Rustc::new(ecx);
-        match self.client.run(&EXEC_STRATEGY, server, annotation, annotated) {
-            Ok(stream) => stream,
-            Err(e) => {
-                let msg = "custom attribute panicked";
-                let mut err = ecx.struct_span_fatal(span, msg);
-                if let Some(s) = e.as_str() {
-                    err.help(&format!("message: {}", s));
-                }
-
-                err.emit();
-                FatalError.raise();
-            }
-        }
-    }
-}
-
-pub struct ProcMacroDerive {
-    pub client: proc_macro::bridge::client::Client<
-        fn(proc_macro::TokenStream) -> proc_macro::TokenStream,
-    >,
-}
-
-impl MultiItemModifier for ProcMacroDerive {
-    fn expand(&self,
-              ecx: &mut ExtCtxt<'_>,
-              span: Span,
-              _meta_item: &ast::MetaItem,
-              item: Annotatable)
-              -> Vec<Annotatable> {
-        let item = match item {
-            Annotatable::Arm(..) |
-            Annotatable::Field(..) |
-            Annotatable::FieldPat(..) |
-            Annotatable::GenericParam(..) |
-            Annotatable::Param(..) |
-            Annotatable::StructField(..) |
-            Annotatable::Variant(..)
-                => panic!("unexpected annotatable"),
-            Annotatable::Item(item) => item,
-            Annotatable::ImplItem(_) |
-            Annotatable::TraitItem(_) |
-            Annotatable::ForeignItem(_) |
-            Annotatable::Stmt(_) |
-            Annotatable::Expr(_) => {
-                ecx.span_err(span, "proc-macro derives may only be \
-                                    applied to a struct, enum, or union");
-                return Vec::new()
-            }
-        };
-        match item.kind {
-            ItemKind::Struct(..) |
-            ItemKind::Enum(..) |
-            ItemKind::Union(..) => {},
-            _ => {
-                ecx.span_err(span, "proc-macro derives may only be \
-                                    applied to a struct, enum, or union");
-                return Vec::new()
-            }
-        }
-
-        let token = token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
-
-        let server = proc_macro_server::Rustc::new(ecx);
-        let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
-            Ok(stream) => stream,
-            Err(e) => {
-                let msg = "proc-macro derive panicked";
-                let mut err = ecx.struct_span_fatal(span, msg);
-                if let Some(s) = e.as_str() {
-                    err.help(&format!("message: {}", s));
-                }
-
-                err.emit();
-                FatalError.raise();
-            }
-        };
-
-        let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
-        let msg = "proc-macro derive produced unparseable tokens";
-
-        let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
-        let mut items = vec![];
-
-        loop {
-            match parser.parse_item() {
-                Ok(None) => break,
-                Ok(Some(item)) => {
-                    items.push(Annotatable::Item(item))
-                }
-                Err(mut err) => {
-                    // FIXME: handle this better
-                    err.cancel();
-                    ecx.struct_span_fatal(span, msg).emit();
-                    FatalError.raise();
-                }
-            }
-        }
-
-
-        // fail if there have been errors emitted
-        if ecx.parse_sess.span_diagnostic.err_count() > error_count_before {
-            ecx.struct_span_fatal(span, msg).emit();
-            FatalError.raise();
-        }
-
-        items
-    }
-}
-
-crate struct MarkAttrs<'a>(crate &'a [ast::Name]);
-
-impl<'a> Visitor<'a> for MarkAttrs<'a> {
-    fn visit_attribute(&mut self, attr: &Attribute) {
-        if let Some(ident) = attr.ident() {
-            if self.0.contains(&ident.name) {
-                mark_used(attr);
-                mark_known(attr);
-            }
-        }
-    }
-
-    fn visit_mac(&mut self, _mac: &Mac) {}
-}
-
-pub fn is_proc_macro_attr(attr: &Attribute) -> bool {
-    [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
-        .iter().any(|kind| attr.check_name(*kind))
-}
-
-crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
-    let mut result = Vec::new();
-    attrs.retain(|attr| {
-        if attr.path != sym::derive {
-            return true;
-        }
-        if !attr.is_meta_item_list() {
-            cx.struct_span_err(attr.span, "malformed `derive` attribute input")
-                .span_suggestion(
-                    attr.span,
-                    "missing traits to be derived",
-                    "#[derive(Trait1, Trait2, ...)]".to_owned(),
-                    Applicability::HasPlaceholders,
-                ).emit();
-            return false;
-        }
-
-        match attr.parse_list(cx.parse_sess,
-                              |parser| parser.parse_path_allowing_meta(PathStyle::Mod)) {
-            Ok(traits) => {
-                result.extend(traits);
-                true
-            }
-            Err(mut e) => {
-                e.emit();
-                false
-            }
-        }
-    });
-    result
-}
diff --git a/src/libsyntax/ext/proc_macro_server.rs b/src/libsyntax/ext/proc_macro_server.rs
deleted file mode 100644 (file)
index 021ec46..0000000
+++ /dev/null
@@ -1,711 +0,0 @@
-use crate::ast;
-use crate::ext::base::ExtCtxt;
-use crate::parse::{self, token, ParseSess};
-use crate::parse::lexer::comments;
-use crate::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
-
-use errors::Diagnostic;
-use rustc_data_structures::sync::Lrc;
-use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
-use syntax_pos::symbol::{kw, sym, Symbol};
-
-use proc_macro::{Delimiter, Level, LineColumn, Spacing};
-use proc_macro::bridge::{server, TokenTree};
-use std::{ascii, panic};
-use std::ops::Bound;
-
-trait FromInternal<T> {
-    fn from_internal(x: T) -> Self;
-}
-
-trait ToInternal<T> {
-    fn to_internal(self) -> T;
-}
-
-impl FromInternal<token::DelimToken> for Delimiter {
-    fn from_internal(delim: token::DelimToken) -> Delimiter {
-        match delim {
-            token::Paren => Delimiter::Parenthesis,
-            token::Brace => Delimiter::Brace,
-            token::Bracket => Delimiter::Bracket,
-            token::NoDelim => Delimiter::None,
-        }
-    }
-}
-
-impl ToInternal<token::DelimToken> for Delimiter {
-    fn to_internal(self) -> token::DelimToken {
-        match self {
-            Delimiter::Parenthesis => token::Paren,
-            Delimiter::Brace => token::Brace,
-            Delimiter::Bracket => token::Bracket,
-            Delimiter::None => token::NoDelim,
-        }
-    }
-}
-
-impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
-    for TokenTree<Group, Punct, Ident, Literal>
-{
-    fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
-                    -> Self {
-        use crate::parse::token::*;
-
-        let joint = is_joint == Joint;
-        let Token { kind, span } = match tree {
-            tokenstream::TokenTree::Delimited(span, delim, tts) => {
-                let delimiter = Delimiter::from_internal(delim);
-                return TokenTree::Group(Group {
-                    delimiter,
-                    stream: tts.into(),
-                    span,
-                });
-            }
-            tokenstream::TokenTree::Token(token) => token,
-        };
-
-        macro_rules! tt {
-            ($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
-                TokenTree::$ty(self::$ty {
-                    $($field $(: $value)*,)+
-                    span,
-                })
-            );
-            ($ty:ident::$method:ident($($value:expr),*)) => (
-                TokenTree::$ty(self::$ty::$method($($value,)* span))
-            );
-        }
-        macro_rules! op {
-            ($a:expr) => {
-                tt!(Punct::new($a, joint))
-            };
-            ($a:expr, $b:expr) => {{
-                stack.push(tt!(Punct::new($b, joint)));
-                tt!(Punct::new($a, true))
-            }};
-            ($a:expr, $b:expr, $c:expr) => {{
-                stack.push(tt!(Punct::new($c, joint)));
-                stack.push(tt!(Punct::new($b, true)));
-                tt!(Punct::new($a, true))
-            }};
-        }
-
-        match kind {
-            Eq => op!('='),
-            Lt => op!('<'),
-            Le => op!('<', '='),
-            EqEq => op!('=', '='),
-            Ne => op!('!', '='),
-            Ge => op!('>', '='),
-            Gt => op!('>'),
-            AndAnd => op!('&', '&'),
-            OrOr => op!('|', '|'),
-            Not => op!('!'),
-            Tilde => op!('~'),
-            BinOp(Plus) => op!('+'),
-            BinOp(Minus) => op!('-'),
-            BinOp(Star) => op!('*'),
-            BinOp(Slash) => op!('/'),
-            BinOp(Percent) => op!('%'),
-            BinOp(Caret) => op!('^'),
-            BinOp(And) => op!('&'),
-            BinOp(Or) => op!('|'),
-            BinOp(Shl) => op!('<', '<'),
-            BinOp(Shr) => op!('>', '>'),
-            BinOpEq(Plus) => op!('+', '='),
-            BinOpEq(Minus) => op!('-', '='),
-            BinOpEq(Star) => op!('*', '='),
-            BinOpEq(Slash) => op!('/', '='),
-            BinOpEq(Percent) => op!('%', '='),
-            BinOpEq(Caret) => op!('^', '='),
-            BinOpEq(And) => op!('&', '='),
-            BinOpEq(Or) => op!('|', '='),
-            BinOpEq(Shl) => op!('<', '<', '='),
-            BinOpEq(Shr) => op!('>', '>', '='),
-            At => op!('@'),
-            Dot => op!('.'),
-            DotDot => op!('.', '.'),
-            DotDotDot => op!('.', '.', '.'),
-            DotDotEq => op!('.', '.', '='),
-            Comma => op!(','),
-            Semi => op!(';'),
-            Colon => op!(':'),
-            ModSep => op!(':', ':'),
-            RArrow => op!('-', '>'),
-            LArrow => op!('<', '-'),
-            FatArrow => op!('=', '>'),
-            Pound => op!('#'),
-            Dollar => op!('$'),
-            Question => op!('?'),
-            SingleQuote => op!('\''),
-
-            Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
-            Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
-            Lifetime(name) => {
-                let ident = ast::Ident::new(name, span).without_first_quote();
-                stack.push(tt!(Ident::new(ident.name, false)));
-                tt!(Punct::new('\'', true))
-            }
-            Literal(lit) => tt!(Literal { lit }),
-            DocComment(c) => {
-                let style = comments::doc_comment_style(&c.as_str());
-                let stripped = comments::strip_doc_comment_decoration(&c.as_str());
-                let mut escaped = String::new();
-                for ch in stripped.chars() {
-                    escaped.extend(ch.escape_debug());
-                }
-                let stream = vec![
-                    Ident(sym::doc, false),
-                    Eq,
-                    TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
-                ]
-                .into_iter()
-                .map(|kind| tokenstream::TokenTree::token(kind, span))
-                .collect();
-                stack.push(TokenTree::Group(Group {
-                    delimiter: Delimiter::Bracket,
-                    stream,
-                    span: DelimSpan::from_single(span),
-                }));
-                if style == ast::AttrStyle::Inner {
-                    stack.push(tt!(Punct::new('!', false)));
-                }
-                tt!(Punct::new('#', false))
-            }
-
-            Interpolated(nt) => {
-                let stream = nt.to_tokenstream(sess, span);
-                TokenTree::Group(Group {
-                    delimiter: Delimiter::None,
-                    stream,
-                    span: DelimSpan::from_single(span),
-                })
-            }
-
-            OpenDelim(..) | CloseDelim(..) => unreachable!(),
-            Whitespace | Comment | Shebang(..) | Unknown(..) | Eof => unreachable!(),
-        }
-    }
-}
-
-impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
-    fn to_internal(self) -> TokenStream {
-        use crate::parse::token::*;
-
-        let (ch, joint, span) = match self {
-            TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
-            TokenTree::Group(Group {
-                delimiter,
-                stream,
-                span,
-            }) => {
-                return tokenstream::TokenTree::Delimited(
-                    span,
-                    delimiter.to_internal(),
-                    stream.into(),
-                )
-                .into();
-            }
-            TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
-                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
-            }
-            TokenTree::Literal(self::Literal {
-                lit: token::Lit { kind: token::Integer, symbol, suffix },
-                span,
-            }) if symbol.as_str().starts_with("-") => {
-                let minus = BinOp(BinOpToken::Minus);
-                let symbol = Symbol::intern(&symbol.as_str()[1..]);
-                let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::token(minus, span);
-                let b = tokenstream::TokenTree::token(integer, span);
-                return vec![a, b].into_iter().collect();
-            }
-            TokenTree::Literal(self::Literal {
-                lit: token::Lit { kind: token::Float, symbol, suffix },
-                span,
-            }) if symbol.as_str().starts_with("-") => {
-                let minus = BinOp(BinOpToken::Minus);
-                let symbol = Symbol::intern(&symbol.as_str()[1..]);
-                let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::token(minus, span);
-                let b = tokenstream::TokenTree::token(float, span);
-                return vec![a, b].into_iter().collect();
-            }
-            TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::token(Literal(lit), span).into()
-            }
-        };
-
-        let kind = match ch {
-            '=' => Eq,
-            '<' => Lt,
-            '>' => Gt,
-            '!' => Not,
-            '~' => Tilde,
-            '+' => BinOp(Plus),
-            '-' => BinOp(Minus),
-            '*' => BinOp(Star),
-            '/' => BinOp(Slash),
-            '%' => BinOp(Percent),
-            '^' => BinOp(Caret),
-            '&' => BinOp(And),
-            '|' => BinOp(Or),
-            '@' => At,
-            '.' => Dot,
-            ',' => Comma,
-            ';' => Semi,
-            ':' => Colon,
-            '#' => Pound,
-            '$' => Dollar,
-            '?' => Question,
-            '\'' => SingleQuote,
-            _ => unreachable!(),
-        };
-
-        let tree = tokenstream::TokenTree::token(kind, span);
-        TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
-    }
-}
-
-impl ToInternal<errors::Level> for Level {
-    fn to_internal(self) -> errors::Level {
-        match self {
-            Level::Error => errors::Level::Error,
-            Level::Warning => errors::Level::Warning,
-            Level::Note => errors::Level::Note,
-            Level::Help => errors::Level::Help,
-            _ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
-        }
-    }
-}
-
-#[derive(Clone)]
-pub struct TokenStreamIter {
-    cursor: tokenstream::Cursor,
-    stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
-}
-
-#[derive(Clone)]
-pub struct Group {
-    delimiter: Delimiter,
-    stream: TokenStream,
-    span: DelimSpan,
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Punct {
-    ch: char,
-    // NB. not using `Spacing` here because it doesn't implement `Hash`.
-    joint: bool,
-    span: Span,
-}
-
-impl Punct {
-    fn new(ch: char, joint: bool, span: Span) -> Punct {
-        const LEGAL_CHARS: &[char] = &['=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^',
-                                       '&', '|', '@', '.', ',', ';', ':', '#', '$', '?', '\''];
-        if !LEGAL_CHARS.contains(&ch) {
-            panic!("unsupported character `{:?}`", ch)
-        }
-        Punct { ch, joint, span }
-    }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Ident {
-    sym: Symbol,
-    is_raw: bool,
-    span: Span,
-}
-
-impl Ident {
-    fn is_valid(string: &str) -> bool {
-        let mut chars = string.chars();
-        if let Some(start) = chars.next() {
-            rustc_lexer::is_id_start(start) && chars.all(rustc_lexer::is_id_continue)
-        } else {
-            false
-        }
-    }
-    fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
-        let string = sym.as_str();
-        if !Self::is_valid(&string) {
-            panic!("`{:?}` is not a valid identifier", string)
-        }
-        // Get rid of gensyms to conservatively check rawness on the string contents only.
-        if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() {
-            panic!("`{}` cannot be a raw identifier", string);
-        }
-        Ident { sym, is_raw, span }
-    }
-    fn dollar_crate(span: Span) -> Ident {
-        // `$crate` is accepted as an ident only if it comes from the compiler.
-        Ident { sym: kw::DollarCrate, is_raw: false, span }
-    }
-}
-
-// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
-#[derive(Clone, Debug)]
-pub struct Literal {
-    lit: token::Lit,
-    span: Span,
-}
-
-pub(crate) struct Rustc<'a> {
-    sess: &'a ParseSess,
-    def_site: Span,
-    call_site: Span,
-    mixed_site: Span,
-}
-
-impl<'a> Rustc<'a> {
-    pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
-        let expn_data = cx.current_expansion.id.expn_data();
-        Rustc {
-            sess: cx.parse_sess,
-            def_site: cx.with_def_site_ctxt(expn_data.def_site),
-            call_site: cx.with_call_site_ctxt(expn_data.call_site),
-            mixed_site: cx.with_mixed_site_ctxt(expn_data.call_site),
-        }
-    }
-
-    fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
-        Literal {
-            lit: token::Lit::new(kind, symbol, suffix),
-            span: server::Span::call_site(self),
-        }
-    }
-}
-
-impl server::Types for Rustc<'_> {
-    type TokenStream = TokenStream;
-    type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
-    type TokenStreamIter = TokenStreamIter;
-    type Group = Group;
-    type Punct = Punct;
-    type Ident = Ident;
-    type Literal = Literal;
-    type SourceFile = Lrc<SourceFile>;
-    type MultiSpan = Vec<Span>;
-    type Diagnostic = Diagnostic;
-    type Span = Span;
-}
-
-impl server::TokenStream for Rustc<'_> {
-    fn new(&mut self) -> Self::TokenStream {
-        TokenStream::empty()
-    }
-    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
-        stream.is_empty()
-    }
-    fn from_str(&mut self, src: &str) -> Self::TokenStream {
-        parse::parse_stream_from_source_str(
-            FileName::proc_macro_source_code(src),
-            src.to_string(),
-            self.sess,
-            Some(self.call_site),
-        )
-    }
-    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
-        stream.to_string()
-    }
-    fn from_token_tree(
-        &mut self,
-        tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
-    ) -> Self::TokenStream {
-        tree.to_internal()
-    }
-    fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
-        TokenStreamIter {
-            cursor: stream.trees(),
-            stack: vec![],
-        }
-    }
-}
-
-impl server::TokenStreamBuilder for Rustc<'_> {
-    fn new(&mut self) -> Self::TokenStreamBuilder {
-        tokenstream::TokenStreamBuilder::new()
-    }
-    fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
-        builder.push(stream);
-    }
-    fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
-        builder.build()
-    }
-}
-
-impl server::TokenStreamIter for Rustc<'_> {
-    fn next(
-        &mut self,
-        iter: &mut Self::TokenStreamIter,
-    ) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
-        loop {
-            let tree = iter.stack.pop().or_else(|| {
-                let next = iter.cursor.next_with_joint()?;
-                Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
-            })?;
-            // HACK: The condition "dummy span + group with empty delimiter" represents an AST
-            // fragment approximately converted into a token stream. This may happen, for
-            // example, with inputs to proc macro attributes, including derives. Such "groups"
-            // need to flattened during iteration over stream's token trees.
-            // Eventually this needs to be removed in favor of keeping original token trees
-            // and not doing the roundtrip through AST.
-            if let TokenTree::Group(ref group) = tree {
-                if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
-                    iter.cursor.append(group.stream.clone());
-                    continue;
-                }
-            }
-            return Some(tree);
-        }
-    }
-}
-
-impl server::Group for Rustc<'_> {
-    fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
-        Group {
-            delimiter,
-            stream,
-            span: DelimSpan::from_single(server::Span::call_site(self)),
-        }
-    }
-    fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
-        group.delimiter
-    }
-    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
-        group.stream.clone()
-    }
-    fn span(&mut self, group: &Self::Group) -> Self::Span {
-        group.span.entire()
-    }
-    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
-        group.span.open
-    }
-    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
-        group.span.close
-    }
-    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
-        group.span = DelimSpan::from_single(span);
-    }
-}
-
-impl server::Punct for Rustc<'_> {
-    fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
-        Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
-    }
-    fn as_char(&mut self, punct: Self::Punct) -> char {
-        punct.ch
-    }
-    fn spacing(&mut self, punct: Self::Punct) -> Spacing {
-        if punct.joint {
-            Spacing::Joint
-        } else {
-            Spacing::Alone
-        }
-    }
-    fn span(&mut self, punct: Self::Punct) -> Self::Span {
-        punct.span
-    }
-    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
-        Punct { span, ..punct }
-    }
-}
-
-impl server::Ident for Rustc<'_> {
-    fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
-        Ident::new(Symbol::intern(string), is_raw, span)
-    }
-    fn span(&mut self, ident: Self::Ident) -> Self::Span {
-        ident.span
-    }
-    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
-        Ident { span, ..ident }
-    }
-}
-
-impl server::Literal for Rustc<'_> {
-    // FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
-    fn debug(&mut self, literal: &Self::Literal) -> String {
-        format!("{:?}", literal)
-    }
-    fn integer(&mut self, n: &str) -> Self::Literal {
-        self.lit(token::Integer, Symbol::intern(n), None)
-    }
-    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
-        self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
-    }
-    fn float(&mut self, n: &str) -> Self::Literal {
-        self.lit(token::Float, Symbol::intern(n), None)
-    }
-    fn f32(&mut self, n: &str) -> Self::Literal {
-        self.lit(token::Float, Symbol::intern(n), Some(sym::f32))
-    }
-    fn f64(&mut self, n: &str) -> Self::Literal {
-        self.lit(token::Float, Symbol::intern(n), Some(sym::f64))
-    }
-    fn string(&mut self, string: &str) -> Self::Literal {
-        let mut escaped = String::new();
-        for ch in string.chars() {
-            escaped.extend(ch.escape_debug());
-        }
-        self.lit(token::Str, Symbol::intern(&escaped), None)
-    }
-    fn character(&mut self, ch: char) -> Self::Literal {
-        let mut escaped = String::new();
-        escaped.extend(ch.escape_unicode());
-        self.lit(token::Char, Symbol::intern(&escaped), None)
-    }
-    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
-        let string = bytes
-            .iter()
-            .cloned()
-            .flat_map(ascii::escape_default)
-            .map(Into::<char>::into)
-            .collect::<String>();
-        self.lit(token::ByteStr, Symbol::intern(&string), None)
-    }
-    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
-        literal.span
-    }
-    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
-        literal.span = span;
-    }
-    fn subspan(
-        &mut self,
-        literal: &Self::Literal,
-        start: Bound<usize>,
-        end: Bound<usize>,
-    ) -> Option<Self::Span> {
-        let span = literal.span;
-        let length = span.hi().to_usize() - span.lo().to_usize();
-
-        let start = match start {
-            Bound::Included(lo) => lo,
-            Bound::Excluded(lo) => lo + 1,
-            Bound::Unbounded => 0,
-        };
-
-        let end = match end {
-            Bound::Included(hi) => hi + 1,
-            Bound::Excluded(hi) => hi,
-            Bound::Unbounded => length,
-        };
-
-        // Bounds check the values, preventing addition overflow and OOB spans.
-        if start > u32::max_value() as usize
-            || end > u32::max_value() as usize
-            || (u32::max_value() - start as u32) < span.lo().to_u32()
-            || (u32::max_value() - end as u32) < span.lo().to_u32()
-            || start >= end
-            || end > length
-        {
-            return None;
-        }
-
-        let new_lo = span.lo() + BytePos::from_usize(start);
-        let new_hi = span.lo() + BytePos::from_usize(end);
-        Some(span.with_lo(new_lo).with_hi(new_hi))
-    }
-}
-
-impl server::SourceFile for Rustc<'_> {
-    fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
-        Lrc::ptr_eq(file1, file2)
-    }
-    fn path(&mut self, file: &Self::SourceFile) -> String {
-        match file.name {
-            FileName::Real(ref path) => path
-                .to_str()
-                .expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
-                .to_string(),
-            _ => file.name.to_string(),
-        }
-    }
-    fn is_real(&mut self, file: &Self::SourceFile) -> bool {
-        file.is_real_file()
-    }
-}
-
-impl server::MultiSpan for Rustc<'_> {
-    fn new(&mut self) -> Self::MultiSpan {
-        vec![]
-    }
-    fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
-        spans.push(span)
-    }
-}
-
-impl server::Diagnostic for Rustc<'_> {
-    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
-        let mut diag = Diagnostic::new(level.to_internal(), msg);
-        diag.set_span(MultiSpan::from_spans(spans));
-        diag
-    }
-    fn sub(
-        &mut self,
-        diag: &mut Self::Diagnostic,
-        level: Level,
-        msg: &str,
-        spans: Self::MultiSpan,
-    ) {
-        diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
-    }
-    fn emit(&mut self, diag: Self::Diagnostic) {
-        self.sess.span_diagnostic.emit_diagnostic(&diag);
-    }
-}
-
-impl server::Span for Rustc<'_> {
-    fn debug(&mut self, span: Self::Span) -> String {
-        format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
-    }
-    fn def_site(&mut self) -> Self::Span {
-        self.def_site
-    }
-    fn call_site(&mut self) -> Self::Span {
-        self.call_site
-    }
-    fn mixed_site(&mut self) -> Self::Span {
-        self.mixed_site
-    }
-    fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
-        self.sess.source_map().lookup_char_pos(span.lo()).file
-    }
-    fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
-        span.parent()
-    }
-    fn source(&mut self, span: Self::Span) -> Self::Span {
-        span.source_callsite()
-    }
-    fn start(&mut self, span: Self::Span) -> LineColumn {
-        let loc = self.sess.source_map().lookup_char_pos(span.lo());
-        LineColumn {
-            line: loc.line,
-            column: loc.col.to_usize(),
-        }
-    }
-    fn end(&mut self, span: Self::Span) -> LineColumn {
-        let loc = self.sess.source_map().lookup_char_pos(span.hi());
-        LineColumn {
-            line: loc.line,
-            column: loc.col.to_usize(),
-        }
-    }
-    fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
-        let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
-        let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
-
-        if self_loc.file.name != other_loc.file.name {
-            return None;
-        }
-
-        Some(first.to(second))
-    }
-    fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
-        span.with_ctxt(at.ctxt())
-    }
-    fn source_text(&mut self,  span: Self::Span) -> Option<String> {
-        self.sess.source_map().span_to_snippet(span).ok()
-    }
-}
index 47ee41f0adc1690b0d0de4e1437ba1906b61c1a1..94f0995566f5289fa6f66229528281ac0a1c0e8c 100644 (file)
@@ -519,6 +519,15 @@ pub fn set(&self, features: &mut Features, span: Span) {
     /// Allows the use of or-patterns (e.g., `0 | 1`).
     (active, or_patterns, "1.38.0", Some(54883), None),
 
+    /// Allows the definition of `const extern fn` and `const unsafe extern fn`.
+    (active, const_extern_fn, "1.40.0", Some(64926), None),
+
+    // Allows the use of raw-dylibs (RFC 2627).
+    (active, raw_dylib, "1.40.0", Some(58713), None),
+
+    /// Enable accurate caller location reporting during panic (RFC 2091).
+    (active, track_caller, "1.40.0", Some(47809), None),
+
     // -------------------------------------------------------------------------
     // feature-group-end: actual feature gates
     // -------------------------------------------------------------------------
@@ -533,4 +542,6 @@ pub fn set(&self, features: &mut Features, span: Span) {
     sym::const_generics,
     sym::or_patterns,
     sym::let_chains,
+    sym::raw_dylib,
+    sym::track_caller,
 ];
index 80a80ff0a0d47b100ad4d97c994751be84adb420..7dd6ae90d9a46af0464383bbb999318705a4dc32 100644 (file)
@@ -9,8 +9,8 @@
 
 use crate::ast;
 use crate::attr::AttributeTemplate;
+use crate::sess::ParseSess;
 use crate::symbol::{Symbol, sym};
-use crate::parse::ParseSess;
 
 use syntax_pos::Span;
 use rustc_data_structures::fx::FxHashMap;
@@ -29,6 +29,7 @@ macro_rules! cfg_fn {
     // (name in cfg, feature, function to check if the feature is enabled)
     (sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)),
     (sym::target_has_atomic, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)),
+    (sym::target_has_atomic_load_store, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)),
     (sym::rustdoc, sym::doc_cfg, cfg_fn!(doc_cfg)),
     (sym::doctest, sym::cfg_doctest, cfg_fn!(cfg_doctest)),
 ];
@@ -276,12 +277,19 @@ macro_rules! experimental {
         "the `link_args` attribute is experimental and not portable across platforms, \
         it is recommended to use `#[link(name = \"foo\")] instead",
     ),
+    gated!(
+        link_ordinal, Whitelisted, template!(List: "ordinal"), raw_dylib,
+        experimental!(link_ordinal)
+    ),
 
     // Plugins:
     (
         sym::plugin_registrar, Normal, template!(Word),
         Gated(
-            Stability::Deprecated("https://github.com/rust-lang/rust/issues/29597", None),
+            Stability::Deprecated(
+                "https://github.com/rust-lang/rust/pull/64675",
+                Some("may be removed in a future compiler version"),
+            ),
             sym::plugin_registrar,
             "compiler plugins are deprecated",
             cfg_fn!(plugin_registrar)
@@ -290,7 +298,10 @@ macro_rules! experimental {
     (
         sym::plugin, CrateLevel, template!(List: "name|name(args)"),
         Gated(
-            Stability::Deprecated("https://github.com/rust-lang/rust/issues/29597", None),
+            Stability::Deprecated(
+                "https://github.com/rust-lang/rust/pull/64675",
+                Some("may be removed in a future compiler version"),
+            ),
             sym::plugin,
             "compiler plugins are deprecated",
             cfg_fn!(plugin)
@@ -320,6 +331,7 @@ macro_rules! experimental {
     ),
 
     gated!(ffi_returns_twice, Whitelisted, template!(Word), experimental!(ffi_returns_twice)),
+    gated!(track_caller, Whitelisted, template!(Word), experimental!(track_caller)),
 
     // ==========================================================================
     // Internal attributes: Stability, deprecation, and unsafe:
index d7fc74955bbbde1c421c5c1e86226f594df07b97..172511f0f099b0bd0d713c94249cb3f1edbba47c 100644 (file)
@@ -5,14 +5,14 @@
 
 use crate::ast::{
     self, AssocTyConstraint, AssocTyConstraintKind, NodeId, GenericParam, GenericParamKind,
-    PatKind, RangeEnd,
+    PatKind, RangeEnd, VariantData,
 };
 use crate::attr::{self, check_builtin_attribute};
 use crate::source_map::Spanned;
 use crate::edition::{ALL_EDITIONS, Edition};
 use crate::visit::{self, FnKind, Visitor};
-use crate::parse::{token, ParseSess};
-use crate::parse::parser::Parser;
+use crate::parse::token;
+use crate::sess::ParseSess;
 use crate::symbol::{Symbol, sym};
 use crate::tokenstream::TokenTree;
 
@@ -56,7 +56,7 @@ macro_rules! gate_feature {
     };
 }
 
-crate fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) {
+pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) {
     PostExpansionVisitor { parse_sess, features }.visit_attribute(attr)
 }
 
@@ -246,6 +246,51 @@ fn check_abi(&self, abi: Abi, span: Span) {
             Abi::System => {}
         }
     }
+
+    fn maybe_report_invalid_custom_discriminants(&self, variants: &[ast::Variant]) {
+        let has_fields = variants.iter().any(|variant| match variant.data {
+            VariantData::Tuple(..) | VariantData::Struct(..) => true,
+            VariantData::Unit(..) => false,
+        });
+
+        let discriminant_spans = variants.iter().filter(|variant| match variant.data {
+            VariantData::Tuple(..) | VariantData::Struct(..) => false,
+            VariantData::Unit(..) => true,
+        })
+        .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
+        .collect::<Vec<_>>();
+
+        if !discriminant_spans.is_empty() && has_fields {
+            let mut err = feature_err(
+                self.parse_sess,
+                sym::arbitrary_enum_discriminant,
+                discriminant_spans.clone(),
+                crate::feature_gate::GateIssue::Language,
+                "custom discriminant values are not allowed in enums with tuple or struct variants",
+            );
+            for sp in discriminant_spans {
+                err.span_label(sp, "disallowed custom discriminant");
+            }
+            for variant in variants.iter() {
+                match &variant.data {
+                    VariantData::Struct(..) => {
+                        err.span_label(
+                            variant.span,
+                            "struct variant defined here",
+                        );
+                    }
+                    VariantData::Tuple(..) => {
+                        err.span_label(
+                            variant.span,
+                            "tuple variant defined here",
+                        );
+                    }
+                    VariantData::Unit(..) => {}
+                }
+            }
+            err.emit();
+        }
+    }
 }
 
 impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
@@ -353,7 +398,7 @@ fn visit_item(&mut self, i: &'a ast::Item) {
 
                 let has_feature = self.features.arbitrary_enum_discriminant;
                 if !has_feature && !i.span.allows_unstable(sym::arbitrary_enum_discriminant) {
-                    Parser::maybe_report_invalid_custom_discriminants(self.parse_sess, &variants);
+                    self.maybe_report_invalid_custom_discriminants(&variants);
                 }
             }
 
@@ -769,7 +814,7 @@ fn feature_removed(span_handler: &Handler, span: Span, reason: Option<&str>) {
             }
 
             if let Some(allowed) = allow_features.as_ref() {
-                if allowed.iter().find(|f| *f == name.as_str()).is_none() {
+                if allowed.iter().find(|&f| f == &name.as_str() as &str).is_none() {
                     span_err!(span_handler, mi.span(), E0725,
                               "the feature `{}` is not in the list of allowed features",
                               name);
@@ -821,6 +866,7 @@ macro_rules! gate_all {
     gate_all!(async_closure, "async closures are unstable");
     gate_all!(yields, generators, "yield syntax is experimental");
     gate_all!(or_patterns, "or-patterns syntax is experimental");
+    gate_all!(const_extern_fn, "`const extern fn` definitions are unstable");
 
     visit::walk_crate(&mut visitor, krate);
 }
@@ -854,25 +900,19 @@ pub fn from_environment() -> UnstableFeatures {
     pub fn is_nightly_build(&self) -> bool {
         match *self {
             UnstableFeatures::Allow | UnstableFeatures::Cheat => true,
-            _ => false,
+            UnstableFeatures::Disallow => false,
         }
     }
 }
 
 fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, unstable: UnstableFeatures) {
-    let allow_features = match unstable {
-        UnstableFeatures::Allow => true,
-        UnstableFeatures::Disallow => false,
-        UnstableFeatures::Cheat => true
-    };
-    if !allow_features {
-        for attr in &krate.attrs {
-            if attr.check_name(sym::feature) {
-                let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)");
-                span_err!(span_handler, attr.span, E0554,
-                          "`#![feature]` may not be used on the {} release channel",
-                          release_channel);
-            }
+    if !unstable.is_nightly_build() {
+        for attr in krate.attrs.iter().filter(|attr| attr.check_name(sym::feature)) {
+            span_err!(
+                span_handler, attr.span, E0554,
+                "`#![feature]` may not be used on the {} release channel",
+                option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)")
+            );
         }
     }
 }
index ca13ab3620508b833cf0724c2d3ecad2d1d0be86..ba970618c0e139d35ff7dfe5d1a3c79de18a6bac 100644 (file)
@@ -58,8 +58,7 @@ pub struct Feature {
     deprecated_attributes, is_builtin_attr,  is_builtin_attr_name,
 };
 pub use check::{
-    check_crate, get_features, feature_err, emit_feature_err,
+    check_crate, check_attribute, get_features, feature_err, emit_feature_err,
     Stability, GateIssue, UnstableFeatures,
     EXPLAIN_STMT_ATTR_SYNTAX, EXPLAIN_UNSIZED_TUPLE_COERCION,
 };
-crate use check::check_attribute;
index 2423e1070fc3eb432cd5abf684df8f2365154375..e3296788d9fa78530ab1dd528a9e55e63076b417 100644 (file)
@@ -12,7 +12,7 @@
 use crate::source_map::{SourceMap, FilePathMapping};
 
 use errors::registry::Registry;
-use errors::{SubDiagnostic, CodeSuggestion, SourceMapper};
+use errors::{SubDiagnostic, CodeSuggestion, SourceMapper, SourceMapperDyn};
 use errors::{DiagnosticId, Applicability};
 use errors::emitter::{Emitter, HumanReadableErrorType};
 
@@ -89,8 +89,8 @@ pub fn ui_testing(self, ui_testing: bool) -> Self {
 }
 
 impl Emitter for JsonEmitter {
-    fn emit_diagnostic(&mut self, db: &errors::Diagnostic) {
-        let data = Diagnostic::from_errors_diagnostic(db, self);
+    fn emit_diagnostic(&mut self, diag: &errors::Diagnostic) {
+        let data = Diagnostic::from_errors_diagnostic(diag, self);
         let result = if self.pretty {
             writeln!(&mut self.dst, "{}", as_pretty_json(&data))
         } else {
@@ -113,6 +113,10 @@ fn emit_artifact_notification(&mut self, path: &Path, artifact_type: &str) {
         }
     }
 
+    fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> {
+        Some(&self.sm)
+    }
+
     fn should_show_explain(&self) -> bool {
         match self.json_rendered {
             HumanReadableErrorType::Short(_) => false,
@@ -205,10 +209,10 @@ struct ArtifactNotification<'a> {
 }
 
 impl Diagnostic {
-    fn from_errors_diagnostic(db: &errors::Diagnostic,
+    fn from_errors_diagnostic(diag: &errors::Diagnostic,
                                je: &JsonEmitter)
                                -> Diagnostic {
-        let sugg = db.suggestions.iter().map(|sugg| {
+        let sugg = diag.suggestions.iter().map(|sugg| {
             Diagnostic {
                 message: sugg.msg.clone(),
                 code: None,
@@ -237,30 +241,30 @@ fn flush(&mut self) -> io::Result<()> {
         let output = buf.clone();
         je.json_rendered.new_emitter(
             Box::new(buf), Some(je.sm.clone()), false, None, je.external_macro_backtrace
-        ).ui_testing(je.ui_testing).emit_diagnostic(db);
+        ).ui_testing(je.ui_testing).emit_diagnostic(diag);
         let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap();
         let output = String::from_utf8(output).unwrap();
 
         Diagnostic {
-            message: db.message(),
-            code: DiagnosticCode::map_opt_string(db.code.clone(), je),
-            level: db.level.to_str(),
-            spans: DiagnosticSpan::from_multispan(&db.span, je),
-            children: db.children.iter().map(|c| {
+            message: diag.message(),
+            code: DiagnosticCode::map_opt_string(diag.code.clone(), je),
+            level: diag.level.to_str(),
+            spans: DiagnosticSpan::from_multispan(&diag.span, je),
+            children: diag.children.iter().map(|c| {
                 Diagnostic::from_sub_diagnostic(c, je)
             }).chain(sugg).collect(),
             rendered: Some(output),
         }
     }
 
-    fn from_sub_diagnostic(db: &SubDiagnostic, je: &JsonEmitter) -> Diagnostic {
+    fn from_sub_diagnostic(diag: &SubDiagnostic, je: &JsonEmitter) -> Diagnostic {
         Diagnostic {
-            message: db.message(),
+            message: diag.message(),
             code: None,
-            level: db.level.to_str(),
-            spans: db.render_span.as_ref()
+            level: diag.level.to_str(),
+            spans: diag.render_span.as_ref()
                      .map(|sp| DiagnosticSpan::from_multispan(sp, je))
-                     .unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)),
+                     .unwrap_or_else(|| DiagnosticSpan::from_multispan(&diag.span, je)),
             children: vec![],
             rendered: None,
         }
index d2c76b669dd5fef54014896c2e84ad4d20325c0d..a68b7fdf931a460d5bf50a2c576132b6e2bfbe51 100644 (file)
 #![feature(const_transmute)]
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
-#![feature(mem_take)]
 #![feature(nll)]
-#![feature(proc_macro_diagnostic)]
-#![feature(proc_macro_internals)]
-#![feature(proc_macro_span)]
 #![feature(try_trait)]
+#![feature(slice_patterns)]
 #![feature(unicode_internals)]
 
 #![recursion_limit="256"]
 
-extern crate proc_macro;
-
 pub use errors;
 use rustc_data_structures::sync::Lock;
 use rustc_index::bit_set::GrowableBitSet;
 #[cfg(test)]
 mod tests;
 
-const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
-
-// A variant of 'try!' that panics on an Err. This is used as a crutch on the
-// way towards a non-panic!-prone parser. It should be used for fatal parsing
-// errors; eventually we plan to convert all code using panictry to just use
-// normal try.
-#[macro_export]
-macro_rules! panictry {
-    ($e:expr) => ({
-        use std::result::Result::{Ok, Err};
-        use errors::FatalError;
-        match $e {
-            Ok(e) => e,
-            Err(mut e) => {
-                e.emit();
-                FatalError.raise()
-            }
-        }
-    })
-}
+pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
 
 // A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
 macro_rules! panictry_buffer {
@@ -147,6 +123,7 @@ pub mod util {
 pub mod show_span;
 pub use syntax_pos::edition;
 pub use syntax_pos::symbol;
+pub mod sess;
 pub mod tokenstream;
 pub mod visit;
 
@@ -156,19 +133,4 @@ pub mod print {
     mod helpers;
 }
 
-pub mod ext {
-    mod placeholders;
-    mod proc_macro_server;
-
-    pub use syntax_pos::hygiene;
-    pub use mbe::macro_rules::compile_declarative_macro;
-    pub mod allocator;
-    pub mod base;
-    pub mod build;
-    pub mod expand;
-    pub mod proc_macro;
-
-    crate mod mbe;
-}
-
 pub mod early_buffered_lints;
index 3923b9f297b9ffaf897feb6b3b547529853a56b8..60ee17d09b7557dcb42cbceb6fe1abdd0632737d 100644 (file)
@@ -610,10 +610,8 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
 }
 
 pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
-    visit_opt(tts, |tts| {
-        let tts = Lrc::make_mut(tts);
-        visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
-    })
+    let tts = Lrc::make_mut(tts);
+    visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
 }
 
 // Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
deleted file mode 100644 (file)
index e74f304..0000000
+++ /dev/null
@@ -1,330 +0,0 @@
-use crate::attr;
-use crate::ast;
-use crate::parse::{SeqSep, PResult};
-use crate::parse::token::{self, Nonterminal, DelimToken};
-use crate::parse::parser::{Parser, TokenType, PathStyle};
-use crate::tokenstream::{TokenStream, TokenTree};
-use crate::source_map::Span;
-
-use log::debug;
-use smallvec::smallvec;
-
-#[derive(Debug)]
-enum InnerAttributeParsePolicy<'a> {
-    Permitted,
-    NotPermitted { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> },
-}
-
-const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
-                                                     permitted in this context";
-
-impl<'a> Parser<'a> {
-    /// Parses attributes that appear before an item.
-    crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
-        let mut attrs: Vec<ast::Attribute> = Vec::new();
-        let mut just_parsed_doc_comment = false;
-        loop {
-            debug!("parse_outer_attributes: self.token={:?}", self.token);
-            match self.token.kind {
-                token::Pound => {
-                    let inner_error_reason = if just_parsed_doc_comment {
-                        "an inner attribute is not permitted following an outer doc comment"
-                    } else if !attrs.is_empty() {
-                        "an inner attribute is not permitted following an outer attribute"
-                    } else {
-                        DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
-                    };
-                    let inner_parse_policy =
-                        InnerAttributeParsePolicy::NotPermitted {
-                            reason: inner_error_reason,
-                            saw_doc_comment: just_parsed_doc_comment,
-                            prev_attr_sp: attrs.last().and_then(|a| Some(a.span))
-                        };
-                    let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
-                    attrs.push(attr);
-                    just_parsed_doc_comment = false;
-                }
-                token::DocComment(s) => {
-                    let attr = attr::mk_sugared_doc_attr(s, self.token.span);
-                    if attr.style != ast::AttrStyle::Outer {
-                        let mut err = self.fatal("expected outer doc comment");
-                        err.note("inner doc comments like this (starting with \
-                                  `//!` or `/*!`) can only appear before items");
-                        return Err(err);
-                    }
-                    attrs.push(attr);
-                    self.bump();
-                    just_parsed_doc_comment = true;
-                }
-                _ => break,
-            }
-        }
-        Ok(attrs)
-    }
-
-    /// Matches `attribute = # ! [ meta_item ]`.
-    ///
-    /// If `permit_inner` is `true`, then a leading `!` indicates an inner
-    /// attribute.
-    pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> {
-        debug!("parse_attribute: permit_inner={:?} self.token={:?}",
-               permit_inner,
-               self.token);
-        let inner_parse_policy = if permit_inner {
-            InnerAttributeParsePolicy::Permitted
-        } else {
-            InnerAttributeParsePolicy::NotPermitted {
-                reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG,
-                saw_doc_comment: false,
-                prev_attr_sp: None
-            }
-        };
-        self.parse_attribute_with_inner_parse_policy(inner_parse_policy)
-    }
-
-    /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy`
-    /// that prescribes how to handle inner attributes.
-    fn parse_attribute_with_inner_parse_policy(&mut self,
-                                               inner_parse_policy: InnerAttributeParsePolicy<'_>)
-                                               -> PResult<'a, ast::Attribute> {
-        debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
-               inner_parse_policy,
-               self.token);
-        let (span, item, style) = match self.token.kind {
-            token::Pound => {
-                let lo = self.token.span;
-                self.bump();
-
-                if let InnerAttributeParsePolicy::Permitted = inner_parse_policy {
-                    self.expected_tokens.push(TokenType::Token(token::Not));
-                }
-
-                let style = if self.token == token::Not {
-                    self.bump();
-                    ast::AttrStyle::Inner
-                } else {
-                    ast::AttrStyle::Outer
-                };
-
-                self.expect(&token::OpenDelim(token::Bracket))?;
-                let item = self.parse_attr_item()?;
-                self.expect(&token::CloseDelim(token::Bracket))?;
-                let hi = self.prev_span;
-
-                let attr_sp = lo.to(hi);
-
-                // Emit error if inner attribute is encountered and not permitted
-                if style == ast::AttrStyle::Inner {
-                    if let InnerAttributeParsePolicy::NotPermitted { reason,
-                        saw_doc_comment, prev_attr_sp } = inner_parse_policy {
-                        let prev_attr_note = if saw_doc_comment {
-                            "previous doc comment"
-                        } else {
-                            "previous outer attribute"
-                        };
-
-                        let mut diagnostic = self
-                            .diagnostic()
-                            .struct_span_err(attr_sp, reason);
-
-                        if let Some(prev_attr_sp) = prev_attr_sp {
-                            diagnostic
-                                .span_label(attr_sp, "not permitted following an outer attibute")
-                                .span_label(prev_attr_sp, prev_attr_note);
-                        }
-
-                        diagnostic
-                            .note("inner attributes, like `#![no_std]`, annotate the item \
-                                   enclosing them, and are usually found at the beginning of \
-                                   source files. Outer attributes, like `#[test]`, annotate the \
-                                   item following them.")
-                            .emit()
-                    }
-                }
-
-                (attr_sp, item, style)
-            }
-            _ => {
-                let token_str = self.this_token_to_string();
-                return Err(self.fatal(&format!("expected `#`, found `{}`", token_str)));
-            }
-        };
-
-        Ok(ast::Attribute {
-            item,
-            id: attr::mk_attr_id(),
-            style,
-            is_sugared_doc: false,
-            span,
-        })
-    }
-
-    /// Parses an inner part of an attribute (the path and following tokens).
-    /// The tokens must be either a delimited token stream, or empty token stream,
-    /// or the "legacy" key-value form.
-    ///     PATH `(` TOKEN_STREAM `)`
-    ///     PATH `[` TOKEN_STREAM `]`
-    ///     PATH `{` TOKEN_STREAM `}`
-    ///     PATH
-    ///     PATH `=` UNSUFFIXED_LIT
-    /// The delimiters or `=` are still put into the resulting token stream.
-    pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
-        let item = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                Nonterminal::NtMeta(ref item) => Some(item.clone()),
-                _ => None,
-            },
-            _ => None,
-        };
-        Ok(if let Some(item) = item {
-            self.bump();
-            item
-        } else {
-            let path = self.parse_path(PathStyle::Mod)?;
-            let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) ||
-               self.check(&token::OpenDelim(DelimToken::Bracket)) ||
-               self.check(&token::OpenDelim(DelimToken::Brace)) {
-                   self.parse_token_tree().into()
-            } else if self.eat(&token::Eq) {
-                let eq = TokenTree::token(token::Eq, self.prev_span);
-                let mut is_interpolated_expr = false;
-                if let token::Interpolated(nt) = &self.token.kind {
-                    if let token::NtExpr(..) = **nt {
-                        is_interpolated_expr = true;
-                    }
-                }
-                let tokens = if is_interpolated_expr {
-                    // We need to accept arbitrary interpolated expressions to continue
-                    // supporting things like `doc = $expr` that work on stable.
-                    // Non-literal interpolated expressions are rejected after expansion.
-                    self.parse_token_tree().into()
-                } else {
-                    self.parse_unsuffixed_lit()?.tokens()
-                };
-                TokenStream::from_streams(smallvec![eq.into(), tokens])
-            } else {
-                TokenStream::empty()
-            };
-            ast::AttrItem { path, tokens }
-        })
-    }
-
-    /// Parses attributes that appear after the opening of an item. These should
-    /// be preceded by an exclamation mark, but we accept and warn about one
-    /// terminated by a semicolon.
-    ///
-    /// Matches `inner_attrs*`.
-    crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
-        let mut attrs: Vec<ast::Attribute> = vec![];
-        loop {
-            match self.token.kind {
-                token::Pound => {
-                    // Don't even try to parse if it's not an inner attribute.
-                    if !self.look_ahead(1, |t| t == &token::Not) {
-                        break;
-                    }
-
-                    let attr = self.parse_attribute(true)?;
-                    assert_eq!(attr.style, ast::AttrStyle::Inner);
-                    attrs.push(attr);
-                }
-                token::DocComment(s) => {
-                    // We need to get the position of this token before we bump.
-                    let attr = attr::mk_sugared_doc_attr(s, self.token.span);
-                    if attr.style == ast::AttrStyle::Inner {
-                        attrs.push(attr);
-                        self.bump();
-                    } else {
-                        break;
-                    }
-                }
-                _ => break,
-            }
-        }
-        Ok(attrs)
-    }
-
-    fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
-        let lit = self.parse_lit()?;
-        debug!("checking if {:?} is unusuffixed", lit);
-
-        if !lit.kind.is_unsuffixed() {
-            let msg = "suffixed literals are not allowed in attributes";
-            self.diagnostic().struct_span_err(lit.span, msg)
-                             .help("instead of using a suffixed literal \
-                                    (1u8, 1.0f32, etc.), use an unsuffixed version \
-                                    (1, 1.0, etc.).")
-                             .emit()
-        }
-
-        Ok(lit)
-    }
-
-    /// Matches the following grammar (per RFC 1559).
-    ///
-    ///     meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
-    ///     meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
-    pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
-        let nt_meta = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                token::NtMeta(ref e) => Some(e.clone()),
-                _ => None,
-            },
-            _ => None,
-        };
-
-        if let Some(item) = nt_meta {
-            return match item.meta(item.path.span) {
-                Some(meta) => {
-                    self.bump();
-                    Ok(meta)
-                }
-                None => self.unexpected(),
-            }
-        }
-
-        let lo = self.token.span;
-        let path = self.parse_path(PathStyle::Mod)?;
-        let kind = self.parse_meta_item_kind()?;
-        let span = lo.to(self.prev_span);
-        Ok(ast::MetaItem { path, kind, span })
-    }
-
-    crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
-        Ok(if self.eat(&token::Eq) {
-            ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
-        } else if self.eat(&token::OpenDelim(token::Paren)) {
-            ast::MetaItemKind::List(self.parse_meta_seq()?)
-        } else {
-            ast::MetaItemKind::Word
-        })
-    }
-
-    /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
-    fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
-        match self.parse_unsuffixed_lit() {
-            Ok(lit) => {
-                return Ok(ast::NestedMetaItem::Literal(lit))
-            }
-            Err(ref mut err) => err.cancel(),
-        }
-
-        match self.parse_meta_item() {
-            Ok(mi) => {
-                return Ok(ast::NestedMetaItem::MetaItem(mi))
-            }
-            Err(ref mut err) => err.cancel(),
-        }
-
-        let found = self.this_token_to_string();
-        let msg = format!("expected unsuffixed literal or identifier, found `{}`", found);
-        Err(self.diagnostic().struct_span_err(self.token.span, &msg))
-    }
-
-    /// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
-    fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
-        self.parse_seq_to_end(&token::CloseDelim(token::Paren),
-                              SeqSep::trailing_allowed(token::Comma),
-                              |p: &mut Parser<'a>| p.parse_meta_item_inner())
-    }
-}
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
deleted file mode 100644 (file)
index 4ad0bd0..0000000
+++ /dev/null
@@ -1,1393 +0,0 @@
-use crate::ast::{
-    self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
-    Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
-};
-use crate::feature_gate::{feature_err, UnstableFeatures};
-use crate::parse::{SeqSep, PResult, Parser, ParseSess};
-use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
-use crate::parse::token::{self, TokenKind};
-use crate::print::pprust;
-use crate::ptr::P;
-use crate::symbol::{kw, sym};
-use crate::ThinVec;
-use crate::util::parser::AssocOp;
-use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralise};
-use rustc_data_structures::fx::FxHashSet;
-use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError};
-use log::{debug, trace};
-use std::mem;
-
-/// Creates a placeholder argument.
-crate fn dummy_arg(ident: Ident) -> Param {
-    let pat = P(Pat {
-        id: ast::DUMMY_NODE_ID,
-        kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
-        span: ident.span,
-    });
-    let ty = Ty {
-        kind: TyKind::Err,
-        span: ident.span,
-        id: ast::DUMMY_NODE_ID
-    };
-    Param {
-        attrs: ThinVec::default(),
-        id: ast::DUMMY_NODE_ID,
-        pat,
-        span: ident.span,
-        ty: P(ty),
-        is_placeholder: false,
-    }
-}
-
-pub enum Error {
-    FileNotFoundForModule {
-        mod_name: String,
-        default_path: String,
-        secondary_path: String,
-        dir_path: String,
-    },
-    DuplicatePaths {
-        mod_name: String,
-        default_path: String,
-        secondary_path: String,
-    },
-    UselessDocComment,
-    InclusiveRangeWithNoEnd,
-}
-
-impl Error {
-    fn span_err<S: Into<MultiSpan>>(
-        self,
-        sp: S,
-        handler: &errors::Handler,
-    ) -> DiagnosticBuilder<'_> {
-        match self {
-            Error::FileNotFoundForModule {
-                ref mod_name,
-                ref default_path,
-                ref secondary_path,
-                ref dir_path,
-            } => {
-                let mut err = struct_span_err!(
-                    handler,
-                    sp,
-                    E0583,
-                    "file not found for module `{}`",
-                    mod_name,
-                );
-                err.help(&format!(
-                    "name the file either {} or {} inside the directory \"{}\"",
-                    default_path,
-                    secondary_path,
-                    dir_path,
-                ));
-                err
-            }
-            Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
-                let mut err = struct_span_err!(
-                    handler,
-                    sp,
-                    E0584,
-                    "file for module `{}` found at both {} and {}",
-                    mod_name,
-                    default_path,
-                    secondary_path,
-                );
-                err.help("delete or rename one of them to remove the ambiguity");
-                err
-            }
-            Error::UselessDocComment => {
-                let mut err = struct_span_err!(
-                    handler,
-                    sp,
-                    E0585,
-                    "found a documentation comment that doesn't document anything",
-                );
-                err.help("doc comments must come before what they document, maybe a comment was \
-                          intended with `//`?");
-                err
-            }
-            Error::InclusiveRangeWithNoEnd => {
-                let mut err = struct_span_err!(
-                    handler,
-                    sp,
-                    E0586,
-                    "inclusive range with no end",
-                );
-                err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)");
-                err
-            }
-        }
-    }
-}
-
-pub trait RecoverQPath: Sized + 'static {
-    const PATH_STYLE: PathStyle = PathStyle::Expr;
-    fn to_ty(&self) -> Option<P<Ty>>;
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
-}
-
-impl RecoverQPath for Ty {
-    const PATH_STYLE: PathStyle = PathStyle::Type;
-    fn to_ty(&self) -> Option<P<Ty>> {
-        Some(P(self.clone()))
-    }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
-        Self {
-            span: path.span,
-            kind: TyKind::Path(qself, path),
-            id: ast::DUMMY_NODE_ID,
-        }
-    }
-}
-
-impl RecoverQPath for Pat {
-    fn to_ty(&self) -> Option<P<Ty>> {
-        self.to_ty()
-    }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
-        Self {
-            span: path.span,
-            kind: PatKind::Path(qself, path),
-            id: ast::DUMMY_NODE_ID,
-        }
-    }
-}
-
-impl RecoverQPath for Expr {
-    fn to_ty(&self) -> Option<P<Ty>> {
-        self.to_ty()
-    }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
-        Self {
-            span: path.span,
-            kind: ExprKind::Path(qself, path),
-            attrs: ThinVec::new(),
-            id: ast::DUMMY_NODE_ID,
-        }
-    }
-}
-
-impl<'a> Parser<'a> {
-    pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
-        self.span_fatal(self.token.span, m)
-    }
-
-    pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
-        self.sess.span_diagnostic.struct_span_fatal(sp, m)
-    }
-
-    pub fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
-        err.span_err(sp, self.diagnostic())
-    }
-
-    pub fn bug(&self, m: &str) -> ! {
-        self.sess.span_diagnostic.span_bug(self.token.span, m)
-    }
-
-    pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
-        self.sess.span_diagnostic.span_err(sp, m)
-    }
-
-    crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
-        self.sess.span_diagnostic.struct_span_err(sp, m)
-    }
-
-    crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
-        self.sess.span_diagnostic.span_bug(sp, m)
-    }
-
-    crate fn diagnostic(&self) -> &'a errors::Handler {
-        &self.sess.span_diagnostic
-    }
-
-    crate fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
-        self.sess.source_map().span_to_snippet(span)
-    }
-
-    crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
-        let mut err = self.struct_span_err(
-            self.token.span,
-            &format!("expected identifier, found {}", self.this_token_descr()),
-        );
-        if let token::Ident(name, false) = self.token.kind {
-            if Ident::new(name, self.token.span).is_raw_guess() {
-                err.span_suggestion(
-                    self.token.span,
-                    "you can escape reserved keywords to use them as identifiers",
-                    format!("r#{}", name),
-                    Applicability::MaybeIncorrect,
-                );
-            }
-        }
-        if let Some(token_descr) = self.token_descr() {
-            err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
-        } else {
-            err.span_label(self.token.span, "expected identifier");
-            if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
-                err.span_suggestion(
-                    self.token.span,
-                    "remove this comma",
-                    String::new(),
-                    Applicability::MachineApplicable,
-                );
-            }
-        }
-        err
-    }
-
-    pub fn expected_one_of_not_found(
-        &mut self,
-        edible: &[TokenKind],
-        inedible: &[TokenKind],
-    ) -> PResult<'a, bool /* recovered */> {
-        fn tokens_to_string(tokens: &[TokenType]) -> String {
-            let mut i = tokens.iter();
-            // This might be a sign we need a connect method on `Iterator`.
-            let b = i.next()
-                     .map_or(String::new(), |t| t.to_string());
-            i.enumerate().fold(b, |mut b, (i, a)| {
-                if tokens.len() > 2 && i == tokens.len() - 2 {
-                    b.push_str(", or ");
-                } else if tokens.len() == 2 && i == tokens.len() - 2 {
-                    b.push_str(" or ");
-                } else {
-                    b.push_str(", ");
-                }
-                b.push_str(&a.to_string());
-                b
-            })
-        }
-
-        let mut expected = edible.iter()
-            .map(|x| TokenType::Token(x.clone()))
-            .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
-            .chain(self.expected_tokens.iter().cloned())
-            .collect::<Vec<_>>();
-        expected.sort_by_cached_key(|x| x.to_string());
-        expected.dedup();
-        let expect = tokens_to_string(&expected[..]);
-        let actual = self.this_token_to_string();
-        let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
-            let short_expect = if expected.len() > 6 {
-                format!("{} possible tokens", expected.len())
-            } else {
-                expect.clone()
-            };
-            (format!("expected one of {}, found `{}`", expect, actual),
-                (self.sess.source_map().next_point(self.prev_span),
-                format!("expected one of {} here", short_expect)))
-        } else if expected.is_empty() {
-            (format!("unexpected token: `{}`", actual),
-                (self.prev_span, "unexpected token after this".to_string()))
-        } else {
-            (format!("expected {}, found `{}`", expect, actual),
-                (self.sess.source_map().next_point(self.prev_span),
-                format!("expected {} here", expect)))
-        };
-        self.last_unexpected_token_span = Some(self.token.span);
-        let mut err = self.fatal(&msg_exp);
-        if self.token.is_ident_named(sym::and) {
-            err.span_suggestion_short(
-                self.token.span,
-                "use `&&` instead of `and` for the boolean operator",
-                "&&".to_string(),
-                Applicability::MaybeIncorrect,
-            );
-        }
-        if self.token.is_ident_named(sym::or) {
-            err.span_suggestion_short(
-                self.token.span,
-                "use `||` instead of `or` for the boolean operator",
-                "||".to_string(),
-                Applicability::MaybeIncorrect,
-            );
-        }
-        let sp = if self.token == token::Eof {
-            // This is EOF; don't want to point at the following char, but rather the last token.
-            self.prev_span
-        } else {
-            label_sp
-        };
-        match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
-            TokenType::Token(t) => Some(t.clone()),
-            _ => None,
-        }).collect::<Vec<_>>(), err) {
-            Err(e) => err = e,
-            Ok(recovered) => {
-                return Ok(recovered);
-            }
-        }
-
-        let is_semi_suggestable = expected.iter().any(|t| match t {
-            TokenType::Token(token::Semi) => true, // We expect a `;` here.
-            _ => false,
-        }) && ( // A `;` would be expected before the current keyword.
-            self.token.is_keyword(kw::Break) ||
-            self.token.is_keyword(kw::Continue) ||
-            self.token.is_keyword(kw::For) ||
-            self.token.is_keyword(kw::If) ||
-            self.token.is_keyword(kw::Let) ||
-            self.token.is_keyword(kw::Loop) ||
-            self.token.is_keyword(kw::Match) ||
-            self.token.is_keyword(kw::Return) ||
-            self.token.is_keyword(kw::While)
-        );
-        let sm = self.sess.source_map();
-        match (sm.lookup_line(self.token.span.lo()), sm.lookup_line(sp.lo())) {
-            (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => {
-                // The spans are in different lines, expected `;` and found `let` or `return`.
-                // High likelihood that it is only a missing `;`.
-                err.span_suggestion_short(
-                    label_sp,
-                    "a semicolon may be missing here",
-                    ";".to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-                err.emit();
-                return Ok(true);
-            }
-            (Ok(ref a), Ok(ref b)) if a.line == b.line => {
-                // When the spans are in the same line, it means that the only content between
-                // them is whitespace, point at the found token in that case:
-                //
-                // X |     () => { syntax error };
-                //   |                    ^^^^^ expected one of 8 possible tokens here
-                //
-                // instead of having:
-                //
-                // X |     () => { syntax error };
-                //   |                   -^^^^^ unexpected token
-                //   |                   |
-                //   |                   expected one of 8 possible tokens here
-                err.span_label(self.token.span, label_exp);
-            }
-            _ if self.prev_span == syntax_pos::DUMMY_SP => {
-                // Account for macro context where the previous span might not be
-                // available to avoid incorrect output (#54841).
-                err.span_label(self.token.span, "unexpected token");
-            }
-            _ => {
-                err.span_label(sp, label_exp);
-                err.span_label(self.token.span, "unexpected token");
-            }
-        }
-        self.maybe_annotate_with_ascription(&mut err, false);
-        Err(err)
-    }
-
-    pub fn maybe_annotate_with_ascription(
-        &self,
-        err: &mut DiagnosticBuilder<'_>,
-        maybe_expected_semicolon: bool,
-    ) {
-        if let Some((sp, likely_path)) = self.last_type_ascription {
-            let sm = self.sess.source_map();
-            let next_pos = sm.lookup_char_pos(self.token.span.lo());
-            let op_pos = sm.lookup_char_pos(sp.hi());
-
-            if likely_path {
-                err.span_suggestion(
-                    sp,
-                    "maybe write a path separator here",
-                    "::".to_string(),
-                    match self.sess.unstable_features {
-                        UnstableFeatures::Disallow => Applicability::MachineApplicable,
-                        _ => Applicability::MaybeIncorrect,
-                    },
-                );
-            } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
-                err.span_suggestion(
-                    sp,
-                    "try using a semicolon",
-                    ";".to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-            } else if let UnstableFeatures::Disallow = self.sess.unstable_features {
-                err.span_label(sp, "tried to parse a type due to this");
-            } else {
-                err.span_label(sp, "tried to parse a type due to this type ascription");
-            }
-            if let UnstableFeatures::Disallow = self.sess.unstable_features {
-                // Give extra information about type ascription only if it's a nightly compiler.
-            } else {
-                err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \
-                          type: `<expr>: <type>`");
-                err.note("for more information, see \
-                          https://github.com/rust-lang/rust/issues/23416");
-            }
-        }
-    }
-
-    /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
-    /// passes through any errors encountered. Used for error recovery.
-    crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
-        if let Err(ref mut err) = self.parse_seq_to_before_tokens(
-            kets,
-            SeqSep::none(),
-            TokenExpectType::Expect,
-            |p| Ok(p.parse_token_tree()),
-        ) {
-            err.cancel();
-        }
-    }
-
-    /// This function checks if there are trailing angle brackets and produces
-    /// a diagnostic to suggest removing them.
-    ///
-    /// ```ignore (diagnostic)
-    /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
-    ///                                                        ^^ help: remove extra angle brackets
-    /// ```
-    crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
-        // This function is intended to be invoked after parsing a path segment where there are two
-        // cases:
-        //
-        // 1. A specific token is expected after the path segment.
-        //    eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
-        //        `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
-        // 2. No specific token is expected after the path segment.
-        //    eg. `x.foo` (field access)
-        //
-        // This function is called after parsing `.foo` and before parsing the token `end` (if
-        // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
-        // `Foo::<Bar>`.
-
-        // We only care about trailing angle brackets if we previously parsed angle bracket
-        // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
-        // removed in this case:
-        //
-        // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
-        //
-        // This case is particularly tricky as we won't notice it just looking at the tokens -
-        // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
-        // have already been parsed):
-        //
-        // `x.foo::<u32>>>(3)`
-        let parsed_angle_bracket_args = segment.args
-            .as_ref()
-            .map(|args| args.is_angle_bracketed())
-            .unwrap_or(false);
-
-        debug!(
-            "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
-            parsed_angle_bracket_args,
-        );
-        if !parsed_angle_bracket_args {
-            return;
-        }
-
-        // Keep the span at the start so we can highlight the sequence of `>` characters to be
-        // removed.
-        let lo = self.token.span;
-
-        // We need to look-ahead to see if we have `>` characters without moving the cursor forward
-        // (since we might have the field access case and the characters we're eating are
-        // actual operators and not trailing characters - ie `x.foo >> 3`).
-        let mut position = 0;
-
-        // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
-        // many of each (so we can correctly pluralize our error messages) and continue to
-        // advance.
-        let mut number_of_shr = 0;
-        let mut number_of_gt = 0;
-        while self.look_ahead(position, |t| {
-            trace!("check_trailing_angle_brackets: t={:?}", t);
-            if *t == token::BinOp(token::BinOpToken::Shr) {
-                number_of_shr += 1;
-                true
-            } else if *t == token::Gt {
-                number_of_gt += 1;
-                true
-            } else {
-                false
-            }
-        }) {
-            position += 1;
-        }
-
-        // If we didn't find any trailing `>` characters, then we have nothing to error about.
-        debug!(
-            "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
-            number_of_gt, number_of_shr,
-        );
-        if number_of_gt < 1 && number_of_shr < 1 {
-            return;
-        }
-
-        // Finally, double check that we have our end token as otherwise this is the
-        // second case.
-        if self.look_ahead(position, |t| {
-            trace!("check_trailing_angle_brackets: t={:?}", t);
-            *t == end
-        }) {
-            // Eat from where we started until the end token so that parsing can continue
-            // as if we didn't have those extra angle brackets.
-            self.eat_to_tokens(&[&end]);
-            let span = lo.until(self.token.span);
-
-            let total_num_of_gt = number_of_gt + number_of_shr * 2;
-            self.diagnostic()
-                .struct_span_err(
-                    span,
-                    &format!("unmatched angle bracket{}", pluralise!(total_num_of_gt)),
-                )
-                .span_suggestion(
-                    span,
-                    &format!("remove extra angle bracket{}", pluralise!(total_num_of_gt)),
-                    String::new(),
-                    Applicability::MachineApplicable,
-                )
-                .emit();
-        }
-    }
-
-    /// Produces an error if comparison operators are chained (RFC #558).
-    /// We only need to check the LHS, not the RHS, because all comparison ops
-    /// have same precedence and are left-associative.
-    crate fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) -> PResult<'a, ()> {
-        debug_assert!(outer_op.is_comparison(),
-                      "check_no_chained_comparison: {:?} is not comparison",
-                      outer_op);
-        match lhs.kind {
-            ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
-                // Respan to include both operators.
-                let op_span = op.span.to(self.token.span);
-                let mut err = self.struct_span_err(
-                    op_span,
-                    "chained comparison operators require parentheses",
-                );
-                if op.node == BinOpKind::Lt &&
-                    *outer_op == AssocOp::Less ||  // Include `<` to provide this recommendation
-                    *outer_op == AssocOp::Greater  // even in a case like the following:
-                {                                  //     Foo<Bar<Baz<Qux, ()>>>
-                    err.help(
-                        "use `::<...>` instead of `<...>` if you meant to specify type arguments");
-                    err.help("or use `(...)` if you meant to specify fn arguments");
-                    // These cases cause too many knock-down errors, bail out (#61329).
-                    return Err(err);
-                }
-                err.emit();
-            }
-            _ => {}
-        }
-        Ok(())
-    }
-
-    crate fn maybe_report_ambiguous_plus(
-        &mut self,
-        allow_plus: bool,
-        impl_dyn_multi: bool,
-        ty: &Ty,
-    ) {
-        if !allow_plus && impl_dyn_multi {
-            let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
-            self.struct_span_err(ty.span, "ambiguous `+` in a type")
-                .span_suggestion(
-                    ty.span,
-                    "use parentheses to disambiguate",
-                    sum_with_parens,
-                    Applicability::MachineApplicable,
-                )
-                .emit();
-        }
-    }
-
-    crate fn maybe_report_invalid_custom_discriminants(
-        sess: &ParseSess,
-        variants: &[ast::Variant],
-    ) {
-        let has_fields = variants.iter().any(|variant| match variant.data {
-            VariantData::Tuple(..) | VariantData::Struct(..) => true,
-            VariantData::Unit(..) => false,
-        });
-
-        let discriminant_spans = variants.iter().filter(|variant| match variant.data {
-            VariantData::Tuple(..) | VariantData::Struct(..) => false,
-            VariantData::Unit(..) => true,
-        })
-        .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span))
-        .collect::<Vec<_>>();
-
-        if !discriminant_spans.is_empty() && has_fields {
-            let mut err = feature_err(
-                sess,
-                sym::arbitrary_enum_discriminant,
-                discriminant_spans.clone(),
-                crate::feature_gate::GateIssue::Language,
-                "custom discriminant values are not allowed in enums with tuple or struct variants",
-            );
-            for sp in discriminant_spans {
-                err.span_label(sp, "disallowed custom discriminant");
-            }
-            for variant in variants.iter() {
-                match &variant.data {
-                    VariantData::Struct(..) => {
-                        err.span_label(
-                            variant.span,
-                            "struct variant defined here",
-                        );
-                    }
-                    VariantData::Tuple(..) => {
-                        err.span_label(
-                            variant.span,
-                            "tuple variant defined here",
-                        );
-                    }
-                    VariantData::Unit(..) => {}
-                }
-            }
-            err.emit();
-        }
-    }
-
-    crate fn maybe_recover_from_bad_type_plus(
-        &mut self,
-        allow_plus: bool,
-        ty: &Ty,
-    ) -> PResult<'a, ()> {
-        // Do not add `+` to expected tokens.
-        if !allow_plus || !self.token.is_like_plus() {
-            return Ok(());
-        }
-
-        self.bump(); // `+`
-        let bounds = self.parse_generic_bounds(None)?;
-        let sum_span = ty.span.to(self.prev_span);
-
-        let mut err = struct_span_err!(
-            self.sess.span_diagnostic,
-            sum_span,
-            E0178,
-            "expected a path on the left-hand side of `+`, not `{}`",
-            pprust::ty_to_string(ty)
-        );
-
-        match ty.kind {
-            TyKind::Rptr(ref lifetime, ref mut_ty) => {
-                let sum_with_parens = pprust::to_string(|s| {
-                    s.s.word("&");
-                    s.print_opt_lifetime(lifetime);
-                    s.print_mutability(mut_ty.mutbl);
-                    s.popen();
-                    s.print_type(&mut_ty.ty);
-                    s.print_type_bounds(" +", &bounds);
-                    s.pclose()
-                });
-                err.span_suggestion(
-                    sum_span,
-                    "try adding parentheses",
-                    sum_with_parens,
-                    Applicability::MachineApplicable,
-                );
-            }
-            TyKind::Ptr(..) | TyKind::BareFn(..) => {
-                err.span_label(sum_span, "perhaps you forgot parentheses?");
-            }
-            _ => {
-                err.span_label(sum_span, "expected a path");
-            }
-        }
-        err.emit();
-        Ok(())
-    }
-
-    /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
-    /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
-    /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
-    crate fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
-        &mut self,
-        base: P<T>,
-        allow_recovery: bool,
-    ) -> PResult<'a, P<T>> {
-        // Do not add `::` to expected tokens.
-        if allow_recovery && self.token == token::ModSep {
-            if let Some(ty) = base.to_ty() {
-                return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
-            }
-        }
-        Ok(base)
-    }
-
-    /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
-    /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
-    crate fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
-        &mut self,
-        ty_span: Span,
-        ty: P<Ty>,
-    ) -> PResult<'a, P<T>> {
-        self.expect(&token::ModSep)?;
-
-        let mut path = ast::Path {
-            segments: Vec::new(),
-            span: DUMMY_SP,
-        };
-        self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
-        path.span = ty_span.to(self.prev_span);
-
-        let ty_str = self
-            .span_to_snippet(ty_span)
-            .unwrap_or_else(|_| pprust::ty_to_string(&ty));
-        self.diagnostic()
-            .struct_span_err(path.span, "missing angle brackets in associated item path")
-            .span_suggestion(
-                // This is a best-effort recovery.
-                path.span,
-                "try",
-                format!("<{}>::{}", ty_str, path),
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
-
-        let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
-        Ok(P(T::recovered(
-            Some(QSelf {
-                ty,
-                path_span,
-                position: 0,
-            }),
-            path,
-        )))
-    }
-
-    crate fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
-        if self.eat(&token::Semi) {
-            let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
-            err.span_suggestion_short(
-                self.prev_span,
-                "remove this semicolon",
-                String::new(),
-                Applicability::MachineApplicable,
-            );
-            if !items.is_empty() {
-                let previous_item = &items[items.len() - 1];
-                let previous_item_kind_name = match previous_item.kind {
-                    // Say "braced struct" because tuple-structs and
-                    // braceless-empty-struct declarations do take a semicolon.
-                    ItemKind::Struct(..) => Some("braced struct"),
-                    ItemKind::Enum(..) => Some("enum"),
-                    ItemKind::Trait(..) => Some("trait"),
-                    ItemKind::Union(..) => Some("union"),
-                    _ => None,
-                };
-                if let Some(name) = previous_item_kind_name {
-                    err.help(&format!(
-                        "{} declarations are not followed by a semicolon",
-                        name
-                    ));
-                }
-            }
-            err.emit();
-            true
-        } else {
-            false
-        }
-    }
-
-    /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
-    /// closing delimiter.
-    pub fn unexpected_try_recover(
-        &mut self,
-        t: &TokenKind,
-    ) -> PResult<'a, bool /* recovered */> {
-        let token_str = pprust::token_kind_to_string(t);
-        let this_token_str = self.this_token_descr();
-        let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
-            // Point at the end of the macro call when reaching end of macro arguments.
-            (token::Eof, Some(_)) => {
-                let sp = self.sess.source_map().next_point(self.token.span);
-                (sp, sp)
-            }
-            // We don't want to point at the following span after DUMMY_SP.
-            // This happens when the parser finds an empty TokenStream.
-            _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span),
-            // EOF, don't want to point at the following char, but rather the last token.
-            (token::Eof, None) => (self.prev_span, self.token.span),
-            _ => (self.sess.source_map().next_point(self.prev_span), self.token.span),
-        };
-        let msg = format!(
-            "expected `{}`, found {}",
-            token_str,
-            match (&self.token.kind, self.subparser_name) {
-                (token::Eof, Some(origin)) => format!("end of {}", origin),
-                _ => this_token_str,
-            },
-        );
-        let mut err = self.struct_span_err(sp, &msg);
-        let label_exp = format!("expected `{}`", token_str);
-        match self.recover_closing_delimiter(&[t.clone()], err) {
-            Err(e) => err = e,
-            Ok(recovered) => {
-                return Ok(recovered);
-            }
-        }
-        let sm = self.sess.source_map();
-        match (sm.lookup_line(prev_sp.lo()), sm.lookup_line(sp.lo())) {
-            (Ok(ref a), Ok(ref b)) if a.line == b.line => {
-                // When the spans are in the same line, it means that the only content
-                // between them is whitespace, point only at the found token.
-                err.span_label(sp, label_exp);
-            }
-            _ => {
-                err.span_label(prev_sp, label_exp);
-                err.span_label(sp, "unexpected token");
-            }
-        }
-        Err(err)
-    }
-
-    crate fn parse_semi_or_incorrect_foreign_fn_body(
-        &mut self,
-        ident: &Ident,
-        extern_sp: Span,
-    ) -> PResult<'a, ()> {
-        if self.token != token::Semi {
-            // This might be an incorrect fn definition (#62109).
-            let parser_snapshot = self.clone();
-            match self.parse_inner_attrs_and_block() {
-                Ok((_, body)) => {
-                    self.struct_span_err(ident.span, "incorrect `fn` inside `extern` block")
-                        .span_label(ident.span, "can't have a body")
-                        .span_label(body.span, "this body is invalid here")
-                        .span_label(
-                            extern_sp,
-                            "`extern` blocks define existing foreign functions and `fn`s \
-                             inside of them cannot have a body")
-                        .help("you might have meant to write a function accessible through ffi, \
-                               which can be done by writing `extern fn` outside of the \
-                               `extern` block")
-                        .note("for more information, visit \
-                               https://doc.rust-lang.org/std/keyword.extern.html")
-                        .emit();
-                }
-                Err(mut err) => {
-                    err.cancel();
-                    mem::replace(self, parser_snapshot);
-                    self.expect(&token::Semi)?;
-                }
-            }
-        } else {
-            self.bump();
-        }
-        Ok(())
-    }
-
-    /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
-    /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
-    crate fn parse_incorrect_await_syntax(
-        &mut self,
-        lo: Span,
-        await_sp: Span,
-    ) -> PResult<'a, (Span, ExprKind)> {
-        if self.token == token::Not {
-            // Handle `await!(<expr>)`.
-            self.expect(&token::Not)?;
-            self.expect(&token::OpenDelim(token::Paren))?;
-            let expr = self.parse_expr()?;
-            self.expect(&token::CloseDelim(token::Paren))?;
-            let sp = self.error_on_incorrect_await(lo, self.prev_span, &expr, false);
-            return Ok((sp, ExprKind::Await(expr)))
-        }
-
-        let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
-        let expr = if self.token == token::OpenDelim(token::Brace) {
-            // Handle `await { <expr> }`.
-            // This needs to be handled separatedly from the next arm to avoid
-            // interpreting `await { <expr> }?` as `<expr>?.await`.
-            self.parse_block_expr(
-                None,
-                self.token.span,
-                BlockCheckMode::Default,
-                ThinVec::new(),
-            )
-        } else {
-            self.parse_expr()
-        }.map_err(|mut err| {
-            err.span_label(await_sp, "while parsing this incorrect await expression");
-            err
-        })?;
-        let sp = self.error_on_incorrect_await(lo, expr.span, &expr, is_question);
-        Ok((sp, ExprKind::Await(expr)))
-    }
-
-    fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
-        let expr_str = self.span_to_snippet(expr.span)
-            .unwrap_or_else(|_| pprust::expr_to_string(&expr));
-        let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
-        let sp = lo.to(hi);
-        let app = match expr.kind {
-            ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
-            _ => Applicability::MachineApplicable,
-        };
-        self.struct_span_err(sp, "incorrect use of `await`")
-            .span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
-            .emit();
-        sp
-    }
-
-    /// If encountering `future.await()`, consumes and emits an error.
-    crate fn recover_from_await_method_call(&mut self) {
-        if self.token == token::OpenDelim(token::Paren) &&
-            self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
-        {
-            // future.await()
-            let lo = self.token.span;
-            self.bump(); // (
-            let sp = lo.to(self.token.span);
-            self.bump(); // )
-            self.struct_span_err(sp, "incorrect use of `await`")
-                .span_suggestion(
-                    sp,
-                    "`await` is not a method call, remove the parentheses",
-                    String::new(),
-                    Applicability::MachineApplicable,
-                ).emit()
-        }
-    }
-
-    /// Recovers a situation like `for ( $pat in $expr )`
-    /// and suggest writing `for $pat in $expr` instead.
-    ///
-    /// This should be called before parsing the `$block`.
-    crate fn recover_parens_around_for_head(
-        &mut self,
-        pat: P<Pat>,
-        expr: &Expr,
-        begin_paren: Option<Span>,
-    ) -> P<Pat> {
-        match (&self.token.kind, begin_paren) {
-            (token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
-                self.bump();
-
-                let pat_str = self
-                    // Remove the `(` from the span of the pattern:
-                    .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
-                    .unwrap_or_else(|_| pprust::pat_to_string(&pat));
-
-                self.struct_span_err(self.prev_span, "unexpected closing `)`")
-                    .span_label(begin_par_sp, "opening `(`")
-                    .span_suggestion(
-                        begin_par_sp.to(self.prev_span),
-                        "remove parenthesis in `for` loop",
-                        format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
-                        // With e.g. `for (x) in y)` this would replace `(x) in y)`
-                        // with `x) in y)` which is syntactically invalid.
-                        // However, this is prevented before we get here.
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
-
-                // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
-                pat.and_then(|pat| match pat.kind {
-                    PatKind::Paren(pat) => pat,
-                    _ => P(pat),
-                })
-            }
-            _ => pat,
-        }
-    }
-
-    crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
-        self.token.is_ident() &&
-            if let ast::ExprKind::Path(..) = node { true } else { false } &&
-            !self.token.is_reserved_ident() &&           // v `foo:bar(baz)`
-            self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) ||
-            self.look_ahead(1, |t| t == &token::Lt) &&     // `foo:bar<baz`
-            self.look_ahead(2, |t| t.is_ident()) ||
-            self.look_ahead(1, |t| t == &token::Colon) &&  // `foo:bar:baz`
-            self.look_ahead(2, |t| t.is_ident()) ||
-            self.look_ahead(1, |t| t == &token::ModSep) &&
-            (self.look_ahead(2, |t| t.is_ident()) ||   // `foo:bar::baz`
-             self.look_ahead(2, |t| t == &token::Lt))  // `foo:bar::<baz>`
-    }
-
-    crate fn recover_seq_parse_error(
-        &mut self,
-        delim: token::DelimToken,
-        lo: Span,
-        result: PResult<'a, P<Expr>>,
-    ) -> P<Expr> {
-        match result {
-            Ok(x) => x,
-            Err(mut err) => {
-                err.emit();
-                // Recover from parse error.
-                self.consume_block(delim);
-                self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
-            }
-        }
-    }
-
-    crate fn recover_closing_delimiter(
-        &mut self,
-        tokens: &[TokenKind],
-        mut err: DiagnosticBuilder<'a>,
-    ) -> PResult<'a, bool> {
-        let mut pos = None;
-        // We want to use the last closing delim that would apply.
-        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
-            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
-                && Some(self.token.span) > unmatched.unclosed_span
-            {
-                pos = Some(i);
-            }
-        }
-        match pos {
-            Some(pos) => {
-                // Recover and assume that the detected unclosed delimiter was meant for
-                // this location. Emit the diagnostic and act as if the delimiter was
-                // present for the parser's sake.
-
-                 // Don't attempt to recover from this unclosed delimiter more than once.
-                let unmatched = self.unclosed_delims.remove(pos);
-                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
-
-                // We want to suggest the inclusion of the closing delimiter where it makes
-                // the most sense, which is immediately after the last token:
-                //
-                //  {foo(bar {}}
-                //      -      ^
-                //      |      |
-                //      |      help: `)` may belong here
-                //      |
-                //      unclosed delimiter
-                if let Some(sp) = unmatched.unclosed_span {
-                    err.span_label(sp, "unclosed delimiter");
-                }
-                err.span_suggestion_short(
-                    self.sess.source_map().next_point(self.prev_span),
-                    &format!("{} may belong here", delim.to_string()),
-                    delim.to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-                err.emit();
-                self.expected_tokens.clear();  // reduce errors
-                Ok(true)
-            }
-            _ => Err(err),
-        }
-    }
-
-    /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid.
-    crate fn eat_bad_pub(&mut self) {
-        if self.token.is_keyword(kw::Pub) {
-            match self.parse_visibility(false) {
-                Ok(vis) => {
-                    self.diagnostic()
-                        .struct_span_err(vis.span, "unnecessary visibility qualifier")
-                        .span_label(vis.span, "`pub` not permitted here")
-                        .emit();
-                }
-                Err(mut err) => err.emit(),
-            }
-        }
-    }
-
-    /// Eats tokens until we can be relatively sure we reached the end of the
-    /// statement. This is something of a best-effort heuristic.
-    ///
-    /// We terminate when we find an unmatched `}` (without consuming it).
-    crate fn recover_stmt(&mut self) {
-        self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
-    }
-
-    /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
-    /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
-    /// approximate -- it can mean we break too early due to macros, but that
-    /// should only lead to sub-optimal recovery, not inaccurate parsing).
-    ///
-    /// If `break_on_block` is `Break`, then we will stop consuming tokens
-    /// after finding (and consuming) a brace-delimited block.
-    crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
-        let mut brace_depth = 0;
-        let mut bracket_depth = 0;
-        let mut in_block = false;
-        debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
-               break_on_semi, break_on_block);
-        loop {
-            debug!("recover_stmt_ loop {:?}", self.token);
-            match self.token.kind {
-                token::OpenDelim(token::DelimToken::Brace) => {
-                    brace_depth += 1;
-                    self.bump();
-                    if break_on_block == BlockMode::Break &&
-                       brace_depth == 1 &&
-                       bracket_depth == 0 {
-                        in_block = true;
-                    }
-                }
-                token::OpenDelim(token::DelimToken::Bracket) => {
-                    bracket_depth += 1;
-                    self.bump();
-                }
-                token::CloseDelim(token::DelimToken::Brace) => {
-                    if brace_depth == 0 {
-                        debug!("recover_stmt_ return - close delim {:?}", self.token);
-                        break;
-                    }
-                    brace_depth -= 1;
-                    self.bump();
-                    if in_block && bracket_depth == 0 && brace_depth == 0 {
-                        debug!("recover_stmt_ return - block end {:?}", self.token);
-                        break;
-                    }
-                }
-                token::CloseDelim(token::DelimToken::Bracket) => {
-                    bracket_depth -= 1;
-                    if bracket_depth < 0 {
-                        bracket_depth = 0;
-                    }
-                    self.bump();
-                }
-                token::Eof => {
-                    debug!("recover_stmt_ return - Eof");
-                    break;
-                }
-                token::Semi => {
-                    self.bump();
-                    if break_on_semi == SemiColonMode::Break &&
-                       brace_depth == 0 &&
-                       bracket_depth == 0 {
-                        debug!("recover_stmt_ return - Semi");
-                        break;
-                    }
-                }
-                token::Comma if break_on_semi == SemiColonMode::Comma &&
-                       brace_depth == 0 &&
-                       bracket_depth == 0 =>
-                {
-                    debug!("recover_stmt_ return - Semi");
-                    break;
-                }
-                _ => {
-                    self.bump()
-                }
-            }
-        }
-    }
-
-    crate fn check_for_for_in_in_typo(&mut self, in_span: Span) {
-        if self.eat_keyword(kw::In) {
-            // a common typo: `for _ in in bar {}`
-            self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`")
-                .span_suggestion_short(
-                    in_span.until(self.prev_span),
-                    "remove the duplicated `in`",
-                    String::new(),
-                    Applicability::MachineApplicable,
-                )
-                .emit();
-        }
-    }
-
-    crate fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
-        let token_str = self.this_token_descr();
-        let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str));
-        err.span_label(self.token.span, "expected `;` or `{`");
-        Err(err)
-    }
-
-    crate fn eat_incorrect_doc_comment_for_param_type(&mut self) {
-        if let token::DocComment(_) = self.token.kind {
-            self.struct_span_err(
-                self.token.span,
-                "documentation comments cannot be applied to a function parameter's type",
-            )
-            .span_label(self.token.span, "doc comments are not allowed here")
-            .emit();
-            self.bump();
-        } else if self.token == token::Pound && self.look_ahead(1, |t| {
-            *t == token::OpenDelim(token::Bracket)
-        }) {
-            let lo = self.token.span;
-            // Skip every token until next possible arg.
-            while self.token != token::CloseDelim(token::Bracket) {
-                self.bump();
-            }
-            let sp = lo.to(self.token.span);
-            self.bump();
-            self.struct_span_err(
-                sp,
-                "attributes cannot be applied to a function parameter's type",
-            )
-            .span_label(sp, "attributes are not allowed here")
-            .emit();
-        }
-    }
-
-    crate fn parameter_without_type(
-        &mut self,
-        err: &mut DiagnosticBuilder<'_>,
-        pat: P<ast::Pat>,
-        require_name: bool,
-        is_self_allowed: bool,
-        is_trait_item: bool,
-    ) -> Option<Ident> {
-        // If we find a pattern followed by an identifier, it could be an (incorrect)
-        // C-style parameter declaration.
-        if self.check_ident() && self.look_ahead(1, |t| {
-            *t == token::Comma || *t == token::CloseDelim(token::Paren)
-        }) { // `fn foo(String s) {}`
-            let ident = self.parse_ident().unwrap();
-            let span = pat.span.with_hi(ident.span.hi());
-
-            err.span_suggestion(
-                span,
-                "declare the type after the parameter binding",
-                String::from("<identifier>: <type>"),
-                Applicability::HasPlaceholders,
-            );
-            return Some(ident);
-        } else if let PatKind::Ident(_, ident, _) = pat.kind {
-            if require_name && (
-                is_trait_item ||
-                self.token == token::Comma ||
-                self.token == token::Lt ||
-                self.token == token::CloseDelim(token::Paren)
-            ) { // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
-                if is_self_allowed {
-                    err.span_suggestion(
-                        pat.span,
-                        "if this is a `self` type, give it a parameter name",
-                        format!("self: {}", ident),
-                        Applicability::MaybeIncorrect,
-                    );
-                }
-                // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
-                // `fn foo(HashMap: TypeName<u32>)`.
-                if self.token != token::Lt {
-                    err.span_suggestion(
-                        pat.span,
-                        "if this was a parameter name, give it a type",
-                        format!("{}: TypeName", ident),
-                        Applicability::HasPlaceholders,
-                    );
-                }
-                err.span_suggestion(
-                    pat.span,
-                    "if this is a type, explicitly ignore the parameter name",
-                    format!("_: {}", ident),
-                    Applicability::MachineApplicable,
-                );
-                err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
-
-                // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
-                return if self.token == token::Lt { None } else { Some(ident) };
-            }
-        }
-        None
-    }
-
-    crate fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
-        let pat = self.parse_pat(Some("argument name"))?;
-        self.expect(&token::Colon)?;
-        let ty = self.parse_ty()?;
-
-        self.diagnostic()
-            .struct_span_err_with_code(
-                pat.span,
-                "patterns aren't allowed in methods without bodies",
-                DiagnosticId::Error("E0642".into()),
-            )
-            .span_suggestion_short(
-                pat.span,
-                "give this argument a name or use an underscore to ignore it",
-                "_".to_owned(),
-                Applicability::MachineApplicable,
-            )
-            .emit();
-
-        // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
-        let pat = P(Pat {
-            kind: PatKind::Wild,
-            span: pat.span,
-            id: ast::DUMMY_NODE_ID
-        });
-        Ok((pat, ty))
-    }
-
-    crate fn recover_bad_self_param(
-        &mut self,
-        mut param: ast::Param,
-        is_trait_item: bool,
-    ) -> PResult<'a, ast::Param> {
-        let sp = param.pat.span;
-        param.ty.kind = TyKind::Err;
-        let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function");
-        if is_trait_item {
-            err.span_label(sp, "must be the first associated function parameter");
-        } else {
-            err.span_label(sp, "not valid as function parameter");
-            err.note("`self` is only valid as the first parameter of an associated function");
-        }
-        err.emit();
-        Ok(param)
-    }
-
-    crate fn consume_block(&mut self, delim: token::DelimToken) {
-        let mut brace_depth = 0;
-        loop {
-            if self.eat(&token::OpenDelim(delim)) {
-                brace_depth += 1;
-            } else if self.eat(&token::CloseDelim(delim)) {
-                if brace_depth == 0 {
-                    return;
-                } else {
-                    brace_depth -= 1;
-                    continue;
-                }
-            } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
-                return;
-            } else {
-                self.bump();
-            }
-        }
-    }
-
-    crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
-        let (span, msg) = match (&self.token.kind, self.subparser_name) {
-            (&token::Eof, Some(origin)) => {
-                let sp = self.sess.source_map().next_point(self.token.span);
-                (sp, format!("expected expression, found end of {}", origin))
-            }
-            _ => (self.token.span, format!(
-                "expected expression, found {}",
-                self.this_token_descr(),
-            )),
-        };
-        let mut err = self.struct_span_err(span, &msg);
-        let sp = self.sess.source_map().start_point(self.token.span);
-        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            self.sess.expr_parentheses_needed(&mut err, *sp, None);
-        }
-        err.span_label(span, "expected expression");
-        err
-    }
-
-    /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors.
-    ///
-    /// This is necessary because at this point we don't know whether we parsed a function with
-    /// anonymous parameters or a function with names but no types. In order to minimize
-    /// unecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
-    /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
-    /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
-    /// we deduplicate them to not complain about duplicated parameter names.
-    crate fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
-        let mut seen_inputs = FxHashSet::default();
-        for input in fn_inputs.iter_mut() {
-            let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = (
-                &input.pat.kind, &input.ty.kind,
-            ) {
-                Some(*ident)
-            } else {
-                None
-            };
-            if let Some(ident) = opt_ident {
-                if seen_inputs.contains(&ident) {
-                    input.pat.kind = PatKind::Wild;
-                }
-                seen_inputs.insert(ident);
-            }
-        }
-    }
-}
index ac3feadce3ae60f06e971ff5c8b13ddefddb6079..e6dc9a4c134174bd10158c2efc99848905bed71c 100644 (file)
@@ -1,5 +1,5 @@
-use crate::parse::ParseSess;
 use crate::parse::token::{self, Token, TokenKind};
+use crate::sess::ParseSess;
 use crate::symbol::{sym, Symbol};
 use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
 
index e5ba7e45309dda964ca6e42699720070e00a49b6..b4dd23c9f9b0f90b338589b8ffba372e4321ea2f 100644 (file)
@@ -1,3 +1,4 @@
+use rustc_data_structures::fx::FxHashMap;
 use syntax_pos::Span;
 
 use crate::print::pprust::token_to_string;
@@ -16,6 +17,7 @@ impl<'a> StringReader<'a> {
             unmatched_braces: Vec::new(),
             matching_delim_spans: Vec::new(),
             last_unclosed_found_span: None,
+            last_delim_empty_block_spans: FxHashMap::default()
         };
         let res = tt_reader.parse_all_token_trees();
         (res, tt_reader.unmatched_braces)
@@ -34,6 +36,7 @@ struct TokenTreesReader<'a> {
     /// Used only for error recovery when arriving to EOF with mismatched braces.
     matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
     last_unclosed_found_span: Option<Span>,
+    last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>
 }
 
 impl<'a> TokenTreesReader<'a> {
@@ -121,13 +124,20 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                     // Correct delimiter.
                     token::CloseDelim(d) if d == delim => {
                         let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
+                        let close_brace_span = self.token.span;
+
+                        if tts.is_empty() {
+                            let empty_block_span = open_brace_span.to(close_brace_span);
+                            self.last_delim_empty_block_spans.insert(delim, empty_block_span);
+                        }
+
                         if self.open_braces.len() == 0 {
                             // Clear up these spans to avoid suggesting them as we've found
                             // properly matched delimiters so far for an entire block.
                             self.matching_delim_spans.clear();
                         } else {
                             self.matching_delim_spans.push(
-                                (open_brace, open_brace_span, self.token.span),
+                                (open_brace, open_brace_span, close_brace_span),
                             );
                         }
                         // Parse the close delimiter.
@@ -193,13 +203,20 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                     tts.into()
                 ).into())
             },
-            token::CloseDelim(_) => {
+            token::CloseDelim(delim) => {
                 // An unexpected closing delimiter (i.e., there is no
                 // matching opening delimiter).
                 let token_str = token_to_string(&self.token);
                 let msg = format!("unexpected close delimiter: `{}`", token_str);
                 let mut err = self.string_reader.sess.span_diagnostic
                     .struct_span_err(self.token.span, &msg);
+
+                if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
+                    err.span_label(
+                        span,
+                        "this block is empty, you might have not meant to close it"
+                    );
+                }
                 err.span_label(self.token.span, "unexpected close delimiter");
                 Err(err)
             },
index fcd5b2782fd61e14ca85f03f697e3a9c17af66ee..7952e293a532d72eef0914ddc03908ac19f328bf 100644 (file)
@@ -1,14 +1,10 @@
 //! Code related to parsing literals.
 
 use crate::ast::{self, Lit, LitKind};
-use crate::parse::parser::Parser;
-use crate::parse::PResult;
-use crate::parse::token::{self, Token, TokenKind};
-use crate::print::pprust;
+use crate::parse::token::{self, Token};
 use crate::symbol::{kw, sym, Symbol};
-use crate::tokenstream::{TokenStream, TokenTree};
+use crate::tokenstream::TokenTree;
 
-use errors::{Applicability, Handler};
 use log::debug;
 use rustc_data_structures::sync::Lrc;
 use syntax_pos::Span;
     IntTooLarge,
 }
 
-impl LitError {
-    fn report(&self, diag: &Handler, lit: token::Lit, span: Span) {
-        let token::Lit { kind, suffix, .. } = lit;
-        match *self {
-            // `NotLiteral` is not an error by itself, so we don't report
-            // it and give the parser opportunity to try something else.
-            LitError::NotLiteral => {}
-            // `LexerError` *is* an error, but it was already reported
-            // by lexer, so here we don't report it the second time.
-            LitError::LexerError => {}
-            LitError::InvalidSuffix => {
-                expect_no_suffix(
-                    diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix
-                );
-            }
-            LitError::InvalidIntSuffix => {
-                let suf = suffix.expect("suffix error with no suffix").as_str();
-                if looks_like_width_suffix(&['i', 'u'], &suf) {
-                    // If it looks like a width, try to be helpful.
-                    let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
-                    diag.struct_span_err(span, &msg)
-                        .help("valid widths are 8, 16, 32, 64 and 128")
-                        .emit();
-                } else {
-                    let msg = format!("invalid suffix `{}` for integer literal", suf);
-                    diag.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{}`", suf))
-                        .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
-                        .emit();
-                }
-            }
-            LitError::InvalidFloatSuffix => {
-                let suf = suffix.expect("suffix error with no suffix").as_str();
-                if looks_like_width_suffix(&['f'], &suf) {
-                    // If it looks like a width, try to be helpful.
-                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
-                    diag.struct_span_err(span, &msg)
-                        .help("valid widths are 32 and 64")
-                        .emit();
-                } else {
-                    let msg = format!("invalid suffix `{}` for float literal", suf);
-                    diag.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{}`", suf))
-                        .help("valid suffixes are `f32` and `f64`")
-                        .emit();
-                }
-            }
-            LitError::NonDecimalFloat(base) => {
-                let descr = match base {
-                    16 => "hexadecimal",
-                    8 => "octal",
-                    2 => "binary",
-                    _ => unreachable!(),
-                };
-                diag.struct_span_err(span, &format!("{} float literal is not supported", descr))
-                    .span_label(span, "not supported")
-                    .emit();
-            }
-            LitError::IntTooLarge => {
-                diag.struct_span_err(span, "integer literal is too large")
-                    .emit();
-            }
-        }
-    }
-}
-
 impl LitKind {
     /// Converts literal token into a semantic literal.
     fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
@@ -204,7 +134,7 @@ pub fn to_lit_token(&self) -> token::Lit {
         let (kind, symbol, suffix) = match *self {
             LitKind::Str(symbol, ast::StrStyle::Cooked) => {
                 // Don't re-intern unless the escaped string is different.
-                let s = &symbol.as_str();
+                let s: &str = &symbol.as_str();
                 let escaped = s.escape_default().to_string();
                 let symbol = if escaped == *s { symbol } else { Symbol::intern(&escaped) };
                 (token::Str, symbol, None)
@@ -254,7 +184,7 @@ pub fn to_lit_token(&self) -> token::Lit {
 
 impl Lit {
     /// Converts literal token into an AST literal.
-    fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
+    crate fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
         Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
     }
 
@@ -286,109 +216,16 @@ pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit {
         Lit { token: kind.to_lit_token(), kind, span }
     }
 
-    /// Losslessly convert an AST literal into a token stream.
-    crate fn tokens(&self) -> TokenStream {
+    /// Losslessly convert an AST literal into a token tree.
+    crate fn token_tree(&self) -> TokenTree {
         let token = match self.token.kind {
             token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
-        TokenTree::token(token, self.span).into()
+        TokenTree::token(token, self.span)
     }
 }
 
-impl<'a> Parser<'a> {
-    /// Matches `lit = true | false | token_lit`.
-    crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
-        let mut recovered = None;
-        if self.token == token::Dot {
-            // Attempt to recover `.4` as `0.4`.
-            recovered = self.look_ahead(1, |next_token| {
-                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
-                        = next_token.kind {
-                    if self.token.span.hi() == next_token.span.lo() {
-                        let s = String::from("0.") + &symbol.as_str();
-                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
-                        return Some(Token::new(kind, self.token.span.to(next_token.span)));
-                    }
-                }
-                None
-            });
-            if let Some(token) = &recovered {
-                self.bump();
-                self.diagnostic()
-                    .struct_span_err(token.span, "float literals must have an integer part")
-                    .span_suggestion(
-                        token.span,
-                        "must have an integer part",
-                        pprust::token_to_string(token),
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
-            }
-        }
-
-        let token = recovered.as_ref().unwrap_or(&self.token);
-        match Lit::from_token(token) {
-            Ok(lit) => {
-                self.bump();
-                Ok(lit)
-            }
-            Err(LitError::NotLiteral) => {
-                let msg = format!("unexpected token: {}", self.this_token_descr());
-                Err(self.span_fatal(token.span, &msg))
-            }
-            Err(err) => {
-                let (lit, span) = (token.expect_lit(), token.span);
-                self.bump();
-                err.report(&self.sess.span_diagnostic, lit, span);
-                // Pack possible quotes and prefixes from the original literal into
-                // the error literal's symbol so they can be pretty-printed faithfully.
-                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
-                let symbol = Symbol::intern(&suffixless_lit.to_string());
-                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
-                Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
-            }
-        }
-    }
-}
-
-crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option<Symbol>) {
-    if let Some(suf) = suffix {
-        let mut err = if kind == "a tuple index" &&
-                         [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) {
-            // #59553: warn instead of reject out of hand to allow the fix to percolate
-            // through the ecosystem when people fix their macros
-            let mut err = diag.struct_span_warn(
-                sp,
-                &format!("suffixes on {} are invalid", kind),
-            );
-            err.note(&format!(
-                "`{}` is *temporarily* accepted on tuple index fields as it was \
-                    incorrectly accepted on stable for a few releases",
-                suf,
-            ));
-            err.help(
-                "on proc macros, you'll want to use `syn::Index::from` or \
-                    `proc_macro::Literal::*_unsuffixed` for code that will desugar \
-                    to tuple field access",
-            );
-            err.note(
-                "for more context, see https://github.com/rust-lang/rust/issues/60210",
-            );
-            err
-        } else {
-            diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
-        };
-        err.span_label(sp, format!("invalid suffix `{}`", suf));
-        err.emit();
-    }
-}
-
-// Checks if `s` looks like i32 or u1234 etc.
-fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
-    s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
-}
-
 fn strip_underscores(symbol: Symbol) -> Symbol {
     // Do not allocate a new string unless necessary.
     let s = symbol.as_str();
@@ -426,15 +263,12 @@ fn integer_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitErr
     let symbol = strip_underscores(symbol);
     let s = symbol.as_str();
 
-    let mut base = 10;
-    if s.len() > 1 && s.as_bytes()[0] == b'0' {
-        match s.as_bytes()[1] {
-            b'x' => base = 16,
-            b'o' => base = 8,
-            b'b' => base = 2,
-            _ => {}
-        }
-    }
+    let base = match s.as_bytes() {
+        [b'0', b'x', ..] => 16,
+        [b'0', b'o', ..] => 8,
+        [b'0', b'b', ..] => 2,
+        _ => 10,
+    };
 
     let ty = match suffix {
         Some(suf) => match suf {
index fa4c10431228ade0ce645b5c270031a585f64592..e6b794a6a990e89bb2c52437ac2a650fa046c717 100644 (file)
@@ -1,40 +1,33 @@
 //! The main parser interface.
 
-use crate::ast::{self, CrateConfig, NodeId};
-use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
-use crate::source_map::{SourceMap, FilePathMapping};
-use crate::feature_gate::UnstableFeatures;
-use crate::parse::parser::Parser;
-use crate::parse::parser::emit_unclosed_delims;
-use crate::parse::token::TokenKind;
-use crate::tokenstream::{TokenStream, TokenTree};
+use crate::ast;
+use crate::parse::parser::{Parser, emit_unclosed_delims};
+use crate::parse::token::Nonterminal;
+use crate::tokenstream::{self, TokenStream, TokenTree};
 use crate::print::pprust;
-use crate::symbol::Symbol;
+use crate::sess::ParseSess;
 
-use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
-use rustc_data_structures::fx::{FxHashSet, FxHashMap};
+use errors::{FatalError, Level, Diagnostic, DiagnosticBuilder};
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert_size;
-use rustc_data_structures::sync::{Lrc, Lock, Once};
-use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
-use syntax_pos::edition::Edition;
-use syntax_pos::hygiene::ExpnId;
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::{Span, SourceFile, FileName};
 
 use std::borrow::Cow;
-use std::path::{Path, PathBuf};
+use std::path::Path;
 use std::str;
 
+use log::info;
+
 #[cfg(test)]
 mod tests;
 
 #[macro_use]
 pub mod parser;
-pub mod attr;
 pub mod lexer;
 pub mod token;
 
 crate mod classify;
-crate mod diagnostics;
 crate mod literal;
 crate mod unescape_error_reporting;
 
 #[cfg(target_arch = "x86_64")]
 static_assert_size!(PResult<'_, bool>, 16);
 
-/// Collected spans during parsing for places where a certain feature was
-/// used and should be feature gated accordingly in `check_crate`.
-#[derive(Default)]
-pub struct GatedSpans {
-    /// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`.
-    pub let_chains: Lock<Vec<Span>>,
-    /// Spans collected for gating `async_closure`, e.g. `async || ..`.
-    pub async_closure: Lock<Vec<Span>>,
-    /// Spans collected for gating `yield e?` expressions (`generators` gate).
-    pub yields: Lock<Vec<Span>>,
-    /// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`.
-    pub or_patterns: Lock<Vec<Span>>,
-}
-
-/// Info about a parsing session.
-pub struct ParseSess {
-    pub span_diagnostic: Handler,
-    pub unstable_features: UnstableFeatures,
-    pub config: CrateConfig,
-    pub edition: Edition,
-    pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
-    /// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
-    pub raw_identifier_spans: Lock<Vec<Span>>,
-    /// Used to determine and report recursive module inclusions.
-    included_mod_stack: Lock<Vec<PathBuf>>,
-    source_map: Lrc<SourceMap>,
-    pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
-    /// Contains the spans of block expressions that could have been incomplete based on the
-    /// operation token that followed it, but that the parser cannot identify without further
-    /// analysis.
-    pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
-    pub injected_crate_name: Once<Symbol>,
-    pub gated_spans: GatedSpans,
-}
-
-impl ParseSess {
-    pub fn new(file_path_mapping: FilePathMapping) -> Self {
-        let cm = Lrc::new(SourceMap::new(file_path_mapping));
-        let handler = Handler::with_tty_emitter(
-            ColorConfig::Auto,
-            true,
-            None,
-            Some(cm.clone()),
-        );
-        ParseSess::with_span_handler(handler, cm)
-    }
-
-    pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
-        Self {
-            span_diagnostic: handler,
-            unstable_features: UnstableFeatures::from_environment(),
-            config: FxHashSet::default(),
-            edition: ExpnId::root().expn_data().edition,
-            missing_fragment_specifiers: Lock::new(FxHashSet::default()),
-            raw_identifier_spans: Lock::new(Vec::new()),
-            included_mod_stack: Lock::new(vec![]),
-            source_map,
-            buffered_lints: Lock::new(vec![]),
-            ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
-            injected_crate_name: Once::new(),
-            gated_spans: GatedSpans::default(),
-        }
-    }
-
-    #[inline]
-    pub fn source_map(&self) -> &SourceMap {
-        &self.source_map
-    }
-
-    pub fn buffer_lint<S: Into<MultiSpan>>(&self,
-        lint_id: BufferedEarlyLintId,
-        span: S,
-        id: NodeId,
-        msg: &str,
-    ) {
-        self.buffered_lints.with_lock(|buffered_lints| {
-            buffered_lints.push(BufferedEarlyLint{
-                span: span.into(),
-                id,
-                msg: msg.into(),
-                lint_id,
-            });
-        });
-    }
-
-    /// Extend an error with a suggestion to wrap an expression with parentheses to allow the
-    /// parser to continue parsing the following operation as part of the same expression.
-    pub fn expr_parentheses_needed(
-        &self,
-        err: &mut DiagnosticBuilder<'_>,
-        span: Span,
-        alt_snippet: Option<String>,
-    ) {
-        if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) {
-            err.span_suggestion(
-                span,
-                "parentheses are required to parse this as an expression",
-                format!("({})", snippet),
-                Applicability::MachineApplicable,
-            );
-        }
-    }
-}
-
 #[derive(Clone)]
 pub struct Directory<'a> {
     pub path: Cow<'a, Path>,
@@ -382,26 +271,131 @@ pub fn stream_to_parser_with_base_dir<'a>(
     Parser::new(sess, stream, Some(base_dir), true, false, None)
 }
 
-/// A sequence separator.
-pub struct SeqSep {
-    /// The separator token.
-    pub sep: Option<TokenKind>,
-    /// `true` if a trailing separator is allowed.
-    pub trailing_sep_allowed: bool,
+// NOTE(Centril): The following probably shouldn't be here but it acknowledges the
+// fact that architecturally, we are using parsing (read on below to understand why).
+
+pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> TokenStream {
+    // A `Nonterminal` is often a parsed AST item. At this point we now
+    // need to convert the parsed AST to an actual token stream, e.g.
+    // un-parse it basically.
+    //
+    // Unfortunately there's not really a great way to do that in a
+    // guaranteed lossless fashion right now. The fallback here is to just
+    // stringify the AST node and reparse it, but this loses all span
+    // information.
+    //
+    // As a result, some AST nodes are annotated with the token stream they
+    // came from. Here we attempt to extract these lossless token streams
+    // before we fall back to the stringification.
+    let tokens = match *nt {
+        Nonterminal::NtItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+        }
+        Nonterminal::NtTraitItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+        }
+        Nonterminal::NtImplItem(ref item) => {
+            prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+        }
+        Nonterminal::NtIdent(ident, is_raw) => {
+            Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
+        }
+        Nonterminal::NtLifetime(ident) => {
+            Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
+        }
+        Nonterminal::NtTT(ref tt) => {
+            Some(tt.clone().into())
+        }
+        _ => None,
+    };
+
+    // FIXME(#43081): Avoid this pretty-print + reparse hack
+    let source = pprust::nonterminal_to_string(nt);
+    let filename = FileName::macro_expansion_source_code(&source);
+    let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span));
+
+    // During early phases of the compiler the AST could get modified
+    // directly (e.g., attributes added or removed) and the internal cache
+    // of tokens my not be invalidated or updated. Consequently if the
+    // "lossless" token stream disagrees with our actual stringification
+    // (which has historically been much more battle-tested) then we go
+    // with the lossy stream anyway (losing span information).
+    //
+    // Note that the comparison isn't `==` here to avoid comparing spans,
+    // but it *also* is a "probable" equality which is a pretty weird
+    // definition. We mostly want to catch actual changes to the AST
+    // like a `#[cfg]` being processed or some weird `macro_rules!`
+    // expansion.
+    //
+    // What we *don't* want to catch is the fact that a user-defined
+    // literal like `0xf` is stringified as `15`, causing the cached token
+    // stream to not be literal `==` token-wise (ignoring spans) to the
+    // token stream we got from stringification.
+    //
+    // Instead the "probably equal" check here is "does each token
+    // recursively have the same discriminant?" We basically don't look at
+    // the token values here and assume that such fine grained token stream
+    // modifications, including adding/removing typically non-semantic
+    // tokens such as extra braces and commas, don't happen.
+    if let Some(tokens) = tokens {
+        if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
+            return tokens
+        }
+        info!("cached tokens found, but they're not \"probably equal\", \
+                going with stringified version");
+    }
+    return tokens_for_real
 }
 
-impl SeqSep {
-    pub fn trailing_allowed(t: TokenKind) -> SeqSep {
-        SeqSep {
-            sep: Some(t),
-            trailing_sep_allowed: true,
-        }
+fn prepend_attrs(
+    sess: &ParseSess,
+    attrs: &[ast::Attribute],
+    tokens: Option<&tokenstream::TokenStream>,
+    span: syntax_pos::Span
+) -> Option<tokenstream::TokenStream> {
+    let tokens = tokens?;
+    if attrs.len() == 0 {
+        return Some(tokens.clone())
     }
+    let mut builder = tokenstream::TokenStreamBuilder::new();
+    for attr in attrs {
+        assert_eq!(attr.style, ast::AttrStyle::Outer,
+                   "inner attributes should prevent cached tokens from existing");
+
+        let source = pprust::attribute_to_string(attr);
+        let macro_filename = FileName::macro_expansion_source_code(&source);
+        if attr.is_sugared_doc {
+            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
+            builder.push(stream);
+            continue
+        }
 
-    pub fn none() -> SeqSep {
-        SeqSep {
-            sep: None,
-            trailing_sep_allowed: false,
+        // synthesize # [ $path $tokens ] manually here
+        let mut brackets = tokenstream::TokenStreamBuilder::new();
+
+        // For simple paths, push the identifier directly
+        if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
+            let ident = attr.path.segments[0].ident;
+            let token = token::Ident(ident.name, ident.as_str().starts_with("r#"));
+            brackets.push(tokenstream::TokenTree::token(token, ident.span));
+
+        // ... and for more complicated paths, fall back to a reparse hack that
+        // should eventually be removed.
+        } else {
+            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
+            brackets.push(stream);
         }
+
+        brackets.push(attr.tokens.clone());
+
+        // The span we list here for `#` and for `[ ... ]` are both wrong in
+        // that it encompasses more than each token, but it hopefully is "good
+        // enough" for now at least.
+        builder.push(tokenstream::TokenTree::token(token::Pound, attr.span));
+        let delim_span = tokenstream::DelimSpan::from_single(attr.span);
+        builder.push(tokenstream::TokenTree::Delimited(
+            delim_span, token::DelimToken::Bracket, brackets.build().into()));
     }
+    builder.push(tokens.clone());
+    Some(builder.build())
 }
index d4a6e9f6c6bc8f4fa1d9ccd10a024a4151c5d9df..6bbd8be0cb9827d4c15c88cc77727d465ece5afa 100644 (file)
@@ -1,28 +1,28 @@
+pub mod attr;
 mod expr;
 mod pat;
 mod item;
-pub use item::AliasKind;
 mod module;
-pub use module::{ModulePath, ModulePathSuccess};
 mod ty;
 mod path;
 pub use path::PathStyle;
 mod stmt;
 mod generics;
+mod diagnostics;
+use diagnostics::Error;
 
 use crate::ast::{
-    self, DUMMY_NODE_ID, AttrStyle, Attribute, BindingMode, CrateSugar, FnDecl, Ident,
-    IsAsync, MacDelimiter, Mutability, Param, StrStyle, SelfKind, TyKind, Visibility,
-    VisibilityKind, Unsafety,
+    self, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident,
+    IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety,
 };
-use crate::parse::{ParseSess, PResult, Directory, DirectoryOwnership, SeqSep, literal, token};
-use crate::parse::diagnostics::{Error, dummy_arg};
+use crate::parse::{PResult, Directory, DirectoryOwnership};
 use crate::parse::lexer::UnmatchedBrace;
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use crate::parse::token::{Token, TokenKind, DelimToken};
+use crate::parse::token::{self, Token, TokenKind, DelimToken};
 use crate::print::pprust;
 use crate::ptr::P;
-use crate::source_map::{self, respan};
+use crate::sess::ParseSess;
+use crate::source_map::respan;
 use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
 use crate::ThinVec;
@@ -44,14 +44,14 @@ struct Restrictions: u8 {
 }
 
 #[derive(Clone, Copy, PartialEq, Debug)]
-crate enum SemiColonMode {
+enum SemiColonMode {
     Break,
     Ignore,
     Comma,
 }
 
 #[derive(Clone, Copy, PartialEq, Debug)]
-crate enum BlockMode {
+enum BlockMode {
     Break,
     Ignore,
 }
@@ -124,33 +124,33 @@ pub struct Parser<'a> {
     prev_token_kind: PrevTokenKind,
     restrictions: Restrictions,
     /// Used to determine the path to externally loaded source files.
-    crate directory: Directory<'a>,
+    pub(super) directory: Directory<'a>,
     /// `true` to parse sub-modules in other files.
-    pub recurse_into_file_modules: bool,
+    pub(super) recurse_into_file_modules: bool,
     /// Name of the root module this parser originated from. If `None`, then the
     /// name is not known. This does not change while the parser is descending
     /// into modules, and sub-parsers have new values for this name.
     pub root_module_name: Option<String>,
-    crate expected_tokens: Vec<TokenType>,
+    expected_tokens: Vec<TokenType>,
     token_cursor: TokenCursor,
     desugar_doc_comments: bool,
     /// `true` we should configure out of line modules as we parse.
-    pub cfg_mods: bool,
+    cfg_mods: bool,
     /// This field is used to keep track of how many left angle brackets we have seen. This is
     /// required in order to detect extra leading left angle brackets (`<` characters) and error
     /// appropriately.
     ///
     /// See the comments in the `parse_path_segment` function for more details.
-    crate unmatched_angle_bracket_count: u32,
-    crate max_angle_bracket_count: u32,
+    unmatched_angle_bracket_count: u32,
+    max_angle_bracket_count: u32,
     /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
     /// it gets removed from here. Every entry left at the end gets emitted as an independent
     /// error.
-    crate unclosed_delims: Vec<UnmatchedBrace>,
-    crate last_unexpected_token_span: Option<Span>,
-    crate last_type_ascription: Option<(Span, bool /* likely path typo */)>,
+    pub(super) unclosed_delims: Vec<UnmatchedBrace>,
+    last_unexpected_token_span: Option<Span>,
+    pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
     /// If present, this `Parser` is not parsing Rust code but rather a macro call.
-    crate subparser_name: Option<&'static str>,
+    subparser_name: Option<&'static str>,
 }
 
 impl<'a> Drop for Parser<'a> {
@@ -194,7 +194,7 @@ struct TokenCursorFrame {
 /// You can find some more example usage of this in the `collect_tokens` method
 /// on the parser.
 #[derive(Clone)]
-crate enum LastToken {
+enum LastToken {
     Collecting(Vec<TreeAndJoint>),
     Was(Option<TreeAndJoint>),
 }
@@ -285,10 +285,10 @@ fn next_desugared(&mut self) -> Token {
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
                 [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
-                    .iter().cloned().collect::<TokenStream>().into()
+                    .iter().cloned().collect::<TokenStream>()
             } else {
                 [TokenTree::token(token::Pound, sp), body]
-                    .iter().cloned().collect::<TokenStream>().into()
+                    .iter().cloned().collect::<TokenStream>()
             },
         )));
 
@@ -297,7 +297,7 @@ fn next_desugared(&mut self) -> Token {
 }
 
 #[derive(Clone, PartialEq)]
-crate enum TokenType {
+enum TokenType {
     Token(TokenKind),
     Keyword(Symbol),
     Operator,
@@ -309,7 +309,7 @@ fn next_desugared(&mut self) -> Token {
 }
 
 impl TokenType {
-    crate fn to_string(&self) -> String {
+    fn to_string(&self) -> String {
         match *self {
             TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
             TokenType::Keyword(kw) => format!("`{}`", kw),
@@ -324,11 +324,35 @@ impl TokenType {
 }
 
 #[derive(Copy, Clone, Debug)]
-crate enum TokenExpectType {
+enum TokenExpectType {
     Expect,
     NoExpect,
 }
 
+/// A sequence separator.
+struct SeqSep {
+    /// The separator token.
+    sep: Option<TokenKind>,
+    /// `true` if a trailing separator is allowed.
+    trailing_sep_allowed: bool,
+}
+
+impl SeqSep {
+    fn trailing_allowed(t: TokenKind) -> SeqSep {
+        SeqSep {
+            sep: Some(t),
+            trailing_sep_allowed: true,
+        }
+    }
+
+    fn none() -> SeqSep {
+        SeqSep {
+            sep: None,
+            trailing_sep_allowed: false,
+        }
+    }
+}
+
 impl<'a> Parser<'a> {
     pub fn new(
         sess: &'a ParseSess,
@@ -405,7 +429,7 @@ pub fn this_token_to_string(&self) -> String {
         pprust::token_to_string(&self.token)
     }
 
-    crate fn token_descr(&self) -> Option<&'static str> {
+    fn token_descr(&self) -> Option<&'static str> {
         Some(match &self.token.kind {
             _ if self.token.is_special_ident() => "reserved identifier",
             _ if self.token.is_used_keyword() => "keyword",
@@ -415,7 +439,7 @@ pub fn this_token_to_string(&self) -> String {
         })
     }
 
-    crate fn this_token_descr(&self) -> String {
+    pub(super) fn this_token_descr(&self) -> String {
         if let Some(prefix) = self.token_descr() {
             format!("{} `{}`", prefix, self.this_token_to_string())
         } else {
@@ -465,7 +489,7 @@ pub fn expect_one_of(
         }
     }
 
-    pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
+    fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
         self.parse_ident_common(true)
     }
 
@@ -498,7 +522,7 @@ fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
     /// encountered.
-    crate fn check(&mut self, tok: &TokenKind) -> bool {
+    fn check(&mut self, tok: &TokenKind) -> bool {
         let is_present = self.token == *tok;
         if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
         is_present
@@ -520,7 +544,7 @@ fn check_keyword(&mut self, kw: Symbol) -> bool {
 
     /// If the next token is the given keyword, eats it and returns `true`.
     /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
-    pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
+    fn eat_keyword(&mut self, kw: Symbol) -> bool {
         if self.check_keyword(kw) {
             self.bump();
             true
@@ -558,7 +582,7 @@ fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
         }
     }
 
-    crate fn check_ident(&mut self) -> bool {
+    fn check_ident(&mut self) -> bool {
         self.check_or_expected(self.token.is_ident(), TokenType::Ident)
     }
 
@@ -638,10 +662,6 @@ fn expect_or(&mut self) -> PResult<'a, ()> {
         }
     }
 
-    fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
-        literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
-    }
-
     /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
     /// `<` and continue. If `<-` is seen, replaces it with a single `<`
     /// and continue. If a `<` is not seen, returns false.
@@ -727,7 +747,7 @@ fn expect_gt(&mut self) -> PResult<'a, ()> {
     /// Parses a sequence, including the closing delimiter. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
-    pub fn parse_seq_to_end<T>(
+    fn parse_seq_to_end<T>(
         &mut self,
         ket: &TokenKind,
         sep: SeqSep,
@@ -743,7 +763,7 @@ pub fn parse_seq_to_end<T>(
     /// Parses a sequence, not including the closing delimiter. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
-    pub fn parse_seq_to_before_end<T>(
+    fn parse_seq_to_before_end<T>(
         &mut self,
         ket: &TokenKind,
         sep: SeqSep,
@@ -761,7 +781,7 @@ fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType)
         })
     }
 
-    crate fn parse_seq_to_before_tokens<T>(
+    fn parse_seq_to_before_tokens<T>(
         &mut self,
         kets: &[&TokenKind],
         sep: SeqSep,
@@ -1007,7 +1027,7 @@ fn parse_or_use_outer_attributes(
         }
     }
 
-    crate fn process_potential_macro_variable(&mut self) {
+    pub fn process_potential_macro_variable(&mut self) {
         self.token = match self.token.kind {
             token::Dollar if self.token.span.from_expansion() &&
                              self.look_ahead(1, |t| t.is_ident()) => {
@@ -1041,7 +1061,7 @@ fn parse_or_use_outer_attributes(
     }
 
     /// Parses a single token tree from the input.
-    crate fn parse_token_tree(&mut self) -> TokenTree {
+    pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
             token::OpenDelim(..) => {
                 let frame = mem::replace(&mut self.token_cursor.frame,
@@ -1094,302 +1114,6 @@ fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) ->
         res
     }
 
-    fn parse_fn_params(
-        &mut self,
-        named_params: bool,
-        allow_c_variadic: bool,
-    ) -> PResult<'a, Vec<Param>> {
-        let sp = self.token.span;
-        let do_not_enforce_named_params_for_c_variadic = |token: &token::Token| {
-            match token.kind {
-                token::DotDotDot => false,
-                _ => named_params,
-            }
-        };
-        let mut c_variadic = false;
-        let (params, _) = self.parse_paren_comma_seq(|p| {
-            match p.parse_param_general(
-                false,
-                false,
-                allow_c_variadic,
-                do_not_enforce_named_params_for_c_variadic,
-            ) {
-                Ok(param) => Ok(
-                    if let TyKind::CVarArgs = param.ty.kind {
-                        c_variadic = true;
-                        if p.token != token::CloseDelim(token::Paren) {
-                            p.span_err(
-                                p.token.span,
-                                "`...` must be the last argument of a C-variadic function",
-                            );
-                            // FIXME(eddyb) this should probably still push `CVarArgs`.
-                            // Maybe AST validation/HIR lowering should emit the above error?
-                            None
-                        } else {
-                            Some(param)
-                        }
-                    } else {
-                        Some(param)
-                    }
-                ),
-                Err(mut e) => {
-                    e.emit();
-                    let lo = p.prev_span;
-                    // Skip every token until next possible arg or end.
-                    p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
-                    // Create a placeholder argument for proper arg count (issue #34264).
-                    let span = lo.to(p.prev_span);
-                    Ok(Some(dummy_arg(Ident::new(kw::Invalid, span))))
-                }
-            }
-        })?;
-
-        let params: Vec<_> = params.into_iter().filter_map(|x| x).collect();
-
-        if c_variadic && params.len() <= 1 {
-            self.span_err(
-                sp,
-                "C-variadic function must be declared with at least one named argument",
-            );
-        }
-
-        Ok(params)
-    }
-
-    /// Parses the parameter list and result type of a function that may have a `self` parameter.
-    fn parse_fn_decl_with_self(
-        &mut self,
-        is_name_required: impl Copy + Fn(&token::Token) -> bool,
-    ) -> PResult<'a, P<FnDecl>> {
-        // Parse the arguments, starting out with `self` being allowed...
-        let mut is_self_allowed = true;
-        let (mut inputs, _): (Vec<_>, _) = self.parse_paren_comma_seq(|p| {
-            let res = p.parse_param_general(is_self_allowed, true, false, is_name_required);
-            // ...but now that we've parsed the first argument, `self` is no longer allowed.
-            is_self_allowed = false;
-            res
-        })?;
-
-        // Replace duplicated recovered params with `_` pattern to avoid unecessary errors.
-        self.deduplicate_recovered_params_names(&mut inputs);
-
-        Ok(P(FnDecl {
-            inputs,
-            output: self.parse_ret_ty(true)?,
-        }))
-    }
-
-    /// Skips unexpected attributes and doc comments in this position and emits an appropriate
-    /// error.
-    /// This version of parse param doesn't necessarily require identifier names.
-    fn parse_param_general(
-        &mut self,
-        is_self_allowed: bool,
-        is_trait_item: bool,
-        allow_c_variadic: bool,
-        is_name_required: impl Fn(&token::Token) -> bool,
-    ) -> PResult<'a, Param> {
-        let lo = self.token.span;
-        let attrs = self.parse_outer_attributes()?;
-
-        // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
-        if let Some(mut param) = self.parse_self_param()? {
-            param.attrs = attrs.into();
-            return if is_self_allowed {
-                Ok(param)
-            } else {
-                self.recover_bad_self_param(param, is_trait_item)
-            };
-        }
-
-        let is_name_required = is_name_required(&self.token);
-        let (pat, ty) = if is_name_required || self.is_named_param() {
-            debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
-
-            let pat = self.parse_fn_param_pat()?;
-            if let Err(mut err) = self.expect(&token::Colon) {
-                if let Some(ident) = self.parameter_without_type(
-                    &mut err,
-                    pat,
-                    is_name_required,
-                    is_self_allowed,
-                    is_trait_item,
-                ) {
-                    err.emit();
-                    return Ok(dummy_arg(ident));
-                } else {
-                    return Err(err);
-                }
-            }
-
-            self.eat_incorrect_doc_comment_for_param_type();
-            (pat, self.parse_ty_common(true, true, allow_c_variadic)?)
-        } else {
-            debug!("parse_param_general ident_to_pat");
-            let parser_snapshot_before_ty = self.clone();
-            self.eat_incorrect_doc_comment_for_param_type();
-            let mut ty = self.parse_ty_common(true, true, allow_c_variadic);
-            if ty.is_ok() && self.token != token::Comma &&
-               self.token != token::CloseDelim(token::Paren) {
-                // This wasn't actually a type, but a pattern looking like a type,
-                // so we are going to rollback and re-parse for recovery.
-                ty = self.unexpected();
-            }
-            match ty {
-                Ok(ty) => {
-                    let ident = Ident::new(kw::Invalid, self.prev_span);
-                    let bm = BindingMode::ByValue(Mutability::Immutable);
-                    let pat = self.mk_pat_ident(ty.span, bm, ident);
-                    (pat, ty)
-                }
-                // If this is a C-variadic argument and we hit an error, return the error.
-                Err(err) if self.token == token::DotDotDot => return Err(err),
-                // Recover from attempting to parse the argument as a type without pattern.
-                Err(mut err) => {
-                    err.cancel();
-                    mem::replace(self, parser_snapshot_before_ty);
-                    self.recover_arg_parse()?
-                }
-            }
-        };
-
-        let span = lo.to(self.token.span);
-
-        Ok(Param {
-            attrs: attrs.into(),
-            id: ast::DUMMY_NODE_ID,
-            is_placeholder: false,
-            pat,
-            span,
-            ty,
-        })
-    }
-
-    /// Returns the parsed optional self parameter and whether a self shortcut was used.
-    ///
-    /// See `parse_self_param_with_attrs` to collect attributes.
-    fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
-        // Extract an identifier *after* having confirmed that the token is one.
-        let expect_self_ident = |this: &mut Self| {
-            match this.token.kind {
-                // Preserve hygienic context.
-                token::Ident(name, _) => {
-                    let span = this.token.span;
-                    this.bump();
-                    Ident::new(name, span)
-                }
-                _ => unreachable!(),
-            }
-        };
-        // Is `self` `n` tokens ahead?
-        let is_isolated_self = |this: &Self, n| {
-            this.is_keyword_ahead(n, &[kw::SelfLower])
-            && this.look_ahead(n + 1, |t| t != &token::ModSep)
-        };
-        // Is `mut self` `n` tokens ahead?
-        let is_isolated_mut_self = |this: &Self, n| {
-            this.is_keyword_ahead(n, &[kw::Mut])
-            && is_isolated_self(this, n + 1)
-        };
-        // Parse `self` or `self: TYPE`. We already know the current token is `self`.
-        let parse_self_possibly_typed = |this: &mut Self, m| {
-            let eself_ident = expect_self_ident(this);
-            let eself_hi = this.prev_span;
-            let eself = if this.eat(&token::Colon) {
-                SelfKind::Explicit(this.parse_ty()?, m)
-            } else {
-                SelfKind::Value(m)
-            };
-            Ok((eself, eself_ident, eself_hi))
-        };
-        // Recover for the grammar `*self`, `*const self`, and `*mut self`.
-        let recover_self_ptr = |this: &mut Self| {
-            let msg = "cannot pass `self` by raw pointer";
-            let span = this.token.span;
-            this.struct_span_err(span, msg)
-                .span_label(span, msg)
-                .emit();
-
-            Ok((SelfKind::Value(Mutability::Immutable), expect_self_ident(this), this.prev_span))
-        };
-
-        // Parse optional `self` parameter of a method.
-        // Only a limited set of initial token sequences is considered `self` parameters; anything
-        // else is parsed as a normal function parameter list, so some lookahead is required.
-        let eself_lo = self.token.span;
-        let (eself, eself_ident, eself_hi) = match self.token.kind {
-            token::BinOp(token::And) => {
-                let eself = if is_isolated_self(self, 1) {
-                    // `&self`
-                    self.bump();
-                    SelfKind::Region(None, Mutability::Immutable)
-                } else if is_isolated_mut_self(self, 1) {
-                    // `&mut self`
-                    self.bump();
-                    self.bump();
-                    SelfKind::Region(None, Mutability::Mutable)
-                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) {
-                    // `&'lt self`
-                    self.bump();
-                    let lt = self.expect_lifetime();
-                    SelfKind::Region(Some(lt), Mutability::Immutable)
-                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) {
-                    // `&'lt mut self`
-                    self.bump();
-                    let lt = self.expect_lifetime();
-                    self.bump();
-                    SelfKind::Region(Some(lt), Mutability::Mutable)
-                } else {
-                    // `&not_self`
-                    return Ok(None);
-                };
-                (eself, expect_self_ident(self), self.prev_span)
-            }
-            // `*self`
-            token::BinOp(token::Star) if is_isolated_self(self, 1) => {
-                self.bump();
-                recover_self_ptr(self)?
-            }
-            // `*mut self` and `*const self`
-            token::BinOp(token::Star) if
-                self.look_ahead(1, |t| t.is_mutability())
-                && is_isolated_self(self, 2) =>
-            {
-                self.bump();
-                self.bump();
-                recover_self_ptr(self)?
-            }
-            // `self` and `self: TYPE`
-            token::Ident(..) if is_isolated_self(self, 0) => {
-                parse_self_possibly_typed(self, Mutability::Immutable)?
-            }
-            // `mut self` and `mut self: TYPE`
-            token::Ident(..) if is_isolated_mut_self(self, 0) => {
-                self.bump();
-                parse_self_possibly_typed(self, Mutability::Mutable)?
-            }
-            _ => return Ok(None),
-        };
-
-        let eself = source_map::respan(eself_lo.to(eself_hi), eself);
-        Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident)))
-    }
-
-    fn is_named_param(&self) -> bool {
-        let offset = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
-                _ => 0,
-            }
-            token::BinOp(token::And) | token::AndAnd => 1,
-            _ if self.token.is_keyword(kw::Mut) => 1,
-            _ => 0,
-        };
-
-        self.look_ahead(offset, |t| t.is_ident()) &&
-        self.look_ahead(offset + 1, |t| t == &token::Colon)
-    }
-
     fn is_crate_vis(&self) -> bool {
         self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
     }
@@ -1474,12 +1198,14 @@ fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
 `pub(super)`: visible only in the current module's parent
 `pub(in path::to::module)`: visible only on the specified path"##;
 
+        let path_str = pprust::path_to_string(&path);
+
         struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
             .help(suggestion)
             .span_suggestion(
                 path.span,
-                &format!("make this visible only to module `{}` with `in`", path),
-                format!("in {}", path),
+                &format!("make this visible only to module `{}` with `in`", path_str),
+                format!("in {}", path_str),
                 Applicability::MachineApplicable,
             )
             .emit();
@@ -1487,6 +1213,15 @@ fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
         Ok(())
     }
 
+    /// Parses `extern` followed by an optional ABI string, or nothing.
+    fn parse_extern_abi(&mut self) -> PResult<'a, Abi> {
+        if self.eat_keyword(kw::Extern) {
+            Ok(self.parse_opt_abi()?.unwrap_or(Abi::C))
+        } else {
+            Ok(Abi::Rust)
+        }
+    }
+
     /// Parses a string as an ABI spec on an extern type or module. Consumes
     /// the `extern` keyword, if one is found.
     fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
@@ -1559,7 +1294,7 @@ fn collect_tokens<R>(
             // This can happen due to a bad interaction of two unrelated recovery mechanisms with
             // mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
             // (#62881).
-            return Ok((ret?, TokenStream::new(vec![])));
+            return Ok((ret?, TokenStream::default()));
         } else {
             &mut self.token_cursor.stack[prev].last_token
         };
@@ -1574,7 +1309,7 @@ fn collect_tokens<R>(
                 // This can happen due to a bad interaction of two unrelated recovery mechanisms
                 // with mismatched delimiters *and* recovery lookahead on the likely typo
                 // `pub ident(` (#62895, different but similar to the case above).
-                return Ok((ret?, TokenStream::new(vec![])));
+                return Ok((ret?, TokenStream::default()));
             }
         };
 
@@ -1612,7 +1347,7 @@ fn is_import_coupler(&mut self) -> bool {
                                    *t == token::BinOp(token::Star))
     }
 
-    pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
+    fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
         let ret = match self.token.kind {
             token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
                 (symbol, ast::StrStyle::Cooked, suffix),
@@ -1652,7 +1387,7 @@ fn report_invalid_macro_expansion_item(&self) {
             ],
             Applicability::MaybeIncorrect,
         ).span_suggestion(
-            self.sess.source_map.next_point(self.prev_span),
+            self.sess.source_map().next_point(self.prev_span),
             "add a semicolon",
             ';'.to_string(),
             Applicability::MaybeIncorrect,
diff --git a/src/libsyntax/parse/parser/attr.rs b/src/libsyntax/parse/parser/attr.rs
new file mode 100644 (file)
index 0000000..188a144
--- /dev/null
@@ -0,0 +1,350 @@
+use super::{SeqSep, PResult, Parser, TokenType, PathStyle};
+use crate::attr;
+use crate::ast;
+use crate::parse::token::{self, Nonterminal, DelimToken};
+use crate::tokenstream::{TokenStream, TokenTree};
+use crate::source_map::Span;
+
+use log::debug;
+
+#[derive(Debug)]
+enum InnerAttributeParsePolicy<'a> {
+    Permitted,
+    NotPermitted { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> },
+}
+
+const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
+                                                     permitted in this context";
+
+impl<'a> Parser<'a> {
+    /// Parses attributes that appear before an item.
+    pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
+        let mut attrs: Vec<ast::Attribute> = Vec::new();
+        let mut just_parsed_doc_comment = false;
+        loop {
+            debug!("parse_outer_attributes: self.token={:?}", self.token);
+            match self.token.kind {
+                token::Pound => {
+                    let inner_error_reason = if just_parsed_doc_comment {
+                        "an inner attribute is not permitted following an outer doc comment"
+                    } else if !attrs.is_empty() {
+                        "an inner attribute is not permitted following an outer attribute"
+                    } else {
+                        DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
+                    };
+                    let inner_parse_policy =
+                        InnerAttributeParsePolicy::NotPermitted {
+                            reason: inner_error_reason,
+                            saw_doc_comment: just_parsed_doc_comment,
+                            prev_attr_sp: attrs.last().and_then(|a| Some(a.span))
+                        };
+                    let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
+                    attrs.push(attr);
+                    just_parsed_doc_comment = false;
+                }
+                token::DocComment(s) => {
+                    let attr = attr::mk_sugared_doc_attr(s, self.token.span);
+                    if attr.style != ast::AttrStyle::Outer {
+                        let mut err = self.fatal("expected outer doc comment");
+                        err.note("inner doc comments like this (starting with \
+                                  `//!` or `/*!`) can only appear before items");
+                        return Err(err);
+                    }
+                    attrs.push(attr);
+                    self.bump();
+                    just_parsed_doc_comment = true;
+                }
+                _ => break,
+            }
+        }
+        Ok(attrs)
+    }
+
+    /// Matches `attribute = # ! [ meta_item ]`.
+    ///
+    /// If `permit_inner` is `true`, then a leading `!` indicates an inner
+    /// attribute.
+    pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> {
+        debug!("parse_attribute: permit_inner={:?} self.token={:?}",
+               permit_inner,
+               self.token);
+        let inner_parse_policy = if permit_inner {
+            InnerAttributeParsePolicy::Permitted
+        } else {
+            InnerAttributeParsePolicy::NotPermitted {
+                reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG,
+                saw_doc_comment: false,
+                prev_attr_sp: None
+            }
+        };
+        self.parse_attribute_with_inner_parse_policy(inner_parse_policy)
+    }
+
+    /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy`
+    /// that prescribes how to handle inner attributes.
+    fn parse_attribute_with_inner_parse_policy(
+        &mut self,
+        inner_parse_policy: InnerAttributeParsePolicy<'_>
+    ) -> PResult<'a, ast::Attribute> {
+        debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
+               inner_parse_policy,
+               self.token);
+        let (span, item, style) = match self.token.kind {
+            token::Pound => {
+                let lo = self.token.span;
+                self.bump();
+
+                if let InnerAttributeParsePolicy::Permitted = inner_parse_policy {
+                    self.expected_tokens.push(TokenType::Token(token::Not));
+                }
+
+                let style = if self.token == token::Not {
+                    self.bump();
+                    ast::AttrStyle::Inner
+                } else {
+                    ast::AttrStyle::Outer
+                };
+
+                self.expect(&token::OpenDelim(token::Bracket))?;
+                let item = self.parse_attr_item()?;
+                self.expect(&token::CloseDelim(token::Bracket))?;
+                let hi = self.prev_span;
+
+                let attr_sp = lo.to(hi);
+
+                // Emit error if inner attribute is encountered and not permitted
+                if style == ast::AttrStyle::Inner {
+                    if let InnerAttributeParsePolicy::NotPermitted { reason,
+                        saw_doc_comment, prev_attr_sp } = inner_parse_policy {
+                        let prev_attr_note = if saw_doc_comment {
+                            "previous doc comment"
+                        } else {
+                            "previous outer attribute"
+                        };
+
+                        let mut diagnostic = self
+                            .diagnostic()
+                            .struct_span_err(attr_sp, reason);
+
+                        if let Some(prev_attr_sp) = prev_attr_sp {
+                            diagnostic
+                                .span_label(attr_sp, "not permitted following an outer attibute")
+                                .span_label(prev_attr_sp, prev_attr_note);
+                        }
+
+                        diagnostic
+                            .note("inner attributes, like `#![no_std]`, annotate the item \
+                                   enclosing them, and are usually found at the beginning of \
+                                   source files. Outer attributes, like `#[test]`, annotate the \
+                                   item following them.")
+                            .emit()
+                    }
+                }
+
+                (attr_sp, item, style)
+            }
+            _ => {
+                let token_str = self.this_token_to_string();
+                return Err(self.fatal(&format!("expected `#`, found `{}`", token_str)));
+            }
+        };
+
+        Ok(ast::Attribute {
+            item,
+            id: attr::mk_attr_id(),
+            style,
+            is_sugared_doc: false,
+            span,
+        })
+    }
+
+    /// Parses an inner part of an attribute (the path and following tokens).
+    /// The tokens must be either a delimited token stream, or empty token stream,
+    /// or the "legacy" key-value form.
+    ///     PATH `(` TOKEN_STREAM `)`
+    ///     PATH `[` TOKEN_STREAM `]`
+    ///     PATH `{` TOKEN_STREAM `}`
+    ///     PATH
+    ///     PATH `=` UNSUFFIXED_LIT
+    /// The delimiters or `=` are still put into the resulting token stream.
+    pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
+        let item = match self.token.kind {
+            token::Interpolated(ref nt) => match **nt {
+                Nonterminal::NtMeta(ref item) => Some(item.clone()),
+                _ => None,
+            },
+            _ => None,
+        };
+        Ok(if let Some(item) = item {
+            self.bump();
+            item
+        } else {
+            let path = self.parse_path(PathStyle::Mod)?;
+            let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) ||
+               self.check(&token::OpenDelim(DelimToken::Bracket)) ||
+               self.check(&token::OpenDelim(DelimToken::Brace)) {
+                   self.parse_token_tree().into()
+            } else if self.eat(&token::Eq) {
+                let eq = TokenTree::token(token::Eq, self.prev_span);
+                let mut is_interpolated_expr = false;
+                if let token::Interpolated(nt) = &self.token.kind {
+                    if let token::NtExpr(..) = **nt {
+                        is_interpolated_expr = true;
+                    }
+                }
+                let token_tree = if is_interpolated_expr {
+                    // We need to accept arbitrary interpolated expressions to continue
+                    // supporting things like `doc = $expr` that work on stable.
+                    // Non-literal interpolated expressions are rejected after expansion.
+                    self.parse_token_tree()
+                } else {
+                    self.parse_unsuffixed_lit()?.token_tree()
+                };
+                TokenStream::new(vec![eq.into(), token_tree.into()])
+            } else {
+                TokenStream::default()
+            };
+            ast::AttrItem { path, tokens }
+        })
+    }
+
+    /// Parses attributes that appear after the opening of an item. These should
+    /// be preceded by an exclamation mark, but we accept and warn about one
+    /// terminated by a semicolon.
+    ///
+    /// Matches `inner_attrs*`.
+    crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
+        let mut attrs: Vec<ast::Attribute> = vec![];
+        loop {
+            match self.token.kind {
+                token::Pound => {
+                    // Don't even try to parse if it's not an inner attribute.
+                    if !self.look_ahead(1, |t| t == &token::Not) {
+                        break;
+                    }
+
+                    let attr = self.parse_attribute(true)?;
+                    assert_eq!(attr.style, ast::AttrStyle::Inner);
+                    attrs.push(attr);
+                }
+                token::DocComment(s) => {
+                    // We need to get the position of this token before we bump.
+                    let attr = attr::mk_sugared_doc_attr(s, self.token.span);
+                    if attr.style == ast::AttrStyle::Inner {
+                        attrs.push(attr);
+                        self.bump();
+                    } else {
+                        break;
+                    }
+                }
+                _ => break,
+            }
+        }
+        Ok(attrs)
+    }
+
+    fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
+        let lit = self.parse_lit()?;
+        debug!("checking if {:?} is unusuffixed", lit);
+
+        if !lit.kind.is_unsuffixed() {
+            let msg = "suffixed literals are not allowed in attributes";
+            self.diagnostic().struct_span_err(lit.span, msg)
+                             .help("instead of using a suffixed literal \
+                                    (1u8, 1.0f32, etc.), use an unsuffixed version \
+                                    (1, 1.0, etc.).")
+                             .emit()
+        }
+
+        Ok(lit)
+    }
+
+    /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited.
+    crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> {
+        self.expect(&token::OpenDelim(token::Paren))?;
+
+        let cfg_predicate = self.parse_meta_item()?;
+        self.expect(&token::Comma)?;
+
+        // Presumably, the majority of the time there will only be one attr.
+        let mut expanded_attrs = Vec::with_capacity(1);
+
+        while !self.check(&token::CloseDelim(token::Paren)) {
+            let lo = self.token.span.lo();
+            let item = self.parse_attr_item()?;
+            expanded_attrs.push((item, self.prev_span.with_lo(lo)));
+            self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?;
+        }
+
+        self.expect(&token::CloseDelim(token::Paren))?;
+        Ok((cfg_predicate, expanded_attrs))
+    }
+
+    /// Matches the following grammar (per RFC 1559).
+    ///
+    ///     meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
+    ///     meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
+    pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
+        let nt_meta = match self.token.kind {
+            token::Interpolated(ref nt) => match **nt {
+                token::NtMeta(ref e) => Some(e.clone()),
+                _ => None,
+            },
+            _ => None,
+        };
+
+        if let Some(item) = nt_meta {
+            return match item.meta(item.path.span) {
+                Some(meta) => {
+                    self.bump();
+                    Ok(meta)
+                }
+                None => self.unexpected(),
+            }
+        }
+
+        let lo = self.token.span;
+        let path = self.parse_path(PathStyle::Mod)?;
+        let kind = self.parse_meta_item_kind()?;
+        let span = lo.to(self.prev_span);
+        Ok(ast::MetaItem { path, kind, span })
+    }
+
+    crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
+        Ok(if self.eat(&token::Eq) {
+            ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+        } else if self.eat(&token::OpenDelim(token::Paren)) {
+            ast::MetaItemKind::List(self.parse_meta_seq()?)
+        } else {
+            ast::MetaItemKind::Word
+        })
+    }
+
+    /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
+    fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
+        match self.parse_unsuffixed_lit() {
+            Ok(lit) => {
+                return Ok(ast::NestedMetaItem::Literal(lit))
+            }
+            Err(ref mut err) => err.cancel(),
+        }
+
+        match self.parse_meta_item() {
+            Ok(mi) => {
+                return Ok(ast::NestedMetaItem::MetaItem(mi))
+            }
+            Err(ref mut err) => err.cancel(),
+        }
+
+        let found = self.this_token_to_string();
+        let msg = format!("expected unsuffixed literal or identifier, found `{}`", found);
+        Err(self.diagnostic().struct_span_err(self.token.span, &msg))
+    }
+
+    /// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
+    fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
+        self.parse_seq_to_end(&token::CloseDelim(token::Paren),
+                              SeqSep::trailing_allowed(token::Comma),
+                              |p: &mut Parser<'a>| p.parse_meta_item_inner())
+    }
+}
diff --git a/src/libsyntax/parse/parser/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs
new file mode 100644 (file)
index 0000000..06982c7
--- /dev/null
@@ -0,0 +1,1494 @@
+use super::{
+    BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType,
+    SeqSep, PResult, Parser
+};
+use crate::ast::{
+    self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
+    Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
+};
+use crate::parse::token::{self, TokenKind};
+use crate::print::pprust;
+use crate::ptr::P;
+use crate::symbol::{kw, sym};
+use crate::ThinVec;
+use crate::util::parser::AssocOp;
+use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralise};
+use rustc_data_structures::fx::FxHashSet;
+use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError};
+use log::{debug, trace};
+use std::mem;
+
+const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments";
+
+/// Creates a placeholder argument.
+pub(super) fn dummy_arg(ident: Ident) -> Param {
+    let pat = P(Pat {
+        id: ast::DUMMY_NODE_ID,
+        kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
+        span: ident.span,
+    });
+    let ty = Ty {
+        kind: TyKind::Err,
+        span: ident.span,
+        id: ast::DUMMY_NODE_ID
+    };
+    Param {
+        attrs: ThinVec::default(),
+        id: ast::DUMMY_NODE_ID,
+        pat,
+        span: ident.span,
+        ty: P(ty),
+        is_placeholder: false,
+    }
+}
+
+pub enum Error {
+    FileNotFoundForModule {
+        mod_name: String,
+        default_path: String,
+        secondary_path: String,
+        dir_path: String,
+    },
+    DuplicatePaths {
+        mod_name: String,
+        default_path: String,
+        secondary_path: String,
+    },
+    UselessDocComment,
+    InclusiveRangeWithNoEnd,
+}
+
+impl Error {
+    fn span_err<S: Into<MultiSpan>>(
+        self,
+        sp: S,
+        handler: &errors::Handler,
+    ) -> DiagnosticBuilder<'_> {
+        match self {
+            Error::FileNotFoundForModule {
+                ref mod_name,
+                ref default_path,
+                ref secondary_path,
+                ref dir_path,
+            } => {
+                let mut err = struct_span_err!(
+                    handler,
+                    sp,
+                    E0583,
+                    "file not found for module `{}`",
+                    mod_name,
+                );
+                err.help(&format!(
+                    "name the file either {} or {} inside the directory \"{}\"",
+                    default_path,
+                    secondary_path,
+                    dir_path,
+                ));
+                err
+            }
+            Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
+                let mut err = struct_span_err!(
+                    handler,
+                    sp,
+                    E0584,
+                    "file for module `{}` found at both {} and {}",
+                    mod_name,
+                    default_path,
+                    secondary_path,
+                );
+                err.help("delete or rename one of them to remove the ambiguity");
+                err
+            }
+            Error::UselessDocComment => {
+                let mut err = struct_span_err!(
+                    handler,
+                    sp,
+                    E0585,
+                    "found a documentation comment that doesn't document anything",
+                );
+                err.help("doc comments must come before what they document, maybe a comment was \
+                          intended with `//`?");
+                err
+            }
+            Error::InclusiveRangeWithNoEnd => {
+                let mut err = struct_span_err!(
+                    handler,
+                    sp,
+                    E0586,
+                    "inclusive range with no end",
+                );
+                err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)");
+                err
+            }
+        }
+    }
+}
+
+pub(super) trait RecoverQPath: Sized + 'static {
+    const PATH_STYLE: PathStyle = PathStyle::Expr;
+    fn to_ty(&self) -> Option<P<Ty>>;
+    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
+}
+
+impl RecoverQPath for Ty {
+    const PATH_STYLE: PathStyle = PathStyle::Type;
+    fn to_ty(&self) -> Option<P<Ty>> {
+        Some(P(self.clone()))
+    }
+    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+        Self {
+            span: path.span,
+            kind: TyKind::Path(qself, path),
+            id: ast::DUMMY_NODE_ID,
+        }
+    }
+}
+
+impl RecoverQPath for Pat {
+    fn to_ty(&self) -> Option<P<Ty>> {
+        self.to_ty()
+    }
+    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+        Self {
+            span: path.span,
+            kind: PatKind::Path(qself, path),
+            id: ast::DUMMY_NODE_ID,
+        }
+    }
+}
+
+impl RecoverQPath for Expr {
+    fn to_ty(&self) -> Option<P<Ty>> {
+        self.to_ty()
+    }
+    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+        Self {
+            span: path.span,
+            kind: ExprKind::Path(qself, path),
+            attrs: ThinVec::new(),
+            id: ast::DUMMY_NODE_ID,
+        }
+    }
+}
+
+impl<'a> Parser<'a> {
+    pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
+        self.span_fatal(self.token.span, m)
+    }
+
+    crate fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+        self.sess.span_diagnostic.struct_span_fatal(sp, m)
+    }
+
+    pub(super) fn span_fatal_err<S: Into<MultiSpan>>(
+        &self,
+        sp: S,
+        err: Error,
+    ) -> DiagnosticBuilder<'a> {
+        err.span_err(sp, self.diagnostic())
+    }
+
+    pub(super) fn bug(&self, m: &str) -> ! {
+        self.sess.span_diagnostic.span_bug(self.token.span, m)
+    }
+
+    pub(super) fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
+        self.sess.span_diagnostic.span_err(sp, m)
+    }
+
+    pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
+        self.sess.span_diagnostic.struct_span_err(sp, m)
+    }
+
+    pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
+        self.sess.span_diagnostic.span_bug(sp, m)
+    }
+
+    pub(super) fn diagnostic(&self) -> &'a errors::Handler {
+        &self.sess.span_diagnostic
+    }
+
+    pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
+        self.sess.source_map().span_to_snippet(span)
+    }
+
+    pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
+        let mut err = self.struct_span_err(
+            self.token.span,
+            &format!("expected identifier, found {}", self.this_token_descr()),
+        );
+        if let token::Ident(name, false) = self.token.kind {
+            if Ident::new(name, self.token.span).is_raw_guess() {
+                err.span_suggestion(
+                    self.token.span,
+                    "you can escape reserved keywords to use them as identifiers",
+                    format!("r#{}", name),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+        }
+        if let Some(token_descr) = self.token_descr() {
+            err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
+        } else {
+            err.span_label(self.token.span, "expected identifier");
+            if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
+                err.span_suggestion(
+                    self.token.span,
+                    "remove this comma",
+                    String::new(),
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+        err
+    }
+
+    pub(super) fn expected_one_of_not_found(
+        &mut self,
+        edible: &[TokenKind],
+        inedible: &[TokenKind],
+    ) -> PResult<'a, bool /* recovered */> {
+        fn tokens_to_string(tokens: &[TokenType]) -> String {
+            let mut i = tokens.iter();
+            // This might be a sign we need a connect method on `Iterator`.
+            let b = i.next()
+                     .map_or(String::new(), |t| t.to_string());
+            i.enumerate().fold(b, |mut b, (i, a)| {
+                if tokens.len() > 2 && i == tokens.len() - 2 {
+                    b.push_str(", or ");
+                } else if tokens.len() == 2 && i == tokens.len() - 2 {
+                    b.push_str(" or ");
+                } else {
+                    b.push_str(", ");
+                }
+                b.push_str(&a.to_string());
+                b
+            })
+        }
+
+        let mut expected = edible.iter()
+            .map(|x| TokenType::Token(x.clone()))
+            .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
+            .chain(self.expected_tokens.iter().cloned())
+            .collect::<Vec<_>>();
+        expected.sort_by_cached_key(|x| x.to_string());
+        expected.dedup();
+        let expect = tokens_to_string(&expected[..]);
+        let actual = self.this_token_to_string();
+        let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
+            let short_expect = if expected.len() > 6 {
+                format!("{} possible tokens", expected.len())
+            } else {
+                expect.clone()
+            };
+            (format!("expected one of {}, found `{}`", expect, actual),
+                (self.sess.source_map().next_point(self.prev_span),
+                format!("expected one of {} here", short_expect)))
+        } else if expected.is_empty() {
+            (format!("unexpected token: `{}`", actual),
+                (self.prev_span, "unexpected token after this".to_string()))
+        } else {
+            (format!("expected {}, found `{}`", expect, actual),
+                (self.sess.source_map().next_point(self.prev_span),
+                format!("expected {} here", expect)))
+        };
+        self.last_unexpected_token_span = Some(self.token.span);
+        let mut err = self.fatal(&msg_exp);
+        if self.token.is_ident_named(sym::and) {
+            err.span_suggestion_short(
+                self.token.span,
+                "use `&&` instead of `and` for the boolean operator",
+                "&&".to_string(),
+                Applicability::MaybeIncorrect,
+            );
+        }
+        if self.token.is_ident_named(sym::or) {
+            err.span_suggestion_short(
+                self.token.span,
+                "use `||` instead of `or` for the boolean operator",
+                "||".to_string(),
+                Applicability::MaybeIncorrect,
+            );
+        }
+        let sp = if self.token == token::Eof {
+            // This is EOF; don't want to point at the following char, but rather the last token.
+            self.prev_span
+        } else {
+            label_sp
+        };
+        match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
+            TokenType::Token(t) => Some(t.clone()),
+            _ => None,
+        }).collect::<Vec<_>>(), err) {
+            Err(e) => err = e,
+            Ok(recovered) => {
+                return Ok(recovered);
+            }
+        }
+
+        let is_semi_suggestable = expected.iter().any(|t| match t {
+            TokenType::Token(token::Semi) => true, // We expect a `;` here.
+            _ => false,
+        }) && ( // A `;` would be expected before the current keyword.
+            self.token.is_keyword(kw::Break) ||
+            self.token.is_keyword(kw::Continue) ||
+            self.token.is_keyword(kw::For) ||
+            self.token.is_keyword(kw::If) ||
+            self.token.is_keyword(kw::Let) ||
+            self.token.is_keyword(kw::Loop) ||
+            self.token.is_keyword(kw::Match) ||
+            self.token.is_keyword(kw::Return) ||
+            self.token.is_keyword(kw::While)
+        );
+        let sm = self.sess.source_map();
+        match (sm.lookup_line(self.token.span.lo()), sm.lookup_line(sp.lo())) {
+            (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => {
+                // The spans are in different lines, expected `;` and found `let` or `return`.
+                // High likelihood that it is only a missing `;`.
+                err.span_suggestion_short(
+                    label_sp,
+                    "a semicolon may be missing here",
+                    ";".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                return Ok(true);
+            }
+            (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+                // When the spans are in the same line, it means that the only content between
+                // them is whitespace, point at the found token in that case:
+                //
+                // X |     () => { syntax error };
+                //   |                    ^^^^^ expected one of 8 possible tokens here
+                //
+                // instead of having:
+                //
+                // X |     () => { syntax error };
+                //   |                   -^^^^^ unexpected token
+                //   |                   |
+                //   |                   expected one of 8 possible tokens here
+                err.span_label(self.token.span, label_exp);
+            }
+            _ if self.prev_span == syntax_pos::DUMMY_SP => {
+                // Account for macro context where the previous span might not be
+                // available to avoid incorrect output (#54841).
+                err.span_label(self.token.span, "unexpected token");
+            }
+            _ => {
+                err.span_label(sp, label_exp);
+                err.span_label(self.token.span, "unexpected token");
+            }
+        }
+        self.maybe_annotate_with_ascription(&mut err, false);
+        Err(err)
+    }
+
+    pub fn maybe_annotate_with_ascription(
+        &self,
+        err: &mut DiagnosticBuilder<'_>,
+        maybe_expected_semicolon: bool,
+    ) {
+        if let Some((sp, likely_path)) = self.last_type_ascription {
+            let sm = self.sess.source_map();
+            let next_pos = sm.lookup_char_pos(self.token.span.lo());
+            let op_pos = sm.lookup_char_pos(sp.hi());
+
+            let allow_unstable = self.sess.unstable_features.is_nightly_build();
+
+            if likely_path {
+                err.span_suggestion(
+                    sp,
+                    "maybe write a path separator here",
+                    "::".to_string(),
+                    if allow_unstable {
+                        Applicability::MaybeIncorrect
+                    } else {
+                        Applicability::MachineApplicable
+                    },
+                );
+            } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
+                err.span_suggestion(
+                    sp,
+                    "try using a semicolon",
+                    ";".to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+            } else if allow_unstable {
+                err.span_label(sp, "tried to parse a type due to this type ascription");
+            } else {
+                err.span_label(sp, "tried to parse a type due to this");
+            }
+            if allow_unstable {
+                // Give extra information about type ascription only if it's a nightly compiler.
+                err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \
+                          type: `<expr>: <type>`");
+                err.note("for more information, see \
+                          https://github.com/rust-lang/rust/issues/23416");
+            }
+        }
+    }
+
+    /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
+    /// passes through any errors encountered. Used for error recovery.
+    pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
+        if let Err(ref mut err) = self.parse_seq_to_before_tokens(
+            kets,
+            SeqSep::none(),
+            TokenExpectType::Expect,
+            |p| Ok(p.parse_token_tree()),
+        ) {
+            err.cancel();
+        }
+    }
+
+    /// This function checks if there are trailing angle brackets and produces
+    /// a diagnostic to suggest removing them.
+    ///
+    /// ```ignore (diagnostic)
+    /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
+    ///                                                        ^^ help: remove extra angle brackets
+    /// ```
+    pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
+        // This function is intended to be invoked after parsing a path segment where there are two
+        // cases:
+        //
+        // 1. A specific token is expected after the path segment.
+        //    eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
+        //        `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
+        // 2. No specific token is expected after the path segment.
+        //    eg. `x.foo` (field access)
+        //
+        // This function is called after parsing `.foo` and before parsing the token `end` (if
+        // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
+        // `Foo::<Bar>`.
+
+        // We only care about trailing angle brackets if we previously parsed angle bracket
+        // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
+        // removed in this case:
+        //
+        // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
+        //
+        // This case is particularly tricky as we won't notice it just looking at the tokens -
+        // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
+        // have already been parsed):
+        //
+        // `x.foo::<u32>>>(3)`
+        let parsed_angle_bracket_args = segment.args
+            .as_ref()
+            .map(|args| args.is_angle_bracketed())
+            .unwrap_or(false);
+
+        debug!(
+            "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
+            parsed_angle_bracket_args,
+        );
+        if !parsed_angle_bracket_args {
+            return;
+        }
+
+        // Keep the span at the start so we can highlight the sequence of `>` characters to be
+        // removed.
+        let lo = self.token.span;
+
+        // We need to look-ahead to see if we have `>` characters without moving the cursor forward
+        // (since we might have the field access case and the characters we're eating are
+        // actual operators and not trailing characters - ie `x.foo >> 3`).
+        let mut position = 0;
+
+        // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
+        // many of each (so we can correctly pluralize our error messages) and continue to
+        // advance.
+        let mut number_of_shr = 0;
+        let mut number_of_gt = 0;
+        while self.look_ahead(position, |t| {
+            trace!("check_trailing_angle_brackets: t={:?}", t);
+            if *t == token::BinOp(token::BinOpToken::Shr) {
+                number_of_shr += 1;
+                true
+            } else if *t == token::Gt {
+                number_of_gt += 1;
+                true
+            } else {
+                false
+            }
+        }) {
+            position += 1;
+        }
+
+        // If we didn't find any trailing `>` characters, then we have nothing to error about.
+        debug!(
+            "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
+            number_of_gt, number_of_shr,
+        );
+        if number_of_gt < 1 && number_of_shr < 1 {
+            return;
+        }
+
+        // Finally, double check that we have our end token as otherwise this is the
+        // second case.
+        if self.look_ahead(position, |t| {
+            trace!("check_trailing_angle_brackets: t={:?}", t);
+            *t == end
+        }) {
+            // Eat from where we started until the end token so that parsing can continue
+            // as if we didn't have those extra angle brackets.
+            self.eat_to_tokens(&[&end]);
+            let span = lo.until(self.token.span);
+
+            let total_num_of_gt = number_of_gt + number_of_shr * 2;
+            self.diagnostic()
+                .struct_span_err(
+                    span,
+                    &format!("unmatched angle bracket{}", pluralise!(total_num_of_gt)),
+                )
+                .span_suggestion(
+                    span,
+                    &format!("remove extra angle bracket{}", pluralise!(total_num_of_gt)),
+                    String::new(),
+                    Applicability::MachineApplicable,
+                )
+                .emit();
+        }
+    }
+
+    /// Produces an error if comparison operators are chained (RFC #558).
+    /// We only need to check the LHS, not the RHS, because all comparison ops have same
+    /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
+    ///
+    /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
+    /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
+    /// case.
+    ///
+    /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
+    /// associative we can infer that we have:
+    ///
+    ///           outer_op
+    ///           /   \
+    ///     inner_op   r2
+    ///        /  \
+    ///     l1    r1
+    pub(super) fn check_no_chained_comparison(
+        &mut self,
+        lhs: &Expr,
+        outer_op: &AssocOp,
+    ) -> PResult<'a, Option<P<Expr>>> {
+        debug_assert!(
+            outer_op.is_comparison(),
+            "check_no_chained_comparison: {:?} is not comparison",
+            outer_op,
+        );
+
+        let mk_err_expr = |this: &Self, span| {
+            Ok(Some(this.mk_expr(span, ExprKind::Err, ThinVec::new())))
+        };
+
+        match lhs.kind {
+            ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
+                // Respan to include both operators.
+                let op_span = op.span.to(self.prev_span);
+                let mut err = self.struct_span_err(
+                    op_span,
+                    "chained comparison operators require parentheses",
+                );
+
+                let suggest = |err: &mut DiagnosticBuilder<'_>| {
+                    err.span_suggestion_verbose(
+                        op_span.shrink_to_lo(),
+                        TURBOFISH,
+                        "::".to_string(),
+                        Applicability::MaybeIncorrect,
+                    );
+                };
+
+                if op.node == BinOpKind::Lt &&
+                    *outer_op == AssocOp::Less ||  // Include `<` to provide this recommendation
+                    *outer_op == AssocOp::Greater  // even in a case like the following:
+                {                                  //     Foo<Bar<Baz<Qux, ()>>>
+                    if *outer_op == AssocOp::Less {
+                        let snapshot = self.clone();
+                        self.bump();
+                        // So far we have parsed `foo<bar<`, consume the rest of the type args.
+                        let modifiers = [
+                            (token::Lt, 1),
+                            (token::Gt, -1),
+                            (token::BinOp(token::Shr), -2),
+                        ];
+                        self.consume_tts(1, &modifiers[..]);
+
+                        if !&[
+                            token::OpenDelim(token::Paren),
+                            token::ModSep,
+                        ].contains(&self.token.kind) {
+                            // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
+                            // parser and bail out.
+                            mem::replace(self, snapshot.clone());
+                        }
+                    }
+                    return if token::ModSep == self.token.kind {
+                        // We have some certainty that this was a bad turbofish at this point.
+                        // `foo< bar >::`
+                        suggest(&mut err);
+
+                        let snapshot = self.clone();
+                        self.bump(); // `::`
+
+                        // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
+                        match self.parse_expr() {
+                            Ok(_) => {
+                                // 99% certain that the suggestion is correct, continue parsing.
+                                err.emit();
+                                // FIXME: actually check that the two expressions in the binop are
+                                // paths and resynthesize new fn call expression instead of using
+                                // `ExprKind::Err` placeholder.
+                                mk_err_expr(self, lhs.span.to(self.prev_span))
+                            }
+                            Err(mut expr_err) => {
+                                expr_err.cancel();
+                                // Not entirely sure now, but we bubble the error up with the
+                                // suggestion.
+                                mem::replace(self, snapshot);
+                                Err(err)
+                            }
+                        }
+                    } else if token::OpenDelim(token::Paren) == self.token.kind {
+                        // We have high certainty that this was a bad turbofish at this point.
+                        // `foo< bar >(`
+                        suggest(&mut err);
+                        // Consume the fn call arguments.
+                        match self.consume_fn_args() {
+                            Err(()) => Err(err),
+                            Ok(()) => {
+                                err.emit();
+                                // FIXME: actually check that the two expressions in the binop are
+                                // paths and resynthesize new fn call expression instead of using
+                                // `ExprKind::Err` placeholder.
+                                mk_err_expr(self, lhs.span.to(self.prev_span))
+                            }
+                        }
+                    } else {
+                        // All we know is that this is `foo < bar >` and *nothing* else. Try to
+                        // be helpful, but don't attempt to recover.
+                        err.help(TURBOFISH);
+                        err.help("or use `(...)` if you meant to specify fn arguments");
+                        // These cases cause too many knock-down errors, bail out (#61329).
+                        Err(err)
+                    };
+                }
+                err.emit();
+            }
+            _ => {}
+        }
+        Ok(None)
+    }
+
+    fn consume_fn_args(&mut self) -> Result<(), ()> {
+        let snapshot = self.clone();
+        self.bump(); // `(`
+
+        // Consume the fn call arguments.
+        let modifiers = [
+            (token::OpenDelim(token::Paren), 1),
+            (token::CloseDelim(token::Paren), -1),
+        ];
+        self.consume_tts(1, &modifiers[..]);
+
+        if self.token.kind == token::Eof {
+            // Not entirely sure that what we consumed were fn arguments, rollback.
+            mem::replace(self, snapshot);
+            Err(())
+        } else {
+            // 99% certain that the suggestion is correct, continue parsing.
+            Ok(())
+        }
+    }
+
+    pub(super) fn maybe_report_ambiguous_plus(
+        &mut self,
+        allow_plus: bool,
+        impl_dyn_multi: bool,
+        ty: &Ty,
+    ) {
+        if !allow_plus && impl_dyn_multi {
+            let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
+            self.struct_span_err(ty.span, "ambiguous `+` in a type")
+                .span_suggestion(
+                    ty.span,
+                    "use parentheses to disambiguate",
+                    sum_with_parens,
+                    Applicability::MachineApplicable,
+                )
+                .emit();
+        }
+    }
+
+    pub(super) fn maybe_recover_from_bad_type_plus(
+        &mut self,
+        allow_plus: bool,
+        ty: &Ty,
+    ) -> PResult<'a, ()> {
+        // Do not add `+` to expected tokens.
+        if !allow_plus || !self.token.is_like_plus() {
+            return Ok(());
+        }
+
+        self.bump(); // `+`
+        let bounds = self.parse_generic_bounds(None)?;
+        let sum_span = ty.span.to(self.prev_span);
+
+        let mut err = struct_span_err!(
+            self.sess.span_diagnostic,
+            sum_span,
+            E0178,
+            "expected a path on the left-hand side of `+`, not `{}`",
+            pprust::ty_to_string(ty)
+        );
+
+        match ty.kind {
+            TyKind::Rptr(ref lifetime, ref mut_ty) => {
+                let sum_with_parens = pprust::to_string(|s| {
+                    s.s.word("&");
+                    s.print_opt_lifetime(lifetime);
+                    s.print_mutability(mut_ty.mutbl);
+                    s.popen();
+                    s.print_type(&mut_ty.ty);
+                    s.print_type_bounds(" +", &bounds);
+                    s.pclose()
+                });
+                err.span_suggestion(
+                    sum_span,
+                    "try adding parentheses",
+                    sum_with_parens,
+                    Applicability::MachineApplicable,
+                );
+            }
+            TyKind::Ptr(..) | TyKind::BareFn(..) => {
+                err.span_label(sum_span, "perhaps you forgot parentheses?");
+            }
+            _ => {
+                err.span_label(sum_span, "expected a path");
+            }
+        }
+        err.emit();
+        Ok(())
+    }
+
+    /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
+    /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
+    /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
+    pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
+        &mut self,
+        base: P<T>,
+        allow_recovery: bool,
+    ) -> PResult<'a, P<T>> {
+        // Do not add `::` to expected tokens.
+        if allow_recovery && self.token == token::ModSep {
+            if let Some(ty) = base.to_ty() {
+                return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
+            }
+        }
+        Ok(base)
+    }
+
+    /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
+    /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
+    pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
+        &mut self,
+        ty_span: Span,
+        ty: P<Ty>,
+    ) -> PResult<'a, P<T>> {
+        self.expect(&token::ModSep)?;
+
+        let mut path = ast::Path {
+            segments: Vec::new(),
+            span: DUMMY_SP,
+        };
+        self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
+        path.span = ty_span.to(self.prev_span);
+
+        let ty_str = self
+            .span_to_snippet(ty_span)
+            .unwrap_or_else(|_| pprust::ty_to_string(&ty));
+        self.diagnostic()
+            .struct_span_err(path.span, "missing angle brackets in associated item path")
+            .span_suggestion(
+                // This is a best-effort recovery.
+                path.span,
+                "try",
+                format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
+                Applicability::MaybeIncorrect,
+            )
+            .emit();
+
+        let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
+        Ok(P(T::recovered(
+            Some(QSelf {
+                ty,
+                path_span,
+                position: 0,
+            }),
+            path,
+        )))
+    }
+
+    pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
+        if self.eat(&token::Semi) {
+            let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`");
+            err.span_suggestion_short(
+                self.prev_span,
+                "remove this semicolon",
+                String::new(),
+                Applicability::MachineApplicable,
+            );
+            if !items.is_empty() {
+                let previous_item = &items[items.len() - 1];
+                let previous_item_kind_name = match previous_item.kind {
+                    // Say "braced struct" because tuple-structs and
+                    // braceless-empty-struct declarations do take a semicolon.
+                    ItemKind::Struct(..) => Some("braced struct"),
+                    ItemKind::Enum(..) => Some("enum"),
+                    ItemKind::Trait(..) => Some("trait"),
+                    ItemKind::Union(..) => Some("union"),
+                    _ => None,
+                };
+                if let Some(name) = previous_item_kind_name {
+                    err.help(&format!(
+                        "{} declarations are not followed by a semicolon",
+                        name
+                    ));
+                }
+            }
+            err.emit();
+            true
+        } else {
+            false
+        }
+    }
+
+    /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
+    /// closing delimiter.
+    pub(super) fn unexpected_try_recover(
+        &mut self,
+        t: &TokenKind,
+    ) -> PResult<'a, bool /* recovered */> {
+        let token_str = pprust::token_kind_to_string(t);
+        let this_token_str = self.this_token_descr();
+        let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
+            // Point at the end of the macro call when reaching end of macro arguments.
+            (token::Eof, Some(_)) => {
+                let sp = self.sess.source_map().next_point(self.token.span);
+                (sp, sp)
+            }
+            // We don't want to point at the following span after DUMMY_SP.
+            // This happens when the parser finds an empty TokenStream.
+            _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span),
+            // EOF, don't want to point at the following char, but rather the last token.
+            (token::Eof, None) => (self.prev_span, self.token.span),
+            _ => (self.sess.source_map().next_point(self.prev_span), self.token.span),
+        };
+        let msg = format!(
+            "expected `{}`, found {}",
+            token_str,
+            match (&self.token.kind, self.subparser_name) {
+                (token::Eof, Some(origin)) => format!("end of {}", origin),
+                _ => this_token_str,
+            },
+        );
+        let mut err = self.struct_span_err(sp, &msg);
+        let label_exp = format!("expected `{}`", token_str);
+        match self.recover_closing_delimiter(&[t.clone()], err) {
+            Err(e) => err = e,
+            Ok(recovered) => {
+                return Ok(recovered);
+            }
+        }
+        let sm = self.sess.source_map();
+        match (sm.lookup_line(prev_sp.lo()), sm.lookup_line(sp.lo())) {
+            (Ok(ref a), Ok(ref b)) if a.line == b.line => {
+                // When the spans are in the same line, it means that the only content
+                // between them is whitespace, point only at the found token.
+                err.span_label(sp, label_exp);
+            }
+            _ => {
+                err.span_label(prev_sp, label_exp);
+                err.span_label(sp, "unexpected token");
+            }
+        }
+        Err(err)
+    }
+
+    pub(super) fn parse_semi_or_incorrect_foreign_fn_body(
+        &mut self,
+        ident: &Ident,
+        extern_sp: Span,
+    ) -> PResult<'a, ()> {
+        if self.token != token::Semi {
+            // This might be an incorrect fn definition (#62109).
+            let parser_snapshot = self.clone();
+            match self.parse_inner_attrs_and_block() {
+                Ok((_, body)) => {
+                    self.struct_span_err(ident.span, "incorrect `fn` inside `extern` block")
+                        .span_label(ident.span, "can't have a body")
+                        .span_label(body.span, "this body is invalid here")
+                        .span_label(
+                            extern_sp,
+                            "`extern` blocks define existing foreign functions and `fn`s \
+                             inside of them cannot have a body")
+                        .help("you might have meant to write a function accessible through ffi, \
+                               which can be done by writing `extern fn` outside of the \
+                               `extern` block")
+                        .note("for more information, visit \
+                               https://doc.rust-lang.org/std/keyword.extern.html")
+                        .emit();
+                }
+                Err(mut err) => {
+                    err.cancel();
+                    mem::replace(self, parser_snapshot);
+                    self.expect(&token::Semi)?;
+                }
+            }
+        } else {
+            self.bump();
+        }
+        Ok(())
+    }
+
+    /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
+    /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
+    pub(super) fn parse_incorrect_await_syntax(
+        &mut self,
+        lo: Span,
+        await_sp: Span,
+    ) -> PResult<'a, (Span, ExprKind)> {
+        if self.token == token::Not {
+            // Handle `await!(<expr>)`.
+            self.expect(&token::Not)?;
+            self.expect(&token::OpenDelim(token::Paren))?;
+            let expr = self.parse_expr()?;
+            self.expect(&token::CloseDelim(token::Paren))?;
+            let sp = self.error_on_incorrect_await(lo, self.prev_span, &expr, false);
+            return Ok((sp, ExprKind::Await(expr)))
+        }
+
+        let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
+        let expr = if self.token == token::OpenDelim(token::Brace) {
+            // Handle `await { <expr> }`.
+            // This needs to be handled separatedly from the next arm to avoid
+            // interpreting `await { <expr> }?` as `<expr>?.await`.
+            self.parse_block_expr(
+                None,
+                self.token.span,
+                BlockCheckMode::Default,
+                ThinVec::new(),
+            )
+        } else {
+            self.parse_expr()
+        }.map_err(|mut err| {
+            err.span_label(await_sp, "while parsing this incorrect await expression");
+            err
+        })?;
+        let sp = self.error_on_incorrect_await(lo, expr.span, &expr, is_question);
+        Ok((sp, ExprKind::Await(expr)))
+    }
+
+    fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
+        let expr_str = self.span_to_snippet(expr.span)
+            .unwrap_or_else(|_| pprust::expr_to_string(&expr));
+        let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
+        let sp = lo.to(hi);
+        let app = match expr.kind {
+            ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
+            _ => Applicability::MachineApplicable,
+        };
+        self.struct_span_err(sp, "incorrect use of `await`")
+            .span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
+            .emit();
+        sp
+    }
+
+    /// If encountering `future.await()`, consumes and emits an error.
+    pub(super) fn recover_from_await_method_call(&mut self) {
+        if self.token == token::OpenDelim(token::Paren) &&
+            self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
+        {
+            // future.await()
+            let lo = self.token.span;
+            self.bump(); // (
+            let sp = lo.to(self.token.span);
+            self.bump(); // )
+            self.struct_span_err(sp, "incorrect use of `await`")
+                .span_suggestion(
+                    sp,
+                    "`await` is not a method call, remove the parentheses",
+                    String::new(),
+                    Applicability::MachineApplicable,
+                ).emit()
+        }
+    }
+
+    /// Recovers a situation like `for ( $pat in $expr )`
+    /// and suggest writing `for $pat in $expr` instead.
+    ///
+    /// This should be called before parsing the `$block`.
+    pub(super) fn recover_parens_around_for_head(
+        &mut self,
+        pat: P<Pat>,
+        expr: &Expr,
+        begin_paren: Option<Span>,
+    ) -> P<Pat> {
+        match (&self.token.kind, begin_paren) {
+            (token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
+                self.bump();
+
+                let pat_str = self
+                    // Remove the `(` from the span of the pattern:
+                    .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
+                    .unwrap_or_else(|_| pprust::pat_to_string(&pat));
+
+                self.struct_span_err(self.prev_span, "unexpected closing `)`")
+                    .span_label(begin_par_sp, "opening `(`")
+                    .span_suggestion(
+                        begin_par_sp.to(self.prev_span),
+                        "remove parenthesis in `for` loop",
+                        format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
+                        // With e.g. `for (x) in y)` this would replace `(x) in y)`
+                        // with `x) in y)` which is syntactically invalid.
+                        // However, this is prevented before we get here.
+                        Applicability::MachineApplicable,
+                    )
+                    .emit();
+
+                // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
+                pat.and_then(|pat| match pat.kind {
+                    PatKind::Paren(pat) => pat,
+                    _ => P(pat),
+                })
+            }
+            _ => pat,
+        }
+    }
+
+    pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
+        self.token.is_ident() &&
+            if let ast::ExprKind::Path(..) = node { true } else { false } &&
+            !self.token.is_reserved_ident() &&           // v `foo:bar(baz)`
+            self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) ||
+            self.look_ahead(1, |t| t == &token::Lt) &&     // `foo:bar<baz`
+            self.look_ahead(2, |t| t.is_ident()) ||
+            self.look_ahead(1, |t| t == &token::Colon) &&  // `foo:bar:baz`
+            self.look_ahead(2, |t| t.is_ident()) ||
+            self.look_ahead(1, |t| t == &token::ModSep) &&
+            (self.look_ahead(2, |t| t.is_ident()) ||   // `foo:bar::baz`
+             self.look_ahead(2, |t| t == &token::Lt))  // `foo:bar::<baz>`
+    }
+
+    pub(super) fn recover_seq_parse_error(
+        &mut self,
+        delim: token::DelimToken,
+        lo: Span,
+        result: PResult<'a, P<Expr>>,
+    ) -> P<Expr> {
+        match result {
+            Ok(x) => x,
+            Err(mut err) => {
+                err.emit();
+                // Recover from parse error.
+                self.consume_block(delim);
+                self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
+            }
+        }
+    }
+
+    pub(super) fn recover_closing_delimiter(
+        &mut self,
+        tokens: &[TokenKind],
+        mut err: DiagnosticBuilder<'a>,
+    ) -> PResult<'a, bool> {
+        let mut pos = None;
+        // We want to use the last closing delim that would apply.
+        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+                && Some(self.token.span) > unmatched.unclosed_span
+            {
+                pos = Some(i);
+            }
+        }
+        match pos {
+            Some(pos) => {
+                // Recover and assume that the detected unclosed delimiter was meant for
+                // this location. Emit the diagnostic and act as if the delimiter was
+                // present for the parser's sake.
+
+                 // Don't attempt to recover from this unclosed delimiter more than once.
+                let unmatched = self.unclosed_delims.remove(pos);
+                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+
+                // We want to suggest the inclusion of the closing delimiter where it makes
+                // the most sense, which is immediately after the last token:
+                //
+                //  {foo(bar {}}
+                //      -      ^
+                //      |      |
+                //      |      help: `)` may belong here
+                //      |
+                //      unclosed delimiter
+                if let Some(sp) = unmatched.unclosed_span {
+                    err.span_label(sp, "unclosed delimiter");
+                }
+                err.span_suggestion_short(
+                    self.sess.source_map().next_point(self.prev_span),
+                    &format!("{} may belong here", delim.to_string()),
+                    delim.to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                self.expected_tokens.clear();  // reduce errors
+                Ok(true)
+            }
+            _ => Err(err),
+        }
+    }
+
+    /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid.
+    pub(super) fn eat_bad_pub(&mut self) {
+        if self.token.is_keyword(kw::Pub) {
+            match self.parse_visibility(false) {
+                Ok(vis) => {
+                    self.diagnostic()
+                        .struct_span_err(vis.span, "unnecessary visibility qualifier")
+                        .span_label(vis.span, "`pub` not permitted here")
+                        .emit();
+                }
+                Err(mut err) => err.emit(),
+            }
+        }
+    }
+
+    /// Eats tokens until we can be relatively sure we reached the end of the
+    /// statement. This is something of a best-effort heuristic.
+    ///
+    /// We terminate when we find an unmatched `}` (without consuming it).
+    pub(super) fn recover_stmt(&mut self) {
+        self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
+    }
+
+    /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
+    /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
+    /// approximate -- it can mean we break too early due to macros, but that
+    /// should only lead to sub-optimal recovery, not inaccurate parsing).
+    ///
+    /// If `break_on_block` is `Break`, then we will stop consuming tokens
+    /// after finding (and consuming) a brace-delimited block.
+    pub(super) fn recover_stmt_(
+        &mut self,
+        break_on_semi: SemiColonMode,
+        break_on_block: BlockMode,
+    ) {
+        let mut brace_depth = 0;
+        let mut bracket_depth = 0;
+        let mut in_block = false;
+        debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
+               break_on_semi, break_on_block);
+        loop {
+            debug!("recover_stmt_ loop {:?}", self.token);
+            match self.token.kind {
+                token::OpenDelim(token::DelimToken::Brace) => {
+                    brace_depth += 1;
+                    self.bump();
+                    if break_on_block == BlockMode::Break &&
+                       brace_depth == 1 &&
+                       bracket_depth == 0 {
+                        in_block = true;
+                    }
+                }
+                token::OpenDelim(token::DelimToken::Bracket) => {
+                    bracket_depth += 1;
+                    self.bump();
+                }
+                token::CloseDelim(token::DelimToken::Brace) => {
+                    if brace_depth == 0 {
+                        debug!("recover_stmt_ return - close delim {:?}", self.token);
+                        break;
+                    }
+                    brace_depth -= 1;
+                    self.bump();
+                    if in_block && bracket_depth == 0 && brace_depth == 0 {
+                        debug!("recover_stmt_ return - block end {:?}", self.token);
+                        break;
+                    }
+                }
+                token::CloseDelim(token::DelimToken::Bracket) => {
+                    bracket_depth -= 1;
+                    if bracket_depth < 0 {
+                        bracket_depth = 0;
+                    }
+                    self.bump();
+                }
+                token::Eof => {
+                    debug!("recover_stmt_ return - Eof");
+                    break;
+                }
+                token::Semi => {
+                    self.bump();
+                    if break_on_semi == SemiColonMode::Break &&
+                       brace_depth == 0 &&
+                       bracket_depth == 0 {
+                        debug!("recover_stmt_ return - Semi");
+                        break;
+                    }
+                }
+                token::Comma if break_on_semi == SemiColonMode::Comma &&
+                       brace_depth == 0 &&
+                       bracket_depth == 0 =>
+                {
+                    debug!("recover_stmt_ return - Semi");
+                    break;
+                }
+                _ => {
+                    self.bump()
+                }
+            }
+        }
+    }
+
+    pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
+        if self.eat_keyword(kw::In) {
+            // a common typo: `for _ in in bar {}`
+            self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`")
+                .span_suggestion_short(
+                    in_span.until(self.prev_span),
+                    "remove the duplicated `in`",
+                    String::new(),
+                    Applicability::MachineApplicable,
+                )
+                .emit();
+        }
+    }
+
+    pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
+        let token_str = self.this_token_descr();
+        let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str));
+        err.span_label(self.token.span, "expected `;` or `{`");
+        Err(err)
+    }
+
+    pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
+        if let token::DocComment(_) = self.token.kind {
+            self.struct_span_err(
+                self.token.span,
+                "documentation comments cannot be applied to a function parameter's type",
+            )
+            .span_label(self.token.span, "doc comments are not allowed here")
+            .emit();
+            self.bump();
+        } else if self.token == token::Pound && self.look_ahead(1, |t| {
+            *t == token::OpenDelim(token::Bracket)
+        }) {
+            let lo = self.token.span;
+            // Skip every token until next possible arg.
+            while self.token != token::CloseDelim(token::Bracket) {
+                self.bump();
+            }
+            let sp = lo.to(self.token.span);
+            self.bump();
+            self.struct_span_err(
+                sp,
+                "attributes cannot be applied to a function parameter's type",
+            )
+            .span_label(sp, "attributes are not allowed here")
+            .emit();
+        }
+    }
+
+    pub(super) fn parameter_without_type(
+        &mut self,
+        err: &mut DiagnosticBuilder<'_>,
+        pat: P<ast::Pat>,
+        require_name: bool,
+        is_self_allowed: bool,
+        is_trait_item: bool,
+    ) -> Option<Ident> {
+        // If we find a pattern followed by an identifier, it could be an (incorrect)
+        // C-style parameter declaration.
+        if self.check_ident() && self.look_ahead(1, |t| {
+            *t == token::Comma || *t == token::CloseDelim(token::Paren)
+        }) { // `fn foo(String s) {}`
+            let ident = self.parse_ident().unwrap();
+            let span = pat.span.with_hi(ident.span.hi());
+
+            err.span_suggestion(
+                span,
+                "declare the type after the parameter binding",
+                String::from("<identifier>: <type>"),
+                Applicability::HasPlaceholders,
+            );
+            return Some(ident);
+        } else if let PatKind::Ident(_, ident, _) = pat.kind {
+            if require_name && (
+                is_trait_item ||
+                self.token == token::Comma ||
+                self.token == token::Lt ||
+                self.token == token::CloseDelim(token::Paren)
+            ) { // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
+                if is_self_allowed {
+                    err.span_suggestion(
+                        pat.span,
+                        "if this is a `self` type, give it a parameter name",
+                        format!("self: {}", ident),
+                        Applicability::MaybeIncorrect,
+                    );
+                }
+                // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
+                // `fn foo(HashMap: TypeName<u32>)`.
+                if self.token != token::Lt {
+                    err.span_suggestion(
+                        pat.span,
+                        "if this was a parameter name, give it a type",
+                        format!("{}: TypeName", ident),
+                        Applicability::HasPlaceholders,
+                    );
+                }
+                err.span_suggestion(
+                    pat.span,
+                    "if this is a type, explicitly ignore the parameter name",
+                    format!("_: {}", ident),
+                    Applicability::MachineApplicable,
+                );
+                err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
+
+                // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
+                return if self.token == token::Lt { None } else { Some(ident) };
+            }
+        }
+        None
+    }
+
+    pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
+        let pat = self.parse_pat(Some("argument name"))?;
+        self.expect(&token::Colon)?;
+        let ty = self.parse_ty()?;
+
+        self.diagnostic()
+            .struct_span_err_with_code(
+                pat.span,
+                "patterns aren't allowed in methods without bodies",
+                DiagnosticId::Error("E0642".into()),
+            )
+            .span_suggestion_short(
+                pat.span,
+                "give this argument a name or use an underscore to ignore it",
+                "_".to_owned(),
+                Applicability::MachineApplicable,
+            )
+            .emit();
+
+        // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
+        let pat = P(Pat {
+            kind: PatKind::Wild,
+            span: pat.span,
+            id: ast::DUMMY_NODE_ID
+        });
+        Ok((pat, ty))
+    }
+
+    pub(super) fn recover_bad_self_param(
+        &mut self,
+        mut param: ast::Param,
+        is_trait_item: bool,
+    ) -> PResult<'a, ast::Param> {
+        let sp = param.pat.span;
+        param.ty.kind = TyKind::Err;
+        let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function");
+        if is_trait_item {
+            err.span_label(sp, "must be the first associated function parameter");
+        } else {
+            err.span_label(sp, "not valid as function parameter");
+            err.note("`self` is only valid as the first parameter of an associated function");
+        }
+        err.emit();
+        Ok(param)
+    }
+
+    pub(super) fn consume_block(&mut self, delim: token::DelimToken) {
+        let mut brace_depth = 0;
+        loop {
+            if self.eat(&token::OpenDelim(delim)) {
+                brace_depth += 1;
+            } else if self.eat(&token::CloseDelim(delim)) {
+                if brace_depth == 0 {
+                    return;
+                } else {
+                    brace_depth -= 1;
+                    continue;
+                }
+            } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
+                return;
+            } else {
+                self.bump();
+            }
+        }
+    }
+
+    pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
+        let (span, msg) = match (&self.token.kind, self.subparser_name) {
+            (&token::Eof, Some(origin)) => {
+                let sp = self.sess.source_map().next_point(self.token.span);
+                (sp, format!("expected expression, found end of {}", origin))
+            }
+            _ => (self.token.span, format!(
+                "expected expression, found {}",
+                self.this_token_descr(),
+            )),
+        };
+        let mut err = self.struct_span_err(span, &msg);
+        let sp = self.sess.source_map().start_point(self.token.span);
+        if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
+            self.sess.expr_parentheses_needed(&mut err, *sp, None);
+        }
+        err.span_label(span, "expected expression");
+        err
+    }
+
+    fn consume_tts(
+        &mut self,
+        mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
+        // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
+        modifier: &[(token::TokenKind, i64)],
+    ) {
+        while acc > 0 {
+            if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
+                acc += *val;
+            }
+            if self.token.kind == token::Eof {
+                break;
+            }
+            self.bump();
+        }
+    }
+
+    /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors.
+    ///
+    /// This is necessary because at this point we don't know whether we parsed a function with
+    /// anonymous parameters or a function with names but no types. In order to minimize
+    /// unecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
+    /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
+    /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
+    /// we deduplicate them to not complain about duplicated parameter names.
+    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
+        let mut seen_inputs = FxHashSet::default();
+        for input in fn_inputs.iter_mut() {
+            let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = (
+                &input.pat.kind, &input.ty.kind,
+            ) {
+                Some(*ident)
+            } else {
+                None
+            };
+            if let Some(ident) = opt_ident {
+                if seen_inputs.contains(&ident) {
+                    input.pat.kind = PatKind::Wild;
+                }
+                seen_inputs.insert(ident);
+            }
+        }
+    }
+}
index 23674ad589dc570716e65b9225355c265b36672b..67a530ec6831bf764efb1866088c022414b2a5f8 100644 (file)
@@ -1,18 +1,18 @@
-use super::{
-    Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode, SemiColonMode,
-    SeqSep, TokenExpectType,
-};
+use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode};
+use super::{SemiColonMode, SeqSep, TokenExpectType};
 use super::pat::{GateOr, PARAM_EXPECTED};
+use super::diagnostics::Error;
+
+use crate::parse::literal::LitError;
 
 use crate::ast::{
     self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode,
     Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind,
-    FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field,
+    FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit,
 };
 use crate::maybe_recover_from_interpolated_ty_qpath;
 use crate::parse::classify;
-use crate::parse::token::{self, Token};
-use crate::parse::diagnostics::Error;
+use crate::parse::token::{self, Token, TokenKind};
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::source_map::{self, Span};
@@ -20,6 +20,7 @@
 use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
 
 use errors::Applicability;
+use syntax_pos::Symbol;
 use std::mem;
 use rustc_data_structures::thin_vec::ThinVec;
 
@@ -238,7 +239,9 @@ pub(super) fn parse_assoc_expr_with(
 
             self.bump();
             if op.is_comparison() {
-                self.check_no_chained_comparison(&lhs, &op)?;
+                if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
+                    return Ok(expr);
+                }
             }
             // Special cases:
             if op == AssocOp::As {
@@ -420,7 +423,7 @@ fn parse_prefix_expr(
                 self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator")
                     .span_suggestion_short(
                         span_of_tilde,
-                        "use `!` to perform bitwise negation",
+                        "use `!` to perform bitwise not",
                         "!".to_owned(),
                         Applicability::MachineApplicable
                     )
@@ -550,8 +553,11 @@ fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
 
                         // Report non-fatal diagnostics, keep `x as usize` as an expression
                         // in AST and continue parsing.
-                        let msg = format!("`<` is interpreted as a start of generic \
-                                           arguments for `{}`, not a {}", path, op_noun);
+                        let msg = format!(
+                            "`<` is interpreted as a start of generic arguments for `{}`, not a {}",
+                            pprust::path_to_string(&path),
+                            op_noun,
+                        );
                         let span_after_type = parser_snapshot_after_type.token.span;
                         let expr = mk_expr(self, P(Ty {
                             span: path.span,
@@ -1067,8 +1073,167 @@ macro_rules! parse_lit {
         self.maybe_recover_from_bad_qpath(expr, true)
     }
 
+    /// Matches `lit = true | false | token_lit`.
+    pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
+        let mut recovered = None;
+        if self.token == token::Dot {
+            // Attempt to recover `.4` as `0.4`.
+            recovered = self.look_ahead(1, |next_token| {
+                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
+                        = next_token.kind {
+                    if self.token.span.hi() == next_token.span.lo() {
+                        let s = String::from("0.") + &symbol.as_str();
+                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
+                        return Some(Token::new(kind, self.token.span.to(next_token.span)));
+                    }
+                }
+                None
+            });
+            if let Some(token) = &recovered {
+                self.bump();
+                self.struct_span_err(token.span, "float literals must have an integer part")
+                    .span_suggestion(
+                        token.span,
+                        "must have an integer part",
+                        pprust::token_to_string(token),
+                        Applicability::MachineApplicable,
+                    )
+                    .emit();
+            }
+        }
+
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        match Lit::from_token(token) {
+            Ok(lit) => {
+                self.bump();
+                Ok(lit)
+            }
+            Err(LitError::NotLiteral) => {
+                let msg = format!("unexpected token: {}", self.this_token_descr());
+                Err(self.span_fatal(token.span, &msg))
+            }
+            Err(err) => {
+                let (lit, span) = (token.expect_lit(), token.span);
+                self.bump();
+                self.error_literal_from_token(err, lit, span);
+                // Pack possible quotes and prefixes from the original literal into
+                // the error literal's symbol so they can be pretty-printed faithfully.
+                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+                let symbol = Symbol::intern(&suffixless_lit.to_string());
+                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+                Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
+            }
+        }
+    }
+
+    fn error_literal_from_token(&self, err: LitError, lit: token::Lit, span: Span) {
+        // Checks if `s` looks like i32 or u1234 etc.
+        fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
+            s.len() > 1
+            && s.starts_with(first_chars)
+            && s[1..].chars().all(|c| c.is_ascii_digit())
+        }
+
+        let token::Lit { kind, suffix, .. } = lit;
+        match err {
+            // `NotLiteral` is not an error by itself, so we don't report
+            // it and give the parser opportunity to try something else.
+            LitError::NotLiteral => {}
+            // `LexerError` *is* an error, but it was already reported
+            // by lexer, so here we don't report it the second time.
+            LitError::LexerError => {}
+            LitError::InvalidSuffix => {
+                self.expect_no_suffix(
+                    span,
+                    &format!("{} {} literal", kind.article(), kind.descr()),
+                    suffix,
+                );
+            }
+            LitError::InvalidIntSuffix => {
+                let suf = suffix.expect("suffix error with no suffix").as_str();
+                if looks_like_width_suffix(&['i', 'u'], &suf) {
+                    // If it looks like a width, try to be helpful.
+                    let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+                    self.struct_span_err(span, &msg)
+                        .help("valid widths are 8, 16, 32, 64 and 128")
+                        .emit();
+                } else {
+                    let msg = format!("invalid suffix `{}` for integer literal", suf);
+                    self.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
+                        .emit();
+                }
+            }
+            LitError::InvalidFloatSuffix => {
+                let suf = suffix.expect("suffix error with no suffix").as_str();
+                if looks_like_width_suffix(&['f'], &suf) {
+                    // If it looks like a width, try to be helpful.
+                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+                    self.struct_span_err(span, &msg)
+                        .help("valid widths are 32 and 64")
+                        .emit();
+                } else {
+                    let msg = format!("invalid suffix `{}` for float literal", suf);
+                    self.struct_span_err(span, &msg)
+                        .span_label(span, format!("invalid suffix `{}`", suf))
+                        .help("valid suffixes are `f32` and `f64`")
+                        .emit();
+                }
+            }
+            LitError::NonDecimalFloat(base) => {
+                let descr = match base {
+                    16 => "hexadecimal",
+                    8 => "octal",
+                    2 => "binary",
+                    _ => unreachable!(),
+                };
+                self.struct_span_err(span, &format!("{} float literal is not supported", descr))
+                    .span_label(span, "not supported")
+                    .emit();
+            }
+            LitError::IntTooLarge => {
+                self.struct_span_err(span, "integer literal is too large")
+                    .emit();
+            }
+        }
+    }
+
+    pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
+        if let Some(suf) = suffix {
+            let mut err = if kind == "a tuple index"
+                && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
+            {
+                // #59553: warn instead of reject out of hand to allow the fix to percolate
+                // through the ecosystem when people fix their macros
+                let mut err = self.sess.span_diagnostic.struct_span_warn(
+                    sp,
+                    &format!("suffixes on {} are invalid", kind),
+                );
+                err.note(&format!(
+                    "`{}` is *temporarily* accepted on tuple index fields as it was \
+                        incorrectly accepted on stable for a few releases",
+                    suf,
+                ));
+                err.help(
+                    "on proc macros, you'll want to use `syn::Index::from` or \
+                        `proc_macro::Literal::*_unsuffixed` for code that will desugar \
+                        to tuple field access",
+                );
+                err.note(
+                    "for more context, see https://github.com/rust-lang/rust/issues/60210",
+                );
+                err
+            } else {
+                self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+            };
+            err.span_label(sp, format!("invalid suffix `{}`", suf));
+            err.emit();
+        }
+    }
+
     /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
-    crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
+    pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
         let minus_lo = self.token.span;
@@ -1088,7 +1253,7 @@ macro_rules! parse_lit {
     }
 
     /// Parses a block or unsafe block.
-    crate fn parse_block_expr(
+    pub(super) fn parse_block_expr(
         &mut self,
         opt_label: Option<Label>,
         lo: Span,
@@ -1393,7 +1558,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
         return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
     }
 
-    crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
+    pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
         let attrs = self.parse_outer_attributes()?;
         let lo = self.token.span;
         let pat = self.parse_top_pat(GateOr::No)?;
@@ -1501,7 +1666,7 @@ fn is_try_block(&self) -> bool {
     }
 
     /// Parses an `async move? {...}` expression.
-    pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
+    fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
         let span_lo = self.token.span;
         self.expect_keyword(kw::Async)?;
         let capture_clause = self.parse_capture_clause();
@@ -1781,4 +1946,8 @@ fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>>
     crate fn mk_expr(&self, span: Span, kind: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
         P(Expr { kind, span, attrs, id: DUMMY_NODE_ID })
     }
+
+    pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
+        self.mk_expr(span, ExprKind::Err, ThinVec::new())
+    }
 }
index 2ecd9cca3c64b532553645e7ca5407652e463f33..bfcb0042a75a351dadf91eeb1757861efd7c24a4 100644 (file)
@@ -74,7 +74,7 @@ fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a,
 
     /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
     /// a trailing comma and erroneous trailing attributes.
-    crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
+    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
         let mut params = Vec::new();
         loop {
             let attrs = self.parse_outer_attributes()?;
index c00a5807d52c5d56a125715f45eace6b4adc33b8..0acfd1450d819d11ed208867fa76d4d319c4741a 100644 (file)
@@ -1,34 +1,29 @@
 use super::{Parser, PResult, PathStyle, SemiColonMode, BlockMode};
+use super::diagnostics::{Error, dummy_arg};
 
 use crate::maybe_whole;
 use crate::ptr::P;
-use crate::ast::{
-    self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle,
-    Item, ItemKind, ImplItem, TraitItem, TraitItemKind,
-    UseTree, UseTreeKind, PathSegment,
-    IsAuto, Constness, IsAsync, Unsafety, Defaultness,
-    Visibility, VisibilityKind, Mutability, FnDecl, FnHeader, MethodSig, Block,
-    ForeignItem, ForeignItemKind,
-    Ty, TyKind, Generics, GenericBounds, TraitRef,
-    EnumDef, VariantData, StructField, AnonConst,
-    Mac, MacDelimiter,
-};
-use crate::ext::base::DummyResult;
+use crate::ast::{self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle, AnonConst, Item, ItemKind};
+use crate::ast::{ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind};
+use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness};
+use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
+use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField};
+use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, MethodSig, SelfKind, Param};
 use crate::parse::token;
 use crate::parse::parser::maybe_append;
-use crate::parse::diagnostics::Error;
 use crate::tokenstream::{TokenTree, TokenStream};
-use crate::source_map::{respan, Span};
 use crate::symbol::{kw, sym};
+use crate::source_map::{self, respan, Span};
+use crate::ThinVec;
 
-use std::mem;
 use log::debug;
+use std::mem;
 use rustc_target::spec::abi::Abi;
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId, StashKey};
 
 /// Whether the type alias or associated type is a concrete type or an opaque type.
 #[derive(Debug)]
-pub enum AliasKind {
+pub(super) enum AliasKind {
     /// Just a new name for the same type.
     Weak(P<Ty>),
     /// Only trait impls of the type will be usable, not the actual type itself.
@@ -98,7 +93,7 @@ fn parse_item_implementation(
 
         let lo = self.token.span;
 
-        let visibility = self.parse_visibility(false)?;
+        let vis = self.parse_visibility(false)?;
 
         if self.eat_keyword(kw::Use) {
             // USE ITEM
@@ -106,15 +101,14 @@ fn parse_item_implementation(
             self.expect(&token::Semi)?;
 
             let span = lo.to(self.prev_span);
-            let item =
-                self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
+            let item = self.mk_item(span, Ident::invalid(), item_, vis, attrs);
             return Ok(Some(item));
         }
 
         if self.eat_keyword(kw::Extern) {
             let extern_sp = self.prev_span;
             if self.eat_keyword(kw::Crate) {
-                return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
+                return Ok(Some(self.parse_item_extern_crate(lo, vis, attrs)?));
             }
 
             let opt_abi = self.parse_opt_abi()?;
@@ -128,10 +122,10 @@ fn parse_item_implementation(
                     constness: respan(fn_span, Constness::NotConst),
                     abi: opt_abi.unwrap_or(Abi::C),
                 };
-                return self.parse_item_fn(lo, visibility, attrs, header);
+                return self.parse_item_fn(lo, vis, attrs, header);
             } else if self.check(&token::OpenDelim(token::Brace)) {
                 return Ok(Some(
-                    self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs, extern_sp)?,
+                    self.parse_item_foreign_mod(lo, opt_abi, vis, attrs, extern_sp)?,
                 ));
             }
 
@@ -142,26 +136,31 @@ fn parse_item_implementation(
             self.bump();
             // STATIC ITEM
             let m = self.parse_mutability();
-            let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_const(Some(m))?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.eat_keyword(kw::Const) {
             let const_span = self.prev_span;
-            if self.check_keyword(kw::Fn)
-                || (self.check_keyword(kw::Unsafe)
-                    && self.is_keyword_ahead(1, &[kw::Fn])) {
+            if [kw::Fn, kw::Unsafe, kw::Extern].iter().any(|k| self.check_keyword(*k)) {
                 // CONST FUNCTION ITEM
                 let unsafety = self.parse_unsafety();
-                self.bump();
+
+                if self.check_keyword(kw::Extern) {
+                    self.sess.gated_spans.const_extern_fn.borrow_mut().push(
+                        lo.to(self.token.span)
+                    );
+                }
+                let abi = self.parse_extern_abi()?;
+                self.bump(); // `fn`
+
                 let header = FnHeader {
                     unsafety,
                     asyncness: respan(const_span, IsAsync::NotAsync),
                     constness: respan(const_span, Constness::Const),
-                    abi: Abi::Rust,
+                    abi,
                 };
-                return self.parse_item_fn(lo, visibility, attrs, header);
+                return self.parse_item_fn(lo, vis, attrs, header);
             }
 
             // CONST ITEM
@@ -177,10 +176,9 @@ fn parse_item_implementation(
                     )
                     .emit();
             }
-            let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+
+            let info = self.parse_item_const(None)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
 
         // Parses `async unsafe? fn`.
@@ -205,40 +203,33 @@ fn parse_item_implementation(
                     constness: respan(fn_span, Constness::NotConst),
                     abi: Abi::Rust,
                 };
-                return self.parse_item_fn(lo, visibility, attrs, header);
+                return self.parse_item_fn(lo, vis, attrs, header);
             }
         }
+
         if self.check_keyword(kw::Unsafe) &&
             self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
         {
             // UNSAFE TRAIT ITEM
             self.bump(); // `unsafe`
-            let is_auto = if self.eat_keyword(kw::Trait) {
-                IsAuto::No
-            } else {
-                self.expect_keyword(kw::Auto)?;
-                self.expect_keyword(kw::Trait)?;
-                IsAuto::Yes
-            };
-            let (ident, item_, extra_attrs) = self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_trait(Unsafety::Unsafe)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.check_keyword(kw::Impl) ||
            self.check_keyword(kw::Unsafe) &&
                 self.is_keyword_ahead(1, &[kw::Impl]) ||
            self.check_keyword(kw::Default) &&
-                self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe]) {
+                self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe])
+        {
             // IMPL ITEM
             let defaultness = self.parse_defaultness();
             let unsafety = self.parse_unsafety();
             self.expect_keyword(kw::Impl)?;
-            let (ident, item_, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_impl(unsafety, defaultness)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.check_keyword(kw::Fn) {
             // FUNCTION ITEM
             self.bump();
@@ -249,19 +240,17 @@ fn parse_item_implementation(
                 constness: respan(fn_span, Constness::NotConst),
                 abi: Abi::Rust,
             };
-            return self.parse_item_fn(lo, visibility, attrs, header);
+            return self.parse_item_fn(lo, vis, attrs, header);
         }
+
         if self.check_keyword(kw::Unsafe)
-            && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
+            && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace))
+        {
             // UNSAFE FUNCTION ITEM
             self.bump(); // `unsafe`
             // `{` is also expected after `unsafe`; in case of error, include it in the diagnostic.
             self.check(&token::OpenDelim(token::Brace));
-            let abi = if self.eat_keyword(kw::Extern) {
-                self.parse_opt_abi()?.unwrap_or(Abi::C)
-            } else {
-                Abi::Rust
-            };
+            let abi = self.parse_extern_abi()?;
             self.expect_keyword(kw::Fn)?;
             let fn_span = self.prev_span;
             let header = FnHeader {
@@ -270,15 +259,15 @@ fn parse_item_implementation(
                 constness: respan(fn_span, Constness::NotConst),
                 abi,
             };
-            return self.parse_item_fn(lo, visibility, attrs, header);
+            return self.parse_item_fn(lo, vis, attrs, header);
         }
+
         if self.eat_keyword(kw::Mod) {
             // MODULE ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_mod(&attrs[..])?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if let Some(type_) = self.eat_type() {
             let (ident, alias, generics) = type_?;
             // TYPE ITEM
@@ -287,54 +276,44 @@ fn parse_item_implementation(
                 AliasKind::OpaqueTy(bounds) => ItemKind::OpaqueTy(bounds, generics),
             };
             let span = lo.to(self.prev_span);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            return Ok(Some(self.mk_item(span, ident, item_, vis, attrs)));
         }
+
         if self.eat_keyword(kw::Enum) {
             // ENUM ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_enum()?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_enum()?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.check_keyword(kw::Trait)
             || (self.check_keyword(kw::Auto)
                 && self.is_keyword_ahead(1, &[kw::Trait]))
         {
-            let is_auto = if self.eat_keyword(kw::Trait) {
-                IsAuto::No
-            } else {
-                self.expect_keyword(kw::Auto)?;
-                self.expect_keyword(kw::Trait)?;
-                IsAuto::Yes
-            };
             // TRAIT ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_trait(is_auto, Unsafety::Normal)?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_trait(Unsafety::Normal)?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.eat_keyword(kw::Struct) {
             // STRUCT ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_struct()?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_struct()?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
+
         if self.is_union_item() {
             // UNION ITEM
             self.bump();
-            let (ident, item_, extra_attrs) = self.parse_item_union()?;
-            let span = lo.to(self.prev_span);
-            let attrs = maybe_append(attrs, extra_attrs);
-            return Ok(Some(self.mk_item(span, ident, item_, visibility, attrs)));
+            let info = self.parse_item_union()?;
+            return self.mk_item_with_info(attrs, lo, vis, info);
         }
-        if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
+
+        if let Some(macro_def) = self.eat_macro_def(&attrs, &vis, lo)? {
             return Ok(Some(macro_def));
         }
 
         // Verify whether we have encountered a struct or method definition where the user forgot to
         // add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
-        if visibility.node.is_pub() &&
+        if vis.node.is_pub() &&
             self.check_ident() &&
             self.look_ahead(1, |t| *t != token::Not)
         {
@@ -425,17 +404,20 @@ fn parse_item_implementation(
                 return Err(err);
             }
         }
-        self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
+        self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, vis)
     }
 
-    fn recover_first_param(&mut self) -> &'static str {
-        match self.parse_outer_attributes()
-            .and_then(|_| self.parse_self_param())
-            .map_err(|mut e| e.cancel())
-        {
-            Ok(Some(_)) => "method",
-            _ => "function",
-        }
+    pub(super) fn mk_item_with_info(
+        &self,
+        attrs: Vec<Attribute>,
+        lo: Span,
+        vis: Visibility,
+        info: ItemInfo,
+    ) -> PResult<'a, Option<P<Item>>> {
+        let (ident, item, extra_attrs) = info;
+        let span = lo.to(self.prev_span);
+        let attrs = maybe_append(attrs, extra_attrs);
+        Ok(Some(self.mk_item(span, ident, item, vis, attrs)))
     }
 
     /// This is the fall-through for parsing items.
@@ -623,7 +605,7 @@ fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
         let ty_second = if self.token == token::DotDot {
             // We need to report this error after `cfg` expansion for compatibility reasons
             self.bump(); // `..`, do not add it to expected tokens
-            Some(DummyResult::raw_ty(self.prev_span, true))
+            Some(self.mk_ty(self.prev_span, TyKind::Err))
         } else if has_for || self.token.can_begin_type() {
             Some(self.parse_ty()?)
         } else {
@@ -710,9 +692,11 @@ pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
         Ok(item)
     }
 
-    fn parse_impl_item_(&mut self,
-                        at_end: &mut bool,
-                        mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
+    fn parse_impl_item_(
+        &mut self,
+        at_end: &mut bool,
+        mut attrs: Vec<Attribute>,
+    ) -> PResult<'a, ImplItem> {
         let lo = self.token.span;
         let vis = self.parse_visibility(false)?;
         let defaultness = self.parse_defaultness();
@@ -724,18 +708,12 @@ fn parse_impl_item_(&mut self,
             };
             (name, kind, generics)
         } else if self.is_const_item() {
-            // This parses the grammar:
-            //     ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
-            self.expect_keyword(kw::Const)?;
-            let name = self.parse_ident()?;
-            self.expect(&token::Colon)?;
-            let typ = self.parse_ty()?;
-            self.expect(&token::Eq)?;
-            let expr = self.parse_expr()?;
-            self.expect(&token::Semi)?;
-            (name, ast::ImplItemKind::Const(typ, expr), Generics::default())
+            self.parse_impl_const()?
+        } else if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(&vis), at_end)? {
+            // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction!
+            (Ident::invalid(), ast::ImplItemKind::Macro(mac), Generics::default())
         } else {
-            let (name, inner_attrs, generics, kind) = self.parse_impl_method(&vis, at_end)?;
+            let (name, inner_attrs, generics, kind) = self.parse_impl_method(at_end)?;
             attrs.extend(inner_attrs);
             (name, kind, generics)
         };
@@ -782,76 +760,29 @@ fn is_const_item(&self) -> bool {
             !self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe])
     }
 
-    /// Parses a method or a macro invocation in a trait impl.
-    fn parse_impl_method(
-        &mut self,
-        vis: &Visibility,
-        at_end: &mut bool
-    ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ast::ImplItemKind)> {
-        // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction!
-        if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
-            // method macro
-            Ok((Ident::invalid(), vec![], Generics::default(), ast::ImplItemKind::Macro(mac)))
-        } else {
-            let (ident, sig, generics) = self.parse_method_sig(|_| true)?;
-            *at_end = true;
-            let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-            Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body)))
-        }
-    }
-
-    /// Parse the "signature", including the identifier, parameters, and generics
-    /// of a method. The body is not parsed as that differs between `trait`s and `impl`s.
-    fn parse_method_sig(
-        &mut self,
-        is_name_required: impl Copy + Fn(&token::Token) -> bool,
-    ) -> PResult<'a, (Ident, MethodSig, Generics)> {
-        let header = self.parse_fn_front_matter()?;
-        let (ident, mut generics) = self.parse_fn_header()?;
-        let decl = self.parse_fn_decl_with_self(is_name_required)?;
-        let sig = MethodSig { header, decl };
-        generics.where_clause = self.parse_where_clause()?;
-        Ok((ident, sig, generics))
+    /// This parses the grammar:
+    ///     ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
+    fn parse_impl_const(&mut self) -> PResult<'a, (Ident, ImplItemKind, Generics)> {
+        self.expect_keyword(kw::Const)?;
+        let name = self.parse_ident()?;
+        self.expect(&token::Colon)?;
+        let typ = self.parse_ty()?;
+        self.expect(&token::Eq)?;
+        let expr = self.parse_expr()?;
+        self.expect(&token::Semi)?;
+        Ok((name, ImplItemKind::Const(typ, expr), Generics::default()))
     }
 
-    /// Parses all the "front matter" for a `fn` declaration, up to
-    /// and including the `fn` keyword:
-    ///
-    /// - `const fn`
-    /// - `unsafe fn`
-    /// - `const unsafe fn`
-    /// - `extern fn`
-    /// - etc.
-    fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> {
-        let is_const_fn = self.eat_keyword(kw::Const);
-        let const_span = self.prev_span;
-        let asyncness = self.parse_asyncness();
-        if let IsAsync::Async { .. } = asyncness {
-            self.ban_async_in_2015(self.prev_span);
-        }
-        let asyncness = respan(self.prev_span, asyncness);
-        let unsafety = self.parse_unsafety();
-        let (constness, unsafety, abi) = if is_const_fn {
-            (respan(const_span, Constness::Const), unsafety, Abi::Rust)
+    /// Parses `auto? trait Foo { ... }` or `trait Foo = Bar;`.
+    fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
+        // Parse optional `auto` prefix.
+        let is_auto = if self.eat_keyword(kw::Auto) {
+            IsAuto::Yes
         } else {
-            let abi = if self.eat_keyword(kw::Extern) {
-                self.parse_opt_abi()?.unwrap_or(Abi::C)
-            } else {
-                Abi::Rust
-            };
-            (respan(self.prev_span, Constness::NotConst), unsafety, abi)
+            IsAuto::No
         };
-        if !self.eat_keyword(kw::Fn) {
-            // It is possible for `expect_one_of` to recover given the contents of
-            // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
-            // account for this.
-            if !self.expect_one_of(&[], &[])? { unreachable!() }
-        }
-        Ok(FnHeader { constness, unsafety, asyncness, abi })
-    }
 
-    /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
-    fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
+        self.expect_keyword(kw::Trait)?;
         let ident = self.parse_ident()?;
         let mut tps = self.parse_generics()?;
 
@@ -936,38 +867,22 @@ pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem>
         Ok(item)
     }
 
-    fn parse_trait_item_(&mut self,
-                         at_end: &mut bool,
-                         mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
+    fn parse_trait_item_(
+        &mut self,
+        at_end: &mut bool,
+        mut attrs: Vec<Attribute>,
+    ) -> PResult<'a, TraitItem> {
         let lo = self.token.span;
         self.eat_bad_pub();
         let (name, kind, generics) = if self.eat_keyword(kw::Type) {
             self.parse_trait_item_assoc_ty()?
         } else if self.is_const_item() {
-            self.expect_keyword(kw::Const)?;
-            let ident = self.parse_ident()?;
-            self.expect(&token::Colon)?;
-            let ty = self.parse_ty()?;
-            let default = if self.eat(&token::Eq) {
-                let expr = self.parse_expr()?;
-                self.expect(&token::Semi)?;
-                Some(expr)
-            } else {
-                self.expect(&token::Semi)?;
-                None
-            };
-            (ident, TraitItemKind::Const(ty, default), Generics::default())
+            self.parse_trait_item_const()?
         } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
             // trait item macro.
-            (Ident::invalid(), ast::TraitItemKind::Macro(mac), Generics::default())
+            (Ident::invalid(), TraitItemKind::Macro(mac), Generics::default())
         } else {
-            // This is somewhat dubious; We don't want to allow
-            // argument names to be left off if there is a definition...
-            //
-            // We don't allow argument names to be left off in edition 2018.
-            let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?;
-            let body = self.parse_trait_method_body(at_end, &mut attrs)?;
-            (ident, ast::TraitItemKind::Method(sig, body), generics)
+            self.parse_trait_item_method(at_end, &mut attrs)?
         };
 
         Ok(TraitItem {
@@ -981,48 +896,24 @@ fn parse_trait_item_(&mut self,
         })
     }
 
-    /// Parse the "body" of a method in a trait item definition.
-    /// This can either be `;` when there's no body,
-    /// or e.g. a block when the method is a provided one.
-    fn parse_trait_method_body(
-        &mut self,
-        at_end: &mut bool,
-        attrs: &mut Vec<Attribute>,
-    ) -> PResult<'a, Option<P<Block>>> {
-        Ok(match self.token.kind {
-            token::Semi => {
-                debug!("parse_trait_method_body(): parsing required method");
-                self.bump();
-                *at_end = true;
-                None
-            }
-            token::OpenDelim(token::Brace) => {
-                debug!("parse_trait_method_body(): parsing provided method");
-                *at_end = true;
-                let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-                attrs.extend(inner_attrs.iter().cloned());
-                Some(body)
-            }
-            token::Interpolated(ref nt) => {
-                match **nt {
-                    token::NtBlock(..) => {
-                        *at_end = true;
-                        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-                        attrs.extend(inner_attrs.iter().cloned());
-                        Some(body)
-                    }
-                    _ => return self.expected_semi_or_open_brace(),
-                }
-            }
-            _ => return self.expected_semi_or_open_brace(),
-        })
+    fn parse_trait_item_const(&mut self) -> PResult<'a, (Ident, TraitItemKind, Generics)> {
+        self.expect_keyword(kw::Const)?;
+        let ident = self.parse_ident()?;
+        self.expect(&token::Colon)?;
+        let ty = self.parse_ty()?;
+        let default = if self.eat(&token::Eq) {
+            Some(self.parse_expr()?)
+        } else {
+            None
+        };
+        self.expect(&token::Semi)?;
+        Ok((ident, TraitItemKind::Const(ty, default), Generics::default()))
     }
 
     /// Parses the following grammar:
     ///
     ///     TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
-    fn parse_trait_item_assoc_ty(&mut self)
-        -> PResult<'a, (Ident, TraitItemKind, Generics)> {
+    fn parse_trait_item_assoc_ty(&mut self) -> PResult<'a, (Ident, TraitItemKind, Generics)> {
         let ident = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
 
@@ -1068,21 +959,13 @@ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
                 );
             }
 
-            if self.eat(&token::BinOp(token::Star)) {
-                UseTreeKind::Glob
-            } else {
-                UseTreeKind::Nested(self.parse_use_tree_list()?)
-            }
+            self.parse_use_tree_glob_or_nested()?
         } else {
             // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
             prefix = self.parse_path(PathStyle::Mod)?;
 
             if self.eat(&token::ModSep) {
-                if self.eat(&token::BinOp(token::Star)) {
-                    UseTreeKind::Glob
-                } else {
-                    UseTreeKind::Nested(self.parse_use_tree_list()?)
-                }
+                self.parse_use_tree_glob_or_nested()?
             } else {
                 UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
             }
@@ -1091,6 +974,15 @@ fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
         Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
     }
 
+    /// Parses `*` or `{...}`.
+    fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
+        Ok(if self.eat(&token::BinOp(token::Star)) {
+            UseTreeKind::Glob
+        } else {
+            UseTreeKind::Nested(self.parse_use_tree_list()?)
+        })
+    }
+
     /// Parses a `UseTreeKind::Nested(list)`.
     ///
     /// ```
@@ -1184,49 +1076,6 @@ fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
         Ok(ident)
     }
 
-    /// Parses an item-position function declaration.
-    fn parse_item_fn(
-        &mut self,
-        lo: Span,
-        vis: Visibility,
-        attrs: Vec<Attribute>,
-        header: FnHeader,
-    ) -> PResult<'a, Option<P<Item>>> {
-        let allow_c_variadic = header.abi == Abi::C && header.unsafety == Unsafety::Unsafe;
-        let (ident, decl, generics) = self.parse_fn_sig(allow_c_variadic)?;
-        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
-        let span = lo.to(self.prev_span);
-        let kind = ItemKind::Fn(decl, header, generics, body);
-        let attrs = maybe_append(attrs, Some(inner_attrs));
-        Ok(Some(self.mk_item(span, ident, kind, vis, attrs)))
-    }
-
-    /// Parse the "signature", including the identifier, parameters, and generics of a function.
-    fn parse_fn_sig(
-        &mut self,
-        allow_c_variadic: bool,
-    ) -> PResult<'a, (Ident, P<FnDecl>, Generics)> {
-        let (ident, mut generics) = self.parse_fn_header()?;
-        let decl = self.parse_fn_decl(allow_c_variadic)?;
-        generics.where_clause = self.parse_where_clause()?;
-        Ok((ident, decl, generics))
-    }
-
-    /// Parses the name and optional generic types of a function header.
-    fn parse_fn_header(&mut self) -> PResult<'a, (Ident, Generics)> {
-        let id = self.parse_ident()?;
-        let generics = self.parse_generics()?;
-        Ok((id, generics))
-    }
-
-    /// Parses the parameter list and result type of a function declaration.
-    fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
-        Ok(P(FnDecl {
-            inputs: self.parse_fn_params(true, allow_c_variadic)?,
-            output: self.parse_ret_ty(true)?,
-        }))
-    }
-
     /// Parses `extern` for foreign ABIs modules.
     ///
     /// `extern` is expected to have been
@@ -1267,7 +1116,7 @@ fn parse_item_foreign_mod(
     }
 
     /// Parses a foreign item.
-    crate fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> {
+    pub fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> {
         maybe_whole!(self, NtForeignItem, |ni| ni);
 
         let attrs = self.parse_outer_attributes()?;
@@ -1278,14 +1127,30 @@ fn parse_item_foreign_mod(
         // Treat `const` as `static` for error recovery, but don't add it to expected tokens.
         if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
             if self.token.is_keyword(kw::Const) {
-                self.diagnostic()
-                    .struct_span_err(self.token.span, "extern items cannot be `const`")
-                    .span_suggestion(
+                let mut err = self
+                    .struct_span_err(self.token.span, "extern items cannot be `const`");
+
+
+                // The user wrote 'const fn'
+                if self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe]) {
+                    err.emit();
+                    // Consume `const`
+                    self.bump();
+                    // Consume `unsafe` if present, since `extern` blocks
+                    // don't allow it. This will leave behind a plain 'fn'
+                    self.eat_keyword(kw::Unsafe);
+                    // Treat 'const fn` as a plain `fn` for error recovery purposes.
+                    // We've already emitted an error, so compilation is guaranteed
+                    // to fail
+                    return Ok(self.parse_item_foreign_fn(visibility, lo, attrs, extern_sp)?);
+                }
+                err.span_suggestion(
                         self.token.span,
                         "try using a static value",
                         "static".to_owned(),
                         Applicability::MachineApplicable
-                    ).emit();
+                );
+                err.emit();
             }
             self.bump(); // `static` or `const`
             return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
@@ -1322,28 +1187,6 @@ fn parse_item_foreign_mod(
         }
     }
 
-    /// Parses a function declaration from a foreign module.
-    fn parse_item_foreign_fn(
-        &mut self,
-        vis: ast::Visibility,
-        lo: Span,
-        attrs: Vec<Attribute>,
-        extern_sp: Span,
-    ) -> PResult<'a, ForeignItem> {
-        self.expect_keyword(kw::Fn)?;
-        let (ident, decl, generics) = self.parse_fn_sig(true)?;
-        let span = lo.to(self.token.span);
-        self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?;
-        Ok(ast::ForeignItem {
-            ident,
-            attrs,
-            kind: ForeignItemKind::Fn(decl, generics),
-            id: DUMMY_NODE_ID,
-            span,
-            vis,
-        })
-    }
-
     /// Parses a static item from a foreign module.
     /// Assumes that the `static` keyword is already parsed.
     fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
@@ -1884,3 +1727,466 @@ fn mk_item(&self, span: Span, ident: Ident, kind: ItemKind, vis: Visibility,
         })
     }
 }
+
+/// The parsing configuration used to parse a parameter list (see `parse_fn_params`).
+pub(super) struct ParamCfg {
+    /// Is `self` is allowed as the first parameter?
+    pub is_self_allowed: bool,
+    /// Is `...` allowed as the tail of the parameter list?
+    pub allow_c_variadic: bool,
+    /// `is_name_required` decides if, per-parameter,
+    /// the parameter must have a pattern or just a type.
+    pub is_name_required: fn(&token::Token) -> bool,
+}
+
+/// Parsing of functions and methods.
+impl<'a> Parser<'a> {
+    /// Parses an item-position function declaration.
+    fn parse_item_fn(
+        &mut self,
+        lo: Span,
+        vis: Visibility,
+        attrs: Vec<Attribute>,
+        header: FnHeader,
+    ) -> PResult<'a, Option<P<Item>>> {
+        let (ident, decl, generics) = self.parse_fn_sig(ParamCfg {
+            is_self_allowed: false,
+            allow_c_variadic: header.abi == Abi::C && header.unsafety == Unsafety::Unsafe,
+            is_name_required: |_| true,
+        })?;
+        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+        let kind = ItemKind::Fn(decl, header, generics, body);
+        self.mk_item_with_info(attrs, lo, vis, (ident, kind, Some(inner_attrs)))
+    }
+
+    /// Parses a function declaration from a foreign module.
+    fn parse_item_foreign_fn(
+        &mut self,
+        vis: ast::Visibility,
+        lo: Span,
+        attrs: Vec<Attribute>,
+        extern_sp: Span,
+    ) -> PResult<'a, ForeignItem> {
+        self.expect_keyword(kw::Fn)?;
+        let (ident, decl, generics) = self.parse_fn_sig(ParamCfg {
+            is_self_allowed: false,
+            allow_c_variadic: true,
+            is_name_required: |_| true,
+        })?;
+        let span = lo.to(self.token.span);
+        self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?;
+        Ok(ast::ForeignItem {
+            ident,
+            attrs,
+            kind: ForeignItemKind::Fn(decl, generics),
+            id: DUMMY_NODE_ID,
+            span,
+            vis,
+        })
+    }
+
+    /// Parses a method or a macro invocation in a trait impl.
+    fn parse_impl_method(
+        &mut self,
+        at_end: &mut bool,
+    ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ImplItemKind)> {
+        let (ident, sig, generics) = self.parse_method_sig(|_| true)?;
+        *at_end = true;
+        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+        Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body)))
+    }
+
+    fn parse_trait_item_method(
+        &mut self,
+        at_end: &mut bool,
+        attrs: &mut Vec<Attribute>,
+    ) -> PResult<'a, (Ident, TraitItemKind, Generics)> {
+        // This is somewhat dubious; We don't want to allow
+        // argument names to be left off if there is a definition...
+        //
+        // We don't allow argument names to be left off in edition 2018.
+        let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?;
+        let body = self.parse_trait_method_body(at_end, attrs)?;
+        Ok((ident, TraitItemKind::Method(sig, body), generics))
+    }
+
+    /// Parse the "body" of a method in a trait item definition.
+    /// This can either be `;` when there's no body,
+    /// or e.g. a block when the method is a provided one.
+    fn parse_trait_method_body(
+        &mut self,
+        at_end: &mut bool,
+        attrs: &mut Vec<Attribute>,
+    ) -> PResult<'a, Option<P<Block>>> {
+        Ok(match self.token.kind {
+            token::Semi => {
+                debug!("parse_trait_method_body(): parsing required method");
+                self.bump();
+                *at_end = true;
+                None
+            }
+            token::OpenDelim(token::Brace) => {
+                debug!("parse_trait_method_body(): parsing provided method");
+                *at_end = true;
+                let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+                attrs.extend(inner_attrs.iter().cloned());
+                Some(body)
+            }
+            token::Interpolated(ref nt) => {
+                match **nt {
+                    token::NtBlock(..) => {
+                        *at_end = true;
+                        let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+                        attrs.extend(inner_attrs.iter().cloned());
+                        Some(body)
+                    }
+                    _ => return self.expected_semi_or_open_brace(),
+                }
+            }
+            _ => return self.expected_semi_or_open_brace(),
+        })
+    }
+
+    /// Parse the "signature", including the identifier, parameters, and generics
+    /// of a method. The body is not parsed as that differs between `trait`s and `impl`s.
+    fn parse_method_sig(
+        &mut self,
+        is_name_required: fn(&token::Token) -> bool,
+    ) -> PResult<'a, (Ident, MethodSig, Generics)> {
+        let header = self.parse_fn_front_matter()?;
+        let (ident, decl, generics) = self.parse_fn_sig(ParamCfg {
+            is_self_allowed: true,
+            allow_c_variadic: false,
+            is_name_required,
+        })?;
+        Ok((ident, MethodSig { header, decl }, generics))
+    }
+
+    /// Parses all the "front matter" for a `fn` declaration, up to
+    /// and including the `fn` keyword:
+    ///
+    /// - `const fn`
+    /// - `unsafe fn`
+    /// - `const unsafe fn`
+    /// - `extern fn`
+    /// - etc.
+    fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> {
+        let is_const_fn = self.eat_keyword(kw::Const);
+        let const_span = self.prev_span;
+        let asyncness = self.parse_asyncness();
+        if let IsAsync::Async { .. } = asyncness {
+            self.ban_async_in_2015(self.prev_span);
+        }
+        let asyncness = respan(self.prev_span, asyncness);
+        let unsafety = self.parse_unsafety();
+        let (constness, unsafety, abi) = if is_const_fn {
+            (respan(const_span, Constness::Const), unsafety, Abi::Rust)
+        } else {
+            let abi = self.parse_extern_abi()?;
+            (respan(self.prev_span, Constness::NotConst), unsafety, abi)
+        };
+        if !self.eat_keyword(kw::Fn) {
+            // It is possible for `expect_one_of` to recover given the contents of
+            // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
+            // account for this.
+            if !self.expect_one_of(&[], &[])? { unreachable!() }
+        }
+        Ok(FnHeader { constness, unsafety, asyncness, abi })
+    }
+
+    /// Parse the "signature", including the identifier, parameters, and generics of a function.
+    fn parse_fn_sig(&mut self, cfg: ParamCfg) -> PResult<'a, (Ident, P<FnDecl>, Generics)> {
+        let ident = self.parse_ident()?;
+        let mut generics = self.parse_generics()?;
+        let decl = self.parse_fn_decl(cfg, true)?;
+        generics.where_clause = self.parse_where_clause()?;
+        Ok((ident, decl, generics))
+    }
+
+    /// Parses the parameter list and result type of a function declaration.
+    pub(super) fn parse_fn_decl(
+        &mut self,
+        cfg: ParamCfg,
+        ret_allow_plus: bool,
+    ) -> PResult<'a, P<FnDecl>> {
+        Ok(P(FnDecl {
+            inputs: self.parse_fn_params(cfg)?,
+            output: self.parse_ret_ty(ret_allow_plus)?,
+        }))
+    }
+
+    /// Parses the parameter list of a function, including the `(` and `)` delimiters.
+    fn parse_fn_params(&mut self, mut cfg: ParamCfg) -> PResult<'a, Vec<Param>> {
+        let sp = self.token.span;
+        let is_trait_item = cfg.is_self_allowed;
+        let mut c_variadic = false;
+        // Parse the arguments, starting out with `self` being possibly allowed...
+        let (params, _) = self.parse_paren_comma_seq(|p| {
+            let param = p.parse_param_general(&cfg, is_trait_item);
+            // ...now that we've parsed the first argument, `self` is no longer allowed.
+            cfg.is_self_allowed = false;
+
+            match param {
+                Ok(param) => Ok(
+                    if let TyKind::CVarArgs = param.ty.kind {
+                        c_variadic = true;
+                        if p.token != token::CloseDelim(token::Paren) {
+                            p.span_err(
+                                p.token.span,
+                                "`...` must be the last argument of a C-variadic function",
+                            );
+                            // FIXME(eddyb) this should probably still push `CVarArgs`.
+                            // Maybe AST validation/HIR lowering should emit the above error?
+                            None
+                        } else {
+                            Some(param)
+                        }
+                    } else {
+                        Some(param)
+                    }
+                ),
+                Err(mut e) => {
+                    e.emit();
+                    let lo = p.prev_span;
+                    // Skip every token until next possible arg or end.
+                    p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
+                    // Create a placeholder argument for proper arg count (issue #34264).
+                    let span = lo.to(p.prev_span);
+                    Ok(Some(dummy_arg(Ident::new(kw::Invalid, span))))
+                }
+            }
+        })?;
+
+        let mut params: Vec<_> = params.into_iter().filter_map(|x| x).collect();
+
+        // Replace duplicated recovered params with `_` pattern to avoid unecessary errors.
+        self.deduplicate_recovered_params_names(&mut params);
+
+        if c_variadic && params.len() <= 1 {
+            self.span_err(
+                sp,
+                "C-variadic function must be declared with at least one named argument",
+            );
+        }
+
+        Ok(params)
+    }
+
+    /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+    /// error.
+    /// This version of parse param doesn't necessarily require identifier names.
+    fn parse_param_general(&mut self, cfg: &ParamCfg, is_trait_item: bool) -> PResult<'a, Param> {
+        let lo = self.token.span;
+        let attrs = self.parse_outer_attributes()?;
+
+        // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
+        if let Some(mut param) = self.parse_self_param()? {
+            param.attrs = attrs.into();
+            return if cfg.is_self_allowed {
+                Ok(param)
+            } else {
+                self.recover_bad_self_param(param, is_trait_item)
+            };
+        }
+
+        let is_name_required = match self.token.kind {
+            token::DotDotDot => false,
+            _ => (cfg.is_name_required)(&self.token),
+        };
+        let (pat, ty) = if is_name_required || self.is_named_param() {
+            debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
+
+            let pat = self.parse_fn_param_pat()?;
+            if let Err(mut err) = self.expect(&token::Colon) {
+                return if let Some(ident) = self.parameter_without_type(
+                    &mut err,
+                    pat,
+                    is_name_required,
+                    cfg.is_self_allowed,
+                    is_trait_item,
+                ) {
+                    err.emit();
+                    Ok(dummy_arg(ident))
+                } else {
+                    Err(err)
+                };
+            }
+
+            self.eat_incorrect_doc_comment_for_param_type();
+            (pat, self.parse_ty_common(true, true, cfg.allow_c_variadic)?)
+        } else {
+            debug!("parse_param_general ident_to_pat");
+            let parser_snapshot_before_ty = self.clone();
+            self.eat_incorrect_doc_comment_for_param_type();
+            let mut ty = self.parse_ty_common(true, true, cfg.allow_c_variadic);
+            if ty.is_ok() && self.token != token::Comma &&
+               self.token != token::CloseDelim(token::Paren) {
+                // This wasn't actually a type, but a pattern looking like a type,
+                // so we are going to rollback and re-parse for recovery.
+                ty = self.unexpected();
+            }
+            match ty {
+                Ok(ty) => {
+                    let ident = Ident::new(kw::Invalid, self.prev_span);
+                    let bm = BindingMode::ByValue(Mutability::Immutable);
+                    let pat = self.mk_pat_ident(ty.span, bm, ident);
+                    (pat, ty)
+                }
+                // If this is a C-variadic argument and we hit an error, return the error.
+                Err(err) if self.token == token::DotDotDot => return Err(err),
+                // Recover from attempting to parse the argument as a type without pattern.
+                Err(mut err) => {
+                    err.cancel();
+                    mem::replace(self, parser_snapshot_before_ty);
+                    self.recover_arg_parse()?
+                }
+            }
+        };
+
+        let span = lo.to(self.token.span);
+
+        Ok(Param {
+            attrs: attrs.into(),
+            id: ast::DUMMY_NODE_ID,
+            is_placeholder: false,
+            pat,
+            span,
+            ty,
+        })
+    }
+
+    /// Returns the parsed optional self parameter and whether a self shortcut was used.
+    ///
+    /// See `parse_self_param_with_attrs` to collect attributes.
+    fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
+        // Extract an identifier *after* having confirmed that the token is one.
+        let expect_self_ident = |this: &mut Self| {
+            match this.token.kind {
+                // Preserve hygienic context.
+                token::Ident(name, _) => {
+                    let span = this.token.span;
+                    this.bump();
+                    Ident::new(name, span)
+                }
+                _ => unreachable!(),
+            }
+        };
+        // Is `self` `n` tokens ahead?
+        let is_isolated_self = |this: &Self, n| {
+            this.is_keyword_ahead(n, &[kw::SelfLower])
+            && this.look_ahead(n + 1, |t| t != &token::ModSep)
+        };
+        // Is `mut self` `n` tokens ahead?
+        let is_isolated_mut_self = |this: &Self, n| {
+            this.is_keyword_ahead(n, &[kw::Mut])
+            && is_isolated_self(this, n + 1)
+        };
+        // Parse `self` or `self: TYPE`. We already know the current token is `self`.
+        let parse_self_possibly_typed = |this: &mut Self, m| {
+            let eself_ident = expect_self_ident(this);
+            let eself_hi = this.prev_span;
+            let eself = if this.eat(&token::Colon) {
+                SelfKind::Explicit(this.parse_ty()?, m)
+            } else {
+                SelfKind::Value(m)
+            };
+            Ok((eself, eself_ident, eself_hi))
+        };
+        // Recover for the grammar `*self`, `*const self`, and `*mut self`.
+        let recover_self_ptr = |this: &mut Self| {
+            let msg = "cannot pass `self` by raw pointer";
+            let span = this.token.span;
+            this.struct_span_err(span, msg)
+                .span_label(span, msg)
+                .emit();
+
+            Ok((SelfKind::Value(Mutability::Immutable), expect_self_ident(this), this.prev_span))
+        };
+
+        // Parse optional `self` parameter of a method.
+        // Only a limited set of initial token sequences is considered `self` parameters; anything
+        // else is parsed as a normal function parameter list, so some lookahead is required.
+        let eself_lo = self.token.span;
+        let (eself, eself_ident, eself_hi) = match self.token.kind {
+            token::BinOp(token::And) => {
+                let eself = if is_isolated_self(self, 1) {
+                    // `&self`
+                    self.bump();
+                    SelfKind::Region(None, Mutability::Immutable)
+                } else if is_isolated_mut_self(self, 1) {
+                    // `&mut self`
+                    self.bump();
+                    self.bump();
+                    SelfKind::Region(None, Mutability::Mutable)
+                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) {
+                    // `&'lt self`
+                    self.bump();
+                    let lt = self.expect_lifetime();
+                    SelfKind::Region(Some(lt), Mutability::Immutable)
+                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) {
+                    // `&'lt mut self`
+                    self.bump();
+                    let lt = self.expect_lifetime();
+                    self.bump();
+                    SelfKind::Region(Some(lt), Mutability::Mutable)
+                } else {
+                    // `&not_self`
+                    return Ok(None);
+                };
+                (eself, expect_self_ident(self), self.prev_span)
+            }
+            // `*self`
+            token::BinOp(token::Star) if is_isolated_self(self, 1) => {
+                self.bump();
+                recover_self_ptr(self)?
+            }
+            // `*mut self` and `*const self`
+            token::BinOp(token::Star) if
+                self.look_ahead(1, |t| t.is_mutability())
+                && is_isolated_self(self, 2) =>
+            {
+                self.bump();
+                self.bump();
+                recover_self_ptr(self)?
+            }
+            // `self` and `self: TYPE`
+            token::Ident(..) if is_isolated_self(self, 0) => {
+                parse_self_possibly_typed(self, Mutability::Immutable)?
+            }
+            // `mut self` and `mut self: TYPE`
+            token::Ident(..) if is_isolated_mut_self(self, 0) => {
+                self.bump();
+                parse_self_possibly_typed(self, Mutability::Mutable)?
+            }
+            _ => return Ok(None),
+        };
+
+        let eself = source_map::respan(eself_lo.to(eself_hi), eself);
+        Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident)))
+    }
+
+    fn is_named_param(&self) -> bool {
+        let offset = match self.token.kind {
+            token::Interpolated(ref nt) => match **nt {
+                token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
+                _ => 0,
+            }
+            token::BinOp(token::And) | token::AndAnd => 1,
+            _ if self.token.is_keyword(kw::Mut) => 1,
+            _ => 0,
+        };
+
+        self.look_ahead(offset, |t| t.is_ident()) &&
+        self.look_ahead(offset + 1, |t| t == &token::Colon)
+    }
+
+    fn recover_first_param(&mut self) -> &'static str {
+        match self.parse_outer_attributes()
+            .and_then(|_| self.parse_self_param())
+            .map_err(|mut e| e.cancel())
+        {
+            Ok(Some(_)) => "method",
+            _ => "function",
+        }
+    }
+}
index 2d2fb487d7df21c2b787ce7237fc0e76d2bc0929..a0e4d2bbb7ac6efd6f3197d6bb84e67608aabc88 100644 (file)
@@ -1,24 +1,24 @@
 use super::{Parser, PResult};
 use super::item::ItemInfo;
+use super::diagnostics::Error;
 
 use crate::attr;
 use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
 use crate::parse::{new_sub_parser_from_file, DirectoryOwnership};
 use crate::parse::token::{self, TokenKind};
-use crate::parse::diagnostics::{Error};
 use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName};
 use crate::symbol::sym;
 
 use std::path::{self, Path, PathBuf};
 
 /// Information about the path to a module.
-pub struct ModulePath {
+pub(super) struct ModulePath {
     name: String,
     path_exists: bool,
     pub result: Result<ModulePathSuccess, Error>,
 }
 
-pub struct ModulePathSuccess {
+pub(super) struct ModulePathSuccess {
     pub path: PathBuf,
     pub directory_ownership: DirectoryOwnership,
     warn: bool,
@@ -39,6 +39,8 @@ pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
     /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
     pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
         let (in_cfg, outer_attrs) = {
+            // FIXME(Centril): This results in a cycle between config and parsing.
+            // Consider using dynamic dispatch via `self.sess` to disentangle the knot.
             let mut strip_unconfigured = crate::config::StripUnconfigured {
                 sess: self.sess,
                 features: None, // Don't perform gated feature checking.
@@ -198,7 +200,7 @@ fn submod_path(
         }
     }
 
-    pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
+    pub(super) fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
         if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
             let s = s.as_str();
 
@@ -215,7 +217,7 @@ pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<Pat
     }
 
     /// Returns a path to a module.
-    pub fn default_submod_path(
+    pub(super) fn default_submod_path(
         id: ast::Ident,
         relative: Option<ast::Ident>,
         dir_path: &Path,
index 7eb2a73a11a82c28664129ec9e465ef1a5cb13ee..af795e51792ff26f4ffc6fd41ee837c3e2210d94 100644 (file)
@@ -4,7 +4,7 @@
 use crate::ptr::P;
 use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
 use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
-use crate::mut_visit::{noop_visit_pat, MutVisitor};
+use crate::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor};
 use crate::parse::token::{self};
 use crate::print::pprust;
 use crate::source_map::{respan, Span, Spanned};
@@ -22,7 +22,7 @@
 
 /// Whether or not an or-pattern should be gated when occurring in the current context.
 #[derive(PartialEq)]
-pub enum GateOr { Yes, No }
+pub(super) enum GateOr { Yes, No }
 
 /// Whether or not to recover a `,` when parsing or-patterns.
 #[derive(PartialEq, Copy, Clone)]
@@ -367,6 +367,7 @@ fn parse_pat_with_range_pat(
 
         let pat = self.mk_pat(lo.to(self.prev_span), pat);
         let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
+        let pat = self.recover_intersection_pat(pat)?;
 
         if !allow_range_pat {
             self.ban_pat_range_if_ambiguous(&pat)?
@@ -375,6 +376,65 @@ fn parse_pat_with_range_pat(
         Ok(pat)
     }
 
+    /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`.
+    ///
+    /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs`
+    /// should already have been parsed by now  at this point,
+    /// if the next token is `@` then we can try to parse the more general form.
+    ///
+    /// Consult `parse_pat_ident` for the `binding` grammar.
+    ///
+    /// The notion of intersection patterns are found in
+    /// e.g. [F#][and] where they are called AND-patterns.
+    ///
+    /// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching
+    fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> {
+        if self.token.kind != token::At {
+            // Next token is not `@` so it's not going to be an intersection pattern.
+            return Ok(lhs);
+        }
+
+        // At this point we attempt to parse `@ $pat_rhs` and emit an error.
+        self.bump(); // `@`
+        let mut rhs = self.parse_pat(None)?;
+        let sp = lhs.span.to(rhs.span);
+
+        if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind {
+            // The user inverted the order, so help them fix that.
+            let mut applicability = Applicability::MachineApplicable;
+            lhs.walk(&mut |p| match p.kind {
+                // `check_match` is unhappy if the subpattern has a binding anywhere.
+                PatKind::Ident(..) => {
+                    applicability = Applicability::MaybeIncorrect;
+                    false // Short-circuit.
+                },
+                _ => true,
+            });
+
+            let lhs_span = lhs.span;
+            // Move the LHS into the RHS as a subpattern.
+            // The RHS is now the full pattern.
+            *sub = Some(lhs);
+
+            self.struct_span_err(sp, "pattern on wrong side of `@`")
+                .span_label(lhs_span, "pattern on the left, should be on the right")
+                .span_label(rhs.span, "binding on the right, should be on the left")
+                .span_suggestion(sp, "switch the order", pprust::pat_to_string(&rhs), applicability)
+                .emit();
+        } else {
+            // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
+            rhs.kind = PatKind::Wild;
+            self.struct_span_err(sp, "left-hand side of `@` must be a binding")
+                .span_label(lhs.span, "interpreted as a pattern, not a binding")
+                .span_label(rhs.span, "also a pattern")
+                .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`")
+                .emit();
+        }
+
+        rhs.span = sp;
+        Ok(rhs)
+    }
+
     /// Ban a range pattern if it has an ambiguous interpretation.
     fn ban_pat_range_if_ambiguous(&self, pat: &Pat) -> PResult<'a, ()> {
         match pat.kind {
@@ -481,6 +541,10 @@ fn recover_mut_ref_ident(&mut self, lo: Span) -> PResult<'a, PatKind> {
     fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
         struct AddMut(bool);
         impl MutVisitor for AddMut {
+            fn visit_mac(&mut self, mac: &mut Mac) {
+                noop_visit_mac(mac, self);
+            }
+
             fn visit_pat(&mut self, pat: &mut P<Pat>) {
                 if let PatKind::Ident(BindingMode::ByValue(ref mut m @ Mutability::Immutable), ..)
                     = pat.kind
index ca823991a2e5f25d6b4291b7d857e10e6dfa0c5c..639d61a2b5cd4a90b27eb5092af4521b2b92c1cf 100644 (file)
@@ -111,7 +111,7 @@ pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> {
     /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
     /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
     /// attributes.
-    pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> {
+    fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> {
         let meta_ident = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref item) => match item.tokens.is_empty() {
@@ -129,7 +129,22 @@ pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path
         self.parse_path(style)
     }
 
-    crate fn parse_path_segments(
+    /// Parse a list of paths inside `#[derive(path_0, ..., path_n)]`.
+    crate fn parse_derive_paths(&mut self) -> PResult<'a, Vec<Path>> {
+        self.expect(&token::OpenDelim(token::Paren))?;
+        let mut list = Vec::new();
+        while !self.eat(&token::CloseDelim(token::Paren)) {
+            let path = self.parse_path_allowing_meta(PathStyle::Mod)?;
+            list.push(path);
+            if !self.eat(&token::Comma) {
+                self.expect(&token::CloseDelim(token::Paren))?;
+                break
+            }
+        }
+        Ok(list)
+    }
+
+    pub(super) fn parse_path_segments(
         &mut self,
         segments: &mut Vec<PathSegment>,
         style: PathStyle,
index 855b03ddd6f6b999580de98ce561dd6b8178c770..d54d9c4b8e9faaff1e534e2d69720dd347404f23 100644 (file)
@@ -2,14 +2,13 @@
 use super::expr::LhsExpr;
 use super::path::PathStyle;
 use super::pat::GateOr;
+use super::diagnostics::Error;
 
 use crate::ptr::P;
 use crate::{maybe_whole, ThinVec};
 use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
 use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
-use crate::ext::base::DummyResult;
 use crate::parse::{classify, DirectoryOwnership};
-use crate::parse::diagnostics::Error;
 use crate::parse::token;
 use crate::source_map::{respan, Span};
 use crate::symbol::{kw, sym};
@@ -373,7 +372,9 @@ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
     }
 
     /// Parses a block. Inner attributes are allowed.
-    crate fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
+    pub(super) fn parse_inner_attrs_and_block(
+        &mut self
+    ) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
         maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
 
         let lo = self.token.span;
@@ -400,7 +401,7 @@ pub(super) fn parse_block_tail(
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     Some(Stmt {
                         id: DUMMY_NODE_ID,
-                        kind: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)),
+                        kind: StmtKind::Expr(self.mk_expr_err(self.token.span)),
                         span: self.token.span,
                     })
                 }
@@ -422,7 +423,7 @@ pub(super) fn parse_block_tail(
     }
 
     /// Parses a statement, including the trailing semicolon.
-    crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+    pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
         // Skip looking for a trailing semicolon when we have an interpolated statement.
         maybe_whole!(self, NtStmt, |x| Some(x));
 
@@ -443,7 +444,7 @@ pub(super) fn parse_block_tail(
                         self.recover_stmt();
                         // Don't complain about type errors in body tail after parse error (#57383).
                         let sp = expr.span.to(self.prev_span);
-                        stmt.kind = StmtKind::Expr(DummyResult::raw_expr(sp, true));
+                        stmt.kind = StmtKind::Expr(self.mk_expr_err(sp));
                     }
                 }
             }
index 41ee2a1599d74028ab9a9d092792e591fb56eb7f..86c94b680b2c9da4a0b61fb2fcadc3d2ecee9976 100644 (file)
@@ -1,16 +1,15 @@
 use super::{Parser, PResult, PathStyle, PrevTokenKind, TokenType};
+use super::item::ParamCfg;
 
 use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath};
 use crate::ptr::P;
 use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
 use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
-use crate::ast::{Mutability, AnonConst, FnDecl, Mac};
+use crate::ast::{Mutability, AnonConst, Mac};
 use crate::parse::token::{self, Token};
 use crate::source_map::Span;
 use crate::symbol::{kw};
 
-use rustc_target::spec::abi::Abi;
-
 use errors::{Applicability, pluralise};
 
 /// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
@@ -211,7 +210,7 @@ pub(super) fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery:
         };
 
         let span = lo.to(self.prev_span);
-        let ty = P(Ty { kind, span, id: ast::DUMMY_NODE_ID });
+        let ty = self.mk_ty(span, kind);
 
         // Try to recover from use of `+` with incorrect priority.
         self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
@@ -281,19 +280,14 @@ fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a,
         */
 
         let unsafety = self.parse_unsafety();
-        let abi = if self.eat_keyword(kw::Extern) {
-            self.parse_opt_abi()?.unwrap_or(Abi::C)
-        } else {
-            Abi::Rust
-        };
-
+        let abi = self.parse_extern_abi()?;
         self.expect_keyword(kw::Fn)?;
-        let inputs = self.parse_fn_params(false, true)?;
-        let ret_ty = self.parse_ret_ty(false)?;
-        let decl = P(FnDecl {
-            inputs,
-            output: ret_ty,
-        });
+        let cfg = ParamCfg {
+            is_self_allowed: false,
+            allow_c_variadic: true,
+            is_name_required: |_| false,
+        };
+        let decl = self.parse_fn_decl(cfg, false)?;
         Ok(TyKind::BareFn(P(BareFnTy {
             abi,
             unsafety,
@@ -302,7 +296,7 @@ fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a,
         })))
     }
 
-    crate fn parse_generic_bounds(&mut self,
+    pub(super) fn parse_generic_bounds(&mut self,
                                   colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
         self.parse_generic_bounds_common(true, colon_span)
     }
@@ -439,13 +433,13 @@ pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<Gener
         }
     }
 
-    crate fn check_lifetime(&mut self) -> bool {
+    pub fn check_lifetime(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Lifetime);
         self.token.is_lifetime()
     }
 
     /// Parses a single lifetime `'a` or panics.
-    crate fn expect_lifetime(&mut self) -> Lifetime {
+    pub fn expect_lifetime(&mut self) -> Lifetime {
         if let Some(ident) = self.token.lifetime() {
             let span = self.token.span;
             self.bump();
@@ -454,4 +448,8 @@ pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<Gener
             self.span_bug(self.token.span, "not a lifetime")
         }
     }
+
+    pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> {
+        P(Ty { kind, span, id: ast::DUMMY_NODE_ID })
+    }
 }
index fd78a2bd53442dfe6fa3192191994db394cfb149..4a8b25c61079b6ad3bf8384d2c306b726b70078d 100644 (file)
@@ -4,16 +4,13 @@
 pub use LitKind::*;
 pub use TokenKind::*;
 
-use crate::ast::{self};
-use crate::parse::{parse_stream_from_source_str, ParseSess};
-use crate::print::pprust;
+use crate::ast;
 use crate::ptr::P;
 use crate::symbol::kw;
-use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
+use crate::tokenstream::TokenTree;
 
 use syntax_pos::symbol::Symbol;
-use syntax_pos::{self, Span, FileName, DUMMY_SP};
-use log::info;
+use syntax_pos::{self, Span, DUMMY_SP};
 
 use std::fmt;
 use std::mem;
@@ -36,7 +33,7 @@ pub enum BinOpToken {
 }
 
 /// A delimiter token.
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum DelimToken {
     /// A round parenthesis (i.e., `(` or `)`).
     Paren,
@@ -288,7 +285,7 @@ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind {
 }
 
 impl Token {
-    crate fn new(kind: TokenKind, span: Span) -> Self {
+    pub fn new(kind: TokenKind, span: Span) -> Self {
         Token { kind, span }
     }
 
@@ -298,12 +295,12 @@ impl Token {
     }
 
     /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
-    crate fn from_ast_ident(ident: ast::Ident) -> Self {
+    pub fn from_ast_ident(ident: ast::Ident) -> Self {
         Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
     }
 
     /// Return this token by value and leave a dummy token in its place.
-    crate fn take(&mut self) -> Self {
+    pub fn take(&mut self) -> Self {
         mem::replace(self, Token::dummy())
     }
 
@@ -324,7 +321,7 @@ impl Token {
     }
 
     /// Returns `true` if the token can appear at the start of an expression.
-    crate fn can_begin_expr(&self) -> bool {
+    pub fn can_begin_expr(&self) -> bool {
         match self.kind {
             Ident(name, is_raw)              =>
                 ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
@@ -356,7 +353,7 @@ impl Token {
     }
 
     /// Returns `true` if the token can appear at the start of a type.
-    crate fn can_begin_type(&self) -> bool {
+    pub fn can_begin_type(&self) -> bool {
         match self.kind {
             Ident(name, is_raw)        =>
                 ident_can_begin_type(name, self.span, is_raw), // type name or keyword
@@ -399,7 +396,7 @@ impl Token {
     }
 
     /// Returns `true` if the token is any literal
-    crate fn is_lit(&self) -> bool {
+    pub fn is_lit(&self) -> bool {
         match self.kind {
             Literal(..) => true,
             _           => false,
@@ -415,7 +412,7 @@ impl Token {
 
     /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
     /// for example a '-42', or one of the boolean idents).
-    crate fn can_begin_literal_or_bool(&self) -> bool {
+    pub fn can_begin_literal_or_bool(&self) -> bool {
         match self.kind {
             Literal(..) | BinOp(Minus) => true,
             Ident(name, false) if name.is_bool_lit() => true,
@@ -737,131 +734,3 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         }
     }
 }
-
-impl Nonterminal {
-    pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
-        // A `Nonterminal` is often a parsed AST item. At this point we now
-        // need to convert the parsed AST to an actual token stream, e.g.
-        // un-parse it basically.
-        //
-        // Unfortunately there's not really a great way to do that in a
-        // guaranteed lossless fashion right now. The fallback here is to just
-        // stringify the AST node and reparse it, but this loses all span
-        // information.
-        //
-        // As a result, some AST nodes are annotated with the token stream they
-        // came from. Here we attempt to extract these lossless token streams
-        // before we fall back to the stringification.
-        let tokens = match *self {
-            Nonterminal::NtItem(ref item) => {
-                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
-            }
-            Nonterminal::NtTraitItem(ref item) => {
-                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
-            }
-            Nonterminal::NtImplItem(ref item) => {
-                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
-            }
-            Nonterminal::NtIdent(ident, is_raw) => {
-                Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
-            }
-            Nonterminal::NtLifetime(ident) => {
-                Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
-            }
-            Nonterminal::NtTT(ref tt) => {
-                Some(tt.clone().into())
-            }
-            _ => None,
-        };
-
-        // FIXME(#43081): Avoid this pretty-print + reparse hack
-        let source = pprust::nonterminal_to_string(self);
-        let filename = FileName::macro_expansion_source_code(&source);
-        let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span));
-
-        // During early phases of the compiler the AST could get modified
-        // directly (e.g., attributes added or removed) and the internal cache
-        // of tokens my not be invalidated or updated. Consequently if the
-        // "lossless" token stream disagrees with our actual stringification
-        // (which has historically been much more battle-tested) then we go
-        // with the lossy stream anyway (losing span information).
-        //
-        // Note that the comparison isn't `==` here to avoid comparing spans,
-        // but it *also* is a "probable" equality which is a pretty weird
-        // definition. We mostly want to catch actual changes to the AST
-        // like a `#[cfg]` being processed or some weird `macro_rules!`
-        // expansion.
-        //
-        // What we *don't* want to catch is the fact that a user-defined
-        // literal like `0xf` is stringified as `15`, causing the cached token
-        // stream to not be literal `==` token-wise (ignoring spans) to the
-        // token stream we got from stringification.
-        //
-        // Instead the "probably equal" check here is "does each token
-        // recursively have the same discriminant?" We basically don't look at
-        // the token values here and assume that such fine grained token stream
-        // modifications, including adding/removing typically non-semantic
-        // tokens such as extra braces and commas, don't happen.
-        if let Some(tokens) = tokens {
-            if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
-                return tokens
-            }
-            info!("cached tokens found, but they're not \"probably equal\", \
-                   going with stringified version");
-        }
-        return tokens_for_real
-    }
-}
-
-fn prepend_attrs(sess: &ParseSess,
-                 attrs: &[ast::Attribute],
-                 tokens: Option<&tokenstream::TokenStream>,
-                 span: syntax_pos::Span)
-    -> Option<tokenstream::TokenStream>
-{
-    let tokens = tokens?;
-    if attrs.len() == 0 {
-        return Some(tokens.clone())
-    }
-    let mut builder = tokenstream::TokenStreamBuilder::new();
-    for attr in attrs {
-        assert_eq!(attr.style, ast::AttrStyle::Outer,
-                   "inner attributes should prevent cached tokens from existing");
-
-        let source = pprust::attribute_to_string(attr);
-        let macro_filename = FileName::macro_expansion_source_code(&source);
-        if attr.is_sugared_doc {
-            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
-            builder.push(stream);
-            continue
-        }
-
-        // synthesize # [ $path $tokens ] manually here
-        let mut brackets = tokenstream::TokenStreamBuilder::new();
-
-        // For simple paths, push the identifier directly
-        if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
-            let ident = attr.path.segments[0].ident;
-            let token = Ident(ident.name, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::token(token, ident.span));
-
-        // ... and for more complicated paths, fall back to a reparse hack that
-        // should eventually be removed.
-        } else {
-            let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
-            brackets.push(stream);
-        }
-
-        brackets.push(attr.tokens.clone());
-
-        // The span we list here for `#` and for `[ ... ]` are both wrong in
-        // that it encompasses more than each token, but it hopefully is "good
-        // enough" for now at least.
-        builder.push(tokenstream::TokenTree::token(Pound, attr.span));
-        let delim_span = DelimSpan::from_single(attr.span);
-        builder.push(tokenstream::TokenTree::Delimited(
-            delim_span, DelimToken::Bracket, brackets.build().into()));
-    }
-    builder.push(tokens.clone());
-    Some(builder.build())
-}
index 7d4ffe493d709460cf3363e3d43ab19e6925f470..136fc355f89d499f29f6843977d8c51977d5f326 100644 (file)
@@ -6,10 +6,11 @@
 use crate::source_map::{self, SourceMap, Spanned};
 use crate::parse::token::{self, BinOpToken, DelimToken, Nonterminal, Token, TokenKind};
 use crate::parse::lexer::comments;
-use crate::parse::{self, ParseSess};
+use crate::parse;
 use crate::print::pp::{self, Breaks};
 use crate::print::pp::Breaks::{Consistent, Inconsistent};
 use crate::ptr::P;
+use crate::sess::ParseSess;
 use crate::symbol::{kw, sym};
 use crate::tokenstream::{self, TokenStream, TokenTree};
 
@@ -2381,7 +2382,8 @@ fn print_qpath(&mut self,
                 }
                 self.print_ident(ident);
                 if let Some(ref p) = *sub {
-                    self.s.word("@");
+                    self.s.space();
+                    self.s.word_space("@");
                     self.print_pat(p);
                 }
             }
diff --git a/src/libsyntax/sess.rs b/src/libsyntax/sess.rs
new file mode 100644 (file)
index 0000000..e49d395
--- /dev/null
@@ -0,0 +1,124 @@
+//! Contains `ParseSess` which holds state living beyond what one `Parser` might.
+//! It also serves as an input to the parser itself.
+
+use crate::ast::{CrateConfig, NodeId};
+use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
+use crate::source_map::{SourceMap, FilePathMapping};
+use crate::feature_gate::UnstableFeatures;
+
+use errors::{Applicability, Handler, ColorConfig, DiagnosticBuilder};
+use rustc_data_structures::fx::{FxHashSet, FxHashMap};
+use rustc_data_structures::sync::{Lrc, Lock, Once};
+use syntax_pos::{Symbol, Span, MultiSpan};
+use syntax_pos::edition::Edition;
+use syntax_pos::hygiene::ExpnId;
+
+use std::path::PathBuf;
+use std::str;
+
+/// Collected spans during parsing for places where a certain feature was
+/// used and should be feature gated accordingly in `check_crate`.
+#[derive(Default)]
+crate struct GatedSpans {
+    /// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`.
+    crate let_chains: Lock<Vec<Span>>,
+    /// Spans collected for gating `async_closure`, e.g. `async || ..`.
+    crate async_closure: Lock<Vec<Span>>,
+    /// Spans collected for gating `yield e?` expressions (`generators` gate).
+    crate yields: Lock<Vec<Span>>,
+    /// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`.
+    crate or_patterns: Lock<Vec<Span>>,
+    /// Spans collected for gating `const_extern_fn`, e.g. `const extern fn foo`.
+    crate const_extern_fn: Lock<Vec<Span>>,
+}
+
+/// Info about a parsing session.
+pub struct ParseSess {
+    pub span_diagnostic: Handler,
+    crate unstable_features: UnstableFeatures,
+    pub config: CrateConfig,
+    pub edition: Edition,
+    pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
+    /// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
+    pub raw_identifier_spans: Lock<Vec<Span>>,
+    /// Used to determine and report recursive module inclusions.
+    pub(super) included_mod_stack: Lock<Vec<PathBuf>>,
+    source_map: Lrc<SourceMap>,
+    pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
+    /// Contains the spans of block expressions that could have been incomplete based on the
+    /// operation token that followed it, but that the parser cannot identify without further
+    /// analysis.
+    pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
+    pub injected_crate_name: Once<Symbol>,
+    crate gated_spans: GatedSpans,
+}
+
+impl ParseSess {
+    pub fn new(file_path_mapping: FilePathMapping) -> Self {
+        let cm = Lrc::new(SourceMap::new(file_path_mapping));
+        let handler = Handler::with_tty_emitter(
+            ColorConfig::Auto,
+            true,
+            None,
+            Some(cm.clone()),
+        );
+        ParseSess::with_span_handler(handler, cm)
+    }
+
+    pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
+        Self {
+            span_diagnostic: handler,
+            unstable_features: UnstableFeatures::from_environment(),
+            config: FxHashSet::default(),
+            edition: ExpnId::root().expn_data().edition,
+            missing_fragment_specifiers: Lock::new(FxHashSet::default()),
+            raw_identifier_spans: Lock::new(Vec::new()),
+            included_mod_stack: Lock::new(vec![]),
+            source_map,
+            buffered_lints: Lock::new(vec![]),
+            ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
+            injected_crate_name: Once::new(),
+            gated_spans: GatedSpans::default(),
+        }
+    }
+
+    #[inline]
+    pub fn source_map(&self) -> &SourceMap {
+        &self.source_map
+    }
+
+    pub fn buffer_lint(
+        &self,
+        lint_id: BufferedEarlyLintId,
+        span: impl Into<MultiSpan>,
+        id: NodeId,
+        msg: &str,
+    ) {
+        self.buffered_lints.with_lock(|buffered_lints| {
+            buffered_lints.push(BufferedEarlyLint{
+                span: span.into(),
+                id,
+                msg: msg.into(),
+                lint_id,
+            });
+        });
+    }
+
+    /// Extend an error with a suggestion to wrap an expression with parentheses to allow the
+    /// parser to continue parsing the following operation as part of the same expression.
+    pub fn expr_parentheses_needed(
+        &self,
+        err: &mut DiagnosticBuilder<'_>,
+        span: Span,
+        alt_snippet: Option<String>,
+    ) {
+        if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) {
+            err.span_suggestion(
+                span,
+                "parentheses are required to parse this as an expression",
+                format!("({})", snippet),
+                Applicability::MachineApplicable,
+            );
+        }
+    }
+}
index 7d0d2392945e5e6700e91e7141c9acd1bdcffa01..1501adc597104bb5c0441b6269c9f85921100ebb 100644 (file)
@@ -878,25 +878,8 @@ pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
 
     // Returns the index of the `SourceFile` (in `self.files`) that contains `pos`.
     pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
-        let files = self.files.borrow();
-        let files = &files.source_files;
-        let count = files.len();
-
-        // Binary search for the `SourceFile`.
-        let mut a = 0;
-        let mut b = count;
-        while b - a > 1 {
-            let m = (a + b) / 2;
-            if files[m].start_pos > pos {
-                b = m;
-            } else {
-                a = m;
-            }
-        }
-
-        assert!(a < count, "position {} does not resolve to a source location", pos.to_usize());
-
-        return a;
+        self.files.borrow().source_files.binary_search_by_key(&pos, |key| key.start_pos)
+            .unwrap_or_else(|p| p - 1)
     }
 
     pub fn count_lines(&self) -> usize {
@@ -987,6 +970,9 @@ fn span_to_lines(&self, sp: Span) -> FileLinesResult {
     fn span_to_string(&self, sp: Span) -> String {
         self.span_to_string(sp)
     }
+    fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
+        self.span_to_snippet(sp)
+    }
     fn span_to_filename(&self, sp: Span) -> FileName {
         self.span_to_filename(sp)
     }
index f510ac9273d097ab45ff6f6220b61ce899c781fb..881bdaa84d099cd9d3dc568e25d0b133643582a7 100644 (file)
@@ -1,7 +1,8 @@
-use crate::{ast, panictry};
-use crate::parse::{ParseSess, PResult, source_file_to_stream};
+use crate::ast;
+use crate::parse::{PResult, source_file_to_stream};
 use crate::parse::new_parser_from_source_str;
 use crate::parse::parser::Parser;
+use crate::sess::ParseSess;
 use crate::source_map::{SourceMap, FilePathMapping};
 use crate::tokenstream::TokenStream;
 use crate::with_default_globals;
@@ -27,7 +28,7 @@ fn string_to_parser(ps: &ParseSess, source_str: String) -> Parser<'_> {
     F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
 {
     let mut p = string_to_parser(&ps, s);
-    let x = panictry!(f(&mut p));
+    let x = f(&mut p).unwrap();
     p.sess.span_diagnostic.abort_if_errors();
     x
 }
index 26cae2a8e7c42fb2d598bf5e6e03b8d1b1d7d740..ac155556cdae25d541ca79c82b947409d0c3b58f 100644 (file)
@@ -14,7 +14,6 @@
 //! ownership of the original.
 
 use crate::parse::token::{self, DelimToken, Token, TokenKind};
-use crate::print::pprust;
 
 use syntax_pos::{BytePos, Span, DUMMY_SP};
 #[cfg(target_arch = "x86_64")]
@@ -23,7 +22,7 @@
 use rustc_serialize::{Decoder, Decodable, Encoder, Encodable};
 use smallvec::{SmallVec, smallvec};
 
-use std::{fmt, iter, mem};
+use std::{iter, mem};
 
 #[cfg(test)]
 mod tests;
@@ -137,13 +136,8 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
 /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
 /// instead of a representation of the abstract syntax tree.
 /// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
-///
-/// The use of `Option` is an optimization that avoids the need for an
-/// allocation when the stream is empty. However, it is not guaranteed that an
-/// empty stream is represented with `None`; it may be represented as a `Some`
-/// around an empty `Vec`.
-#[derive(Clone, Debug)]
-pub struct TokenStream(pub Option<Lrc<Vec<TreeAndJoint>>>);
+#[derive(Clone, Debug, Default)]
+pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
 
 pub type TreeAndJoint = (TokenTree, IsJoint);
 
@@ -162,38 +156,36 @@ pub enum IsJoint {
 impl TokenStream {
     /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
     /// separating the two arguments with a comma for diagnostic suggestions.
-    pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
+    pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
         // Used to suggest if a user writes `foo!(a b);`
-        if let Some(ref stream) = self.0 {
-            let mut suggestion = None;
-            let mut iter = stream.iter().enumerate().peekable();
-            while let Some((pos, ts)) = iter.next() {
-                if let Some((_, next)) = iter.peek() {
-                    let sp = match (&ts, &next) {
-                        (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
-                        ((TokenTree::Token(token_left), NonJoint),
-                         (TokenTree::Token(token_right), _))
-                        if ((token_left.is_ident() && !token_left.is_reserved_ident())
-                            || token_left.is_lit()) &&
-                            ((token_right.is_ident() && !token_right.is_reserved_ident())
-                            || token_right.is_lit()) => token_left.span,
-                        ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
-                        _ => continue,
-                    };
-                    let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::token(token::Comma, sp), NonJoint);
-                    suggestion = Some((pos, comma, sp));
-                }
-            }
-            if let Some((pos, comma, sp)) = suggestion {
-                let mut new_stream = vec![];
-                let parts = stream.split_at(pos + 1);
-                new_stream.extend_from_slice(parts.0);
-                new_stream.push(comma);
-                new_stream.extend_from_slice(parts.1);
-                return Some((TokenStream::new(new_stream), sp));
+        let mut suggestion = None;
+        let mut iter = self.0.iter().enumerate().peekable();
+        while let Some((pos, ts)) = iter.next() {
+            if let Some((_, next)) = iter.peek() {
+                let sp = match (&ts, &next) {
+                    (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+                    ((TokenTree::Token(token_left), NonJoint),
+                     (TokenTree::Token(token_right), _))
+                    if ((token_left.is_ident() && !token_left.is_reserved_ident())
+                        || token_left.is_lit()) &&
+                        ((token_right.is_ident() && !token_right.is_reserved_ident())
+                        || token_right.is_lit()) => token_left.span,
+                    ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
+                    _ => continue,
+                };
+                let sp = sp.shrink_to_hi();
+                let comma = (TokenTree::token(token::Comma, sp), NonJoint);
+                suggestion = Some((pos, comma, sp));
             }
         }
+        if let Some((pos, comma, sp)) = suggestion {
+            let mut new_stream = vec![];
+            let parts = self.0.split_at(pos + 1);
+            new_stream.extend_from_slice(parts.0);
+            new_stream.push(comma);
+            new_stream.extend_from_slice(parts.1);
+            return Some((TokenStream::new(new_stream), sp));
+        }
         None
     }
 }
@@ -210,9 +202,9 @@ fn from(tree: TokenTree) -> TreeAndJoint {
     }
 }
 
-impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
-        TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
+impl iter::FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
+        TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndJoint>>())
     }
 }
 
@@ -225,58 +217,57 @@ fn eq(&self, other: &TokenStream) -> bool {
 }
 
 impl TokenStream {
-    pub fn len(&self) -> usize {
-        if let Some(ref slice) = self.0 {
-            slice.len()
-        } else {
-            0
-        }
+    pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
+        TokenStream(Lrc::new(streams))
     }
 
-    pub fn empty() -> TokenStream {
-        TokenStream(None)
+    pub fn is_empty(&self) -> bool {
+        self.0.is_empty()
     }
 
-    pub fn is_empty(&self) -> bool {
-        match self.0 {
-            None => true,
-            Some(ref stream) => stream.is_empty(),
-        }
+    pub fn len(&self) -> usize {
+        self.0.len()
     }
 
     pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
         match streams.len() {
-            0 => TokenStream::empty(),
+            0 => TokenStream::default(),
             1 => streams.pop().unwrap(),
             _ => {
-                // rust-lang/rust#57735: pre-allocate vector to avoid
-                // quadratic blow-up due to on-the-fly reallocations.
-                let tree_count = streams.iter()
-                    .map(|ts| match &ts.0 { None => 0, Some(s) => s.len() })
+                // We are going to extend the first stream in `streams` with
+                // the elements from the subsequent streams. This requires
+                // using `make_mut()` on the first stream, and in practice this
+                // doesn't cause cloning 99.9% of the time.
+                //
+                // One very common use case is when `streams` has two elements,
+                // where the first stream has any number of elements within
+                // (often 1, but sometimes many more) and the second stream has
+                // a single element within.
+
+                // Determine how much the first stream will be extended.
+                // Needed to avoid quadratic blow up from on-the-fly
+                // reallocations (#57735).
+                let num_appends = streams.iter()
+                    .skip(1)
+                    .map(|ts| ts.len())
                     .sum();
-                let mut vec = Vec::with_capacity(tree_count);
 
-                for stream in streams {
-                    match stream.0 {
-                        None => {},
-                        Some(stream2) => vec.extend(stream2.iter().cloned()),
-                    }
+                // Get the first stream. If it's `None`, create an empty
+                // stream.
+                let mut iter = streams.drain();
+                let mut first_stream_lrc = iter.next().unwrap().0;
+
+                // Append the elements to the first stream, after reserving
+                // space for them.
+                let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
+                first_vec_mut.reserve(num_appends);
+                for stream in iter {
+                    first_vec_mut.extend(stream.0.iter().cloned());
                 }
-                TokenStream::new(vec)
-            }
-        }
-    }
-
-    pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
-        match streams.len() {
-            0 => TokenStream(None),
-            _ => TokenStream(Some(Lrc::new(streams))),
-        }
-    }
 
-    pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
-        if let Some(stream) = self.0 {
-            vec.extend(stream.iter().cloned());
+                // Create the final `TokenStream`.
+                TokenStream(first_stream_lrc)
+            }
         }
     }
 
@@ -344,43 +335,22 @@ fn semantic_tree(tree: &TokenTree) -> bool {
     }
 
     pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        TokenStream(self.0.map(|stream| {
-            Lrc::new(
-                stream
-                    .iter()
-                    .enumerate()
-                    .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
-                    .collect())
-        }))
+        TokenStream(Lrc::new(
+            self.0
+                .iter()
+                .enumerate()
+                .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
+                .collect()
+        ))
     }
 
     pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        TokenStream(self.0.map(|stream| {
-            Lrc::new(
-                stream
-                    .iter()
-                    .map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
-                    .collect())
-        }))
-    }
-
-    fn first_tree_and_joint(&self) -> Option<TreeAndJoint> {
-        self.0.as_ref().map(|stream| {
-            stream.first().unwrap().clone()
-        })
-    }
-
-    fn last_tree_if_joint(&self) -> Option<TokenTree> {
-        match self.0 {
-            None => None,
-            Some(ref stream) => {
-                if let (tree, Joint) = stream.last().unwrap() {
-                    Some(tree.clone())
-                } else {
-                    None
-                }
-            }
-        }
+        TokenStream(Lrc::new(
+            self.0
+                .iter()
+                .map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
+                .collect()
+        ))
     }
 }
 
@@ -394,18 +364,48 @@ pub fn new() -> TokenStreamBuilder {
     }
 
     pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
-        let stream = stream.into();
-        let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
-        if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
-            if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
-                if let Some(glued_tok) = last_token.glue(&token) {
-                    let last_stream = self.0.pop().unwrap();
-                    self.push_all_but_last_tree(&last_stream);
-                    let glued_tt = TokenTree::Token(glued_tok);
-                    let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
-                    self.0.push(glued_tokenstream);
-                    self.push_all_but_first_tree(&stream);
-                    return
+        let mut stream = stream.into();
+
+        // If `self` is not empty and the last tree within the last stream is a
+        // token tree marked with `Joint`...
+        if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
+            if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
+
+                // ...and `stream` is not empty and the first tree within it is
+                // a token tree...
+                let TokenStream(ref mut stream_lrc) = stream;
+                if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
+
+                    // ...and the two tokens can be glued together...
+                    if let Some(glued_tok) = last_token.glue(&token) {
+
+                        // ...then do so, by overwriting the last token
+                        // tree in `self` and removing the first token tree
+                        // from `stream`. This requires using `make_mut()`
+                        // on the last stream in `self` and on `stream`,
+                        // and in practice this doesn't cause cloning 99.9%
+                        // of the time.
+
+                        // Overwrite the last token tree with the merged
+                        // token.
+                        let last_vec_mut = Lrc::make_mut(last_stream_lrc);
+                        *last_vec_mut.last_mut().unwrap() =
+                            (TokenTree::Token(glued_tok), *is_joint);
+
+                        // Remove the first token tree from `stream`. (This
+                        // is almost always the only tree in `stream`.)
+                        let stream_vec_mut = Lrc::make_mut(stream_lrc);
+                        stream_vec_mut.remove(0);
+
+                        // Don't push `stream` if it's empty -- that could
+                        // block subsequent token gluing, by getting
+                        // between two token trees that should be glued
+                        // together.
+                        if !stream.is_empty() {
+                            self.0.push(stream);
+                        }
+                        return;
+                    }
                 }
             }
         }
@@ -415,26 +415,6 @@ pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
     pub fn build(self) -> TokenStream {
         TokenStream::from_streams(self.0)
     }
-
-    fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
-        if let Some(ref streams) = stream.0 {
-            let len = streams.len();
-            match len {
-                1 => {}
-                _ => self.0.push(TokenStream(Some(Lrc::new(streams[0 .. len - 1].to_vec())))),
-            }
-        }
-    }
-
-    fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
-        if let Some(ref streams) = stream.0 {
-            let len = streams.len();
-            match len {
-                1 => {}
-                _ => self.0.push(TokenStream(Some(Lrc::new(streams[1 .. len].to_vec())))),
-            }
-        }
-    }
 }
 
 #[derive(Clone)]
@@ -457,16 +437,11 @@ fn new(stream: TokenStream) -> Self {
     }
 
     pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
-        match self.stream.0 {
-            None => None,
-            Some(ref stream) => {
-                if self.index < stream.len() {
-                    self.index += 1;
-                    Some(stream[self.index - 1].clone())
-                } else {
-                    None
-                }
-            }
+        if self.index < self.stream.len() {
+            self.index += 1;
+            Some(self.stream.0[self.index - 1].clone())
+        } else {
+            None
         }
     }
 
@@ -475,22 +450,13 @@ pub fn append(&mut self, new_stream: TokenStream) {
             return;
         }
         let index = self.index;
-        let stream = mem::replace(&mut self.stream, TokenStream(None));
+        let stream = mem::take(&mut self.stream);
         *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
         self.index = index;
     }
 
     pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
-        match self.stream.0 {
-            None => None,
-            Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
-        }
-    }
-}
-
-impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&pprust::tts_to_string(self.clone()))
+        self.stream.0[self.index ..].get(n).map(|(tree, _)| tree.clone())
     }
 }
 
diff --git a/src/libsyntax_expand/Cargo.toml b/src/libsyntax_expand/Cargo.toml
new file mode 100644 (file)
index 0000000..f063753
--- /dev/null
@@ -0,0 +1,26 @@
+[package]
+authors = ["The Rust Project Developers"]
+name = "syntax_expand"
+version = "0.0.0"
+edition = "2018"
+build = false
+
+[lib]
+name = "syntax_expand"
+path = "lib.rs"
+doctest = false
+
+[dependencies]
+bitflags = "1.0"
+rustc_serialize = { path = "../libserialize", package = "serialize" }
+log = "0.4"
+scoped-tls = "1.0"
+lazy_static = "1.0.0"
+syntax_pos = { path = "../libsyntax_pos" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
+rustc_data_structures = { path = "../librustc_data_structures" }
+rustc_index = { path = "../librustc_index" }
+rustc_lexer = { path = "../librustc_lexer" }
+rustc_target = { path = "../librustc_target" }
+smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
+syntax = { path = "../libsyntax" }
diff --git a/src/libsyntax_expand/allocator.rs b/src/libsyntax_expand/allocator.rs
new file mode 100644 (file)
index 0000000..3526be1
--- /dev/null
@@ -0,0 +1,75 @@
+use syntax::{ast, attr, visit};
+use syntax::symbol::{sym, Symbol};
+use syntax_pos::Span;
+
+#[derive(Clone, Copy)]
+pub enum AllocatorKind {
+    Global,
+    DefaultLib,
+    DefaultExe,
+}
+
+impl AllocatorKind {
+    pub fn fn_name(&self, base: &str) -> String {
+        match *self {
+            AllocatorKind::Global => format!("__rg_{}", base),
+            AllocatorKind::DefaultLib => format!("__rdl_{}", base),
+            AllocatorKind::DefaultExe => format!("__rde_{}", base),
+        }
+    }
+}
+
+pub enum AllocatorTy {
+    Layout,
+    Ptr,
+    ResultPtr,
+    Unit,
+    Usize,
+}
+
+pub struct AllocatorMethod {
+    pub name: &'static str,
+    pub inputs: &'static [AllocatorTy],
+    pub output: AllocatorTy,
+}
+
+pub static ALLOCATOR_METHODS: &[AllocatorMethod] = &[
+    AllocatorMethod {
+        name: "alloc",
+        inputs: &[AllocatorTy::Layout],
+        output: AllocatorTy::ResultPtr,
+    },
+    AllocatorMethod {
+        name: "dealloc",
+        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout],
+        output: AllocatorTy::Unit,
+    },
+    AllocatorMethod {
+        name: "realloc",
+        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Usize],
+        output: AllocatorTy::ResultPtr,
+    },
+    AllocatorMethod {
+        name: "alloc_zeroed",
+        inputs: &[AllocatorTy::Layout],
+        output: AllocatorTy::ResultPtr,
+    },
+];
+
+pub fn global_allocator_spans(krate: &ast::Crate) -> Vec<Span> {
+    struct Finder { name: Symbol, spans: Vec<Span> }
+    impl<'ast> visit::Visitor<'ast> for Finder {
+        fn visit_item(&mut self, item: &'ast ast::Item) {
+            if item.ident.name == self.name &&
+               attr::contains_name(&item.attrs, sym::rustc_std_internal_symbol) {
+                self.spans.push(item.span);
+            }
+            visit::walk_item(self, item)
+        }
+    }
+
+    let name = Symbol::intern(&AllocatorKind::Global.fn_name("alloc"));
+    let mut f = Finder { name, spans: Vec::new() };
+    visit::walk_crate(&mut f, krate);
+    f.spans
+}
diff --git a/src/libsyntax_expand/base.rs b/src/libsyntax_expand/base.rs
new file mode 100644 (file)
index 0000000..c222e73
--- /dev/null
@@ -0,0 +1,1188 @@
+use crate::expand::{self, AstFragment, Invocation};
+use crate::hygiene::ExpnId;
+
+use syntax::ast::{self, NodeId, Attribute, Name, PatKind};
+use syntax::attr::{self, HasAttrs, Stability, Deprecation};
+use syntax::source_map::SourceMap;
+use syntax::edition::Edition;
+use syntax::mut_visit::{self, MutVisitor};
+use syntax::parse::{self, parser, DirectoryOwnership};
+use syntax::parse::token;
+use syntax::ptr::P;
+use syntax::sess::ParseSess;
+use syntax::symbol::{kw, sym, Ident, Symbol};
+use syntax::{ThinVec, MACRO_ARGUMENTS};
+use syntax::tokenstream::{self, TokenStream};
+use syntax::visit::Visitor;
+
+use errors::{DiagnosticBuilder, DiagnosticId};
+use smallvec::{smallvec, SmallVec};
+use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
+use syntax_pos::hygiene::{AstPass, ExpnData, ExpnKind};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::{self, Lrc};
+use std::iter;
+use std::path::PathBuf;
+use std::rc::Rc;
+use std::default::Default;
+
+pub use syntax_pos::hygiene::MacroKind;
+
+#[derive(Debug,Clone)]
+pub enum Annotatable {
+    Item(P<ast::Item>),
+    TraitItem(P<ast::TraitItem>),
+    ImplItem(P<ast::ImplItem>),
+    ForeignItem(P<ast::ForeignItem>),
+    Stmt(P<ast::Stmt>),
+    Expr(P<ast::Expr>),
+    Arm(ast::Arm),
+    Field(ast::Field),
+    FieldPat(ast::FieldPat),
+    GenericParam(ast::GenericParam),
+    Param(ast::Param),
+    StructField(ast::StructField),
+    Variant(ast::Variant),
+}
+
+impl HasAttrs for Annotatable {
+    fn attrs(&self) -> &[Attribute] {
+        match *self {
+            Annotatable::Item(ref item) => &item.attrs,
+            Annotatable::TraitItem(ref trait_item) => &trait_item.attrs,
+            Annotatable::ImplItem(ref impl_item) => &impl_item.attrs,
+            Annotatable::ForeignItem(ref foreign_item) => &foreign_item.attrs,
+            Annotatable::Stmt(ref stmt) => stmt.attrs(),
+            Annotatable::Expr(ref expr) => &expr.attrs,
+            Annotatable::Arm(ref arm) => &arm.attrs,
+            Annotatable::Field(ref field) => &field.attrs,
+            Annotatable::FieldPat(ref fp) => &fp.attrs,
+            Annotatable::GenericParam(ref gp) => &gp.attrs,
+            Annotatable::Param(ref p) => &p.attrs,
+            Annotatable::StructField(ref sf) => &sf.attrs,
+            Annotatable::Variant(ref v) => &v.attrs(),
+        }
+    }
+
+    fn visit_attrs<F: FnOnce(&mut Vec<Attribute>)>(&mut self, f: F) {
+        match self {
+            Annotatable::Item(item) => item.visit_attrs(f),
+            Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
+            Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
+            Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
+            Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
+            Annotatable::Expr(expr) => expr.visit_attrs(f),
+            Annotatable::Arm(arm) => arm.visit_attrs(f),
+            Annotatable::Field(field) => field.visit_attrs(f),
+            Annotatable::FieldPat(fp) => fp.visit_attrs(f),
+            Annotatable::GenericParam(gp) => gp.visit_attrs(f),
+            Annotatable::Param(p) => p.visit_attrs(f),
+            Annotatable::StructField(sf) => sf.visit_attrs(f),
+            Annotatable::Variant(v) => v.visit_attrs(f),
+        }
+    }
+}
+
+impl Annotatable {
+    pub fn span(&self) -> Span {
+        match *self {
+            Annotatable::Item(ref item) => item.span,
+            Annotatable::TraitItem(ref trait_item) => trait_item.span,
+            Annotatable::ImplItem(ref impl_item) => impl_item.span,
+            Annotatable::ForeignItem(ref foreign_item) => foreign_item.span,
+            Annotatable::Stmt(ref stmt) => stmt.span,
+            Annotatable::Expr(ref expr) => expr.span,
+            Annotatable::Arm(ref arm) => arm.span,
+            Annotatable::Field(ref field) => field.span,
+            Annotatable::FieldPat(ref fp) => fp.pat.span,
+            Annotatable::GenericParam(ref gp) => gp.ident.span,
+            Annotatable::Param(ref p) => p.span,
+            Annotatable::StructField(ref sf) => sf.span,
+            Annotatable::Variant(ref v) => v.span,
+        }
+    }
+
+    pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
+        match self {
+            Annotatable::Item(item) => visitor.visit_item(item),
+            Annotatable::TraitItem(trait_item) => visitor.visit_trait_item(trait_item),
+            Annotatable::ImplItem(impl_item) => visitor.visit_impl_item(impl_item),
+            Annotatable::ForeignItem(foreign_item) => visitor.visit_foreign_item(foreign_item),
+            Annotatable::Stmt(stmt) => visitor.visit_stmt(stmt),
+            Annotatable::Expr(expr) => visitor.visit_expr(expr),
+            Annotatable::Arm(arm) => visitor.visit_arm(arm),
+            Annotatable::Field(field) => visitor.visit_field(field),
+            Annotatable::FieldPat(fp) => visitor.visit_field_pattern(fp),
+            Annotatable::GenericParam(gp) => visitor.visit_generic_param(gp),
+            Annotatable::Param(p) => visitor.visit_param(p),
+            Annotatable::StructField(sf) =>visitor.visit_struct_field(sf),
+            Annotatable::Variant(v) => visitor.visit_variant(v),
+        }
+    }
+
+    pub fn expect_item(self) -> P<ast::Item> {
+        match self {
+            Annotatable::Item(i) => i,
+            _ => panic!("expected Item")
+        }
+    }
+
+    pub fn map_item_or<F, G>(self, mut f: F, mut or: G) -> Annotatable
+        where F: FnMut(P<ast::Item>) -> P<ast::Item>,
+              G: FnMut(Annotatable) -> Annotatable
+    {
+        match self {
+            Annotatable::Item(i) => Annotatable::Item(f(i)),
+            _ => or(self)
+        }
+    }
+
+    pub fn expect_trait_item(self) -> ast::TraitItem {
+        match self {
+            Annotatable::TraitItem(i) => i.into_inner(),
+            _ => panic!("expected Item")
+        }
+    }
+
+    pub fn expect_impl_item(self) -> ast::ImplItem {
+        match self {
+            Annotatable::ImplItem(i) => i.into_inner(),
+            _ => panic!("expected Item")
+        }
+    }
+
+    pub fn expect_foreign_item(self) -> ast::ForeignItem {
+        match self {
+            Annotatable::ForeignItem(i) => i.into_inner(),
+            _ => panic!("expected foreign item")
+        }
+    }
+
+    pub fn expect_stmt(self) -> ast::Stmt {
+        match self {
+            Annotatable::Stmt(stmt) => stmt.into_inner(),
+            _ => panic!("expected statement"),
+        }
+    }
+
+    pub fn expect_expr(self) -> P<ast::Expr> {
+        match self {
+            Annotatable::Expr(expr) => expr,
+            _ => panic!("expected expression"),
+        }
+    }
+
+    pub fn expect_arm(self) -> ast::Arm {
+        match self {
+            Annotatable::Arm(arm) => arm,
+            _ => panic!("expected match arm")
+        }
+    }
+
+    pub fn expect_field(self) -> ast::Field {
+        match self {
+            Annotatable::Field(field) => field,
+            _ => panic!("expected field")
+        }
+    }
+
+    pub fn expect_field_pattern(self) -> ast::FieldPat {
+        match self {
+            Annotatable::FieldPat(fp) => fp,
+            _ => panic!("expected field pattern")
+        }
+    }
+
+    pub fn expect_generic_param(self) -> ast::GenericParam {
+        match self {
+            Annotatable::GenericParam(gp) => gp,
+            _ => panic!("expected generic parameter")
+        }
+    }
+
+    pub fn expect_param(self) -> ast::Param {
+        match self {
+            Annotatable::Param(param) => param,
+            _ => panic!("expected parameter")
+        }
+    }
+
+    pub fn expect_struct_field(self) -> ast::StructField {
+        match self {
+            Annotatable::StructField(sf) => sf,
+            _ => panic!("expected struct field")
+        }
+    }
+
+    pub fn expect_variant(self) -> ast::Variant {
+        match self {
+            Annotatable::Variant(v) => v,
+            _ => panic!("expected variant")
+        }
+    }
+
+    pub fn derive_allowed(&self) -> bool {
+        match *self {
+            Annotatable::Item(ref item) => match item.kind {
+                ast::ItemKind::Struct(..) |
+                ast::ItemKind::Enum(..) |
+                ast::ItemKind::Union(..) => true,
+                _ => false,
+            },
+            _ => false,
+        }
+    }
+}
+
+// `meta_item` is the annotation, and `item` is the item being modified.
+// FIXME Decorators should follow the same pattern too.
+pub trait MultiItemModifier {
+    fn expand(&self,
+              ecx: &mut ExtCtxt<'_>,
+              span: Span,
+              meta_item: &ast::MetaItem,
+              item: Annotatable)
+              -> Vec<Annotatable>;
+}
+
+impl<F, T> MultiItemModifier for F
+    where F: Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, Annotatable) -> T,
+          T: Into<Vec<Annotatable>>,
+{
+    fn expand(&self,
+              ecx: &mut ExtCtxt<'_>,
+              span: Span,
+              meta_item: &ast::MetaItem,
+              item: Annotatable)
+              -> Vec<Annotatable> {
+        (*self)(ecx, span, meta_item, item).into()
+    }
+}
+
+impl Into<Vec<Annotatable>> for Annotatable {
+    fn into(self) -> Vec<Annotatable> {
+        vec![self]
+    }
+}
+
+pub trait ProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt<'_>,
+                   span: Span,
+                   ts: TokenStream)
+                   -> TokenStream;
+}
+
+impl<F> ProcMacro for F
+    where F: Fn(TokenStream) -> TokenStream
+{
+    fn expand<'cx>(&self,
+                   _ecx: &'cx mut ExtCtxt<'_>,
+                   _span: Span,
+                   ts: TokenStream)
+                   -> TokenStream {
+        // FIXME setup implicit context in TLS before calling self.
+        (*self)(ts)
+    }
+}
+
+pub trait AttrProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt<'_>,
+                   span: Span,
+                   annotation: TokenStream,
+                   annotated: TokenStream)
+                   -> TokenStream;
+}
+
+impl<F> AttrProcMacro for F
+    where F: Fn(TokenStream, TokenStream) -> TokenStream
+{
+    fn expand<'cx>(&self,
+                   _ecx: &'cx mut ExtCtxt<'_>,
+                   _span: Span,
+                   annotation: TokenStream,
+                   annotated: TokenStream)
+                   -> TokenStream {
+        // FIXME setup implicit context in TLS before calling self.
+        (*self)(annotation, annotated)
+    }
+}
+
+/// Represents a thing that maps token trees to Macro Results
+pub trait TTMacroExpander {
+    fn expand<'cx>(
+        &self,
+        ecx: &'cx mut ExtCtxt<'_>,
+        span: Span,
+        input: TokenStream,
+    ) -> Box<dyn MacResult+'cx>;
+}
+
+pub type MacroExpanderFn =
+    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
+                -> Box<dyn MacResult+'cx>;
+
+impl<F> TTMacroExpander for F
+    where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
+    -> Box<dyn MacResult+'cx>
+{
+    fn expand<'cx>(
+        &self,
+        ecx: &'cx mut ExtCtxt<'_>,
+        span: Span,
+        mut input: TokenStream,
+    ) -> Box<dyn MacResult+'cx> {
+        struct AvoidInterpolatedIdents;
+
+        impl MutVisitor for AvoidInterpolatedIdents {
+            fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
+                if let tokenstream::TokenTree::Token(token) = tt {
+                    if let token::Interpolated(nt) = &token.kind {
+                        if let token::NtIdent(ident, is_raw) = **nt {
+                            *tt = tokenstream::TokenTree::token(
+                                token::Ident(ident.name, is_raw), ident.span
+                            );
+                        }
+                    }
+                }
+                mut_visit::noop_visit_tt(tt, self)
+            }
+
+            fn visit_mac(&mut self, mac: &mut ast::Mac) {
+                mut_visit::noop_visit_mac(mac, self)
+            }
+        }
+        AvoidInterpolatedIdents.visit_tts(&mut input);
+        (*self)(ecx, span, input)
+    }
+}
+
+// Use a macro because forwarding to a simple function has type system issues
+macro_rules! make_stmts_default {
+    ($me:expr) => {
+        $me.make_expr().map(|e| smallvec![ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            span: e.span,
+            kind: ast::StmtKind::Expr(e),
+        }])
+    }
+}
+
+/// The result of a macro expansion. The return values of the various
+/// methods are spliced into the AST at the callsite of the macro.
+pub trait MacResult {
+    /// Creates an expression.
+    fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
+        None
+    }
+    /// Creates zero or more items.
+    fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
+        None
+    }
+
+    /// Creates zero or more impl items.
+    fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
+        None
+    }
+
+    /// Creates zero or more trait items.
+    fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
+        None
+    }
+
+    /// Creates zero or more items in an `extern {}` block
+    fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None }
+
+    /// Creates a pattern.
+    fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
+        None
+    }
+
+    /// Creates zero or more statements.
+    ///
+    /// By default this attempts to create an expression statement,
+    /// returning None if that fails.
+    fn make_stmts(self: Box<Self>) -> Option<SmallVec<[ast::Stmt; 1]>> {
+        make_stmts_default!(self)
+    }
+
+    fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> {
+        None
+    }
+
+    fn make_arms(self: Box<Self>) -> Option<SmallVec<[ast::Arm; 1]>> {
+        None
+    }
+
+    fn make_fields(self: Box<Self>) -> Option<SmallVec<[ast::Field; 1]>> {
+        None
+    }
+
+    fn make_field_patterns(self: Box<Self>) -> Option<SmallVec<[ast::FieldPat; 1]>> {
+        None
+    }
+
+    fn make_generic_params(self: Box<Self>) -> Option<SmallVec<[ast::GenericParam; 1]>> {
+        None
+    }
+
+    fn make_params(self: Box<Self>) -> Option<SmallVec<[ast::Param; 1]>> {
+        None
+    }
+
+    fn make_struct_fields(self: Box<Self>) -> Option<SmallVec<[ast::StructField; 1]>> {
+        None
+    }
+
+    fn make_variants(self: Box<Self>) -> Option<SmallVec<[ast::Variant; 1]>> {
+        None
+    }
+}
+
+macro_rules! make_MacEager {
+    ( $( $fld:ident: $t:ty, )* ) => {
+        /// `MacResult` implementation for the common case where you've already
+        /// built each form of AST that you might return.
+        #[derive(Default)]
+        pub struct MacEager {
+            $(
+                pub $fld: Option<$t>,
+            )*
+        }
+
+        impl MacEager {
+            $(
+                pub fn $fld(v: $t) -> Box<dyn MacResult> {
+                    Box::new(MacEager {
+                        $fld: Some(v),
+                        ..Default::default()
+                    })
+                }
+            )*
+        }
+    }
+}
+
+make_MacEager! {
+    expr: P<ast::Expr>,
+    pat: P<ast::Pat>,
+    items: SmallVec<[P<ast::Item>; 1]>,
+    impl_items: SmallVec<[ast::ImplItem; 1]>,
+    trait_items: SmallVec<[ast::TraitItem; 1]>,
+    foreign_items: SmallVec<[ast::ForeignItem; 1]>,
+    stmts: SmallVec<[ast::Stmt; 1]>,
+    ty: P<ast::Ty>,
+}
+
+impl MacResult for MacEager {
+    fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
+        self.expr
+    }
+
+    fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
+        self.items
+    }
+
+    fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
+        self.impl_items
+    }
+
+    fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
+        self.trait_items
+    }
+
+    fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> {
+        self.foreign_items
+    }
+
+    fn make_stmts(self: Box<Self>) -> Option<SmallVec<[ast::Stmt; 1]>> {
+        match self.stmts.as_ref().map_or(0, |s| s.len()) {
+            0 => make_stmts_default!(self),
+            _ => self.stmts,
+        }
+    }
+
+    fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
+        if let Some(p) = self.pat {
+            return Some(p);
+        }
+        if let Some(e) = self.expr {
+            if let ast::ExprKind::Lit(_) = e.kind {
+                return Some(P(ast::Pat {
+                    id: ast::DUMMY_NODE_ID,
+                    span: e.span,
+                    kind: PatKind::Lit(e),
+                }));
+            }
+        }
+        None
+    }
+
+    fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> {
+        self.ty
+    }
+}
+
+/// Fill-in macro expansion result, to allow compilation to continue
+/// after hitting errors.
+#[derive(Copy, Clone)]
+pub struct DummyResult {
+    is_error: bool,
+    span: Span,
+}
+
+impl DummyResult {
+    /// Creates a default MacResult that can be anything.
+    ///
+    /// Use this as a return value after hitting any errors and
+    /// calling `span_err`.
+    pub fn any(span: Span) -> Box<dyn MacResult+'static> {
+        Box::new(DummyResult { is_error: true, span })
+    }
+
+    /// Same as `any`, but must be a valid fragment, not error.
+    pub fn any_valid(span: Span) -> Box<dyn MacResult+'static> {
+        Box::new(DummyResult { is_error: false, span })
+    }
+
+    /// A plain dummy expression.
+    pub fn raw_expr(sp: Span, is_error: bool) -> P<ast::Expr> {
+        P(ast::Expr {
+            id: ast::DUMMY_NODE_ID,
+            kind: if is_error { ast::ExprKind::Err } else { ast::ExprKind::Tup(Vec::new()) },
+            span: sp,
+            attrs: ThinVec::new(),
+        })
+    }
+
+    /// A plain dummy pattern.
+    pub fn raw_pat(sp: Span) -> ast::Pat {
+        ast::Pat {
+            id: ast::DUMMY_NODE_ID,
+            kind: PatKind::Wild,
+            span: sp,
+        }
+    }
+
+    /// A plain dummy type.
+    pub fn raw_ty(sp: Span, is_error: bool) -> P<ast::Ty> {
+        P(ast::Ty {
+            id: ast::DUMMY_NODE_ID,
+            kind: if is_error { ast::TyKind::Err } else { ast::TyKind::Tup(Vec::new()) },
+            span: sp
+        })
+    }
+}
+
+impl MacResult for DummyResult {
+    fn make_expr(self: Box<DummyResult>) -> Option<P<ast::Expr>> {
+        Some(DummyResult::raw_expr(self.span, self.is_error))
+    }
+
+    fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> {
+        Some(P(DummyResult::raw_pat(self.span)))
+    }
+
+    fn make_items(self: Box<DummyResult>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_impl_items(self: Box<DummyResult>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_trait_items(self: Box<DummyResult>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_stmts(self: Box<DummyResult>) -> Option<SmallVec<[ast::Stmt; 1]>> {
+        Some(smallvec![ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)),
+            span: self.span,
+        }])
+    }
+
+    fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
+        Some(DummyResult::raw_ty(self.span, self.is_error))
+    }
+
+    fn make_arms(self: Box<DummyResult>) -> Option<SmallVec<[ast::Arm; 1]>> {
+       Some(SmallVec::new())
+    }
+
+    fn make_fields(self: Box<DummyResult>) -> Option<SmallVec<[ast::Field; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_field_patterns(self: Box<DummyResult>) -> Option<SmallVec<[ast::FieldPat; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_generic_params(self: Box<DummyResult>) -> Option<SmallVec<[ast::GenericParam; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_params(self: Box<DummyResult>) -> Option<SmallVec<[ast::Param; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_struct_fields(self: Box<DummyResult>) -> Option<SmallVec<[ast::StructField; 1]>> {
+        Some(SmallVec::new())
+    }
+
+    fn make_variants(self: Box<DummyResult>) -> Option<SmallVec<[ast::Variant; 1]>> {
+        Some(SmallVec::new())
+    }
+}
+
+/// A syntax extension kind.
+pub enum SyntaxExtensionKind {
+    /// A token-based function-like macro.
+    Bang(
+        /// An expander with signature TokenStream -> TokenStream.
+        Box<dyn ProcMacro + sync::Sync + sync::Send>,
+    ),
+
+    /// An AST-based function-like macro.
+    LegacyBang(
+        /// An expander with signature TokenStream -> AST.
+        Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
+    ),
+
+    /// A token-based attribute macro.
+    Attr(
+        /// An expander with signature (TokenStream, TokenStream) -> TokenStream.
+        /// The first TokenSteam is the attribute itself, the second is the annotated item.
+        /// The produced TokenSteam replaces the input TokenSteam.
+        Box<dyn AttrProcMacro + sync::Sync + sync::Send>,
+    ),
+
+    /// An AST-based attribute macro.
+    LegacyAttr(
+        /// An expander with signature (AST, AST) -> AST.
+        /// The first AST fragment is the attribute itself, the second is the annotated item.
+        /// The produced AST fragment replaces the input AST fragment.
+        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
+    ),
+
+    /// A trivial attribute "macro" that does nothing,
+    /// only keeps the attribute and marks it as inert,
+    /// thus making it ineligible for further expansion.
+    NonMacroAttr {
+        /// Suppresses the `unused_attributes` lint for this attribute.
+        mark_used: bool,
+    },
+
+    /// A token-based derive macro.
+    Derive(
+        /// An expander with signature TokenStream -> TokenStream (not yet).
+        /// The produced TokenSteam is appended to the input TokenSteam.
+        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
+    ),
+
+    /// An AST-based derive macro.
+    LegacyDerive(
+        /// An expander with signature AST -> AST.
+        /// The produced AST fragment is appended to the input AST fragment.
+        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
+    ),
+}
+
+/// A struct representing a macro definition in "lowered" form ready for expansion.
+pub struct SyntaxExtension {
+    /// A syntax extension kind.
+    pub kind: SyntaxExtensionKind,
+    /// Span of the macro definition.
+    pub span: Span,
+    /// Whitelist of unstable features that are treated as stable inside this macro.
+    pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
+    /// Suppresses the `unsafe_code` lint for code produced by this macro.
+    pub allow_internal_unsafe: bool,
+    /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`) for this macro.
+    pub local_inner_macros: bool,
+    /// The macro's stability info.
+    pub stability: Option<Stability>,
+    /// The macro's deprecation info.
+    pub deprecation: Option<Deprecation>,
+    /// Names of helper attributes registered by this macro.
+    pub helper_attrs: Vec<Symbol>,
+    /// Edition of the crate in which this macro is defined.
+    pub edition: Edition,
+    /// Built-in macros have a couple of special properties like availability
+    /// in `#[no_implicit_prelude]` modules, so we have to keep this flag.
+    pub is_builtin: bool,
+    /// We have to identify macros providing a `Copy` impl early for compatibility reasons.
+    pub is_derive_copy: bool,
+}
+
+impl SyntaxExtension {
+    /// Returns which kind of macro calls this syntax extension.
+    pub fn macro_kind(&self) -> MacroKind {
+        match self.kind {
+            SyntaxExtensionKind::Bang(..) |
+            SyntaxExtensionKind::LegacyBang(..) => MacroKind::Bang,
+            SyntaxExtensionKind::Attr(..) |
+            SyntaxExtensionKind::LegacyAttr(..) |
+            SyntaxExtensionKind::NonMacroAttr { .. } => MacroKind::Attr,
+            SyntaxExtensionKind::Derive(..) |
+            SyntaxExtensionKind::LegacyDerive(..) => MacroKind::Derive,
+        }
+    }
+
+    /// Constructs a syntax extension with default properties.
+    pub fn default(kind: SyntaxExtensionKind, edition: Edition) -> SyntaxExtension {
+        SyntaxExtension {
+            span: DUMMY_SP,
+            allow_internal_unstable: None,
+            allow_internal_unsafe: false,
+            local_inner_macros: false,
+            stability: None,
+            deprecation: None,
+            helper_attrs: Vec::new(),
+            edition,
+            is_builtin: false,
+            is_derive_copy: false,
+            kind,
+        }
+    }
+
+    /// Constructs a syntax extension with the given properties
+    /// and other properties converted from attributes.
+    pub fn new(
+        sess: &ParseSess,
+        kind: SyntaxExtensionKind,
+        span: Span,
+        helper_attrs: Vec<Symbol>,
+        edition: Edition,
+        name: Name,
+        attrs: &[ast::Attribute],
+    ) -> SyntaxExtension {
+        let allow_internal_unstable = attr::allow_internal_unstable(
+            &attrs, &sess.span_diagnostic,
+        ).map(|features| features.collect::<Vec<Symbol>>().into());
+
+        let mut local_inner_macros = false;
+        if let Some(macro_export) = attr::find_by_name(attrs, sym::macro_export) {
+            if let Some(l) = macro_export.meta_item_list() {
+                local_inner_macros = attr::list_contains_name(&l, sym::local_inner_macros);
+            }
+        }
+
+        let is_builtin = attr::contains_name(attrs, sym::rustc_builtin_macro);
+
+        SyntaxExtension {
+            kind,
+            span,
+            allow_internal_unstable,
+            allow_internal_unsafe: attr::contains_name(attrs, sym::allow_internal_unsafe),
+            local_inner_macros,
+            stability: attr::find_stability(&sess, attrs, span),
+            deprecation: attr::find_deprecation(&sess, attrs, span),
+            helper_attrs,
+            edition,
+            is_builtin,
+            is_derive_copy: is_builtin && name == sym::Copy,
+        }
+    }
+
+    pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
+        fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: TokenStream)
+                         -> Box<dyn MacResult + 'cx> {
+            DummyResult::any(span)
+        }
+        SyntaxExtension::default(SyntaxExtensionKind::LegacyBang(Box::new(expander)), edition)
+    }
+
+    pub fn dummy_derive(edition: Edition) -> SyntaxExtension {
+        fn expander(_: &mut ExtCtxt<'_>, _: Span, _: &ast::MetaItem, _: Annotatable)
+                    -> Vec<Annotatable> {
+            Vec::new()
+        }
+        SyntaxExtension::default(SyntaxExtensionKind::Derive(Box::new(expander)), edition)
+    }
+
+    pub fn non_macro_attr(mark_used: bool, edition: Edition) -> SyntaxExtension {
+        SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition)
+    }
+
+    pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData {
+        ExpnData {
+            kind: ExpnKind::Macro(self.macro_kind(), descr),
+            parent,
+            call_site,
+            def_site: self.span,
+            allow_internal_unstable: self.allow_internal_unstable.clone(),
+            allow_internal_unsafe: self.allow_internal_unsafe,
+            local_inner_macros: self.local_inner_macros,
+            edition: self.edition,
+        }
+    }
+}
+
+pub type NamedSyntaxExtension = (Name, SyntaxExtension);
+
+/// Result of resolving a macro invocation.
+pub enum InvocationRes {
+    Single(Lrc<SyntaxExtension>),
+    DeriveContainer(Vec<Lrc<SyntaxExtension>>),
+}
+
+/// Error type that denotes indeterminacy.
+pub struct Indeterminate;
+
+bitflags::bitflags! {
+    /// Built-in derives that need some extra tracking beyond the usual macro functionality.
+    #[derive(Default)]
+    pub struct SpecialDerives: u8 {
+        const PARTIAL_EQ = 1 << 0;
+        const EQ         = 1 << 1;
+        const COPY       = 1 << 2;
+    }
+}
+
+pub trait Resolver {
+    fn next_node_id(&mut self) -> NodeId;
+
+    fn resolve_dollar_crates(&mut self);
+    fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment);
+    fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension);
+
+    fn expansion_for_ast_pass(
+        &mut self,
+        call_site: Span,
+        pass: AstPass,
+        features: &[Symbol],
+        parent_module_id: Option<NodeId>,
+    ) -> ExpnId;
+
+    fn resolve_imports(&mut self);
+
+    fn resolve_macro_invocation(
+        &mut self, invoc: &Invocation, eager_expansion_root: ExpnId, force: bool
+    ) -> Result<InvocationRes, Indeterminate>;
+
+    fn check_unused_macros(&self);
+
+    fn has_derives(&self, expn_id: ExpnId, derives: SpecialDerives) -> bool;
+    fn add_derives(&mut self, expn_id: ExpnId, derives: SpecialDerives);
+}
+
+#[derive(Clone)]
+pub struct ModuleData {
+    pub mod_path: Vec<ast::Ident>,
+    pub directory: PathBuf,
+}
+
+#[derive(Clone)]
+pub struct ExpansionData {
+    pub id: ExpnId,
+    pub depth: usize,
+    pub module: Rc<ModuleData>,
+    pub directory_ownership: DirectoryOwnership,
+    pub prior_type_ascription: Option<(Span, bool)>,
+}
+
+/// One of these is made during expansion and incrementally updated as we go;
+/// when a macro expansion occurs, the resulting nodes have the `backtrace()
+/// -> expn_data` of their expansion context stored into their span.
+pub struct ExtCtxt<'a> {
+    pub parse_sess: &'a ParseSess,
+    pub ecfg: expand::ExpansionConfig<'a>,
+    pub root_path: PathBuf,
+    pub resolver: &'a mut dyn Resolver,
+    pub current_expansion: ExpansionData,
+    pub expansions: FxHashMap<Span, Vec<String>>,
+}
+
+impl<'a> ExtCtxt<'a> {
+    pub fn new(parse_sess: &'a ParseSess,
+               ecfg: expand::ExpansionConfig<'a>,
+               resolver: &'a mut dyn Resolver)
+               -> ExtCtxt<'a> {
+        ExtCtxt {
+            parse_sess,
+            ecfg,
+            root_path: PathBuf::new(),
+            resolver,
+            current_expansion: ExpansionData {
+                id: ExpnId::root(),
+                depth: 0,
+                module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }),
+                directory_ownership: DirectoryOwnership::Owned { relative: None },
+                prior_type_ascription: None,
+            },
+            expansions: FxHashMap::default(),
+        }
+    }
+
+    /// Returns a `Folder` for deeply expanding all macros in an AST node.
+    pub fn expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
+        expand::MacroExpander::new(self, false)
+    }
+
+    /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node.
+    /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified.
+    pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
+        expand::MacroExpander::new(self, true)
+    }
+    pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
+        parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
+    }
+    pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
+    pub fn parse_sess(&self) -> &'a ParseSess { self.parse_sess }
+    pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config }
+    pub fn call_site(&self) -> Span {
+        self.current_expansion.id.expn_data().call_site
+    }
+
+    /// Equivalent of `Span::def_site` from the proc macro API,
+    /// except that the location is taken from the span passed as an argument.
+    pub fn with_def_site_ctxt(&self, span: Span) -> Span {
+        span.with_def_site_ctxt(self.current_expansion.id)
+    }
+
+    /// Equivalent of `Span::call_site` from the proc macro API,
+    /// except that the location is taken from the span passed as an argument.
+    pub fn with_call_site_ctxt(&self, span: Span) -> Span {
+        span.with_call_site_ctxt(self.current_expansion.id)
+    }
+
+    /// Equivalent of `Span::mixed_site` from the proc macro API,
+    /// except that the location is taken from the span passed as an argument.
+    pub fn with_mixed_site_ctxt(&self, span: Span) -> Span {
+        span.with_mixed_site_ctxt(self.current_expansion.id)
+    }
+
+    /// Returns span for the macro which originally caused the current expansion to happen.
+    ///
+    /// Stops backtracing at include! boundary.
+    pub fn expansion_cause(&self) -> Option<Span> {
+        let mut expn_id = self.current_expansion.id;
+        let mut last_macro = None;
+        loop {
+            let expn_data = expn_id.expn_data();
+            // Stop going up the backtrace once include! is encountered
+            if expn_data.is_root() || expn_data.kind.descr() == sym::include {
+                break;
+            }
+            expn_id = expn_data.call_site.ctxt().outer_expn();
+            last_macro = Some(expn_data.call_site);
+        }
+        last_macro
+    }
+
+    pub fn struct_span_warn<S: Into<MultiSpan>>(&self,
+                                                sp: S,
+                                                msg: &str)
+                                                -> DiagnosticBuilder<'a> {
+        self.parse_sess.span_diagnostic.struct_span_warn(sp, msg)
+    }
+    pub fn struct_span_err<S: Into<MultiSpan>>(&self,
+                                               sp: S,
+                                               msg: &str)
+                                               -> DiagnosticBuilder<'a> {
+        self.parse_sess.span_diagnostic.struct_span_err(sp, msg)
+    }
+    pub fn struct_span_fatal<S: Into<MultiSpan>>(&self,
+                                                 sp: S,
+                                                 msg: &str)
+                                                 -> DiagnosticBuilder<'a> {
+        self.parse_sess.span_diagnostic.struct_span_fatal(sp, msg)
+    }
+
+    /// Emit `msg` attached to `sp`, and stop compilation immediately.
+    ///
+    /// `span_err` should be strongly preferred where-ever possible:
+    /// this should *only* be used when:
+    ///
+    /// - continuing has a high risk of flow-on errors (e.g., errors in
+    ///   declaring a macro would cause all uses of that macro to
+    ///   complain about "undefined macro"), or
+    /// - there is literally nothing else that can be done (however,
+    ///   in most cases one can construct a dummy expression/item to
+    ///   substitute; we never hit resolve/type-checking so the dummy
+    ///   value doesn't have to match anything)
+    pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
+        self.parse_sess.span_diagnostic.span_fatal(sp, msg).raise();
+    }
+
+    /// Emit `msg` attached to `sp`, without immediately stopping
+    /// compilation.
+    ///
+    /// Compilation will be stopped in the near future (at the end of
+    /// the macro expansion phase).
+    pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+        self.parse_sess.span_diagnostic.span_err(sp, msg);
+    }
+    pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
+        self.parse_sess.span_diagnostic.span_err_with_code(sp, msg, code);
+    }
+    pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+        self.parse_sess.span_diagnostic.span_warn(sp, msg);
+    }
+    pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
+        self.parse_sess.span_diagnostic.span_bug(sp, msg);
+    }
+    pub fn trace_macros_diag(&mut self) {
+        for (sp, notes) in self.expansions.iter() {
+            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
+            for note in notes {
+                db.note(note);
+            }
+            db.emit();
+        }
+        // Fixme: does this result in errors?
+        self.expansions.clear();
+    }
+    pub fn bug(&self, msg: &str) -> ! {
+        self.parse_sess.span_diagnostic.bug(msg);
+    }
+    pub fn trace_macros(&self) -> bool {
+        self.ecfg.trace_mac
+    }
+    pub fn set_trace_macros(&mut self, x: bool) {
+        self.ecfg.trace_mac = x
+    }
+    pub fn ident_of(&self, st: &str, sp: Span) -> ast::Ident {
+        ast::Ident::from_str_and_span(st, sp)
+    }
+    pub fn std_path(&self, components: &[Symbol]) -> Vec<ast::Ident> {
+        let def_site = self.with_def_site_ctxt(DUMMY_SP);
+        iter::once(Ident::new(kw::DollarCrate, def_site))
+            .chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
+            .collect()
+    }
+    pub fn name_of(&self, st: &str) -> ast::Name {
+        Symbol::intern(st)
+    }
+
+    pub fn check_unused_macros(&self) {
+        self.resolver.check_unused_macros();
+    }
+
+    /// Resolves a path mentioned inside Rust code.
+    ///
+    /// This unifies the logic used for resolving `include_X!`, and `#[doc(include)]` file paths.
+    ///
+    /// Returns an absolute path to the file that `path` refers to.
+    pub fn resolve_path(&self, path: impl Into<PathBuf>, span: Span) -> PathBuf {
+        let path = path.into();
+
+        // Relative paths are resolved relative to the file in which they are found
+        // after macro expansion (that is, they are unhygienic).
+        if !path.is_absolute() {
+            let callsite = span.source_callsite();
+            let mut result = match self.source_map().span_to_unmapped_path(callsite) {
+                FileName::Real(path) => path,
+                FileName::DocTest(path, _) => path,
+                other => panic!("cannot resolve relative path in non-file source `{}`", other),
+            };
+            result.pop();
+            result.push(path);
+            result
+        } else {
+            path
+        }
+    }
+}
+
+/// Extracts a string literal from the macro expanded version of `expr`,
+/// emitting `err_msg` if `expr` is not a string literal. This does not stop
+/// compilation on error, merely emits a non-fatal error and returns `None`.
+pub fn expr_to_spanned_string<'a>(
+    cx: &'a mut ExtCtxt<'_>,
+    expr: P<ast::Expr>,
+    err_msg: &str,
+) -> Result<(Symbol, ast::StrStyle, Span), Option<DiagnosticBuilder<'a>>> {
+    // Perform eager expansion on the expression.
+    // We want to be able to handle e.g., `concat!("foo", "bar")`.
+    let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
+
+    Err(match expr.kind {
+        ast::ExprKind::Lit(ref l) => match l.kind {
+            ast::LitKind::Str(s, style) => return Ok((s, style, expr.span)),
+            ast::LitKind::Err(_) => None,
+            _ => Some(cx.struct_span_err(l.span, err_msg))
+        },
+        ast::ExprKind::Err => None,
+        _ => Some(cx.struct_span_err(expr.span, err_msg))
+    })
+}
+
+pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
+                      -> Option<(Symbol, ast::StrStyle)> {
+    expr_to_spanned_string(cx, expr, err_msg)
+        .map_err(|err| err.map(|mut err| err.emit()))
+        .ok()
+        .map(|(symbol, style, _)| (symbol, style))
+}
+
+/// Non-fatally assert that `tts` is empty. Note that this function
+/// returns even when `tts` is non-empty, macros that *need* to stop
+/// compilation should call
+/// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
+/// done as rarely as possible).
+pub fn check_zero_tts(cx: &ExtCtxt<'_>,
+                      sp: Span,
+                      tts: TokenStream,
+                      name: &str) {
+    if !tts.is_empty() {
+        cx.span_err(sp, &format!("{} takes no arguments", name));
+    }
+}
+
+/// Interpreting `tts` as a comma-separated sequence of expressions,
+/// expect exactly one string literal, or emit an error and return `None`.
+pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
+                               sp: Span,
+                               tts: TokenStream,
+                               name: &str)
+                               -> Option<String> {
+    let mut p = cx.new_parser_from_tts(tts);
+    if p.token == token::Eof {
+        cx.span_err(sp, &format!("{} takes 1 argument", name));
+        return None
+    }
+    let ret = panictry!(p.parse_expr());
+    let _ = p.eat(&token::Comma);
+
+    if p.token != token::Eof {
+        cx.span_err(sp, &format!("{} takes 1 argument", name));
+    }
+    expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
+        s.to_string()
+    })
+}
+
+/// Extracts comma-separated expressions from `tts`. If there is a
+/// parsing error, emit a non-fatal error and return `None`.
+pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
+                          sp: Span,
+                          tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
+    let mut p = cx.new_parser_from_tts(tts);
+    let mut es = Vec::new();
+    while p.token != token::Eof {
+        let expr = panictry!(p.parse_expr());
+
+        // Perform eager expansion on the expression.
+        // We want to be able to handle e.g., `concat!("foo", "bar")`.
+        let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
+
+        es.push(expr);
+        if p.eat(&token::Comma) {
+            continue;
+        }
+        if p.token != token::Eof {
+            cx.span_err(sp, "expected token: `,`");
+            return None;
+        }
+    }
+    Some(es)
+}
diff --git a/src/libsyntax_expand/build.rs b/src/libsyntax_expand/build.rs
new file mode 100644 (file)
index 0000000..105ffe3
--- /dev/null
@@ -0,0 +1,640 @@
+use crate::base::ExtCtxt;
+
+use syntax::ast::{self, Ident, Expr, BlockCheckMode, UnOp, PatKind};
+use syntax::attr;
+use syntax::source_map::{respan, Spanned};
+use syntax::ptr::P;
+use syntax::symbol::{kw, sym, Symbol};
+use syntax::ThinVec;
+
+use syntax_pos::{Pos, Span};
+
+impl<'a> ExtCtxt<'a> {
+    pub fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
+        self.path_all(span, false, strs, vec![])
+    }
+    pub fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path {
+        self.path(span, vec![id])
+    }
+    pub fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
+        self.path_all(span, true, strs, vec![])
+    }
+    pub fn path_all(&self,
+                span: Span,
+                global: bool,
+                mut idents: Vec<ast::Ident> ,
+                args: Vec<ast::GenericArg>)
+                -> ast::Path {
+        assert!(!idents.is_empty());
+        let add_root = global && !idents[0].is_path_segment_keyword();
+        let mut segments = Vec::with_capacity(idents.len() + add_root as usize);
+        if add_root {
+            segments.push(ast::PathSegment::path_root(span));
+        }
+        let last_ident = idents.pop().unwrap();
+        segments.extend(idents.into_iter().map(|ident| {
+            ast::PathSegment::from_ident(ident.with_span_pos(span))
+        }));
+        let args = if !args.is_empty() {
+            ast::AngleBracketedArgs { args, constraints: Vec::new(), span }.into()
+        } else {
+            None
+        };
+        segments.push(ast::PathSegment {
+            ident: last_ident.with_span_pos(span),
+            id: ast::DUMMY_NODE_ID,
+            args,
+        });
+        ast::Path { span, segments }
+    }
+
+    pub fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
+        ast::MutTy {
+            ty,
+            mutbl,
+        }
+    }
+
+    pub fn ty(&self, span: Span, kind: ast::TyKind) -> P<ast::Ty> {
+        P(ast::Ty {
+            id: ast::DUMMY_NODE_ID,
+            span,
+            kind,
+        })
+    }
+
+    pub fn ty_path(&self, path: ast::Path) -> P<ast::Ty> {
+        self.ty(path.span, ast::TyKind::Path(None, path))
+    }
+
+    // Might need to take bounds as an argument in the future, if you ever want
+    // to generate a bounded existential trait type.
+    pub fn ty_ident(&self, span: Span, ident: ast::Ident)
+        -> P<ast::Ty> {
+        self.ty_path(self.path_ident(span, ident))
+    }
+
+    pub fn anon_const(&self, span: Span, kind: ast::ExprKind) -> ast::AnonConst {
+        ast::AnonConst {
+            id: ast::DUMMY_NODE_ID,
+            value: P(ast::Expr {
+                id: ast::DUMMY_NODE_ID,
+                kind,
+                span,
+                attrs: ThinVec::new(),
+            })
+        }
+    }
+
+    pub fn const_ident(&self, span: Span, ident: ast::Ident) -> ast::AnonConst {
+        self.anon_const(span, ast::ExprKind::Path(None, self.path_ident(span, ident)))
+    }
+
+    pub fn ty_rptr(&self,
+               span: Span,
+               ty: P<ast::Ty>,
+               lifetime: Option<ast::Lifetime>,
+               mutbl: ast::Mutability)
+        -> P<ast::Ty> {
+        self.ty(span,
+                ast::TyKind::Rptr(lifetime, self.ty_mt(ty, mutbl)))
+    }
+
+    pub fn ty_ptr(&self,
+              span: Span,
+              ty: P<ast::Ty>,
+              mutbl: ast::Mutability)
+        -> P<ast::Ty> {
+        self.ty(span,
+                ast::TyKind::Ptr(self.ty_mt(ty, mutbl)))
+    }
+
+    pub fn typaram(&self,
+               span: Span,
+               ident: ast::Ident,
+               attrs: Vec<ast::Attribute>,
+               bounds: ast::GenericBounds,
+               default: Option<P<ast::Ty>>) -> ast::GenericParam {
+        ast::GenericParam {
+            ident: ident.with_span_pos(span),
+            id: ast::DUMMY_NODE_ID,
+            attrs: attrs.into(),
+            bounds,
+            kind: ast::GenericParamKind::Type {
+                default,
+            },
+            is_placeholder: false
+        }
+    }
+
+    pub fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
+        ast::TraitRef {
+            path,
+            ref_id: ast::DUMMY_NODE_ID,
+        }
+    }
+
+    pub fn poly_trait_ref(&self, span: Span, path: ast::Path) -> ast::PolyTraitRef {
+        ast::PolyTraitRef {
+            bound_generic_params: Vec::new(),
+            trait_ref: self.trait_ref(path),
+            span,
+        }
+    }
+
+    pub fn trait_bound(&self, path: ast::Path) -> ast::GenericBound {
+        ast::GenericBound::Trait(self.poly_trait_ref(path.span, path),
+                                 ast::TraitBoundModifier::None)
+    }
+
+    pub fn lifetime(&self, span: Span, ident: ast::Ident) -> ast::Lifetime {
+        ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
+    }
+
+    pub fn lifetime_def(&self,
+                    span: Span,
+                    ident: ast::Ident,
+                    attrs: Vec<ast::Attribute>,
+                    bounds: ast::GenericBounds)
+                    -> ast::GenericParam {
+        let lifetime = self.lifetime(span, ident);
+        ast::GenericParam {
+            ident: lifetime.ident,
+            id: lifetime.id,
+            attrs: attrs.into(),
+            bounds,
+            kind: ast::GenericParamKind::Lifetime,
+            is_placeholder: false
+        }
+    }
+
+    pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
+        ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            span: expr.span,
+            kind: ast::StmtKind::Expr(expr),
+        }
+    }
+
+    pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
+                ex: P<ast::Expr>) -> ast::Stmt {
+        let pat = if mutbl {
+            let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mutable);
+            self.pat_ident_binding_mode(sp, ident, binding_mode)
+        } else {
+            self.pat_ident(sp, ident)
+        };
+        let local = P(ast::Local {
+            pat,
+            ty: None,
+            init: Some(ex),
+            id: ast::DUMMY_NODE_ID,
+            span: sp,
+            attrs: ThinVec::new(),
+        });
+        ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            kind: ast::StmtKind::Local(local),
+            span: sp,
+        }
+    }
+
+    // Generates `let _: Type;`, which is usually used for type assertions.
+    pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
+        let local = P(ast::Local {
+            pat: self.pat_wild(span),
+            ty: Some(ty),
+            init: None,
+            id: ast::DUMMY_NODE_ID,
+            span,
+            attrs: ThinVec::new(),
+        });
+        ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            kind: ast::StmtKind::Local(local),
+            span,
+        }
+    }
+
+    pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
+        ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            kind: ast::StmtKind::Item(item),
+            span: sp,
+        }
+    }
+
+    pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
+        self.block(expr.span, vec![ast::Stmt {
+            id: ast::DUMMY_NODE_ID,
+            span: expr.span,
+            kind: ast::StmtKind::Expr(expr),
+        }])
+    }
+    pub fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block> {
+        P(ast::Block {
+           stmts,
+           id: ast::DUMMY_NODE_ID,
+           rules: BlockCheckMode::Default,
+           span,
+        })
+    }
+
+    pub fn expr(&self, span: Span, kind: ast::ExprKind) -> P<ast::Expr> {
+        P(ast::Expr {
+            id: ast::DUMMY_NODE_ID,
+            kind,
+            span,
+            attrs: ThinVec::new(),
+        })
+    }
+
+    pub fn expr_path(&self, path: ast::Path) -> P<ast::Expr> {
+        self.expr(path.span, ast::ExprKind::Path(None, path))
+    }
+
+    pub fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr> {
+        self.expr_path(self.path_ident(span, id))
+    }
+    pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
+        self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
+    }
+
+    pub fn expr_binary(&self, sp: Span, op: ast::BinOpKind,
+                   lhs: P<ast::Expr>, rhs: P<ast::Expr>) -> P<ast::Expr> {
+        self.expr(sp, ast::ExprKind::Binary(Spanned { node: op, span: sp }, lhs, rhs))
+    }
+
+    pub fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
+        self.expr(sp, ast::ExprKind::Unary(UnOp::Deref, e))
+    }
+
+    pub fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
+        self.expr(sp, ast::ExprKind::AddrOf(ast::Mutability::Immutable, e))
+    }
+
+    pub fn expr_call(
+        &self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>,
+    ) -> P<ast::Expr> {
+        self.expr(span, ast::ExprKind::Call(expr, args))
+    }
+    pub fn expr_call_ident(&self, span: Span, id: ast::Ident,
+                       args: Vec<P<ast::Expr>>) -> P<ast::Expr> {
+        self.expr(span, ast::ExprKind::Call(self.expr_ident(span, id), args))
+    }
+    pub fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
+                      args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
+        let pathexpr = self.expr_path(self.path_global(sp, fn_path));
+        self.expr_call(sp, pathexpr, args)
+    }
+    pub fn expr_method_call(&self, span: Span,
+                        expr: P<ast::Expr>,
+                        ident: ast::Ident,
+                        mut args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
+        args.insert(0, expr);
+        let segment = ast::PathSegment::from_ident(ident.with_span_pos(span));
+        self.expr(span, ast::ExprKind::MethodCall(segment, args))
+    }
+    pub fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
+        self.expr(b.span, ast::ExprKind::Block(b, None))
+    }
+    pub fn field_imm(&self, span: Span, ident: Ident, e: P<ast::Expr>) -> ast::Field {
+        ast::Field {
+            ident: ident.with_span_pos(span),
+            expr: e,
+            span,
+            is_shorthand: false,
+            attrs: ThinVec::new(),
+            id: ast::DUMMY_NODE_ID,
+            is_placeholder: false,
+        }
+    }
+    pub fn expr_struct(
+        &self, span: Span, path: ast::Path, fields: Vec<ast::Field>
+    ) -> P<ast::Expr> {
+        self.expr(span, ast::ExprKind::Struct(path, fields, None))
+    }
+    pub fn expr_struct_ident(&self, span: Span,
+                         id: ast::Ident, fields: Vec<ast::Field>) -> P<ast::Expr> {
+        self.expr_struct(span, self.path_ident(span, id), fields)
+    }
+
+    pub fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
+        let lit = ast::Lit::from_lit_kind(lit_kind, span);
+        self.expr(span, ast::ExprKind::Lit(lit))
+    }
+    pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
+        self.expr_lit(span, ast::LitKind::Int(i as u128,
+                                              ast::LitIntType::Unsigned(ast::UintTy::Usize)))
+    }
+    pub fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> {
+        self.expr_lit(sp, ast::LitKind::Int(u as u128,
+                                            ast::LitIntType::Unsigned(ast::UintTy::U32)))
+    }
+    pub fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
+        self.expr_lit(sp, ast::LitKind::Bool(value))
+    }
+
+    pub fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
+        self.expr(sp, ast::ExprKind::Array(exprs))
+    }
+    pub fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
+        self.expr_addr_of(sp, self.expr_vec(sp, exprs))
+    }
+    pub fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
+        self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
+    }
+
+    pub fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
+        self.expr(sp, ast::ExprKind::Cast(expr, ty))
+    }
+
+    pub fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
+        let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
+        self.expr_call_global(sp, some, vec![expr])
+    }
+
+    pub fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
+        self.expr(sp, ast::ExprKind::Tup(exprs))
+    }
+
+    pub fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
+        let loc = self.source_map().lookup_char_pos(span.lo());
+        let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name.to_string()));
+        let expr_line = self.expr_u32(span, loc.line as u32);
+        let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1);
+        let expr_loc_tuple = self.expr_tuple(span, vec![expr_file, expr_line, expr_col]);
+        let expr_loc_ptr = self.expr_addr_of(span, expr_loc_tuple);
+        self.expr_call_global(
+            span,
+            [sym::std, sym::rt, sym::begin_panic].iter().map(|s| Ident::new(*s, span)).collect(),
+            vec![
+                self.expr_str(span, msg),
+                expr_loc_ptr])
+    }
+
+    pub fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
+        self.expr_fail(span, Symbol::intern("internal error: entered unreachable code"))
+    }
+
+    pub fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
+        let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
+        self.expr_call_global(sp, ok, vec![expr])
+    }
+
+    pub fn expr_try(&self, sp: Span, head: P<ast::Expr>) -> P<ast::Expr> {
+        let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
+        let ok_path = self.path_global(sp, ok);
+        let err = self.std_path(&[sym::result, sym::Result, sym::Err]);
+        let err_path = self.path_global(sp, err);
+
+        let binding_variable = self.ident_of("__try_var", sp);
+        let binding_pat = self.pat_ident(sp, binding_variable);
+        let binding_expr = self.expr_ident(sp, binding_variable);
+
+        // `Ok(__try_var)` pattern
+        let ok_pat = self.pat_tuple_struct(sp, ok_path, vec![binding_pat.clone()]);
+
+        // `Err(__try_var)` (pattern and expression respectively)
+        let err_pat = self.pat_tuple_struct(sp, err_path.clone(), vec![binding_pat]);
+        let err_inner_expr = self.expr_call(sp, self.expr_path(err_path),
+                                            vec![binding_expr.clone()]);
+        // `return Err(__try_var)`
+        let err_expr = self.expr(sp, ast::ExprKind::Ret(Some(err_inner_expr)));
+
+        // `Ok(__try_var) => __try_var`
+        let ok_arm = self.arm(sp, ok_pat, binding_expr);
+        // `Err(__try_var) => return Err(__try_var)`
+        let err_arm = self.arm(sp, err_pat, err_expr);
+
+        // `match head { Ok() => ..., Err() => ... }`
+        self.expr_match(sp, head, vec![ok_arm, err_arm])
+    }
+
+
+    pub fn pat(&self, span: Span, kind: PatKind) -> P<ast::Pat> {
+        P(ast::Pat { id: ast::DUMMY_NODE_ID, kind, span })
+    }
+    pub fn pat_wild(&self, span: Span) -> P<ast::Pat> {
+        self.pat(span, PatKind::Wild)
+    }
+    pub fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat> {
+        self.pat(span, PatKind::Lit(expr))
+    }
+    pub fn pat_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Pat> {
+        let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Immutable);
+        self.pat_ident_binding_mode(span, ident, binding_mode)
+    }
+
+    pub fn pat_ident_binding_mode(&self,
+                              span: Span,
+                              ident: ast::Ident,
+                              bm: ast::BindingMode) -> P<ast::Pat> {
+        let pat = PatKind::Ident(bm, ident.with_span_pos(span), None);
+        self.pat(span, pat)
+    }
+    pub fn pat_path(&self, span: Span, path: ast::Path) -> P<ast::Pat> {
+        self.pat(span, PatKind::Path(None, path))
+    }
+    pub fn pat_tuple_struct(&self, span: Span, path: ast::Path,
+                        subpats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
+        self.pat(span, PatKind::TupleStruct(path, subpats))
+    }
+    pub fn pat_struct(&self, span: Span, path: ast::Path,
+                      field_pats: Vec<ast::FieldPat>) -> P<ast::Pat> {
+        self.pat(span, PatKind::Struct(path, field_pats, false))
+    }
+    pub fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
+        self.pat(span, PatKind::Tuple(pats))
+    }
+
+    pub fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
+        let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
+        let path = self.path_global(span, some);
+        self.pat_tuple_struct(span, path, vec![pat])
+    }
+
+    pub fn pat_none(&self, span: Span) -> P<ast::Pat> {
+        let some = self.std_path(&[sym::option, sym::Option, sym::None]);
+        let path = self.path_global(span, some);
+        self.pat_path(span, path)
+    }
+
+    pub fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
+        let some = self.std_path(&[sym::result, sym::Result, sym::Ok]);
+        let path = self.path_global(span, some);
+        self.pat_tuple_struct(span, path, vec![pat])
+    }
+
+    pub fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
+        let some = self.std_path(&[sym::result, sym::Result, sym::Err]);
+        let path = self.path_global(span, some);
+        self.pat_tuple_struct(span, path, vec![pat])
+    }
+
+    pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm {
+        ast::Arm {
+            attrs: vec![],
+            pat,
+            guard: None,
+            body: expr,
+            span,
+            id: ast::DUMMY_NODE_ID,
+            is_placeholder: false,
+        }
+    }
+
+    pub fn arm_unreachable(&self, span: Span) -> ast::Arm {
+        self.arm(span, self.pat_wild(span), self.expr_unreachable(span))
+    }
+
+    pub fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm>) -> P<Expr> {
+        self.expr(span, ast::ExprKind::Match(arg, arms))
+    }
+
+    pub fn expr_if(&self, span: Span, cond: P<ast::Expr>,
+               then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr> {
+        let els = els.map(|x| self.expr_block(self.block_expr(x)));
+        self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
+    }
+
+    pub fn lambda_fn_decl(&self,
+                      span: Span,
+                      fn_decl: P<ast::FnDecl>,
+                      body: P<ast::Expr>,
+                      fn_decl_span: Span) // span of the `|...|` part
+                      -> P<ast::Expr> {
+        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
+                                               ast::IsAsync::NotAsync,
+                                               ast::Movability::Movable,
+                                               fn_decl,
+                                               body,
+                                               fn_decl_span))
+    }
+
+    pub fn lambda(&self,
+              span: Span,
+              ids: Vec<ast::Ident>,
+              body: P<ast::Expr>)
+              -> P<ast::Expr> {
+        let fn_decl = self.fn_decl(
+            ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
+            ast::FunctionRetTy::Default(span));
+
+        // FIXME -- We are using `span` as the span of the `|...|`
+        // part of the lambda, but it probably (maybe?) corresponds to
+        // the entire lambda body. Probably we should extend the API
+        // here, but that's not entirely clear.
+        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
+                                               ast::IsAsync::NotAsync,
+                                               ast::Movability::Movable,
+                                               fn_decl,
+                                               body,
+                                               span))
+    }
+
+    pub fn lambda0(&self, span: Span, body: P<ast::Expr>) -> P<ast::Expr> {
+        self.lambda(span, Vec::new(), body)
+    }
+
+    pub fn lambda1(&self, span: Span, body: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
+        self.lambda(span, vec![ident], body)
+    }
+
+    pub fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>,
+                      ident: ast::Ident) -> P<ast::Expr> {
+        self.lambda1(span, self.expr_block(self.block(span, stmts)), ident)
+    }
+
+    pub fn param(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Param {
+        let arg_pat = self.pat_ident(span, ident);
+        ast::Param {
+            attrs: ThinVec::default(),
+            id: ast::DUMMY_NODE_ID,
+            pat: arg_pat,
+            span,
+            ty,
+            is_placeholder: false,
+        }
+    }
+
+    // FIXME: unused `self`
+    pub fn fn_decl(&self, inputs: Vec<ast::Param>, output: ast::FunctionRetTy) -> P<ast::FnDecl> {
+        P(ast::FnDecl {
+            inputs,
+            output,
+        })
+    }
+
+    pub fn item(&self, span: Span, name: Ident,
+            attrs: Vec<ast::Attribute>, kind: ast::ItemKind) -> P<ast::Item> {
+        // FIXME: Would be nice if our generated code didn't violate
+        // Rust coding conventions
+        P(ast::Item {
+            ident: name,
+            attrs,
+            id: ast::DUMMY_NODE_ID,
+            kind,
+            vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
+            span,
+            tokens: None,
+        })
+    }
+
+    pub fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
+        let fields: Vec<_> = tys.into_iter().map(|ty| {
+            ast::StructField {
+                span: ty.span,
+                ty,
+                ident: None,
+                vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
+                attrs: Vec::new(),
+                id: ast::DUMMY_NODE_ID,
+                is_placeholder: false,
+            }
+        }).collect();
+
+        let vdata = if fields.is_empty() {
+            ast::VariantData::Unit(ast::DUMMY_NODE_ID)
+        } else {
+            ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID)
+        };
+
+        ast::Variant {
+            attrs: Vec::new(),
+            data: vdata,
+            disr_expr: None,
+            id: ast::DUMMY_NODE_ID,
+            ident,
+            span,
+            is_placeholder: false,
+        }
+    }
+
+    pub fn item_static(&self,
+                   span: Span,
+                   name: Ident,
+                   ty: P<ast::Ty>,
+                   mutbl: ast::Mutability,
+                   expr: P<ast::Expr>)
+                   -> P<ast::Item> {
+        self.item(span, name, Vec::new(), ast::ItemKind::Static(ty, mutbl, expr))
+    }
+
+    pub fn item_const(&self,
+                  span: Span,
+                  name: Ident,
+                  ty: P<ast::Ty>,
+                  expr: P<ast::Expr>)
+                  -> P<ast::Item> {
+        self.item(span, name, Vec::new(), ast::ItemKind::Const(ty, expr))
+    }
+
+    pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute {
+        attr::mk_attr_outer(mi)
+    }
+
+    pub fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem {
+        attr::mk_word_item(Ident::new(w, sp))
+    }
+}
diff --git a/src/libsyntax_expand/expand.rs b/src/libsyntax_expand/expand.rs
new file mode 100644 (file)
index 0000000..f03d464
--- /dev/null
@@ -0,0 +1,1562 @@
+use crate::base::*;
+use crate::proc_macro::{collect_derives, MarkAttrs};
+use crate::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
+use crate::mbe::macro_rules::annotate_err_with_kind;
+use crate::placeholders::{placeholder, PlaceholderExpander};
+
+use syntax::ast::{self, AttrItem, Block, Ident, LitKind, NodeId, PatKind, Path};
+use syntax::ast::{MacStmtStyle, StmtKind, ItemKind};
+use syntax::attr::{self, HasAttrs};
+use syntax::source_map::respan;
+use syntax::configure;
+use syntax::config::StripUnconfigured;
+use syntax::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
+use syntax::mut_visit::*;
+use syntax::parse::{DirectoryOwnership, PResult};
+use syntax::parse::token;
+use syntax::parse::parser::Parser;
+use syntax::print::pprust;
+use syntax::ptr::P;
+use syntax::symbol::{sym, Symbol};
+use syntax::tokenstream::{TokenStream, TokenTree};
+use syntax::visit::Visitor;
+use syntax::util::map_in_place::MapInPlace;
+
+use errors::{Applicability, FatalError};
+use smallvec::{smallvec, SmallVec};
+use syntax_pos::{Span, DUMMY_SP, FileName};
+
+use rustc_data_structures::sync::Lrc;
+use std::io::ErrorKind;
+use std::{iter, mem, slice};
+use std::ops::DerefMut;
+use std::rc::Rc;
+use std::path::PathBuf;
+
+macro_rules! ast_fragments {
+    (
+        $($Kind:ident($AstTy:ty) {
+            $kind_name:expr;
+            $(one fn $mut_visit_ast:ident; fn $visit_ast:ident;)?
+            $(many fn $flat_map_ast_elt:ident; fn $visit_ast_elt:ident;)?
+            fn $make_ast:ident;
+        })*
+    ) => {
+        /// A fragment of AST that can be produced by a single macro expansion.
+        /// Can also serve as an input and intermediate result for macro expansion operations.
+        pub enum AstFragment {
+            OptExpr(Option<P<ast::Expr>>),
+            $($Kind($AstTy),)*
+        }
+
+        /// "Discriminant" of an AST fragment.
+        #[derive(Copy, Clone, PartialEq, Eq)]
+        pub enum AstFragmentKind {
+            OptExpr,
+            $($Kind,)*
+        }
+
+        impl AstFragmentKind {
+            pub fn name(self) -> &'static str {
+                match self {
+                    AstFragmentKind::OptExpr => "expression",
+                    $(AstFragmentKind::$Kind => $kind_name,)*
+                }
+            }
+
+            fn make_from<'a>(self, result: Box<dyn MacResult + 'a>) -> Option<AstFragment> {
+                match self {
+                    AstFragmentKind::OptExpr =>
+                        result.make_expr().map(Some).map(AstFragment::OptExpr),
+                    $(AstFragmentKind::$Kind => result.$make_ast().map(AstFragment::$Kind),)*
+                }
+            }
+        }
+
+        impl AstFragment {
+            pub fn add_placeholders(&mut self, placeholders: &[NodeId]) {
+                if placeholders.is_empty() {
+                    return;
+                }
+                match self {
+                    $($(AstFragment::$Kind(ast) => ast.extend(placeholders.iter().flat_map(|id| {
+                        // We are repeating through arguments with `many`, to do that we have to
+                        // mention some macro variable from those arguments even if it's not used.
+                        #[cfg_attr(bootstrap, allow(unused_macros))]
+                        macro _repeating($flat_map_ast_elt) {}
+                        placeholder(AstFragmentKind::$Kind, *id).$make_ast()
+                    })),)?)*
+                    _ => panic!("unexpected AST fragment kind")
+                }
+            }
+
+            pub fn make_opt_expr(self) -> Option<P<ast::Expr>> {
+                match self {
+                    AstFragment::OptExpr(expr) => expr,
+                    _ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
+                }
+            }
+
+            $(pub fn $make_ast(self) -> $AstTy {
+                match self {
+                    AstFragment::$Kind(ast) => ast,
+                    _ => panic!("AstFragment::make_* called on the wrong kind of fragment"),
+                }
+            })*
+
+            pub fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
+                match self {
+                    AstFragment::OptExpr(opt_expr) => {
+                        visit_clobber(opt_expr, |opt_expr| {
+                            if let Some(expr) = opt_expr {
+                                vis.filter_map_expr(expr)
+                            } else {
+                                None
+                            }
+                        });
+                    }
+                    $($(AstFragment::$Kind(ast) => vis.$mut_visit_ast(ast),)?)*
+                    $($(AstFragment::$Kind(ast) =>
+                        ast.flat_map_in_place(|ast| vis.$flat_map_ast_elt(ast)),)?)*
+                }
+            }
+
+            pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
+                match *self {
+                    AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr),
+                    AstFragment::OptExpr(None) => {}
+                    $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)*
+                    $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] {
+                        visitor.$visit_ast_elt(ast_elt);
+                    })?)*
+                }
+            }
+        }
+
+        impl<'a> MacResult for crate::mbe::macro_rules::ParserAnyMacro<'a> {
+            $(fn $make_ast(self: Box<crate::mbe::macro_rules::ParserAnyMacro<'a>>)
+                           -> Option<$AstTy> {
+                Some(self.make(AstFragmentKind::$Kind).$make_ast())
+            })*
+        }
+    }
+}
+
+ast_fragments! {
+    Expr(P<ast::Expr>) { "expression"; one fn visit_expr; fn visit_expr; fn make_expr; }
+    Pat(P<ast::Pat>) { "pattern"; one fn visit_pat; fn visit_pat; fn make_pat; }
+    Ty(P<ast::Ty>) { "type"; one fn visit_ty; fn visit_ty; fn make_ty; }
+    Stmts(SmallVec<[ast::Stmt; 1]>) {
+        "statement"; many fn flat_map_stmt; fn visit_stmt; fn make_stmts;
+    }
+    Items(SmallVec<[P<ast::Item>; 1]>) {
+        "item"; many fn flat_map_item; fn visit_item; fn make_items;
+    }
+    TraitItems(SmallVec<[ast::TraitItem; 1]>) {
+        "trait item"; many fn flat_map_trait_item; fn visit_trait_item; fn make_trait_items;
+    }
+    ImplItems(SmallVec<[ast::ImplItem; 1]>) {
+        "impl item"; many fn flat_map_impl_item; fn visit_impl_item; fn make_impl_items;
+    }
+    ForeignItems(SmallVec<[ast::ForeignItem; 1]>) {
+        "foreign item";
+        many fn flat_map_foreign_item;
+        fn visit_foreign_item;
+        fn make_foreign_items;
+    }
+    Arms(SmallVec<[ast::Arm; 1]>) {
+        "match arm"; many fn flat_map_arm; fn visit_arm; fn make_arms;
+    }
+    Fields(SmallVec<[ast::Field; 1]>) {
+        "field expression"; many fn flat_map_field; fn visit_field; fn make_fields;
+    }
+    FieldPats(SmallVec<[ast::FieldPat; 1]>) {
+        "field pattern";
+        many fn flat_map_field_pattern;
+        fn visit_field_pattern;
+        fn make_field_patterns;
+    }
+    GenericParams(SmallVec<[ast::GenericParam; 1]>) {
+        "generic parameter";
+        many fn flat_map_generic_param;
+        fn visit_generic_param;
+        fn make_generic_params;
+    }
+    Params(SmallVec<[ast::Param; 1]>) {
+        "function parameter"; many fn flat_map_param; fn visit_param; fn make_params;
+    }
+    StructFields(SmallVec<[ast::StructField; 1]>) {
+        "field";
+        many fn flat_map_struct_field;
+        fn visit_struct_field;
+        fn make_struct_fields;
+    }
+    Variants(SmallVec<[ast::Variant; 1]>) {
+        "variant"; many fn flat_map_variant; fn visit_variant; fn make_variants;
+    }
+}
+
+impl AstFragmentKind {
+    fn dummy(self, span: Span) -> AstFragment {
+        self.make_from(DummyResult::any(span)).expect("couldn't create a dummy AST fragment")
+    }
+
+    fn expect_from_annotatables<I: IntoIterator<Item = Annotatable>>(self, items: I)
+                                                                     -> AstFragment {
+        let mut items = items.into_iter();
+        match self {
+            AstFragmentKind::Arms =>
+                AstFragment::Arms(items.map(Annotatable::expect_arm).collect()),
+            AstFragmentKind::Fields =>
+                AstFragment::Fields(items.map(Annotatable::expect_field).collect()),
+            AstFragmentKind::FieldPats =>
+                AstFragment::FieldPats(items.map(Annotatable::expect_field_pattern).collect()),
+            AstFragmentKind::GenericParams =>
+                AstFragment::GenericParams(items.map(Annotatable::expect_generic_param).collect()),
+            AstFragmentKind::Params =>
+                AstFragment::Params(items.map(Annotatable::expect_param).collect()),
+            AstFragmentKind::StructFields => AstFragment::StructFields(
+                items.map(Annotatable::expect_struct_field).collect()
+            ),
+            AstFragmentKind::Variants =>
+                AstFragment::Variants(items.map(Annotatable::expect_variant).collect()),
+            AstFragmentKind::Items =>
+                AstFragment::Items(items.map(Annotatable::expect_item).collect()),
+            AstFragmentKind::ImplItems =>
+                AstFragment::ImplItems(items.map(Annotatable::expect_impl_item).collect()),
+            AstFragmentKind::TraitItems =>
+                AstFragment::TraitItems(items.map(Annotatable::expect_trait_item).collect()),
+            AstFragmentKind::ForeignItems =>
+                AstFragment::ForeignItems(items.map(Annotatable::expect_foreign_item).collect()),
+            AstFragmentKind::Stmts =>
+                AstFragment::Stmts(items.map(Annotatable::expect_stmt).collect()),
+            AstFragmentKind::Expr => AstFragment::Expr(
+                items.next().expect("expected exactly one expression").expect_expr()
+            ),
+            AstFragmentKind::OptExpr =>
+                AstFragment::OptExpr(items.next().map(Annotatable::expect_expr)),
+            AstFragmentKind::Pat | AstFragmentKind::Ty =>
+                panic!("patterns and types aren't annotatable"),
+        }
+    }
+}
+
+pub struct Invocation {
+    pub kind: InvocationKind,
+    pub fragment_kind: AstFragmentKind,
+    pub expansion_data: ExpansionData,
+}
+
+pub enum InvocationKind {
+    Bang {
+        mac: ast::Mac,
+        span: Span,
+    },
+    Attr {
+        attr: ast::Attribute,
+        item: Annotatable,
+        // Required for resolving derive helper attributes.
+        derives: Vec<Path>,
+        // We temporarily report errors for attribute macros placed after derives
+        after_derive: bool,
+    },
+    Derive {
+        path: Path,
+        item: Annotatable,
+    },
+    /// "Invocation" that contains all derives from an item,
+    /// broken into multiple `Derive` invocations when expanded.
+    /// FIXME: Find a way to remove it.
+    DeriveContainer {
+        derives: Vec<Path>,
+        item: Annotatable,
+    },
+}
+
+impl Invocation {
+    pub fn span(&self) -> Span {
+        match &self.kind {
+            InvocationKind::Bang { span, .. } => *span,
+            InvocationKind::Attr { attr, .. } => attr.span,
+            InvocationKind::Derive { path, .. } => path.span,
+            InvocationKind::DeriveContainer { item, .. } => item.span(),
+        }
+    }
+}
+
+pub struct MacroExpander<'a, 'b> {
+    pub cx: &'a mut ExtCtxt<'b>,
+    monotonic: bool, // cf. `cx.monotonic_expander()`
+}
+
+impl<'a, 'b> MacroExpander<'a, 'b> {
+    pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
+        MacroExpander { cx, monotonic }
+    }
+
+    pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
+        let mut module = ModuleData {
+            mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)],
+            directory: match self.cx.source_map().span_to_unmapped_path(krate.span) {
+                FileName::Real(path) => path,
+                other => PathBuf::from(other.to_string()),
+            },
+        };
+        module.directory.pop();
+        self.cx.root_path = module.directory.clone();
+        self.cx.current_expansion.module = Rc::new(module);
+
+        let orig_mod_span = krate.module.inner;
+
+        let krate_item = AstFragment::Items(smallvec![P(ast::Item {
+            attrs: krate.attrs,
+            span: krate.span,
+            kind: ast::ItemKind::Mod(krate.module),
+            ident: Ident::invalid(),
+            id: ast::DUMMY_NODE_ID,
+            vis: respan(krate.span.shrink_to_lo(), ast::VisibilityKind::Public),
+            tokens: None,
+        })]);
+
+        match self.fully_expand_fragment(krate_item).make_items().pop().map(P::into_inner) {
+            Some(ast::Item { attrs, kind: ast::ItemKind::Mod(module), .. }) => {
+                krate.attrs = attrs;
+                krate.module = module;
+            },
+            None => {
+                // Resolution failed so we return an empty expansion
+                krate.attrs = vec![];
+                krate.module = ast::Mod {
+                    inner: orig_mod_span,
+                    items: vec![],
+                    inline: true,
+                };
+            },
+            _ => unreachable!(),
+        };
+        self.cx.trace_macros_diag();
+        krate
+    }
+
+    // Recursively expand all macro invocations in this AST fragment.
+    pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment {
+        let orig_expansion_data = self.cx.current_expansion.clone();
+        self.cx.current_expansion.depth = 0;
+
+        // Collect all macro invocations and replace them with placeholders.
+        let (mut fragment_with_placeholders, mut invocations)
+            = self.collect_invocations(input_fragment, &[]);
+
+        // Optimization: if we resolve all imports now,
+        // we'll be able to immediately resolve most of imported macros.
+        self.resolve_imports();
+
+        // Resolve paths in all invocations and produce output expanded fragments for them, but
+        // do not insert them into our input AST fragment yet, only store in `expanded_fragments`.
+        // The output fragments also go through expansion recursively until no invocations are left.
+        // Unresolved macros produce dummy outputs as a recovery measure.
+        invocations.reverse();
+        let mut expanded_fragments = Vec::new();
+        let mut undetermined_invocations = Vec::new();
+        let (mut progress, mut force) = (false, !self.monotonic);
+        loop {
+            let invoc = if let Some(invoc) = invocations.pop() {
+                invoc
+            } else {
+                self.resolve_imports();
+                if undetermined_invocations.is_empty() { break }
+                invocations = mem::take(&mut undetermined_invocations);
+                force = !mem::replace(&mut progress, false);
+                continue
+            };
+
+            let eager_expansion_root =
+                if self.monotonic { invoc.expansion_data.id } else { orig_expansion_data.id };
+            let res = match self.cx.resolver.resolve_macro_invocation(
+                &invoc, eager_expansion_root, force
+            ) {
+                Ok(res) => res,
+                Err(Indeterminate) => {
+                    undetermined_invocations.push(invoc);
+                    continue
+                }
+            };
+
+            progress = true;
+            let ExpansionData { depth, id: expn_id, .. } = invoc.expansion_data;
+            self.cx.current_expansion = invoc.expansion_data.clone();
+
+            // FIXME(jseyfried): Refactor out the following logic
+            let (expanded_fragment, new_invocations) = match res {
+                InvocationRes::Single(ext) => {
+                    let fragment = self.expand_invoc(invoc, &ext.kind);
+                    self.collect_invocations(fragment, &[])
+                }
+                InvocationRes::DeriveContainer(exts) => {
+                    let (derives, item) = match invoc.kind {
+                        InvocationKind::DeriveContainer { derives, item } => (derives, item),
+                        _ => unreachable!(),
+                    };
+                    if !item.derive_allowed() {
+                        let attr = attr::find_by_name(item.attrs(), sym::derive)
+                            .expect("`derive` attribute should exist");
+                        let span = attr.span;
+                        let mut err = self.cx.struct_span_err(span,
+                            "`derive` may only be applied to structs, enums and unions");
+                        if let ast::AttrStyle::Inner = attr.style {
+                            let trait_list = derives.iter()
+                                .map(|t| pprust::path_to_string(t))
+                                .collect::<Vec<_>>();
+                            let suggestion = format!("#[derive({})]", trait_list.join(", "));
+                            err.span_suggestion(
+                                span, "try an outer attribute", suggestion,
+                                // We don't 𝑘𝑛𝑜𝑤 that the following item is an ADT
+                                Applicability::MaybeIncorrect
+                            );
+                        }
+                        err.emit();
+                    }
+
+                    let mut item = self.fully_configure(item);
+                    item.visit_attrs(|attrs| attrs.retain(|a| a.path != sym::derive));
+                    let mut helper_attrs = Vec::new();
+                    let mut has_copy = false;
+                    for ext in exts {
+                        helper_attrs.extend(&ext.helper_attrs);
+                        has_copy |= ext.is_derive_copy;
+                    }
+                    // Mark derive helpers inside this item as known and used.
+                    // FIXME: This is a hack, derive helpers should be integrated with regular name
+                    // resolution instead. For example, helpers introduced by a derive container
+                    // can be in scope for all code produced by that container's expansion.
+                    item.visit_with(&mut MarkAttrs(&helper_attrs));
+                    if has_copy {
+                        self.cx.resolver.add_derives(invoc.expansion_data.id, SpecialDerives::COPY);
+                    }
+
+                    let mut derive_placeholders = Vec::with_capacity(derives.len());
+                    invocations.reserve(derives.len());
+                    for path in derives {
+                        let expn_id = ExpnId::fresh(None);
+                        derive_placeholders.push(NodeId::placeholder_from_expn_id(expn_id));
+                        invocations.push(Invocation {
+                            kind: InvocationKind::Derive { path, item: item.clone() },
+                            fragment_kind: invoc.fragment_kind,
+                            expansion_data: ExpansionData {
+                                id: expn_id,
+                                ..invoc.expansion_data.clone()
+                            },
+                        });
+                    }
+                    let fragment = invoc.fragment_kind
+                        .expect_from_annotatables(::std::iter::once(item));
+                    self.collect_invocations(fragment, &derive_placeholders)
+                }
+            };
+
+            if expanded_fragments.len() < depth {
+                expanded_fragments.push(Vec::new());
+            }
+            expanded_fragments[depth - 1].push((expn_id, expanded_fragment));
+            if !self.cx.ecfg.single_step {
+                invocations.extend(new_invocations.into_iter().rev());
+            }
+        }
+
+        self.cx.current_expansion = orig_expansion_data;
+
+        // Finally incorporate all the expanded macros into the input AST fragment.
+        let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic);
+        while let Some(expanded_fragments) = expanded_fragments.pop() {
+            for (expn_id, expanded_fragment) in expanded_fragments.into_iter().rev() {
+                placeholder_expander.add(NodeId::placeholder_from_expn_id(expn_id),
+                                         expanded_fragment);
+            }
+        }
+        fragment_with_placeholders.mut_visit_with(&mut placeholder_expander);
+        fragment_with_placeholders
+    }
+
+    fn resolve_imports(&mut self) {
+        if self.monotonic {
+            self.cx.resolver.resolve_imports();
+        }
+    }
+
+    /// Collects all macro invocations reachable at this time in this AST fragment, and replace
+    /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s.
+    /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and
+    /// prepares data for resolving paths of macro invocations.
+    fn collect_invocations(&mut self, mut fragment: AstFragment, extra_placeholders: &[NodeId])
+                           -> (AstFragment, Vec<Invocation>) {
+        // Resolve `$crate`s in the fragment for pretty-printing.
+        self.cx.resolver.resolve_dollar_crates();
+
+        let invocations = {
+            let mut collector = InvocationCollector {
+                cfg: StripUnconfigured {
+                    sess: self.cx.parse_sess,
+                    features: self.cx.ecfg.features,
+                },
+                cx: self.cx,
+                invocations: Vec::new(),
+                monotonic: self.monotonic,
+            };
+            fragment.mut_visit_with(&mut collector);
+            fragment.add_placeholders(extra_placeholders);
+            collector.invocations
+        };
+
+        if self.monotonic {
+            self.cx.resolver.visit_ast_fragment_with_placeholders(
+                self.cx.current_expansion.id, &fragment
+            );
+        }
+
+        (fragment, invocations)
+    }
+
+    fn fully_configure(&mut self, item: Annotatable) -> Annotatable {
+        let mut cfg = StripUnconfigured {
+            sess: self.cx.parse_sess,
+            features: self.cx.ecfg.features,
+        };
+        // Since the item itself has already been configured by the InvocationCollector,
+        // we know that fold result vector will contain exactly one element
+        match item {
+            Annotatable::Item(item) => {
+                Annotatable::Item(cfg.flat_map_item(item).pop().unwrap())
+            }
+            Annotatable::TraitItem(item) => {
+                Annotatable::TraitItem(
+                    item.map(|item| cfg.flat_map_trait_item(item).pop().unwrap()))
+            }
+            Annotatable::ImplItem(item) => {
+                Annotatable::ImplItem(item.map(|item| cfg.flat_map_impl_item(item).pop().unwrap()))
+            }
+            Annotatable::ForeignItem(item) => {
+                Annotatable::ForeignItem(
+                    item.map(|item| cfg.flat_map_foreign_item(item).pop().unwrap())
+                )
+            }
+            Annotatable::Stmt(stmt) => {
+                Annotatable::Stmt(stmt.map(|stmt| cfg.flat_map_stmt(stmt).pop().unwrap()))
+            }
+            Annotatable::Expr(mut expr) => {
+                Annotatable::Expr({ cfg.visit_expr(&mut expr); expr })
+            }
+            Annotatable::Arm(arm) => {
+                Annotatable::Arm(cfg.flat_map_arm(arm).pop().unwrap())
+            }
+            Annotatable::Field(field) => {
+                Annotatable::Field(cfg.flat_map_field(field).pop().unwrap())
+            }
+            Annotatable::FieldPat(fp) => {
+                Annotatable::FieldPat(cfg.flat_map_field_pattern(fp).pop().unwrap())
+            }
+            Annotatable::GenericParam(param) => {
+                Annotatable::GenericParam(cfg.flat_map_generic_param(param).pop().unwrap())
+            }
+            Annotatable::Param(param) => {
+                Annotatable::Param(cfg.flat_map_param(param).pop().unwrap())
+            }
+            Annotatable::StructField(sf) => {
+                Annotatable::StructField(cfg.flat_map_struct_field(sf).pop().unwrap())
+            }
+            Annotatable::Variant(v) => {
+                Annotatable::Variant(cfg.flat_map_variant(v).pop().unwrap())
+            }
+        }
+    }
+
+    fn expand_invoc(&mut self, invoc: Invocation, ext: &SyntaxExtensionKind) -> AstFragment {
+        if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit {
+            let expn_data = self.cx.current_expansion.id.expn_data();
+            let suggested_limit = self.cx.ecfg.recursion_limit * 2;
+            let mut err = self.cx.struct_span_err(expn_data.call_site,
+                &format!("recursion limit reached while expanding the macro `{}`",
+                         expn_data.kind.descr()));
+            err.help(&format!(
+                "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
+                suggested_limit));
+            err.emit();
+            self.cx.trace_macros_diag();
+            FatalError.raise();
+        }
+
+        let (fragment_kind, span) = (invoc.fragment_kind, invoc.span());
+        match invoc.kind {
+            InvocationKind::Bang { mac, .. } => match ext {
+                SyntaxExtensionKind::Bang(expander) => {
+                    self.gate_proc_macro_expansion_kind(span, fragment_kind);
+                    let tok_result = expander.expand(self.cx, span, mac.stream());
+                    self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
+                }
+                SyntaxExtensionKind::LegacyBang(expander) => {
+                    let prev = self.cx.current_expansion.prior_type_ascription;
+                    self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription;
+                    let tok_result = expander.expand(self.cx, span, mac.stream());
+                    let result = if let Some(result) = fragment_kind.make_from(tok_result) {
+                        result
+                    } else {
+                        let msg = format!(
+                            "non-{kind} macro in {kind} position: {path}",
+                            kind = fragment_kind.name(),
+                            path = pprust::path_to_string(&mac.path),
+                        );
+                        self.cx.span_err(span, &msg);
+                        self.cx.trace_macros_diag();
+                        fragment_kind.dummy(span)
+                    };
+                    self.cx.current_expansion.prior_type_ascription = prev;
+                    result
+                }
+                _ => unreachable!()
+            }
+            InvocationKind::Attr { attr, mut item, .. } => match ext {
+                SyntaxExtensionKind::Attr(expander) => {
+                    self.gate_proc_macro_attr_item(span, &item);
+                    let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
+                        Annotatable::Item(item) => token::NtItem(item),
+                        Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
+                        Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
+                        Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
+                        Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
+                        Annotatable::Expr(expr) => token::NtExpr(expr),
+                        Annotatable::Arm(..)
+                        | Annotatable::Field(..)
+                        | Annotatable::FieldPat(..)
+                        | Annotatable::GenericParam(..)
+                        | Annotatable::Param(..)
+                        | Annotatable::StructField(..)
+                        | Annotatable::Variant(..)
+                            => panic!("unexpected annotatable"),
+                    })), DUMMY_SP).into();
+                    let input = self.extract_proc_macro_attr_input(attr.item.tokens, span);
+                    let tok_result = expander.expand(self.cx, span, input, item_tok);
+                    self.parse_ast_fragment(tok_result, fragment_kind, &attr.item.path, span)
+                }
+                SyntaxExtensionKind::LegacyAttr(expander) => {
+                    match attr.parse_meta(self.cx.parse_sess) {
+                        Ok(meta) => {
+                            let item = expander.expand(self.cx, span, &meta, item);
+                            fragment_kind.expect_from_annotatables(item)
+                        }
+                        Err(mut err) => {
+                            err.emit();
+                            fragment_kind.dummy(span)
+                        }
+                    }
+                }
+                SyntaxExtensionKind::NonMacroAttr { mark_used } => {
+                    attr::mark_known(&attr);
+                    if *mark_used {
+                        attr::mark_used(&attr);
+                    }
+                    item.visit_attrs(|attrs| attrs.push(attr));
+                    fragment_kind.expect_from_annotatables(iter::once(item))
+                }
+                _ => unreachable!()
+            }
+            InvocationKind::Derive { path, item } => match ext {
+                SyntaxExtensionKind::Derive(expander) |
+                SyntaxExtensionKind::LegacyDerive(expander) => {
+                    if !item.derive_allowed() {
+                        return fragment_kind.dummy(span);
+                    }
+                    let meta = ast::MetaItem { kind: ast::MetaItemKind::Word, span, path };
+                    let items = expander.expand(self.cx, span, &meta, item);
+                    fragment_kind.expect_from_annotatables(items)
+                }
+                _ => unreachable!()
+            }
+            InvocationKind::DeriveContainer { .. } => unreachable!()
+        }
+    }
+
+    fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream {
+        let mut trees = tokens.trees();
+        match trees.next() {
+            Some(TokenTree::Delimited(_, _, tts)) => {
+                if trees.next().is_none() {
+                    return tts.into()
+                }
+            }
+            Some(TokenTree::Token(..)) => {}
+            None => return TokenStream::default(),
+        }
+        self.cx.span_err(span, "custom attribute invocations must be \
+            of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
+            followed by a delimiter token");
+        TokenStream::default()
+    }
+
+    fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
+        let (kind, gate) = match *item {
+            Annotatable::Item(ref item) => {
+                match item.kind {
+                    ItemKind::Mod(_) if self.cx.ecfg.proc_macro_hygiene() => return,
+                    ItemKind::Mod(_) => ("modules", sym::proc_macro_hygiene),
+                    _ => return,
+                }
+            }
+            Annotatable::TraitItem(_) => return,
+            Annotatable::ImplItem(_) => return,
+            Annotatable::ForeignItem(_) => return,
+            Annotatable::Stmt(_) |
+            Annotatable::Expr(_) if self.cx.ecfg.proc_macro_hygiene() => return,
+            Annotatable::Stmt(_) => ("statements", sym::proc_macro_hygiene),
+            Annotatable::Expr(_) => ("expressions", sym::proc_macro_hygiene),
+            Annotatable::Arm(..)
+            | Annotatable::Field(..)
+            | Annotatable::FieldPat(..)
+            | Annotatable::GenericParam(..)
+            | Annotatable::Param(..)
+            | Annotatable::StructField(..)
+            | Annotatable::Variant(..)
+            => panic!("unexpected annotatable"),
+        };
+        emit_feature_err(
+            self.cx.parse_sess,
+            gate,
+            span,
+            GateIssue::Language,
+            &format!("custom attributes cannot be applied to {}", kind),
+        );
+    }
+
+    fn gate_proc_macro_expansion_kind(&self, span: Span, kind: AstFragmentKind) {
+        let kind = match kind {
+            AstFragmentKind::Expr |
+            AstFragmentKind::OptExpr => "expressions",
+            AstFragmentKind::Pat => "patterns",
+            AstFragmentKind::Stmts => "statements",
+            AstFragmentKind::Ty |
+            AstFragmentKind::Items |
+            AstFragmentKind::TraitItems |
+            AstFragmentKind::ImplItems |
+            AstFragmentKind::ForeignItems => return,
+            AstFragmentKind::Arms
+            | AstFragmentKind::Fields
+            | AstFragmentKind::FieldPats
+            | AstFragmentKind::GenericParams
+            | AstFragmentKind::Params
+            | AstFragmentKind::StructFields
+            | AstFragmentKind::Variants
+                => panic!("unexpected AST fragment kind"),
+        };
+        if self.cx.ecfg.proc_macro_hygiene() {
+            return
+        }
+        emit_feature_err(
+            self.cx.parse_sess,
+            sym::proc_macro_hygiene,
+            span,
+            GateIssue::Language,
+            &format!("procedural macros cannot be expanded to {}", kind),
+        );
+    }
+
+    fn parse_ast_fragment(
+        &mut self,
+        toks: TokenStream,
+        kind: AstFragmentKind,
+        path: &Path,
+        span: Span,
+    ) -> AstFragment {
+        let mut parser = self.cx.new_parser_from_tts(toks);
+        match parse_ast_fragment(&mut parser, kind, false) {
+            Ok(fragment) => {
+                ensure_complete_parse(&mut parser, path, kind.name(), span);
+                fragment
+            }
+            Err(mut err) => {
+                err.set_span(span);
+                annotate_err_with_kind(&mut err, kind, span);
+                err.emit();
+                self.cx.trace_macros_diag();
+                kind.dummy(span)
+            }
+        }
+    }
+}
+
+pub fn parse_ast_fragment<'a>(
+    this: &mut Parser<'a>,
+    kind: AstFragmentKind,
+    macro_legacy_warnings: bool,
+) -> PResult<'a, AstFragment> {
+    Ok(match kind {
+        AstFragmentKind::Items => {
+            let mut items = SmallVec::new();
+            while let Some(item) = this.parse_item()? {
+                items.push(item);
+            }
+            AstFragment::Items(items)
+        }
+        AstFragmentKind::TraitItems => {
+            let mut items = SmallVec::new();
+            while this.token != token::Eof {
+                items.push(this.parse_trait_item(&mut false)?);
+            }
+            AstFragment::TraitItems(items)
+        }
+        AstFragmentKind::ImplItems => {
+            let mut items = SmallVec::new();
+            while this.token != token::Eof {
+                items.push(this.parse_impl_item(&mut false)?);
+            }
+            AstFragment::ImplItems(items)
+        }
+        AstFragmentKind::ForeignItems => {
+            let mut items = SmallVec::new();
+            while this.token != token::Eof {
+                items.push(this.parse_foreign_item(DUMMY_SP)?);
+            }
+            AstFragment::ForeignItems(items)
+        }
+        AstFragmentKind::Stmts => {
+            let mut stmts = SmallVec::new();
+            while this.token != token::Eof &&
+                    // won't make progress on a `}`
+                    this.token != token::CloseDelim(token::Brace) {
+                if let Some(stmt) = this.parse_full_stmt(macro_legacy_warnings)? {
+                    stmts.push(stmt);
+                }
+            }
+            AstFragment::Stmts(stmts)
+        }
+        AstFragmentKind::Expr => AstFragment::Expr(this.parse_expr()?),
+        AstFragmentKind::OptExpr => {
+            if this.token != token::Eof {
+                AstFragment::OptExpr(Some(this.parse_expr()?))
+            } else {
+                AstFragment::OptExpr(None)
+            }
+        },
+        AstFragmentKind::Ty => AstFragment::Ty(this.parse_ty()?),
+        AstFragmentKind::Pat => AstFragment::Pat(this.parse_pat(None)?),
+        AstFragmentKind::Arms
+        | AstFragmentKind::Fields
+        | AstFragmentKind::FieldPats
+        | AstFragmentKind::GenericParams
+        | AstFragmentKind::Params
+        | AstFragmentKind::StructFields
+        | AstFragmentKind::Variants
+            => panic!("unexpected AST fragment kind"),
+    })
+}
+
+pub fn ensure_complete_parse<'a>(
+    this: &mut Parser<'a>,
+    macro_path: &Path,
+    kind_name: &str,
+    span: Span,
+) {
+    if this.token != token::Eof {
+        let msg = format!("macro expansion ignores token `{}` and any following",
+                            this.this_token_to_string());
+        // Avoid emitting backtrace info twice.
+        let def_site_span = this.token.span.with_ctxt(SyntaxContext::root());
+        let mut err = this.struct_span_err(def_site_span, &msg);
+        err.span_label(span, "caused by the macro expansion here");
+        let msg = format!(
+            "the usage of `{}!` is likely invalid in {} context",
+            pprust::path_to_string(macro_path),
+            kind_name,
+        );
+        err.note(&msg);
+        let semi_span = this.sess.source_map().next_point(span);
+
+        let semi_full_span = semi_span.to(this.sess.source_map().next_point(semi_span));
+        match this.sess.source_map().span_to_snippet(semi_full_span) {
+            Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => {
+                err.span_suggestion(
+                    semi_span,
+                    "you might be missing a semicolon here",
+                    ";".to_owned(),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            _ => {}
+        }
+        err.emit();
+    }
+}
+
+struct InvocationCollector<'a, 'b> {
+    cx: &'a mut ExtCtxt<'b>,
+    cfg: StripUnconfigured<'a>,
+    invocations: Vec<Invocation>,
+    monotonic: bool,
+}
+
+impl<'a, 'b> InvocationCollector<'a, 'b> {
+    fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment {
+        // Expansion data for all the collected invocations is set upon their resolution,
+        // with exception of the derive container case which is not resolved and can get
+        // its expansion data immediately.
+        let expn_data = match &kind {
+            InvocationKind::DeriveContainer { item, .. } => Some(ExpnData {
+                parent: self.cx.current_expansion.id,
+                ..ExpnData::default(
+                    ExpnKind::Macro(MacroKind::Attr, sym::derive),
+                    item.span(), self.cx.parse_sess.edition,
+                )
+            }),
+            _ => None,
+        };
+        let expn_id = ExpnId::fresh(expn_data);
+        self.invocations.push(Invocation {
+            kind,
+            fragment_kind,
+            expansion_data: ExpansionData {
+                id: expn_id,
+                depth: self.cx.current_expansion.depth + 1,
+                ..self.cx.current_expansion.clone()
+            },
+        });
+        placeholder(fragment_kind, NodeId::placeholder_from_expn_id(expn_id))
+    }
+
+    fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: AstFragmentKind) -> AstFragment {
+        self.collect(kind, InvocationKind::Bang { mac, span })
+    }
+
+    fn collect_attr(&mut self,
+                    attr: Option<ast::Attribute>,
+                    derives: Vec<Path>,
+                    item: Annotatable,
+                    kind: AstFragmentKind,
+                    after_derive: bool)
+                    -> AstFragment {
+        self.collect(kind, match attr {
+            Some(attr) => InvocationKind::Attr { attr, item, derives, after_derive },
+            None => InvocationKind::DeriveContainer { derives, item },
+        })
+    }
+
+    fn find_attr_invoc(&self, attrs: &mut Vec<ast::Attribute>, after_derive: &mut bool)
+                       -> Option<ast::Attribute> {
+        let attr = attrs.iter()
+                        .position(|a| {
+                            if a.path == sym::derive {
+                                *after_derive = true;
+                            }
+                            !attr::is_known(a) && !is_builtin_attr(a)
+                        })
+                        .map(|i| attrs.remove(i));
+        if let Some(attr) = &attr {
+            if !self.cx.ecfg.custom_inner_attributes() &&
+               attr.style == ast::AttrStyle::Inner && attr.path != sym::test {
+                emit_feature_err(&self.cx.parse_sess, sym::custom_inner_attributes,
+                                 attr.span, GateIssue::Language,
+                                 "non-builtin inner attributes are unstable");
+            }
+        }
+        attr
+    }
+
+    /// If `item` is an attr invocation, remove and return the macro attribute and derive traits.
+    fn classify_item<T>(&mut self, item: &mut T)
+                        -> (Option<ast::Attribute>, Vec<Path>, /* after_derive */ bool)
+        where T: HasAttrs,
+    {
+        let (mut attr, mut traits, mut after_derive) = (None, Vec::new(), false);
+
+        item.visit_attrs(|mut attrs| {
+            attr = self.find_attr_invoc(&mut attrs, &mut after_derive);
+            traits = collect_derives(&mut self.cx, &mut attrs);
+        });
+
+        (attr, traits, after_derive)
+    }
+
+    /// Alternative to `classify_item()` that ignores `#[derive]` so invocations fallthrough
+    /// to the unused-attributes lint (making it an error on statements and expressions
+    /// is a breaking change)
+    fn classify_nonitem<T: HasAttrs>(&mut self, nonitem: &mut T)
+                                     -> (Option<ast::Attribute>, /* after_derive */ bool) {
+        let (mut attr, mut after_derive) = (None, false);
+
+        nonitem.visit_attrs(|mut attrs| {
+            attr = self.find_attr_invoc(&mut attrs, &mut after_derive);
+        });
+
+        (attr, after_derive)
+    }
+
+    fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
+        self.cfg.configure(node)
+    }
+
+    // Detect use of feature-gated or invalid attributes on macro invocations
+    // since they will not be detected after macro expansion.
+    fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
+        let features = self.cx.ecfg.features.unwrap();
+        for attr in attrs.iter() {
+            feature_gate::check_attribute(attr, self.cx.parse_sess, features);
+
+            // macros are expanded before any lint passes so this warning has to be hardcoded
+            if attr.path == sym::derive {
+                self.cx.struct_span_warn(attr.span, "`#[derive]` does nothing on macro invocations")
+                    .note("this may become a hard error in a future release")
+                    .emit();
+            }
+        }
+    }
+}
+
+impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
+    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
+        self.cfg.configure_expr(expr);
+        visit_clobber(expr.deref_mut(), |mut expr| {
+            self.cfg.configure_expr_kind(&mut expr.kind);
+
+            // ignore derives so they remain unused
+            let (attr, after_derive) = self.classify_nonitem(&mut expr);
+
+            if attr.is_some() {
+                // Collect the invoc regardless of whether or not attributes are permitted here
+                // expansion will eat the attribute so it won't error later.
+                attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a));
+
+                // AstFragmentKind::Expr requires the macro to emit an expression.
+                return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)),
+                                          AstFragmentKind::Expr, after_derive)
+                    .make_expr()
+                    .into_inner()
+            }
+
+            if let ast::ExprKind::Mac(mac) = expr.kind {
+                self.check_attributes(&expr.attrs);
+                self.collect_bang(mac, expr.span, AstFragmentKind::Expr)
+                    .make_expr()
+                    .into_inner()
+            } else {
+                noop_visit_expr(&mut expr, self);
+                expr
+            }
+        });
+    }
+
+    fn flat_map_arm(&mut self, arm: ast::Arm) -> SmallVec<[ast::Arm; 1]> {
+        let mut arm = configure!(self, arm);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut arm);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::Arm(arm),
+                                     AstFragmentKind::Arms, after_derive)
+                                     .make_arms();
+        }
+
+        noop_flat_map_arm(arm, self)
+    }
+
+    fn flat_map_field(&mut self, field: ast::Field) -> SmallVec<[ast::Field; 1]> {
+        let mut field = configure!(self, field);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut field);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::Field(field),
+                                     AstFragmentKind::Fields, after_derive)
+                                     .make_fields();
+        }
+
+        noop_flat_map_field(field, self)
+    }
+
+    fn flat_map_field_pattern(&mut self, fp: ast::FieldPat) -> SmallVec<[ast::FieldPat; 1]> {
+        let mut fp = configure!(self, fp);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut fp);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::FieldPat(fp),
+                                     AstFragmentKind::FieldPats, after_derive)
+                                     .make_field_patterns();
+        }
+
+        noop_flat_map_field_pattern(fp, self)
+    }
+
+    fn flat_map_param(&mut self, p: ast::Param) -> SmallVec<[ast::Param; 1]> {
+        let mut p = configure!(self, p);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut p);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::Param(p),
+                                     AstFragmentKind::Params, after_derive)
+                                     .make_params();
+        }
+
+        noop_flat_map_param(p, self)
+    }
+
+    fn flat_map_struct_field(&mut self, sf: ast::StructField) -> SmallVec<[ast::StructField; 1]> {
+        let mut sf = configure!(self, sf);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut sf);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::StructField(sf),
+                                     AstFragmentKind::StructFields, after_derive)
+                                     .make_struct_fields();
+        }
+
+        noop_flat_map_struct_field(sf, self)
+    }
+
+    fn flat_map_variant(&mut self, variant: ast::Variant) -> SmallVec<[ast::Variant; 1]> {
+        let mut variant = configure!(self, variant);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut variant);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::Variant(variant),
+                                     AstFragmentKind::Variants, after_derive)
+                                     .make_variants();
+        }
+
+        noop_flat_map_variant(variant, self)
+    }
+
+    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
+        let expr = configure!(self, expr);
+        expr.filter_map(|mut expr| {
+            self.cfg.configure_expr_kind(&mut expr.kind);
+
+            // Ignore derives so they remain unused.
+            let (attr, after_derive) = self.classify_nonitem(&mut expr);
+
+            if attr.is_some() {
+                attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a));
+
+                return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)),
+                                         AstFragmentKind::OptExpr, after_derive)
+                    .make_opt_expr()
+                    .map(|expr| expr.into_inner())
+            }
+
+            if let ast::ExprKind::Mac(mac) = expr.kind {
+                self.check_attributes(&expr.attrs);
+                self.collect_bang(mac, expr.span, AstFragmentKind::OptExpr)
+                    .make_opt_expr()
+                    .map(|expr| expr.into_inner())
+            } else {
+                Some({ noop_visit_expr(&mut expr, self); expr })
+            }
+        })
+    }
+
+    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
+        self.cfg.configure_pat(pat);
+        match pat.kind {
+            PatKind::Mac(_) => {}
+            _ => return noop_visit_pat(pat, self),
+        }
+
+        visit_clobber(pat, |mut pat| {
+            match mem::replace(&mut pat.kind, PatKind::Wild) {
+                PatKind::Mac(mac) =>
+                    self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(),
+                _ => unreachable!(),
+            }
+        });
+    }
+
+    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
+        let mut stmt = configure!(self, stmt);
+
+        // we'll expand attributes on expressions separately
+        if !stmt.is_expr() {
+            let (attr, derives, after_derive) = if stmt.is_item() {
+                self.classify_item(&mut stmt)
+            } else {
+                // ignore derives on non-item statements so it falls through
+                // to the unused-attributes lint
+                let (attr, after_derive) = self.classify_nonitem(&mut stmt);
+                (attr, vec![], after_derive)
+            };
+
+            if attr.is_some() || !derives.is_empty() {
+                return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt)),
+                                         AstFragmentKind::Stmts, after_derive).make_stmts();
+            }
+        }
+
+        if let StmtKind::Mac(mac) = stmt.kind {
+            let (mac, style, attrs) = mac.into_inner();
+            self.check_attributes(&attrs);
+            let mut placeholder = self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts)
+                                        .make_stmts();
+
+            // If this is a macro invocation with a semicolon, then apply that
+            // semicolon to the final statement produced by expansion.
+            if style == MacStmtStyle::Semicolon {
+                if let Some(stmt) = placeholder.pop() {
+                    placeholder.push(stmt.add_trailing_semicolon());
+                }
+            }
+
+            return placeholder;
+        }
+
+        // The placeholder expander gives ids to statements, so we avoid folding the id here.
+        let ast::Stmt { id, kind, span } = stmt;
+        noop_flat_map_stmt_kind(kind, self).into_iter().map(|kind| {
+            ast::Stmt { id, kind, span }
+        }).collect()
+
+    }
+
+    fn visit_block(&mut self, block: &mut P<Block>) {
+        let old_directory_ownership = self.cx.current_expansion.directory_ownership;
+        self.cx.current_expansion.directory_ownership = DirectoryOwnership::UnownedViaBlock;
+        noop_visit_block(block, self);
+        self.cx.current_expansion.directory_ownership = old_directory_ownership;
+    }
+
+    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+        let mut item = configure!(self, item);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut item);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::Item(item),
+                                     AstFragmentKind::Items, after_derive).make_items();
+        }
+
+        match item.kind {
+            ast::ItemKind::Mac(..) => {
+                self.check_attributes(&item.attrs);
+                item.and_then(|item| match item.kind {
+                    ItemKind::Mac(mac) => self.collect(
+                        AstFragmentKind::Items, InvocationKind::Bang { mac, span: item.span }
+                    ).make_items(),
+                    _ => unreachable!(),
+                })
+            }
+            ast::ItemKind::Mod(ast::Mod { inner, .. }) => {
+                if item.ident == Ident::invalid() {
+                    return noop_flat_map_item(item, self);
+                }
+
+                let orig_directory_ownership = self.cx.current_expansion.directory_ownership;
+                let mut module = (*self.cx.current_expansion.module).clone();
+                module.mod_path.push(item.ident);
+
+                // Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`).
+                // In the non-inline case, `inner` is never the dummy span (cf. `parse_item_mod`).
+                // Thus, if `inner` is the dummy span, we know the module is inline.
+                let inline_module = item.span.contains(inner) || inner.is_dummy();
+
+                if inline_module {
+                    if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, sym::path) {
+                        self.cx.current_expansion.directory_ownership =
+                            DirectoryOwnership::Owned { relative: None };
+                        module.directory.push(&*path.as_str());
+                    } else {
+                        module.directory.push(&*item.ident.as_str());
+                    }
+                } else {
+                    let path = self.cx.parse_sess.source_map().span_to_unmapped_path(inner);
+                    let mut path = match path {
+                        FileName::Real(path) => path,
+                        other => PathBuf::from(other.to_string()),
+                    };
+                    let directory_ownership = match path.file_name().unwrap().to_str() {
+                        Some("mod.rs") => DirectoryOwnership::Owned { relative: None },
+                        Some(_) => DirectoryOwnership::Owned {
+                            relative: Some(item.ident),
+                        },
+                        None => DirectoryOwnership::UnownedViaMod(false),
+                    };
+                    path.pop();
+                    module.directory = path;
+                    self.cx.current_expansion.directory_ownership = directory_ownership;
+                }
+
+                let orig_module =
+                    mem::replace(&mut self.cx.current_expansion.module, Rc::new(module));
+                let result = noop_flat_map_item(item, self);
+                self.cx.current_expansion.module = orig_module;
+                self.cx.current_expansion.directory_ownership = orig_directory_ownership;
+                result
+            }
+
+            _ => noop_flat_map_item(item, self),
+        }
+    }
+
+    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
+        let mut item = configure!(self, item);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut item);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::TraitItem(P(item)),
+                                     AstFragmentKind::TraitItems, after_derive).make_trait_items()
+        }
+
+        match item.kind {
+            ast::TraitItemKind::Macro(mac) => {
+                let ast::TraitItem { attrs, span, .. } = item;
+                self.check_attributes(&attrs);
+                self.collect_bang(mac, span, AstFragmentKind::TraitItems).make_trait_items()
+            }
+            _ => noop_flat_map_trait_item(item, self),
+        }
+    }
+
+    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
+        let mut item = configure!(self, item);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut item);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::ImplItem(P(item)),
+                                     AstFragmentKind::ImplItems, after_derive).make_impl_items();
+        }
+
+        match item.kind {
+            ast::ImplItemKind::Macro(mac) => {
+                let ast::ImplItem { attrs, span, .. } = item;
+                self.check_attributes(&attrs);
+                self.collect_bang(mac, span, AstFragmentKind::ImplItems).make_impl_items()
+            }
+            _ => noop_flat_map_impl_item(item, self),
+        }
+    }
+
+    fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
+        match ty.kind {
+            ast::TyKind::Mac(_) => {}
+            _ => return noop_visit_ty(ty, self),
+        };
+
+        visit_clobber(ty, |mut ty| {
+            match mem::replace(&mut ty.kind, ast::TyKind::Err) {
+                ast::TyKind::Mac(mac) =>
+                    self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(),
+                _ => unreachable!(),
+            }
+        });
+    }
+
+    fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
+        self.cfg.configure_foreign_mod(foreign_mod);
+        noop_visit_foreign_mod(foreign_mod, self);
+    }
+
+    fn flat_map_foreign_item(&mut self, mut foreign_item: ast::ForeignItem)
+        -> SmallVec<[ast::ForeignItem; 1]>
+    {
+        let (attr, traits, after_derive) = self.classify_item(&mut foreign_item);
+
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::ForeignItem(P(foreign_item)),
+                                     AstFragmentKind::ForeignItems, after_derive)
+                                     .make_foreign_items();
+        }
+
+        if let ast::ForeignItemKind::Macro(mac) = foreign_item.kind {
+            self.check_attributes(&foreign_item.attrs);
+            return self.collect_bang(mac, foreign_item.span, AstFragmentKind::ForeignItems)
+                .make_foreign_items();
+        }
+
+        noop_flat_map_foreign_item(foreign_item, self)
+    }
+
+    fn visit_item_kind(&mut self, item: &mut ast::ItemKind) {
+        match item {
+            ast::ItemKind::MacroDef(..) => {}
+            _ => {
+                self.cfg.configure_item_kind(item);
+                noop_visit_item_kind(item, self);
+            }
+        }
+    }
+
+    fn flat_map_generic_param(
+        &mut self,
+        param: ast::GenericParam
+    ) -> SmallVec<[ast::GenericParam; 1]>
+    {
+        let mut param = configure!(self, param);
+
+        let (attr, traits, after_derive) = self.classify_item(&mut param);
+        if attr.is_some() || !traits.is_empty() {
+            return self.collect_attr(attr, traits, Annotatable::GenericParam(param),
+                                     AstFragmentKind::GenericParams, after_derive)
+                                     .make_generic_params();
+        }
+
+        noop_flat_map_generic_param(param, self)
+    }
+
+    fn visit_attribute(&mut self, at: &mut ast::Attribute) {
+        // turn `#[doc(include="filename")]` attributes into `#[doc(include(file="filename",
+        // contents="file contents")]` attributes
+        if !at.check_name(sym::doc) {
+            return noop_visit_attribute(at, self);
+        }
+
+        if let Some(list) = at.meta_item_list() {
+            if !list.iter().any(|it| it.check_name(sym::include)) {
+                return noop_visit_attribute(at, self);
+            }
+
+            let mut items = vec![];
+
+            for mut it in list {
+                if !it.check_name(sym::include) {
+                    items.push({ noop_visit_meta_list_item(&mut it, self); it });
+                    continue;
+                }
+
+                if let Some(file) = it.value_str() {
+                    let err_count = self.cx.parse_sess.span_diagnostic.err_count();
+                    self.check_attributes(slice::from_ref(at));
+                    if self.cx.parse_sess.span_diagnostic.err_count() > err_count {
+                        // avoid loading the file if they haven't enabled the feature
+                        return noop_visit_attribute(at, self);
+                    }
+
+                    let filename = self.cx.resolve_path(&*file.as_str(), it.span());
+                    match self.cx.source_map().load_file(&filename) {
+                        Ok(source_file) => {
+                            let src = source_file.src.as_ref()
+                                .expect("freshly loaded file should have a source");
+                            let src_interned = Symbol::intern(src.as_str());
+
+                            let include_info = vec![
+                                ast::NestedMetaItem::MetaItem(
+                                    attr::mk_name_value_item_str(
+                                        Ident::with_dummy_span(sym::file),
+                                        file,
+                                        DUMMY_SP,
+                                    ),
+                                ),
+                                ast::NestedMetaItem::MetaItem(
+                                    attr::mk_name_value_item_str(
+                                        Ident::with_dummy_span(sym::contents),
+                                        src_interned,
+                                        DUMMY_SP,
+                                    ),
+                                ),
+                            ];
+
+                            let include_ident = Ident::with_dummy_span(sym::include);
+                            let item = attr::mk_list_item(include_ident, include_info);
+                            items.push(ast::NestedMetaItem::MetaItem(item));
+                        }
+                        Err(e) => {
+                            let lit = it
+                                .meta_item()
+                                .and_then(|item| item.name_value_literal())
+                                .unwrap();
+
+                            if e.kind() == ErrorKind::InvalidData {
+                                self.cx
+                                    .struct_span_err(
+                                        lit.span,
+                                        &format!("{} wasn't a utf-8 file", filename.display()),
+                                    )
+                                    .span_label(lit.span, "contains invalid utf-8")
+                                    .emit();
+                            } else {
+                                let mut err = self.cx.struct_span_err(
+                                    lit.span,
+                                    &format!("couldn't read {}: {}", filename.display(), e),
+                                );
+                                err.span_label(lit.span, "couldn't read file");
+
+                                err.emit();
+                            }
+                        }
+                    }
+                } else {
+                    let mut err = self.cx.struct_span_err(
+                        it.span(),
+                        &format!("expected path to external documentation"),
+                    );
+
+                    // Check if the user erroneously used `doc(include(...))` syntax.
+                    let literal = it.meta_item_list().and_then(|list| {
+                        if list.len() == 1 {
+                            list[0].literal().map(|literal| &literal.kind)
+                        } else {
+                            None
+                        }
+                    });
+
+                    let (path, applicability) = match &literal {
+                        Some(LitKind::Str(path, ..)) => {
+                            (path.to_string(), Applicability::MachineApplicable)
+                        }
+                        _ => (String::from("<path>"), Applicability::HasPlaceholders),
+                    };
+
+                    err.span_suggestion(
+                        it.span(),
+                        "provide a file path with `=`",
+                        format!("include = \"{}\"", path),
+                        applicability,
+                    );
+
+                    err.emit();
+                }
+            }
+
+            let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
+            *at = attr::Attribute {
+                item: AttrItem { path: meta.path, tokens: meta.kind.tokens(meta.span) },
+                span: at.span,
+                id: at.id,
+                style: at.style,
+                is_sugared_doc: false,
+            };
+        } else {
+            noop_visit_attribute(at, self)
+        }
+    }
+
+    fn visit_id(&mut self, id: &mut ast::NodeId) {
+        if self.monotonic {
+            debug_assert_eq!(*id, ast::DUMMY_NODE_ID);
+            *id = self.cx.resolver.next_node_id()
+        }
+    }
+
+    fn visit_fn_decl(&mut self, mut fn_decl: &mut P<ast::FnDecl>) {
+        self.cfg.configure_fn_decl(&mut fn_decl);
+        noop_visit_fn_decl(fn_decl, self);
+    }
+}
+
+pub struct ExpansionConfig<'feat> {
+    pub crate_name: String,
+    pub features: Option<&'feat Features>,
+    pub recursion_limit: usize,
+    pub trace_mac: bool,
+    pub should_test: bool, // If false, strip `#[test]` nodes
+    pub single_step: bool,
+    pub keep_macs: bool,
+}
+
+impl<'feat> ExpansionConfig<'feat> {
+    pub fn default(crate_name: String) -> ExpansionConfig<'static> {
+        ExpansionConfig {
+            crate_name,
+            features: None,
+            recursion_limit: 1024,
+            trace_mac: false,
+            should_test: false,
+            single_step: false,
+            keep_macs: false,
+        }
+    }
+
+    fn proc_macro_hygiene(&self) -> bool {
+        self.features.map_or(false, |features| features.proc_macro_hygiene)
+    }
+    fn custom_inner_attributes(&self) -> bool {
+        self.features.map_or(false, |features| features.custom_inner_attributes)
+    }
+}
diff --git a/src/libsyntax_expand/lib.rs b/src/libsyntax_expand/lib.rs
new file mode 100644 (file)
index 0000000..db292b6
--- /dev/null
@@ -0,0 +1,39 @@
+#![feature(crate_visibility_modifier)]
+#![feature(decl_macro)]
+#![feature(proc_macro_diagnostic)]
+#![feature(proc_macro_internals)]
+#![feature(proc_macro_span)]
+
+extern crate proc_macro as pm;
+
+// A variant of 'try!' that panics on an Err. This is used as a crutch on the
+// way towards a non-panic!-prone parser. It should be used for fatal parsing
+// errors; eventually we plan to convert all code using panictry to just use
+// normal try.
+#[macro_export]
+macro_rules! panictry {
+    ($e:expr) => ({
+        use std::result::Result::{Ok, Err};
+        use errors::FatalError;
+        match $e {
+            Ok(e) => e,
+            Err(mut e) => {
+                e.emit();
+                FatalError.raise()
+            }
+        }
+    })
+}
+
+mod placeholders;
+mod proc_macro_server;
+
+pub use syntax_pos::hygiene;
+pub use mbe::macro_rules::compile_declarative_macro;
+pub mod allocator;
+pub mod base;
+pub mod build;
+pub mod expand;
+pub mod proc_macro;
+
+crate mod mbe;
diff --git a/src/libsyntax_expand/mbe.rs b/src/libsyntax_expand/mbe.rs
new file mode 100644 (file)
index 0000000..453fe94
--- /dev/null
@@ -0,0 +1,166 @@
+//! This module implements declarative macros: old `macro_rules` and the newer
+//! `macro`. Declarative macros are also known as "macro by example", and that's
+//! why we call this module `mbe`. For external documentation, prefer the
+//! official terminology: "declarative macros".
+
+crate mod transcribe;
+crate mod macro_check;
+crate mod macro_parser;
+crate mod macro_rules;
+crate mod quoted;
+
+use syntax::ast;
+use syntax::parse::token::{self, Token, TokenKind};
+use syntax::tokenstream::{DelimSpan};
+
+use syntax_pos::{BytePos, Span};
+
+use rustc_data_structures::sync::Lrc;
+
+/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
+/// that the delimiter itself might be `NoDelim`.
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Delimited {
+    delim: token::DelimToken,
+    tts: Vec<TokenTree>,
+}
+
+impl Delimited {
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
+    fn open_tt(&self, span: Span) -> TokenTree {
+        let open_span = if span.is_dummy() {
+            span
+        } else {
+            span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
+        };
+        TokenTree::token(token::OpenDelim(self.delim), open_span)
+    }
+
+    /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
+    fn close_tt(&self, span: Span) -> TokenTree {
+        let close_span = if span.is_dummy() {
+            span
+        } else {
+            span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
+        };
+        TokenTree::token(token::CloseDelim(self.delim), close_span)
+    }
+}
+
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct SequenceRepetition {
+    /// The sequence of token trees
+    tts: Vec<TokenTree>,
+    /// The optional separator
+    separator: Option<Token>,
+    /// Whether the sequence can be repeated zero (*), or one or more times (+)
+    kleene: KleeneToken,
+    /// The number of `Match`s that appear in the sequence (and subsequences)
+    num_captures: usize,
+}
+
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
+struct KleeneToken {
+    span: Span,
+    op: KleeneOp,
+}
+
+impl KleeneToken {
+    fn new(op: KleeneOp, span: Span) -> KleeneToken {
+        KleeneToken { span, op }
+    }
+}
+
+/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
+/// for token sequences.
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+enum KleeneOp {
+    /// Kleene star (`*`) for zero or more repetitions
+    ZeroOrMore,
+    /// Kleene plus (`+`) for one or more repetitions
+    OneOrMore,
+    /// Kleene optional (`?`) for zero or one reptitions
+    ZeroOrOne,
+}
+
+/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
+/// are "first-class" token trees. Useful for parsing macros.
+#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
+enum TokenTree {
+    Token(Token),
+    Delimited(DelimSpan, Lrc<Delimited>),
+    /// A kleene-style repetition sequence
+    Sequence(DelimSpan, Lrc<SequenceRepetition>),
+    /// e.g., `$var`
+    MetaVar(Span, ast::Ident),
+    /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
+    MetaVarDecl(
+        Span,
+        ast::Ident, /* name to bind */
+        ast::Ident, /* kind of nonterminal */
+    ),
+}
+
+impl TokenTree {
+    /// Return the number of tokens in the tree.
+    fn len(&self) -> usize {
+        match *self {
+            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+                token::NoDelim => delimed.tts.len(),
+                _ => delimed.tts.len() + 2,
+            },
+            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
+            _ => 0,
+        }
+    }
+
+    /// Returns `true` if the given token tree is delimited.
+    fn is_delimited(&self) -> bool {
+        match *self {
+            TokenTree::Delimited(..) => true,
+            _ => false,
+        }
+    }
+
+    /// Returns `true` if the given token tree is a token of the given kind.
+    fn is_token(&self, expected_kind: &TokenKind) -> bool {
+        match self {
+            TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
+            _ => false,
+        }
+    }
+
+    /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
+    fn get_tt(&self, index: usize) -> TokenTree {
+        match (self, index) {
+            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
+                delimed.tts[index].clone()
+            }
+            (&TokenTree::Delimited(span, ref delimed), _) => {
+                if index == 0 {
+                    return delimed.open_tt(span.open);
+                }
+                if index == delimed.tts.len() + 1 {
+                    return delimed.close_tt(span.close);
+                }
+                delimed.tts[index - 1].clone()
+            }
+            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
+            _ => panic!("Cannot expand a token tree"),
+        }
+    }
+
+    /// Retrieves the `TokenTree`'s span.
+    fn span(&self) -> Span {
+        match *self {
+            TokenTree::Token(Token { span, .. })
+            | TokenTree::MetaVar(span, _)
+            | TokenTree::MetaVarDecl(span, _, _) => span,
+            TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
+        }
+    }
+
+    fn token(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span))
+    }
+}
diff --git a/src/libsyntax_expand/mbe/macro_check.rs b/src/libsyntax_expand/mbe/macro_check.rs
new file mode 100644 (file)
index 0000000..50abda8
--- /dev/null
@@ -0,0 +1,626 @@
+//! Checks that meta-variables in macro definition are correctly declared and used.
+//!
+//! # What is checked
+//!
+//! ## Meta-variables must not be bound twice
+//!
+//! ```
+//! macro_rules! foo { ($x:tt $x:tt) => { $x }; }
+//! ```
+//!
+//! This check is sound (no false-negative) and complete (no false-positive).
+//!
+//! ## Meta-variables must not be free
+//!
+//! ```
+//! macro_rules! foo { () => { $x }; }
+//! ```
+//!
+//! This check is also done at macro instantiation but only if the branch is taken.
+//!
+//! ## Meta-variables must repeat at least as many times as their binder
+//!
+//! ```
+//! macro_rules! foo { ($($x:tt)*) => { $x }; }
+//! ```
+//!
+//! This check is also done at macro instantiation but only if the branch is taken.
+//!
+//! ## Meta-variables must repeat with the same Kleene operators as their binder
+//!
+//! ```
+//! macro_rules! foo { ($($x:tt)+) => { $($x)* }; }
+//! ```
+//!
+//! This check is not done at macro instantiation.
+//!
+//! # Disclaimer
+//!
+//! In the presence of nested macros (a macro defined in a macro), those checks may have false
+//! positives and false negatives. We try to detect those cases by recognizing potential macro
+//! definitions in RHSes, but nested macros may be hidden through the use of particular values of
+//! meta-variables.
+//!
+//! ## Examples of false positive
+//!
+//! False positives can come from cases where we don't recognize a nested macro, because it depends
+//! on particular values of meta-variables. In the following example, we think both instances of
+//! `$x` are free, which is a correct statement if `$name` is anything but `macro_rules`. But when
+//! `$name` is `macro_rules`, like in the instantiation below, then `$x:tt` is actually a binder of
+//! the nested macro and `$x` is bound to it.
+//!
+//! ```
+//! macro_rules! foo { ($name:ident) => { $name! bar { ($x:tt) => { $x }; } }; }
+//! foo!(macro_rules);
+//! ```
+//!
+//! False positives can also come from cases where we think there is a nested macro while there
+//! isn't. In the following example, we think `$x` is free, which is incorrect because `bar` is not
+//! a nested macro since it is not evaluated as code by `stringify!`.
+//!
+//! ```
+//! macro_rules! foo { () => { stringify!(macro_rules! bar { () => { $x }; }) }; }
+//! ```
+//!
+//! ## Examples of false negative
+//!
+//! False negatives can come from cases where we don't recognize a meta-variable, because it depends
+//! on particular values of meta-variables. In the following examples, we don't see that if `$d` is
+//! instantiated with `$` then `$d z` becomes `$z` in the nested macro definition and is thus a free
+//! meta-variable. Note however, that if `foo` is instantiated, then we would check the definition
+//! of `bar` and would see the issue.
+//!
+//! ```
+//! macro_rules! foo { ($d:tt) => { macro_rules! bar { ($y:tt) => { $d z }; } }; }
+//! ```
+//!
+//! # How it is checked
+//!
+//! There are 3 main functions: `check_binders`, `check_occurrences`, and `check_nested_macro`. They
+//! all need some kind of environment.
+//!
+//! ## Environments
+//!
+//! Environments are used to pass information.
+//!
+//! ### From LHS to RHS
+//!
+//! When checking a LHS with `check_binders`, we produce (and use) an environment for binders,
+//! namely `Binders`. This is a mapping from binder name to information about that binder: the span
+//! of the binder for error messages and the stack of Kleene operators under which it was bound in
+//! the LHS.
+//!
+//! This environment is used by both the LHS and RHS. The LHS uses it to detect duplicate binders.
+//! The RHS uses it to detect the other errors.
+//!
+//! ### From outer macro to inner macro
+//!
+//! When checking the RHS of an outer macro and we detect a nested macro definition, we push the
+//! current state, namely `MacroState`, to an environment of nested macro definitions. Each state
+//! stores the LHS binders when entering the macro definition as well as the stack of Kleene
+//! operators under which the inner macro is defined in the RHS.
+//!
+//! This environment is a stack representing the nesting of macro definitions. As such, the stack of
+//! Kleene operators under which a meta-variable is repeating is the concatenation of the stacks
+//! stored when entering a macro definition starting from the state in which the meta-variable is
+//! bound.
+use crate::mbe::{KleeneToken, TokenTree};
+
+use syntax::ast::NodeId;
+use syntax::early_buffered_lints::BufferedEarlyLintId;
+use syntax::parse::token::{DelimToken, Token, TokenKind};
+use syntax::sess::ParseSess;
+use syntax::symbol::{kw, sym};
+
+use rustc_data_structures::fx::FxHashMap;
+use smallvec::SmallVec;
+use syntax_pos::{symbol::Ident, MultiSpan, Span};
+
+/// Stack represented as linked list.
+///
+/// Those are used for environments because they grow incrementally and are not mutable.
+enum Stack<'a, T> {
+    /// Empty stack.
+    Empty,
+    /// A non-empty stack.
+    Push {
+        /// The top element.
+        top: T,
+        /// The previous elements.
+        prev: &'a Stack<'a, T>,
+    },
+}
+
+impl<'a, T> Stack<'a, T> {
+    /// Returns whether a stack is empty.
+    fn is_empty(&self) -> bool {
+        match *self {
+            Stack::Empty => true,
+            _ => false,
+        }
+    }
+
+    /// Returns a new stack with an element of top.
+    fn push(&'a self, top: T) -> Stack<'a, T> {
+        Stack::Push { top, prev: self }
+    }
+}
+
+impl<'a, T> Iterator for &'a Stack<'a, T> {
+    type Item = &'a T;
+
+    // Iterates from top to bottom of the stack.
+    fn next(&mut self) -> Option<&'a T> {
+        match *self {
+            Stack::Empty => None,
+            Stack::Push { ref top, ref prev } => {
+                *self = prev;
+                Some(top)
+            }
+        }
+    }
+}
+
+impl From<&Stack<'_, KleeneToken>> for SmallVec<[KleeneToken; 1]> {
+    fn from(ops: &Stack<'_, KleeneToken>) -> SmallVec<[KleeneToken; 1]> {
+        let mut ops: SmallVec<[KleeneToken; 1]> = ops.cloned().collect();
+        // The stack is innermost on top. We want outermost first.
+        ops.reverse();
+        ops
+    }
+}
+
+/// Information attached to a meta-variable binder in LHS.
+struct BinderInfo {
+    /// The span of the meta-variable in LHS.
+    span: Span,
+    /// The stack of Kleene operators (outermost first).
+    ops: SmallVec<[KleeneToken; 1]>,
+}
+
+/// An environment of meta-variables to their binder information.
+type Binders = FxHashMap<Ident, BinderInfo>;
+
+/// The state at which we entered a macro definition in the RHS of another macro definition.
+struct MacroState<'a> {
+    /// The binders of the branch where we entered the macro definition.
+    binders: &'a Binders,
+    /// The stack of Kleene operators (outermost first) where we entered the macro definition.
+    ops: SmallVec<[KleeneToken; 1]>,
+}
+
+/// Checks that meta-variables are used correctly in a macro definition.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `span` is used when no spans are available
+/// - `lhses` and `rhses` should have the same length and represent the macro definition
+pub(super) fn check_meta_variables(
+    sess: &ParseSess,
+    node_id: NodeId,
+    span: Span,
+    lhses: &[TokenTree],
+    rhses: &[TokenTree],
+) -> bool {
+    if lhses.len() != rhses.len() {
+        sess.span_diagnostic.span_bug(span, "length mismatch between LHSes and RHSes")
+    }
+    let mut valid = true;
+    for (lhs, rhs) in lhses.iter().zip(rhses.iter()) {
+        let mut binders = Binders::default();
+        check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut valid);
+        check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut valid);
+    }
+    valid
+}
+
+/// Checks `lhs` as part of the LHS of a macro definition, extends `binders` with new binders, and
+/// sets `valid` to false in case of errors.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `lhs` is checked as part of a LHS
+/// - `macros` is the stack of possible outer macros
+/// - `binders` contains the binders of the LHS
+/// - `ops` is the stack of Kleene operators from the LHS
+/// - `valid` is set in case of errors
+fn check_binders(
+    sess: &ParseSess,
+    node_id: NodeId,
+    lhs: &TokenTree,
+    macros: &Stack<'_, MacroState<'_>>,
+    binders: &mut Binders,
+    ops: &Stack<'_, KleeneToken>,
+    valid: &mut bool,
+) {
+    match *lhs {
+        TokenTree::Token(..) => {}
+        // This can only happen when checking a nested macro because this LHS is then in the RHS of
+        // the outer macro. See ui/macros/macro-of-higher-order.rs where $y:$fragment in the
+        // LHS of the nested macro (and RHS of the outer macro) is parsed as MetaVar(y) Colon
+        // MetaVar(fragment) and not as MetaVarDecl(y, fragment).
+        TokenTree::MetaVar(span, name) => {
+            if macros.is_empty() {
+                sess.span_diagnostic.span_bug(span, "unexpected MetaVar in lhs");
+            }
+            // There are 3 possibilities:
+            if let Some(prev_info) = binders.get(&name) {
+                // 1. The meta-variable is already bound in the current LHS: This is an error.
+                let mut span = MultiSpan::from_span(span);
+                span.push_span_label(prev_info.span, "previous declaration".into());
+                buffer_lint(sess, span, node_id, "duplicate matcher binding");
+            } else if get_binder_info(macros, binders, name).is_none() {
+                // 2. The meta-variable is free: This is a binder.
+                binders.insert(name, BinderInfo { span, ops: ops.into() });
+            } else {
+                // 3. The meta-variable is bound: This is an occurrence.
+                check_occurrences(sess, node_id, lhs, macros, binders, ops, valid);
+            }
+        }
+        // Similarly, this can only happen when checking a toplevel macro.
+        TokenTree::MetaVarDecl(span, name, _kind) => {
+            if !macros.is_empty() {
+                sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in nested lhs");
+            }
+            if let Some(prev_info) = get_binder_info(macros, binders, name) {
+                // Duplicate binders at the top-level macro definition are errors. The lint is only
+                // for nested macro definitions.
+                sess.span_diagnostic
+                    .struct_span_err(span, "duplicate matcher binding")
+                    .span_note(prev_info.span, "previous declaration was here")
+                    .emit();
+                *valid = false;
+            } else {
+                binders.insert(name, BinderInfo { span, ops: ops.into() });
+            }
+        }
+        TokenTree::Delimited(_, ref del) => {
+            for tt in &del.tts {
+                check_binders(sess, node_id, tt, macros, binders, ops, valid);
+            }
+        }
+        TokenTree::Sequence(_, ref seq) => {
+            let ops = ops.push(seq.kleene);
+            for tt in &seq.tts {
+                check_binders(sess, node_id, tt, macros, binders, &ops, valid);
+            }
+        }
+    }
+}
+
+/// Returns the binder information of a meta-variable.
+///
+/// Arguments:
+/// - `macros` is the stack of possible outer macros
+/// - `binders` contains the current binders
+/// - `name` is the name of the meta-variable we are looking for
+fn get_binder_info<'a>(
+    mut macros: &'a Stack<'a, MacroState<'a>>,
+    binders: &'a Binders,
+    name: Ident,
+) -> Option<&'a BinderInfo> {
+    binders.get(&name).or_else(|| macros.find_map(|state| state.binders.get(&name)))
+}
+
+/// Checks `rhs` as part of the RHS of a macro definition and sets `valid` to false in case of
+/// errors.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `rhs` is checked as part of a RHS
+/// - `macros` is the stack of possible outer macros
+/// - `binders` contains the binders of the associated LHS
+/// - `ops` is the stack of Kleene operators from the RHS
+/// - `valid` is set in case of errors
+fn check_occurrences(
+    sess: &ParseSess,
+    node_id: NodeId,
+    rhs: &TokenTree,
+    macros: &Stack<'_, MacroState<'_>>,
+    binders: &Binders,
+    ops: &Stack<'_, KleeneToken>,
+    valid: &mut bool,
+) {
+    match *rhs {
+        TokenTree::Token(..) => {}
+        TokenTree::MetaVarDecl(span, _name, _kind) => {
+            sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in rhs")
+        }
+        TokenTree::MetaVar(span, name) => {
+            check_ops_is_prefix(sess, node_id, macros, binders, ops, span, name);
+        }
+        TokenTree::Delimited(_, ref del) => {
+            check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
+        }
+        TokenTree::Sequence(_, ref seq) => {
+            let ops = ops.push(seq.kleene);
+            check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, valid);
+        }
+    }
+}
+
+/// Represents the processed prefix of a nested macro.
+#[derive(Clone, Copy, PartialEq, Eq)]
+enum NestedMacroState {
+    /// Nothing that matches a nested macro definition was processed yet.
+    Empty,
+    /// The token `macro_rules` was processed.
+    MacroRules,
+    /// The tokens `macro_rules!` were processed.
+    MacroRulesNot,
+    /// The tokens `macro_rules!` followed by a name were processed. The name may be either directly
+    /// an identifier or a meta-variable (that hopefully would be instantiated by an identifier).
+    MacroRulesNotName,
+    /// The keyword `macro` was processed.
+    Macro,
+    /// The keyword `macro` followed by a name was processed.
+    MacroName,
+    /// The keyword `macro` followed by a name and a token delimited by parentheses was processed.
+    MacroNameParen,
+}
+
+/// Checks `tts` as part of the RHS of a macro definition, tries to recognize nested macro
+/// definitions, and sets `valid` to false in case of errors.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `tts` is checked as part of a RHS and may contain macro definitions
+/// - `macros` is the stack of possible outer macros
+/// - `binders` contains the binders of the associated LHS
+/// - `ops` is the stack of Kleene operators from the RHS
+/// - `valid` is set in case of errors
+fn check_nested_occurrences(
+    sess: &ParseSess,
+    node_id: NodeId,
+    tts: &[TokenTree],
+    macros: &Stack<'_, MacroState<'_>>,
+    binders: &Binders,
+    ops: &Stack<'_, KleeneToken>,
+    valid: &mut bool,
+) {
+    let mut state = NestedMacroState::Empty;
+    let nested_macros = macros.push(MacroState { binders, ops: ops.into() });
+    let mut nested_binders = Binders::default();
+    for tt in tts {
+        match (state, tt) {
+            (
+                NestedMacroState::Empty,
+                &TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
+            ) => {
+                if name == sym::macro_rules {
+                    state = NestedMacroState::MacroRules;
+                } else if name == kw::Macro {
+                    state = NestedMacroState::Macro;
+                }
+            }
+            (
+                NestedMacroState::MacroRules,
+                &TokenTree::Token(Token { kind: TokenKind::Not, .. }),
+            ) => {
+                state = NestedMacroState::MacroRulesNot;
+            }
+            (
+                NestedMacroState::MacroRulesNot,
+                &TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
+            ) => {
+                state = NestedMacroState::MacroRulesNotName;
+            }
+            (NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => {
+                state = NestedMacroState::MacroRulesNotName;
+                // We check that the meta-variable is correctly used.
+                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
+            }
+            (NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
+            | (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
+                if del.delim == DelimToken::Brace =>
+            {
+                let legacy = state == NestedMacroState::MacroRulesNotName;
+                state = NestedMacroState::Empty;
+                let rest =
+                    check_nested_macro(sess, node_id, legacy, &del.tts, &nested_macros, valid);
+                // If we did not check the whole macro definition, then check the rest as if outside
+                // the macro definition.
+                check_nested_occurrences(
+                    sess,
+                    node_id,
+                    &del.tts[rest..],
+                    macros,
+                    binders,
+                    ops,
+                    valid,
+                );
+            }
+            (
+                NestedMacroState::Macro,
+                &TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
+            ) => {
+                state = NestedMacroState::MacroName;
+            }
+            (NestedMacroState::Macro, &TokenTree::MetaVar(..)) => {
+                state = NestedMacroState::MacroName;
+                // We check that the meta-variable is correctly used.
+                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
+            }
+            (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
+                if del.delim == DelimToken::Paren =>
+            {
+                state = NestedMacroState::MacroNameParen;
+                nested_binders = Binders::default();
+                check_binders(
+                    sess,
+                    node_id,
+                    tt,
+                    &nested_macros,
+                    &mut nested_binders,
+                    &Stack::Empty,
+                    valid,
+                );
+            }
+            (NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
+                if del.delim == DelimToken::Brace =>
+            {
+                state = NestedMacroState::Empty;
+                check_occurrences(
+                    sess,
+                    node_id,
+                    tt,
+                    &nested_macros,
+                    &nested_binders,
+                    &Stack::Empty,
+                    valid,
+                );
+            }
+            (_, ref tt) => {
+                state = NestedMacroState::Empty;
+                check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
+            }
+        }
+    }
+}
+
+/// Checks the body of nested macro, returns where the check stopped, and sets `valid` to false in
+/// case of errors.
+///
+/// The token trees are checked as long as they look like a list of (LHS) => {RHS} token trees. This
+/// check is a best-effort to detect a macro definition. It returns the position in `tts` where we
+/// stopped checking because we detected we were not in a macro definition anymore.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `legacy` specifies whether the macro is legacy
+/// - `tts` is checked as a list of (LHS) => {RHS}
+/// - `macros` is the stack of outer macros
+/// - `valid` is set in case of errors
+fn check_nested_macro(
+    sess: &ParseSess,
+    node_id: NodeId,
+    legacy: bool,
+    tts: &[TokenTree],
+    macros: &Stack<'_, MacroState<'_>>,
+    valid: &mut bool,
+) -> usize {
+    let n = tts.len();
+    let mut i = 0;
+    let separator = if legacy { TokenKind::Semi } else { TokenKind::Comma };
+    loop {
+        // We expect 3 token trees: `(LHS) => {RHS}`. The separator is checked after.
+        if i + 2 >= n
+            || !tts[i].is_delimited()
+            || !tts[i + 1].is_token(&TokenKind::FatArrow)
+            || !tts[i + 2].is_delimited()
+        {
+            break;
+        }
+        let lhs = &tts[i];
+        let rhs = &tts[i + 2];
+        let mut binders = Binders::default();
+        check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, valid);
+        check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, valid);
+        // Since the last semicolon is optional for legacy macros and decl_macro are not terminated,
+        // we increment our checked position by how many token trees we already checked (the 3
+        // above) before checking for the separator.
+        i += 3;
+        if i == n || !tts[i].is_token(&separator) {
+            break;
+        }
+        // We increment our checked position for the semicolon.
+        i += 1;
+    }
+    i
+}
+
+/// Checks that a meta-variable occurrence is valid.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `macros` is the stack of possible outer macros
+/// - `binders` contains the binders of the associated LHS
+/// - `ops` is the stack of Kleene operators from the RHS
+/// - `span` is the span of the meta-variable to check
+/// - `name` is the name of the meta-variable to check
+fn check_ops_is_prefix(
+    sess: &ParseSess,
+    node_id: NodeId,
+    macros: &Stack<'_, MacroState<'_>>,
+    binders: &Binders,
+    ops: &Stack<'_, KleeneToken>,
+    span: Span,
+    name: Ident,
+) {
+    let macros = macros.push(MacroState { binders, ops: ops.into() });
+    // Accumulates the stacks the operators of each state until (and including when) the
+    // meta-variable is found. The innermost stack is first.
+    let mut acc: SmallVec<[&SmallVec<[KleeneToken; 1]>; 1]> = SmallVec::new();
+    for state in &macros {
+        acc.push(&state.ops);
+        if let Some(binder) = state.binders.get(&name) {
+            // This variable concatenates the stack of operators from the RHS of the LHS where the
+            // meta-variable was defined to where it is used (in possibly nested macros). The
+            // outermost operator is first.
+            let mut occurrence_ops: SmallVec<[KleeneToken; 2]> = SmallVec::new();
+            // We need to iterate from the end to start with outermost stack.
+            for ops in acc.iter().rev() {
+                occurrence_ops.extend_from_slice(ops);
+            }
+            ops_is_prefix(sess, node_id, span, name, &binder.ops, &occurrence_ops);
+            return;
+        }
+    }
+    buffer_lint(sess, span.into(), node_id, &format!("unknown macro variable `{}`", name));
+}
+
+/// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
+///
+/// The stack of Kleene operators of a meta-variable occurrence just needs to have the stack of
+/// Kleene operators of its binder as a prefix.
+///
+/// Consider $i in the following example:
+///
+///     ( $( $i:ident = $($j:ident),+ );* ) => { $($( $i += $j; )+)* }
+///
+/// It occurs under the Kleene stack ["*", "+"] and is bound under ["*"] only.
+///
+/// Arguments:
+/// - `sess` is used to emit diagnostics and lints
+/// - `node_id` is used to emit lints
+/// - `span` is the span of the meta-variable being check
+/// - `name` is the name of the meta-variable being check
+/// - `binder_ops` is the stack of Kleene operators for the binder
+/// - `occurrence_ops` is the stack of Kleene operators for the occurrence
+fn ops_is_prefix(
+    sess: &ParseSess,
+    node_id: NodeId,
+    span: Span,
+    name: Ident,
+    binder_ops: &[KleeneToken],
+    occurrence_ops: &[KleeneToken],
+) {
+    for (i, binder) in binder_ops.iter().enumerate() {
+        if i >= occurrence_ops.len() {
+            let mut span = MultiSpan::from_span(span);
+            span.push_span_label(binder.span, "expected repetition".into());
+            let message = &format!("variable '{}' is still repeating at this depth", name);
+            buffer_lint(sess, span, node_id, message);
+            return;
+        }
+        let occurrence = &occurrence_ops[i];
+        if occurrence.op != binder.op {
+            let mut span = MultiSpan::from_span(span);
+            span.push_span_label(binder.span, "expected repetition".into());
+            span.push_span_label(occurrence.span, "conflicting repetition".into());
+            let message = "meta-variable repeats with different Kleene operator";
+            buffer_lint(sess, span, node_id, message);
+            return;
+        }
+    }
+}
+
+fn buffer_lint(sess: &ParseSess, span: MultiSpan, node_id: NodeId, message: &str) {
+    sess.buffer_lint(BufferedEarlyLintId::MetaVariableMisuse, span, node_id, message);
+}
diff --git a/src/libsyntax_expand/mbe/macro_parser.rs b/src/libsyntax_expand/mbe/macro_parser.rs
new file mode 100644 (file)
index 0000000..3efe226
--- /dev/null
@@ -0,0 +1,947 @@
+//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals
+//! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
+//! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
+//! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
+//! fit for Macro-by-Example-style rules.
+//!
+//! (In order to prevent the pathological case, we'd need to lazily construct the resulting
+//! `NamedMatch`es at the very end. It'd be a pain, and require more memory to keep around old
+//! items, but it would also save overhead)
+//!
+//! We don't say this parser uses the Earley algorithm, because it's unnecessarily inaccurate.
+//! The macro parser restricts itself to the features of finite state automata. Earley parsers
+//! can be described as an extension of NFAs with completion rules, prediction rules, and recursion.
+//!
+//! Quick intro to how the parser works:
+//!
+//! A 'position' is a dot in the middle of a matcher, usually represented as a
+//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
+//!
+//! The parser walks through the input a character at a time, maintaining a list
+//! of threads consistent with the current position in the input string: `cur_items`.
+//!
+//! As it processes them, it fills up `eof_items` with threads that would be valid if
+//! the macro invocation is now over, `bb_items` with threads that are waiting on
+//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
+//! on a particular token. Most of the logic concerns moving the · through the
+//! repetitions indicated by Kleene stars. The rules for moving the · without
+//! consuming any input are called epsilon transitions. It only advances or calls
+//! out to the real Rust parser when no `cur_items` threads remain.
+//!
+//! Example:
+//!
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
+//! next: [· a $( a )* a b]
+//!
+//! - - - Advance over an a. - - -
+//!
+//! Remaining input: a a a b
+//! cur: [a · $( a )* a b]
+//! Descend/Skip (first item).
+//! next: [a $( · a )* a b]  [a $( a )* · a b].
+//!
+//! - - - Advance over an a. - - -
+//!
+//! Remaining input: a a b
+//! cur: [a $( a · )* a b]  [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
+//!
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
+//!
+//! Remaining input: a b
+//! cur: [a $( a · )* a b]  [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
+//!
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
+//!
+//! Remaining input: b
+//! cur: [a $( a · )* a b]  [a $( a )* a · b]
+//! Follow epsilon transition: Finish/Repeat (first item)
+//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
+//!
+//! - - - Advance over a b. - - -
+//!
+//! Remaining input: ''
+//! eof: [a $( a )* a b ·]
+//! ```
+
+crate use NamedMatch::*;
+crate use ParseResult::*;
+use TokenTreeOrTokenTreeSlice::*;
+
+use crate::mbe::{self, TokenTree};
+
+use syntax::ast::{Ident, Name};
+use syntax::parse::{Directory, PResult};
+use syntax::parse::parser::{Parser, PathStyle};
+use syntax::parse::token::{self, DocComment, Nonterminal, Token};
+use syntax::print::pprust;
+use syntax::sess::ParseSess;
+use syntax::symbol::{kw, sym, Symbol};
+use syntax::tokenstream::{DelimSpan, TokenStream};
+
+use errors::FatalError;
+use smallvec::{smallvec, SmallVec};
+use syntax_pos::Span;
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Lrc;
+use std::collections::hash_map::Entry::{Occupied, Vacant};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+// To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body.
+
+/// Either a sequence of token trees or a single one. This is used as the representation of the
+/// sequence of tokens that make up a matcher.
+#[derive(Clone)]
+enum TokenTreeOrTokenTreeSlice<'tt> {
+    Tt(TokenTree),
+    TtSeq(&'tt [TokenTree]),
+}
+
+impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
+    /// Returns the number of constituent top-level token trees of `self` (top-level in that it
+    /// will not recursively descend into subtrees).
+    fn len(&self) -> usize {
+        match *self {
+            TtSeq(ref v) => v.len(),
+            Tt(ref tt) => tt.len(),
+        }
+    }
+
+    /// The `index`-th token tree of `self`.
+    fn get_tt(&self, index: usize) -> TokenTree {
+        match *self {
+            TtSeq(ref v) => v[index].clone(),
+            Tt(ref tt) => tt.get_tt(index),
+        }
+    }
+}
+
+/// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`.
+///
+/// This is used by `inner_parse_loop` to keep track of delimited submatchers that we have
+/// descended into.
+#[derive(Clone)]
+struct MatcherTtFrame<'tt> {
+    /// The "parent" matcher that we are descending into.
+    elts: TokenTreeOrTokenTreeSlice<'tt>,
+    /// The position of the "dot" in `elts` at the time we descended.
+    idx: usize,
+}
+
+type NamedMatchVec = SmallVec<[NamedMatch; 4]>;
+
+/// Represents a single "position" (aka "matcher position", aka "item"), as
+/// described in the module documentation.
+///
+/// Here:
+///
+/// - `'root` represents the lifetime of the stack slot that holds the root
+///   `MatcherPos`. As described in `MatcherPosHandle`, the root `MatcherPos`
+///   structure is stored on the stack, but subsequent instances are put into
+///   the heap.
+/// - `'tt` represents the lifetime of the token trees that this matcher
+///   position refers to.
+///
+/// It is important to distinguish these two lifetimes because we have a
+/// `SmallVec<TokenTreeOrTokenTreeSlice<'tt>>` below, and the destructor of
+/// that is considered to possibly access the data from its elements (it lacks
+/// a `#[may_dangle]` attribute). As a result, the compiler needs to know that
+/// all the elements in that `SmallVec` strictly outlive the root stack slot
+/// lifetime. By separating `'tt` from `'root`, we can show that.
+#[derive(Clone)]
+struct MatcherPos<'root, 'tt> {
+    /// The token or sequence of tokens that make up the matcher
+    top_elts: TokenTreeOrTokenTreeSlice<'tt>,
+
+    /// The position of the "dot" in this matcher
+    idx: usize,
+
+    /// The first span of source that the beginning of this matcher corresponds to. In other
+    /// words, the token in the source whose span is `sp_open` is matched against the first token of
+    /// the matcher.
+    sp_open: Span,
+
+    /// For each named metavar in the matcher, we keep track of token trees matched against the
+    /// metavar by the black box parser. In particular, there may be more than one match per
+    /// metavar if we are in a repetition (each repetition matches each of the variables).
+    /// Moreover, matchers and repetitions can be nested; the `matches` field is shared (hence the
+    /// `Rc`) among all "nested" matchers. `match_lo`, `match_cur`, and `match_hi` keep track of
+    /// the current position of the `self` matcher position in the shared `matches` list.
+    ///
+    /// Also, note that while we are descending into a sequence, matchers are given their own
+    /// `matches` vector. Only once we reach the end of a full repetition of the sequence do we add
+    /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
+    /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
+    /// wants the shared `matches`, one should use `up.matches`.
+    matches: Box<[Lrc<NamedMatchVec>]>,
+    /// The position in `matches` corresponding to the first metavar in this matcher's sequence of
+    /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
+    /// to `matches[match_lo]`.
+    match_lo: usize,
+    /// The position in `matches` corresponding to the metavar we are currently trying to match
+    /// against the source token stream. `match_lo <= match_cur <= match_hi`.
+    match_cur: usize,
+    /// Similar to `match_lo` except `match_hi` is the position in `matches` of the _last_ metavar
+    /// in this matcher.
+    match_hi: usize,
+
+    // The following fields are used if we are matching a repetition. If we aren't, they should be
+    // `None`.
+
+    /// The KleeneOp of this sequence if we are in a repetition.
+    seq_op: Option<mbe::KleeneOp>,
+
+    /// The separator if we are in a repetition.
+    sep: Option<Token>,
+
+    /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
+    /// before we enter the sequence.
+    up: Option<MatcherPosHandle<'root, 'tt>>,
+
+    /// Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from
+    /// a delimited token tree (e.g., something wrapped in `(` `)`) or to get the contents of a doc
+    /// comment...
+    ///
+    /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
+    /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
+    /// that where the bottom of the stack is the outermost matcher.
+    /// Also, throughout the comments, this "descent" is often referred to as "unzipping"...
+    stack: SmallVec<[MatcherTtFrame<'tt>; 1]>,
+}
+
+impl<'root, 'tt> MatcherPos<'root, 'tt> {
+    /// Adds `m` as a named match for the `idx`-th metavar.
+    fn push_match(&mut self, idx: usize, m: NamedMatch) {
+        let matches = Lrc::make_mut(&mut self.matches[idx]);
+        matches.push(m);
+    }
+}
+
+// Lots of MatcherPos instances are created at runtime. Allocating them on the
+// heap is slow. Furthermore, using SmallVec<MatcherPos> to allocate them all
+// on the stack is also slow, because MatcherPos is quite a large type and
+// instances get moved around a lot between vectors, which requires lots of
+// slow memcpy calls.
+//
+// Therefore, the initial MatcherPos is always allocated on the stack,
+// subsequent ones (of which there aren't that many) are allocated on the heap,
+// and this type is used to encapsulate both cases.
+enum MatcherPosHandle<'root, 'tt> {
+    Ref(&'root mut MatcherPos<'root, 'tt>),
+    Box(Box<MatcherPos<'root, 'tt>>),
+}
+
+impl<'root, 'tt> Clone for MatcherPosHandle<'root, 'tt> {
+    // This always produces a new Box.
+    fn clone(&self) -> Self {
+        MatcherPosHandle::Box(match *self {
+            MatcherPosHandle::Ref(ref r) => Box::new((**r).clone()),
+            MatcherPosHandle::Box(ref b) => b.clone(),
+        })
+    }
+}
+
+impl<'root, 'tt> Deref for MatcherPosHandle<'root, 'tt> {
+    type Target = MatcherPos<'root, 'tt>;
+    fn deref(&self) -> &Self::Target {
+        match *self {
+            MatcherPosHandle::Ref(ref r) => r,
+            MatcherPosHandle::Box(ref b) => b,
+        }
+    }
+}
+
+impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
+    fn deref_mut(&mut self) -> &mut MatcherPos<'root, 'tt> {
+        match *self {
+            MatcherPosHandle::Ref(ref mut r) => r,
+            MatcherPosHandle::Box(ref mut b) => b,
+        }
+    }
+}
+
+/// Represents the possible results of an attempted parse.
+crate enum ParseResult<T> {
+    /// Parsed successfully.
+    Success(T),
+    /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
+    /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
+    Failure(Token, &'static str),
+    /// Fatal error (malformed macro?). Abort compilation.
+    Error(syntax_pos::Span, String),
+}
+
+/// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
+/// This represents the mapping of metavars to the token trees they bind to.
+crate type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
+
+/// Count how many metavars are named in the given matcher `ms`.
+pub(super) fn count_names(ms: &[TokenTree]) -> usize {
+    ms.iter().fold(0, |count, elt| {
+        count + match *elt {
+            TokenTree::Sequence(_, ref seq) => seq.num_captures,
+            TokenTree::Delimited(_, ref delim) => count_names(&delim.tts),
+            TokenTree::MetaVar(..) => 0,
+            TokenTree::MetaVarDecl(..) => 1,
+            TokenTree::Token(..) => 0,
+        }
+    })
+}
+
+/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
+fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
+    if len == 0 {
+        vec![]
+    } else {
+        let empty_matches = Lrc::new(SmallVec::new());
+        vec![empty_matches; len]
+    }.into_boxed_slice()
+}
+
+/// Generates the top-level matcher position in which the "dot" is before the first token of the
+/// matcher `ms` and we are going to start matching at the span `open` in the source.
+fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> {
+    let match_idx_hi = count_names(ms);
+    let matches = create_matches(match_idx_hi);
+    MatcherPos {
+        // Start with the top level matcher given to us
+        top_elts: TtSeq(ms), // "elts" is an abbr. for "elements"
+        // The "dot" is before the first token of the matcher
+        idx: 0,
+        // We start matching at the span `open` in the source code
+        sp_open: open,
+
+        // Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in `top_elts`.
+        // `match_lo` for `top_elts` is 0 and `match_hi` is `matches.len()`. `match_cur` is 0 since
+        // we haven't actually matched anything yet.
+        matches,
+        match_lo: 0,
+        match_cur: 0,
+        match_hi: match_idx_hi,
+
+        // Haven't descended into any delimiters, so empty stack
+        stack: smallvec![],
+
+        // Haven't descended into any sequences, so both of these are `None`.
+        seq_op: None,
+        sep: None,
+        up: None,
+    }
+}
+
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
+/// so it is associated with a single ident in a parse, and all
+/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
+///
+/// The in-memory structure of a particular `NamedMatch` represents the match
+/// that occurred when a particular subset of a matcher was applied to a
+/// particular token tree.
+///
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
+/// only on the nesting depth of `ast::TTSeq`s in the originating
+/// token tree it was derived from.
+#[derive(Debug, Clone)]
+crate enum NamedMatch {
+    MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
+    MatchedNonterminal(Lrc<Nonterminal>),
+}
+
+/// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
+/// and an iterator of items that matched input and produces a `NamedParseResult`.
+fn nameize<I: Iterator<Item = NamedMatch>>(
+    sess: &ParseSess,
+    ms: &[TokenTree],
+    mut res: I,
+) -> NamedParseResult {
+    // Recursively descend into each type of matcher (e.g., sequences, delimited, metavars) and make
+    // sure that each metavar has _exactly one_ binding. If a metavar does not have exactly one
+    // binding, then there is an error. If it does, then we insert the binding into the
+    // `NamedParseResult`.
+    fn n_rec<I: Iterator<Item = NamedMatch>>(
+        sess: &ParseSess,
+        m: &TokenTree,
+        res: &mut I,
+        ret_val: &mut FxHashMap<Ident, NamedMatch>,
+    ) -> Result<(), (syntax_pos::Span, String)> {
+        match *m {
+            TokenTree::Sequence(_, ref seq) => for next_m in &seq.tts {
+                n_rec(sess, next_m, res.by_ref(), ret_val)?
+            },
+            TokenTree::Delimited(_, ref delim) => for next_m in &delim.tts {
+                n_rec(sess, next_m, res.by_ref(), ret_val)?;
+            },
+            TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
+                if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
+                    return Err((span, "missing fragment specifier".to_string()));
+                }
+            }
+            TokenTree::MetaVarDecl(sp, bind_name, _) => {
+                match ret_val.entry(bind_name) {
+                    Vacant(spot) => {
+                        spot.insert(res.next().unwrap());
+                    }
+                    Occupied(..) => {
+                        return Err((sp, format!("duplicated bind name: {}", bind_name)))
+                    }
+                }
+            }
+            TokenTree::MetaVar(..) | TokenTree::Token(..) => (),
+        }
+
+        Ok(())
+    }
+
+    let mut ret_val = FxHashMap::default();
+    for m in ms {
+        match n_rec(sess, m, res.by_ref(), &mut ret_val) {
+            Ok(_) => {}
+            Err((sp, msg)) => return Error(sp, msg),
+        }
+    }
+
+    Success(ret_val)
+}
+
+/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
+fn token_name_eq(t1: &Token, t2: &Token) -> bool {
+    if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
+        ident1.name == ident2.name && is_raw1 == is_raw2
+    } else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
+        ident1.name == ident2.name
+    } else {
+        t1.kind == t2.kind
+    }
+}
+
+/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
+/// produce more items in `next_items`, `eof_items`, and `bb_items`.
+///
+/// For more info about the how this happens, see the module-level doc comments and the inline
+/// comments of this function.
+///
+/// # Parameters
+///
+/// - `sess`: the parsing session into which errors are emitted.
+/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
+///   successful execution of this function.
+/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
+///   the function `parse`.
+/// - `eof_items`: the set of items that would be valid if this was the EOF.
+/// - `bb_items`: the set of items that are waiting for the black-box parser.
+/// - `token`: the current token of the parser.
+/// - `span`: the `Span` in the source code corresponding to the token trees we are trying to match
+///   against the matcher positions in `cur_items`.
+///
+/// # Returns
+///
+/// A `ParseResult`. Note that matches are kept track of through the items generated.
+fn inner_parse_loop<'root, 'tt>(
+    sess: &ParseSess,
+    cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
+    next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
+    eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
+    bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
+    token: &Token,
+) -> ParseResult<()> {
+    // Pop items from `cur_items` until it is empty.
+    while let Some(mut item) = cur_items.pop() {
+        // When unzipped trees end, remove them. This corresponds to backtracking out of a
+        // delimited submatcher into which we already descended. In backtracking out again, we need
+        // to advance the "dot" past the delimiters in the outer matcher.
+        while item.idx >= item.top_elts.len() {
+            match item.stack.pop() {
+                Some(MatcherTtFrame { elts, idx }) => {
+                    item.top_elts = elts;
+                    item.idx = idx + 1;
+                }
+                None => break,
+            }
+        }
+
+        // Get the current position of the "dot" (`idx`) in `item` and the number of token trees in
+        // the matcher (`len`).
+        let idx = item.idx;
+        let len = item.top_elts.len();
+
+        // If `idx >= len`, then we are at or past the end of the matcher of `item`.
+        if idx >= len {
+            // We are repeating iff there is a parent. If the matcher is inside of a repetition,
+            // then we could be at the end of a sequence or at the beginning of the next
+            // repetition.
+            if item.up.is_some() {
+                // At this point, regardless of whether there is a separator, we should add all
+                // matches from the complete repetition of the sequence to the shared, top-level
+                // `matches` list (actually, `up.matches`, which could itself not be the top-level,
+                // but anyway...). Moreover, we add another item to `cur_items` in which the "dot"
+                // is at the end of the `up` matcher. This ensures that the "dot" in the `up`
+                // matcher is also advanced sufficiently.
+                //
+                // NOTE: removing the condition `idx == len` allows trailing separators.
+                if idx == len {
+                    // Get the `up` matcher
+                    let mut new_pos = item.up.clone().unwrap();
+
+                    // Add matches from this repetition to the `matches` of `up`
+                    for idx in item.match_lo..item.match_hi {
+                        let sub = item.matches[idx].clone();
+                        let span = DelimSpan::from_pair(item.sp_open, token.span);
+                        new_pos.push_match(idx, MatchedSeq(sub, span));
+                    }
+
+                    // Move the "dot" past the repetition in `up`
+                    new_pos.match_cur = item.match_hi;
+                    new_pos.idx += 1;
+                    cur_items.push(new_pos);
+                }
+
+                // Check if we need a separator.
+                if idx == len && item.sep.is_some() {
+                    // We have a separator, and it is the current token. We can advance past the
+                    // separator token.
+                    if item.sep
+                        .as_ref()
+                        .map(|sep| token_name_eq(token, sep))
+                        .unwrap_or(false)
+                    {
+                        item.idx += 1;
+                        next_items.push(item);
+                    }
+                }
+                // We don't need a separator. Move the "dot" back to the beginning of the matcher
+                // and try to match again UNLESS we are only allowed to have _one_ repetition.
+                else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
+                    item.match_cur = item.match_lo;
+                    item.idx = 0;
+                    cur_items.push(item);
+                }
+            }
+            // If we are not in a repetition, then being at the end of a matcher means that we have
+            // reached the potential end of the input.
+            else {
+                eof_items.push(item);
+            }
+        }
+        // We are in the middle of a matcher.
+        else {
+            // Look at what token in the matcher we are trying to match the current token (`token`)
+            // against. Depending on that, we may generate new items.
+            match item.top_elts.get_tt(idx) {
+                // Need to descend into a sequence
+                TokenTree::Sequence(sp, seq) => {
+                    // Examine the case where there are 0 matches of this sequence. We are
+                    // implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
+                    // result in a "no rules expected token" error by virtue of this matcher not
+                    // working.
+                    if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
+                        || seq.kleene.op == mbe::KleeneOp::ZeroOrOne
+                    {
+                        let mut new_item = item.clone();
+                        new_item.match_cur += seq.num_captures;
+                        new_item.idx += 1;
+                        for idx in item.match_cur..item.match_cur + seq.num_captures {
+                            new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![]), sp));
+                        }
+                        cur_items.push(new_item);
+                    }
+
+                    let matches = create_matches(item.matches.len());
+                    cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos {
+                        stack: smallvec![],
+                        sep: seq.separator.clone(),
+                        seq_op: Some(seq.kleene.op),
+                        idx: 0,
+                        matches,
+                        match_lo: item.match_cur,
+                        match_cur: item.match_cur,
+                        match_hi: item.match_cur + seq.num_captures,
+                        up: Some(item),
+                        sp_open: sp.open,
+                        top_elts: Tt(TokenTree::Sequence(sp, seq)),
+                    })));
+                }
+
+                // We need to match a metavar (but the identifier is invalid)... this is an error
+                TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
+                    if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
+                        return Error(span, "missing fragment specifier".to_string());
+                    }
+                }
+
+                // We need to match a metavar with a valid ident... call out to the black-box
+                // parser by adding an item to `bb_items`.
+                TokenTree::MetaVarDecl(_, _, id) => {
+                    // Built-in nonterminals never start with these tokens,
+                    // so we can eliminate them from consideration.
+                    if may_begin_with(token, id.name) {
+                        bb_items.push(item);
+                    }
+                }
+
+                // We need to descend into a delimited submatcher or a doc comment. To do this, we
+                // push the current matcher onto a stack and push a new item containing the
+                // submatcher onto `cur_items`.
+                //
+                // At the beginning of the loop, if we reach the end of the delimited submatcher,
+                // we pop the stack to backtrack out of the descent.
+                seq @ TokenTree::Delimited(..) |
+                seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
+                    let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
+                    let idx = item.idx;
+                    item.stack.push(MatcherTtFrame {
+                        elts: lower_elts,
+                        idx,
+                    });
+                    item.idx = 0;
+                    cur_items.push(item);
+                }
+
+                // We just matched a normal token. We can just advance the parser.
+                TokenTree::Token(t) if token_name_eq(&t, token) => {
+                    item.idx += 1;
+                    next_items.push(item);
+                }
+
+                // There was another token that was not `token`... This means we can't add any
+                // rules. NOTE that this is not necessarily an error unless _all_ items in
+                // `cur_items` end up doing this. There may still be some other matchers that do
+                // end up working out.
+                TokenTree::Token(..) | TokenTree::MetaVar(..) => {}
+            }
+        }
+    }
+
+    // Yay a successful parse (so far)!
+    Success(())
+}
+
+/// Use the given sequence of token trees (`ms`) as a matcher. Match the given token stream `tts`
+/// against it and return the match.
+///
+/// # Parameters
+///
+/// - `sess`: The session into which errors are emitted
+/// - `tts`: The tokenstream we are matching against the pattern `ms`
+/// - `ms`: A sequence of token trees representing a pattern against which we are matching
+/// - `directory`: Information about the file locations (needed for the black-box parser)
+/// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box
+///   parser)
+pub(super) fn parse(
+    sess: &ParseSess,
+    tts: TokenStream,
+    ms: &[TokenTree],
+    directory: Option<Directory<'_>>,
+    recurse_into_modules: bool,
+) -> NamedParseResult {
+    // Create a parser that can be used for the "black box" parts.
+    let mut parser = Parser::new(
+        sess,
+        tts,
+        directory,
+        recurse_into_modules,
+        true,
+        syntax::MACRO_ARGUMENTS,
+    );
+
+    // A queue of possible matcher positions. We initialize it with the matcher position in which
+    // the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then
+    // processes all of these possible matcher positions and produces possible next positions into
+    // `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items`
+    // and we start over again.
+    //
+    // This MatcherPos instance is allocated on the stack. All others -- and
+    // there are frequently *no* others! -- are allocated on the heap.
+    let mut initial = initial_matcher_pos(ms, parser.token.span);
+    let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
+    let mut next_items = Vec::new();
+
+    loop {
+        // Matcher positions black-box parsed by parser.rs (`parser`)
+        let mut bb_items = SmallVec::new();
+
+        // Matcher positions that would be valid if the macro invocation was over now
+        let mut eof_items = SmallVec::new();
+        assert!(next_items.is_empty());
+
+        // Process `cur_items` until either we have finished the input or we need to get some
+        // parsing from the black-box parser done. The result is that `next_items` will contain a
+        // bunch of possible next matcher positions in `next_items`.
+        match inner_parse_loop(
+            sess,
+            &mut cur_items,
+            &mut next_items,
+            &mut eof_items,
+            &mut bb_items,
+            &parser.token,
+        ) {
+            Success(_) => {}
+            Failure(token, msg) => return Failure(token, msg),
+            Error(sp, msg) => return Error(sp, msg),
+        }
+
+        // inner parse loop handled all cur_items, so it's empty
+        assert!(cur_items.is_empty());
+
+        // We need to do some post processing after the `inner_parser_loop`.
+        //
+        // Error messages here could be improved with links to original rules.
+
+        // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
+        // either the parse is ambiguous (which should never happen) or there is a syntax error.
+        if parser.token == token::Eof {
+            if eof_items.len() == 1 {
+                let matches = eof_items[0]
+                    .matches
+                    .iter_mut()
+                    .map(|dv| Lrc::make_mut(dv).pop().unwrap());
+                return nameize(sess, ms, matches);
+            } else if eof_items.len() > 1 {
+                return Error(
+                    parser.token.span,
+                    "ambiguity: multiple successful parses".to_string(),
+                );
+            } else {
+                return Failure(
+                    Token::new(token::Eof, if parser.token.span.is_dummy() {
+                        parser.token.span
+                    } else {
+                        sess.source_map().next_point(parser.token.span)
+                    }),
+                    "missing tokens in macro arguments",
+                );
+            }
+        }
+        // Performance hack: eof_items may share matchers via Rc with other things that we want
+        // to modify. Dropping eof_items now may drop these refcounts to 1, preventing an
+        // unnecessary implicit clone later in Rc::make_mut.
+        drop(eof_items);
+
+        // Another possibility is that we need to call out to parse some rust nonterminal
+        // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
+        if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 {
+            let nts = bb_items
+                .iter()
+                .map(|item| match item.top_elts.get_tt(item.idx) {
+                    TokenTree::MetaVarDecl(_, bind, name) => format!("{} ('{}')", name, bind),
+                    _ => panic!(),
+                })
+                .collect::<Vec<String>>()
+                .join(" or ");
+
+            return Error(
+                parser.token.span,
+                format!(
+                    "local ambiguity: multiple parsing options: {}",
+                    match next_items.len() {
+                        0 => format!("built-in NTs {}.", nts),
+                        1 => format!("built-in NTs {} or 1 other option.", nts),
+                        n => format!("built-in NTs {} or {} other options.", nts, n),
+                    }
+                ),
+            );
+        }
+        // If there are no possible next positions AND we aren't waiting for the black-box parser,
+        // then there is a syntax error.
+        else if bb_items.is_empty() && next_items.is_empty() {
+            return Failure(
+                parser.token.take(),
+                "no rules expected this token in macro call",
+            );
+        }
+        // Dump all possible `next_items` into `cur_items` for the next iteration.
+        else if !next_items.is_empty() {
+            // Now process the next token
+            cur_items.extend(next_items.drain(..));
+            parser.bump();
+        }
+        // Finally, we have the case where we need to call the black-box parser to get some
+        // nonterminal.
+        else {
+            assert_eq!(bb_items.len(), 1);
+
+            let mut item = bb_items.pop().unwrap();
+            if let TokenTree::MetaVarDecl(span, _, ident) = item.top_elts.get_tt(item.idx) {
+                let match_cur = item.match_cur;
+                item.push_match(
+                    match_cur,
+                    MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, ident.name))),
+                );
+                item.idx += 1;
+                item.match_cur += 1;
+            } else {
+                unreachable!()
+            }
+            cur_items.push(item);
+        }
+
+        assert!(!cur_items.is_empty());
+    }
+}
+
+/// The token is an identifier, but not `_`.
+/// We prohibit passing `_` to macros expecting `ident` for now.
+fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
+    match token.kind {
+        token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
+        _ => None,
+    }
+}
+
+/// Checks whether a non-terminal may begin with a particular token.
+///
+/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
+/// token. Be conservative (return true) if not sure.
+fn may_begin_with(token: &Token, name: Name) -> bool {
+    /// Checks whether the non-terminal may contain a single (non-keyword) identifier.
+    fn may_be_ident(nt: &token::Nonterminal) -> bool {
+        match *nt {
+            token::NtItem(_) | token::NtBlock(_) | token::NtVis(_) => false,
+            _ => true,
+        }
+    }
+
+    match name {
+        sym::expr => token.can_begin_expr()
+            // This exception is here for backwards compatibility.
+            && !token.is_keyword(kw::Let),
+        sym::ty => token.can_begin_type(),
+        sym::ident => get_macro_name(token).is_some(),
+        sym::literal => token.can_begin_literal_or_bool(),
+        sym::vis => match token.kind {
+            // The follow-set of :vis + "priv" keyword + interpolated
+            token::Comma | token::Ident(..) | token::Interpolated(_) => true,
+            _ => token.can_begin_type(),
+        },
+        sym::block => match token.kind {
+            token::OpenDelim(token::Brace) => true,
+            token::Interpolated(ref nt) => match **nt {
+                token::NtItem(_)
+                | token::NtPat(_)
+                | token::NtTy(_)
+                | token::NtIdent(..)
+                | token::NtMeta(_)
+                | token::NtPath(_)
+                | token::NtVis(_) => false, // none of these may start with '{'.
+                _ => true,
+            },
+            _ => false,
+        },
+        sym::path | sym::meta => match token.kind {
+            token::ModSep | token::Ident(..) => true,
+            token::Interpolated(ref nt) => match **nt {
+                token::NtPath(_) | token::NtMeta(_) => true,
+                _ => may_be_ident(&nt),
+            },
+            _ => false,
+        },
+        sym::pat => match token.kind {
+            token::Ident(..) |               // box, ref, mut, and other identifiers (can stricten)
+            token::OpenDelim(token::Paren) |    // tuple pattern
+            token::OpenDelim(token::Bracket) |  // slice pattern
+            token::BinOp(token::And) |          // reference
+            token::BinOp(token::Minus) |        // negative literal
+            token::AndAnd |                     // double reference
+            token::Literal(..) |                // literal
+            token::DotDot |                     // range pattern (future compat)
+            token::DotDotDot |                  // range pattern (future compat)
+            token::ModSep |                     // path
+            token::Lt |                         // path (UFCS constant)
+            token::BinOp(token::Shl) => true,   // path (double UFCS)
+            token::Interpolated(ref nt) => may_be_ident(nt),
+            _ => false,
+        },
+        sym::lifetime => match token.kind {
+            token::Lifetime(_) => true,
+            token::Interpolated(ref nt) => match **nt {
+                token::NtLifetime(_) | token::NtTT(_) => true,
+                _ => false,
+            },
+            _ => false,
+        },
+        _ => match token.kind {
+            token::CloseDelim(_) => false,
+            _ => true,
+        },
+    }
+}
+
+/// A call to the "black-box" parser to parse some Rust non-terminal.
+///
+/// # Parameters
+///
+/// - `p`: the "black-box" parser to use
+/// - `sp`: the `Span` we want to parse
+/// - `name`: the name of the metavar _matcher_ we want to match (e.g., `tt`, `ident`, `block`,
+///   etc...)
+///
+/// # Returns
+///
+/// The parsed non-terminal.
+fn parse_nt(p: &mut Parser<'_>, sp: Span, name: Symbol) -> Nonterminal {
+    // FIXME(Centril): Consider moving this to `parser.rs` to make
+    // the visibilities of the methods used below `pub(super)` at most.
+
+    if name == sym::tt {
+        return token::NtTT(p.parse_token_tree());
+    }
+    // check at the beginning and the parser checks after each bump
+    p.process_potential_macro_variable();
+    match parse_nt_inner(p, sp, name) {
+        Ok(nt) => nt,
+        Err(mut err) => {
+            err.emit();
+            FatalError.raise();
+        }
+    }
+}
+
+fn parse_nt_inner<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> PResult<'a, Nonterminal> {
+    Ok(match name {
+        sym::item => match p.parse_item()? {
+            Some(i) => token::NtItem(i),
+            None => return Err(p.fatal("expected an item keyword")),
+        },
+        sym::block => token::NtBlock(p.parse_block()?),
+        sym::stmt => match p.parse_stmt()? {
+            Some(s) => token::NtStmt(s),
+            None => return Err(p.fatal("expected a statement")),
+        },
+        sym::pat => token::NtPat(p.parse_pat(None)?),
+        sym::expr => token::NtExpr(p.parse_expr()?),
+        sym::literal => token::NtLiteral(p.parse_literal_maybe_minus()?),
+        sym::ty => token::NtTy(p.parse_ty()?),
+        // this could be handled like a token, since it is one
+        sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
+            let span = p.token.span;
+            p.bump();
+            token::NtIdent(Ident::new(name, span), is_raw)
+        } else {
+            let token_str = pprust::token_to_string(&p.token);
+            return Err(p.fatal(&format!("expected ident, found {}", &token_str)));
+        }
+        sym::path => token::NtPath(p.parse_path(PathStyle::Type)?),
+        sym::meta => token::NtMeta(p.parse_attr_item()?),
+        sym::vis => token::NtVis(p.parse_visibility(true)?),
+        sym::lifetime => if p.check_lifetime() {
+            token::NtLifetime(p.expect_lifetime().ident)
+        } else {
+            let token_str = pprust::token_to_string(&p.token);
+            return Err(p.fatal(&format!("expected a lifetime, found `{}`", &token_str)));
+        }
+        // this is not supposed to happen, since it has been checked
+        // when compiling the macro.
+        _ => p.span_bug(sp, "invalid fragment specifier"),
+    })
+}
diff --git a/src/libsyntax_expand/mbe/macro_rules.rs b/src/libsyntax_expand/mbe/macro_rules.rs
new file mode 100644 (file)
index 0000000..9a4130b
--- /dev/null
@@ -0,0 +1,1192 @@
+use crate::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
+use crate::base::{SyntaxExtension, SyntaxExtensionKind};
+use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
+use crate::mbe;
+use crate::mbe::macro_check;
+use crate::mbe::macro_parser::parse;
+use crate::mbe::macro_parser::{Error, Failure, Success};
+use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedParseResult};
+use crate::mbe::transcribe::transcribe;
+
+use syntax::ast;
+use syntax::attr::{self, TransparencyError};
+use syntax::edition::Edition;
+use syntax::feature_gate::Features;
+use syntax::parse::parser::Parser;
+use syntax::parse::token::TokenKind::*;
+use syntax::parse::token::{self, NtTT, Token};
+use syntax::parse::Directory;
+use syntax::print::pprust;
+use syntax::sess::ParseSess;
+use syntax::symbol::{kw, sym, Symbol};
+use syntax::tokenstream::{DelimSpan, TokenStream};
+
+use errors::{DiagnosticBuilder, FatalError};
+use log::debug;
+use syntax_pos::hygiene::Transparency;
+use syntax_pos::Span;
+
+use rustc_data_structures::fx::FxHashMap;
+use std::borrow::Cow;
+use std::collections::hash_map::Entry;
+use std::slice;
+
+use errors::Applicability;
+use rustc_data_structures::sync::Lrc;
+
+const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
+                                        `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
+                                        `literal`, `path`, `meta`, `tt`, `item` and `vis`";
+
+crate struct ParserAnyMacro<'a> {
+    parser: Parser<'a>,
+
+    /// Span of the expansion site of the macro this parser is for
+    site_span: Span,
+    /// The ident of the macro we're parsing
+    macro_ident: ast::Ident,
+    arm_span: Span,
+}
+
+crate fn annotate_err_with_kind(
+    err: &mut DiagnosticBuilder<'_>,
+    kind: AstFragmentKind,
+    span: Span,
+) {
+    match kind {
+        AstFragmentKind::Ty => {
+            err.span_label(span, "this macro call doesn't expand to a type");
+        }
+        AstFragmentKind::Pat => {
+            err.span_label(span, "this macro call doesn't expand to a pattern");
+        }
+        _ => {}
+    };
+}
+
+impl<'a> ParserAnyMacro<'a> {
+    crate fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
+        let ParserAnyMacro { site_span, macro_ident, ref mut parser, arm_span } = *self;
+        let fragment = panictry!(parse_ast_fragment(parser, kind, true).map_err(|mut e| {
+            if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
+                if !e.span.is_dummy() {
+                    // early end of macro arm (#52866)
+                    e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
+                }
+                let msg = &e.message[0];
+                e.message[0] = (
+                    format!(
+                        "macro expansion ends with an incomplete expression: {}",
+                        msg.0.replace(", found `<eof>`", ""),
+                    ),
+                    msg.1,
+                );
+            }
+            if e.span.is_dummy() {
+                // Get around lack of span in error (#30128)
+                e.replace_span_with(site_span);
+                if parser.sess.source_map().span_to_filename(arm_span).is_real() {
+                    e.span_label(arm_span, "in this macro arm");
+                }
+            } else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() {
+                e.span_label(site_span, "in this macro invocation");
+            }
+            match kind {
+                AstFragmentKind::Pat if macro_ident.name == sym::vec => {
+                    let mut suggestion = None;
+                    if let Ok(code) = parser.sess.source_map().span_to_snippet(site_span) {
+                        if let Some(bang) = code.find('!') {
+                            suggestion = Some(code[bang + 1..].to_string());
+                        }
+                    }
+                    if let Some(suggestion) = suggestion {
+                        e.span_suggestion(
+                            site_span,
+                            "use a slice pattern here instead",
+                            suggestion,
+                            Applicability::MachineApplicable,
+                        );
+                    } else {
+                        e.span_label(
+                            site_span,
+                            "use a slice pattern here instead",
+                        );
+                    }
+                    e.help("for more information, see https://doc.rust-lang.org/edition-guide/\
+                            rust-2018/slice-patterns.html");
+                }
+                _ => annotate_err_with_kind(&mut e, kind, site_span),
+            };
+            e
+        }));
+
+        // We allow semicolons at the end of expressions -- e.g., the semicolon in
+        // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
+        // but `m!()` is allowed in expression positions (cf. issue #34706).
+        if kind == AstFragmentKind::Expr && parser.token == token::Semi {
+            parser.bump();
+        }
+
+        // Make sure we don't have any tokens left to parse so we don't silently drop anything.
+        let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
+        ensure_complete_parse(parser, &path, kind.name(), site_span);
+        fragment
+    }
+}
+
+struct MacroRulesMacroExpander {
+    name: ast::Ident,
+    span: Span,
+    transparency: Transparency,
+    lhses: Vec<mbe::TokenTree>,
+    rhses: Vec<mbe::TokenTree>,
+    valid: bool,
+}
+
+impl TTMacroExpander for MacroRulesMacroExpander {
+    fn expand<'cx>(
+        &self,
+        cx: &'cx mut ExtCtxt<'_>,
+        sp: Span,
+        input: TokenStream,
+    ) -> Box<dyn MacResult + 'cx> {
+        if !self.valid {
+            return DummyResult::any(sp);
+        }
+        generic_extension(
+            cx, sp, self.span, self.name, self.transparency, input, &self.lhses, &self.rhses
+        )
+    }
+}
+
+fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) {
+    let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
+    cx.expansions.entry(sp).or_default().push(message);
+}
+
+/// Given `lhses` and `rhses`, this is the new macro we create
+fn generic_extension<'cx>(
+    cx: &'cx mut ExtCtxt<'_>,
+    sp: Span,
+    def_span: Span,
+    name: ast::Ident,
+    transparency: Transparency,
+    arg: TokenStream,
+    lhses: &[mbe::TokenTree],
+    rhses: &[mbe::TokenTree],
+) -> Box<dyn MacResult + 'cx> {
+    if cx.trace_macros() {
+        let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(arg.clone()));
+        trace_macros_note(cx, sp, msg);
+    }
+
+    // Which arm's failure should we report? (the one furthest along)
+    let mut best_failure: Option<(Token, &str)> = None;
+
+    for (i, lhs) in lhses.iter().enumerate() {
+        // try each arm's matchers
+        let lhs_tt = match *lhs {
+            mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
+            _ => cx.span_bug(sp, "malformed macro lhs"),
+        };
+
+        match parse_tt(cx, lhs_tt, arg.clone()) {
+            Success(named_matches) => {
+                let rhs = match rhses[i] {
+                    // ignore delimiters
+                    mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
+                    _ => cx.span_bug(sp, "malformed macro rhs"),
+                };
+                let arm_span = rhses[i].span();
+
+                let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
+                // rhs has holes ( `$id` and `$(...)` that need filled)
+                let mut tts = transcribe(cx, &named_matches, rhs, transparency);
+
+                // Replace all the tokens for the corresponding positions in the macro, to maintain
+                // proper positions in error reporting, while maintaining the macro_backtrace.
+                if rhs_spans.len() == tts.len() {
+                    tts = tts.map_enumerated(|i, mut tt| {
+                        let mut sp = rhs_spans[i];
+                        sp = sp.with_ctxt(tt.span().ctxt());
+                        tt.set_span(sp);
+                        tt
+                    });
+                }
+
+                if cx.trace_macros() {
+                    let msg = format!("to `{}`", pprust::tts_to_string(tts.clone()));
+                    trace_macros_note(cx, sp, msg);
+                }
+
+                let directory = Directory {
+                    path: Cow::from(cx.current_expansion.module.directory.as_path()),
+                    ownership: cx.current_expansion.directory_ownership,
+                };
+                let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
+                p.root_module_name =
+                    cx.current_expansion.module.mod_path.last().map(|id| id.as_str().to_string());
+                p.last_type_ascription = cx.current_expansion.prior_type_ascription;
+
+                p.process_potential_macro_variable();
+                // Let the context choose how to interpret the result.
+                // Weird, but useful for X-macros.
+                return Box::new(ParserAnyMacro {
+                    parser: p,
+
+                    // Pass along the original expansion site and the name of the macro
+                    // so we can print a useful error message if the parse of the expanded
+                    // macro leaves unparsed tokens.
+                    site_span: sp,
+                    macro_ident: name,
+                    arm_span,
+                });
+            }
+            Failure(token, msg) => match best_failure {
+                Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
+                _ => best_failure = Some((token, msg)),
+            },
+            Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]),
+        }
+    }
+
+    let (token, label) = best_failure.expect("ran no matchers");
+    let span = token.span.substitute_dummy(sp);
+    let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
+    err.span_label(span, label);
+    if !def_span.is_dummy() && cx.source_map().span_to_filename(def_span).is_real() {
+        err.span_label(cx.source_map().def_span(def_span), "when calling this macro");
+    }
+
+    // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
+    if let Some((arg, comma_span)) = arg.add_comma() {
+        for lhs in lhses {
+            // try each arm's matchers
+            let lhs_tt = match *lhs {
+                mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
+                _ => continue,
+            };
+            match parse_tt(cx, lhs_tt, arg.clone()) {
+                Success(_) => {
+                    if comma_span.is_dummy() {
+                        err.note("you might be missing a comma");
+                    } else {
+                        err.span_suggestion_short(
+                            comma_span,
+                            "missing comma here",
+                            ", ".to_string(),
+                            Applicability::MachineApplicable,
+                        );
+                    }
+                }
+                _ => {}
+            }
+        }
+    }
+    err.emit();
+    cx.trace_macros_diag();
+    DummyResult::any(sp)
+}
+
+// Note that macro-by-example's input is also matched against a token tree:
+//                   $( $lhs:tt => $rhs:tt );+
+//
+// Holy self-referential!
+
+/// Converts a macro item into a syntax extension.
+pub fn compile_declarative_macro(
+    sess: &ParseSess,
+    features: &Features,
+    def: &ast::Item,
+    edition: Edition,
+) -> SyntaxExtension {
+    let diag = &sess.span_diagnostic;
+    let lhs_nm = ast::Ident::new(sym::lhs, def.span);
+    let rhs_nm = ast::Ident::new(sym::rhs, def.span);
+    let tt_spec = ast::Ident::new(sym::tt, def.span);
+
+    // Parse the macro_rules! invocation
+    let body = match def.kind {
+        ast::ItemKind::MacroDef(ref body) => body,
+        _ => unreachable!(),
+    };
+
+    // The pattern that macro_rules matches.
+    // The grammar for macro_rules! is:
+    // $( $lhs:tt => $rhs:tt );+
+    // ...quasiquoting this would be nice.
+    // These spans won't matter, anyways
+    let argument_gram = vec![
+        mbe::TokenTree::Sequence(
+            DelimSpan::dummy(),
+            Lrc::new(mbe::SequenceRepetition {
+                tts: vec![
+                    mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
+                    mbe::TokenTree::token(token::FatArrow, def.span),
+                    mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
+                ],
+                separator: Some(Token::new(
+                    if body.legacy { token::Semi } else { token::Comma },
+                    def.span,
+                )),
+                kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
+                num_captures: 2,
+            }),
+        ),
+        // to phase into semicolon-termination instead of semicolon-separation
+        mbe::TokenTree::Sequence(
+            DelimSpan::dummy(),
+            Lrc::new(mbe::SequenceRepetition {
+                tts: vec![mbe::TokenTree::token(
+                    if body.legacy { token::Semi } else { token::Comma },
+                    def.span,
+                )],
+                separator: None,
+                kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
+                num_captures: 0,
+            }),
+        ),
+    ];
+
+    let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
+        Success(m) => m,
+        Failure(token, msg) => {
+            let s = parse_failure_msg(&token);
+            let sp = token.span.substitute_dummy(def.span);
+            let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
+            err.span_label(sp, msg);
+            err.emit();
+            FatalError.raise();
+        }
+        Error(sp, s) => {
+            sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
+        }
+    };
+
+    let mut valid = true;
+
+    // Extract the arguments:
+    let lhses = match argument_map[&lhs_nm] {
+        MatchedSeq(ref s, _) => s
+            .iter()
+            .map(|m| {
+                if let MatchedNonterminal(ref nt) = *m {
+                    if let NtTT(ref tt) = **nt {
+                        let tt = mbe::quoted::parse(
+                            tt.clone().into(),
+                            true,
+                            sess,
+                        )
+                        .pop()
+                        .unwrap();
+                        valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
+                        return tt;
+                    }
+                }
+                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+            })
+            .collect::<Vec<mbe::TokenTree>>(),
+        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
+    };
+
+    let rhses = match argument_map[&rhs_nm] {
+        MatchedSeq(ref s, _) => s
+            .iter()
+            .map(|m| {
+                if let MatchedNonterminal(ref nt) = *m {
+                    if let NtTT(ref tt) = **nt {
+                        return mbe::quoted::parse(
+                            tt.clone().into(),
+                            false,
+                            sess,
+                        )
+                        .pop()
+                        .unwrap();
+                    }
+                }
+                sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+            })
+            .collect::<Vec<mbe::TokenTree>>(),
+        _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
+    };
+
+    for rhs in &rhses {
+        valid &= check_rhs(sess, rhs);
+    }
+
+    // don't abort iteration early, so that errors for multiple lhses can be reported
+    for lhs in &lhses {
+        valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
+    }
+
+    // We use CRATE_NODE_ID instead of `def.id` otherwise we may emit buffered lints for a node id
+    // that is not lint-checked and trigger the "failed to process buffered lint here" bug.
+    valid &= macro_check::check_meta_variables(sess, ast::CRATE_NODE_ID, def.span, &lhses, &rhses);
+
+    let (transparency, transparency_error) = attr::find_transparency(&def.attrs, body.legacy);
+    match transparency_error {
+        Some(TransparencyError::UnknownTransparency(value, span)) =>
+            diag.span_err(span, &format!("unknown macro transparency: `{}`", value)),
+        Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) =>
+            diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes"),
+        None => {}
+    }
+
+    let expander: Box<_> = Box::new(MacroRulesMacroExpander {
+        name: def.ident, span: def.span, transparency, lhses, rhses, valid
+    });
+
+    SyntaxExtension::new(
+        sess,
+        SyntaxExtensionKind::LegacyBang(expander),
+        def.span,
+        Vec::new(),
+        edition,
+        def.ident.name,
+        &def.attrs,
+    )
+}
+
+fn check_lhs_nt_follows(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    lhs: &mbe::TokenTree,
+) -> bool {
+    // lhs is going to be like TokenTree::Delimited(...), where the
+    // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
+    if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
+        check_matcher(sess, features, attrs, &tts.tts)
+    } else {
+        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+        sess.span_diagnostic.span_err(lhs.span(), msg);
+        false
+    }
+    // we don't abort on errors on rejection, the driver will do that for us
+    // after parsing/expansion. we can report every error in every macro this way.
+}
+
+/// Checks that the lhs contains no repetition which could match an empty token
+/// tree, because then the matcher would hang indefinitely.
+fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
+    use mbe::TokenTree;
+    for tt in tts {
+        match *tt {
+            TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
+            TokenTree::Delimited(_, ref del) => {
+                if !check_lhs_no_empty_seq(sess, &del.tts) {
+                    return false;
+                }
+            }
+            TokenTree::Sequence(span, ref seq) => {
+                if seq.separator.is_none()
+                    && seq.tts.iter().all(|seq_tt| match *seq_tt {
+                        TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
+                        TokenTree::Sequence(_, ref sub_seq) => {
+                            sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
+                                || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
+                        }
+                        _ => false,
+                    })
+                {
+                    let sp = span.entire();
+                    sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
+                    return false;
+                }
+                if !check_lhs_no_empty_seq(sess, &seq.tts) {
+                    return false;
+                }
+            }
+        }
+    }
+
+    true
+}
+
+fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
+    match *rhs {
+        mbe::TokenTree::Delimited(..) => return true,
+        _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
+    }
+    false
+}
+
+fn check_matcher(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    matcher: &[mbe::TokenTree],
+) -> bool {
+    let first_sets = FirstSets::new(matcher);
+    let empty_suffix = TokenSet::empty();
+    let err = sess.span_diagnostic.err_count();
+    check_matcher_core(sess, features, attrs, &first_sets, matcher, &empty_suffix);
+    err == sess.span_diagnostic.err_count()
+}
+
+// `The FirstSets` for a matcher is a mapping from subsequences in the
+// matcher to the FIRST set for that subsequence.
+//
+// This mapping is partially precomputed via a backwards scan over the
+// token trees of the matcher, which provides a mapping from each
+// repetition sequence to its *first* set.
+//
+// (Hypothetically, sequences should be uniquely identifiable via their
+// spans, though perhaps that is false, e.g., for macro-generated macros
+// that do not try to inject artificial span information. My plan is
+// to try to catch such cases ahead of time and not include them in
+// the precomputed mapping.)
+struct FirstSets {
+    // this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
+    // span in the original matcher to the First set for the inner sequence `tt ...`.
+    //
+    // If two sequences have the same span in a matcher, then map that
+    // span to None (invalidating the mapping here and forcing the code to
+    // use a slow path).
+    first: FxHashMap<Span, Option<TokenSet>>,
+}
+
+impl FirstSets {
+    fn new(tts: &[mbe::TokenTree]) -> FirstSets {
+        use mbe::TokenTree;
+
+        let mut sets = FirstSets { first: FxHashMap::default() };
+        build_recur(&mut sets, tts);
+        return sets;
+
+        // walks backward over `tts`, returning the FIRST for `tts`
+        // and updating `sets` at the same time for all sequence
+        // substructure we find within `tts`.
+        fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
+            let mut first = TokenSet::empty();
+            for tt in tts.iter().rev() {
+                match *tt {
+                    TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
+                        first.replace_with(tt.clone());
+                    }
+                    TokenTree::Delimited(span, ref delimited) => {
+                        build_recur(sets, &delimited.tts[..]);
+                        first.replace_with(delimited.open_tt(span.open));
+                    }
+                    TokenTree::Sequence(sp, ref seq_rep) => {
+                        let subfirst = build_recur(sets, &seq_rep.tts[..]);
+
+                        match sets.first.entry(sp.entire()) {
+                            Entry::Vacant(vac) => {
+                                vac.insert(Some(subfirst.clone()));
+                            }
+                            Entry::Occupied(mut occ) => {
+                                // if there is already an entry, then a span must have collided.
+                                // This should not happen with typical macro_rules macros,
+                                // but syntax extensions need not maintain distinct spans,
+                                // so distinct syntax trees can be assigned the same span.
+                                // In such a case, the map cannot be trusted; so mark this
+                                // entry as unusable.
+                                occ.insert(None);
+                            }
+                        }
+
+                        // If the sequence contents can be empty, then the first
+                        // token could be the separator token itself.
+
+                        if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+                            first.add_one_maybe(TokenTree::Token(sep.clone()));
+                        }
+
+                        // Reverse scan: Sequence comes before `first`.
+                        if subfirst.maybe_empty
+                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
+                            || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
+                        {
+                            // If sequence is potentially empty, then
+                            // union them (preserving first emptiness).
+                            first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
+                        } else {
+                            // Otherwise, sequence guaranteed
+                            // non-empty; replace first.
+                            first = subfirst;
+                        }
+                    }
+                }
+            }
+
+            first
+        }
+    }
+
+    // walks forward over `tts` until all potential FIRST tokens are
+    // identified.
+    fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
+        use mbe::TokenTree;
+
+        let mut first = TokenSet::empty();
+        for tt in tts.iter() {
+            assert!(first.maybe_empty);
+            match *tt {
+                TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
+                    first.add_one(tt.clone());
+                    return first;
+                }
+                TokenTree::Delimited(span, ref delimited) => {
+                    first.add_one(delimited.open_tt(span.open));
+                    return first;
+                }
+                TokenTree::Sequence(sp, ref seq_rep) => {
+                    let subfirst_owned;
+                    let subfirst = match self.first.get(&sp.entire()) {
+                        Some(&Some(ref subfirst)) => subfirst,
+                        Some(&None) => {
+                            subfirst_owned = self.first(&seq_rep.tts[..]);
+                            &subfirst_owned
+                        }
+                        None => {
+                            panic!("We missed a sequence during FirstSets construction");
+                        }
+                    };
+
+                    // If the sequence contents can be empty, then the first
+                    // token could be the separator token itself.
+                    if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+                        first.add_one_maybe(TokenTree::Token(sep.clone()));
+                    }
+
+                    assert!(first.maybe_empty);
+                    first.add_all(subfirst);
+                    if subfirst.maybe_empty
+                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
+                        || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
+                    {
+                        // Continue scanning for more first
+                        // tokens, but also make sure we
+                        // restore empty-tracking state.
+                        first.maybe_empty = true;
+                        continue;
+                    } else {
+                        return first;
+                    }
+                }
+            }
+        }
+
+        // we only exit the loop if `tts` was empty or if every
+        // element of `tts` matches the empty sequence.
+        assert!(first.maybe_empty);
+        first
+    }
+}
+
+// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
+// (for macro-by-example syntactic variables). It also carries the
+// `maybe_empty` flag; that is true if and only if the matcher can
+// match an empty token sequence.
+//
+// The First set is computed on submatchers like `$($a:expr b),* $(c)* d`,
+// which has corresponding FIRST = {$a:expr, c, d}.
+// Likewise, `$($a:expr b),* $(c)+ d` has FIRST = {$a:expr, c}.
+//
+// (Notably, we must allow for *-op to occur zero times.)
+#[derive(Clone, Debug)]
+struct TokenSet {
+    tokens: Vec<mbe::TokenTree>,
+    maybe_empty: bool,
+}
+
+impl TokenSet {
+    // Returns a set for the empty sequence.
+    fn empty() -> Self {
+        TokenSet { tokens: Vec::new(), maybe_empty: true }
+    }
+
+    // Returns the set `{ tok }` for the single-token (and thus
+    // non-empty) sequence [tok].
+    fn singleton(tok: mbe::TokenTree) -> Self {
+        TokenSet { tokens: vec![tok], maybe_empty: false }
+    }
+
+    // Changes self to be the set `{ tok }`.
+    // Since `tok` is always present, marks self as non-empty.
+    fn replace_with(&mut self, tok: mbe::TokenTree) {
+        self.tokens.clear();
+        self.tokens.push(tok);
+        self.maybe_empty = false;
+    }
+
+    // Changes self to be the empty set `{}`; meant for use when
+    // the particular token does not matter, but we want to
+    // record that it occurs.
+    fn replace_with_irrelevant(&mut self) {
+        self.tokens.clear();
+        self.maybe_empty = false;
+    }
+
+    // Adds `tok` to the set for `self`, marking sequence as non-empy.
+    fn add_one(&mut self, tok: mbe::TokenTree) {
+        if !self.tokens.contains(&tok) {
+            self.tokens.push(tok);
+        }
+        self.maybe_empty = false;
+    }
+
+    // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
+    fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
+        if !self.tokens.contains(&tok) {
+            self.tokens.push(tok);
+        }
+    }
+
+    // Adds all elements of `other` to this.
+    //
+    // (Since this is a set, we filter out duplicates.)
+    //
+    // If `other` is potentially empty, then preserves the previous
+    // setting of the empty flag of `self`. If `other` is guaranteed
+    // non-empty, then `self` is marked non-empty.
+    fn add_all(&mut self, other: &Self) {
+        for tok in &other.tokens {
+            if !self.tokens.contains(tok) {
+                self.tokens.push(tok.clone());
+            }
+        }
+        if !other.maybe_empty {
+            self.maybe_empty = false;
+        }
+    }
+}
+
+// Checks that `matcher` is internally consistent and that it
+// can legally be followed by a token `N`, for all `N` in `follow`.
+// (If `follow` is empty, then it imposes no constraint on
+// the `matcher`.)
+//
+// Returns the set of NT tokens that could possibly come last in
+// `matcher`. (If `matcher` matches the empty sequence, then
+// `maybe_empty` will be set to true.)
+//
+// Requires that `first_sets` is pre-computed for `matcher`;
+// see `FirstSets::new`.
+fn check_matcher_core(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    first_sets: &FirstSets,
+    matcher: &[mbe::TokenTree],
+    follow: &TokenSet,
+) -> TokenSet {
+    use mbe::TokenTree;
+
+    let mut last = TokenSet::empty();
+
+    // 2. For each token and suffix  [T, SUFFIX] in M:
+    // ensure that T can be followed by SUFFIX, and if SUFFIX may be empty,
+    // then ensure T can also be followed by any element of FOLLOW.
+    'each_token: for i in 0..matcher.len() {
+        let token = &matcher[i];
+        let suffix = &matcher[i + 1..];
+
+        let build_suffix_first = || {
+            let mut s = first_sets.first(suffix);
+            if s.maybe_empty {
+                s.add_all(follow);
+            }
+            s
+        };
+
+        // (we build `suffix_first` on demand below; you can tell
+        // which cases are supposed to fall through by looking for the
+        // initialization of this variable.)
+        let suffix_first;
+
+        // First, update `last` so that it corresponds to the set
+        // of NT tokens that might end the sequence `... token`.
+        match *token {
+            TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
+                let can_be_followed_by_any;
+                if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
+                    let msg = format!("invalid fragment specifier `{}`", bad_frag);
+                    sess.span_diagnostic
+                        .struct_span_err(token.span(), &msg)
+                        .help(VALID_FRAGMENT_NAMES_MSG)
+                        .emit();
+                    // (This eliminates false positives and duplicates
+                    // from error messages.)
+                    can_be_followed_by_any = true;
+                } else {
+                    can_be_followed_by_any = token_can_be_followed_by_any(token);
+                }
+
+                if can_be_followed_by_any {
+                    // don't need to track tokens that work with any,
+                    last.replace_with_irrelevant();
+                    // ... and don't need to check tokens that can be
+                    // followed by anything against SUFFIX.
+                    continue 'each_token;
+                } else {
+                    last.replace_with(token.clone());
+                    suffix_first = build_suffix_first();
+                }
+            }
+            TokenTree::Delimited(span, ref d) => {
+                let my_suffix = TokenSet::singleton(d.close_tt(span.close));
+                check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix);
+                // don't track non NT tokens
+                last.replace_with_irrelevant();
+
+                // also, we don't need to check delimited sequences
+                // against SUFFIX
+                continue 'each_token;
+            }
+            TokenTree::Sequence(_, ref seq_rep) => {
+                suffix_first = build_suffix_first();
+                // The trick here: when we check the interior, we want
+                // to include the separator (if any) as a potential
+                // (but not guaranteed) element of FOLLOW. So in that
+                // case, we make a temp copy of suffix and stuff
+                // delimiter in there.
+                //
+                // FIXME: Should I first scan suffix_first to see if
+                // delimiter is already in it before I go through the
+                // work of cloning it? But then again, this way I may
+                // get a "tighter" span?
+                let mut new;
+                let my_suffix = if let Some(sep) = &seq_rep.separator {
+                    new = suffix_first.clone();
+                    new.add_one_maybe(TokenTree::Token(sep.clone()));
+                    &new
+                } else {
+                    &suffix_first
+                };
+
+                // At this point, `suffix_first` is built, and
+                // `my_suffix` is some TokenSet that we can use
+                // for checking the interior of `seq_rep`.
+                let next =
+                    check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix);
+                if next.maybe_empty {
+                    last.add_all(&next);
+                } else {
+                    last = next;
+                }
+
+                // the recursive call to check_matcher_core already ran the 'each_last
+                // check below, so we can just keep going forward here.
+                continue 'each_token;
+            }
+        }
+
+        // (`suffix_first` guaranteed initialized once reaching here.)
+
+        // Now `last` holds the complete set of NT tokens that could
+        // end the sequence before SUFFIX. Check that every one works with `suffix`.
+        'each_last: for token in &last.tokens {
+            if let TokenTree::MetaVarDecl(_, name, frag_spec) = *token {
+                for next_token in &suffix_first.tokens {
+                    match is_in_follow(next_token, frag_spec.name) {
+                        IsInFollow::Invalid(msg, help) => {
+                            sess.span_diagnostic
+                                .struct_span_err(next_token.span(), &msg)
+                                .help(help)
+                                .emit();
+                            // don't bother reporting every source of
+                            // conflict for a particular element of `last`.
+                            continue 'each_last;
+                        }
+                        IsInFollow::Yes => {}
+                        IsInFollow::No(possible) => {
+                            let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
+                            {
+                                "is"
+                            } else {
+                                "may be"
+                            };
+
+                            let sp = next_token.span();
+                            let mut err = sess.span_diagnostic.struct_span_err(
+                                sp,
+                                &format!(
+                                    "`${name}:{frag}` {may_be} followed by `{next}`, which \
+                                     is not allowed for `{frag}` fragments",
+                                    name = name,
+                                    frag = frag_spec,
+                                    next = quoted_tt_to_string(next_token),
+                                    may_be = may_be
+                                ),
+                            );
+                            err.span_label(
+                                sp,
+                                format!("not allowed after `{}` fragments", frag_spec),
+                            );
+                            let msg = "allowed there are: ";
+                            match possible {
+                                &[] => {}
+                                &[t] => {
+                                    err.note(&format!(
+                                        "only {} is allowed after `{}` fragments",
+                                        t, frag_spec,
+                                    ));
+                                }
+                                ts => {
+                                    err.note(&format!(
+                                        "{}{} or {}",
+                                        msg,
+                                        ts[..ts.len() - 1]
+                                            .iter()
+                                            .map(|s| *s)
+                                            .collect::<Vec<_>>()
+                                            .join(", "),
+                                        ts[ts.len() - 1],
+                                    ));
+                                }
+                            }
+                            err.emit();
+                        }
+                    }
+                }
+            }
+        }
+    }
+    last
+}
+
+fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
+    if let mbe::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
+        frag_can_be_followed_by_any(frag_spec.name)
+    } else {
+        // (Non NT's can always be followed by anthing in matchers.)
+        true
+    }
+}
+
+/// Returns `true` if a fragment of type `frag` can be followed by any sort of
+/// token. We use this (among other things) as a useful approximation
+/// for when `frag` can be followed by a repetition like `$(...)*` or
+/// `$(...)+`. In general, these can be a bit tricky to reason about,
+/// so we adopt a conservative position that says that any fragment
+/// specifier which consumes at most one token tree can be followed by
+/// a fragment specifier (indeed, these fragments can be followed by
+/// ANYTHING without fear of future compatibility hazards).
+fn frag_can_be_followed_by_any(frag: Symbol) -> bool {
+    match frag {
+        sym::item     | // always terminated by `}` or `;`
+        sym::block    | // exactly one token tree
+        sym::ident    | // exactly one token tree
+        sym::literal  | // exactly one token tree
+        sym::meta     | // exactly one token tree
+        sym::lifetime | // exactly one token tree
+        sym::tt =>   // exactly one token tree
+            true,
+
+        _ =>
+            false,
+    }
+}
+
+enum IsInFollow {
+    Yes,
+    No(&'static [&'static str]),
+    Invalid(String, &'static str),
+}
+
+/// Returns `true` if `frag` can legally be followed by the token `tok`. For
+/// fragments that can consume an unbounded number of tokens, `tok`
+/// must be within a well-defined follow set. This is intended to
+/// guarantee future compatibility: for example, without this rule, if
+/// we expanded `expr` to include a new binary operator, we might
+/// break macros that were relying on that binary operator as a
+/// separator.
+// when changing this do not forget to update doc/book/macros.md!
+fn is_in_follow(tok: &mbe::TokenTree, frag: Symbol) -> IsInFollow {
+    use mbe::TokenTree;
+
+    if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
+        // closing a token tree can never be matched by any fragment;
+        // iow, we always require that `(` and `)` match, etc.
+        IsInFollow::Yes
+    } else {
+        match frag {
+            sym::item => {
+                // since items *must* be followed by either a `;` or a `}`, we can
+                // accept anything after them
+                IsInFollow::Yes
+            }
+            sym::block => {
+                // anything can follow block, the braces provide an easy boundary to
+                // maintain
+                IsInFollow::Yes
+            }
+            sym::stmt | sym::expr => {
+                const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
+                        FatArrow | Comma | Semi => IsInFollow::Yes,
+                        _ => IsInFollow::No(TOKENS),
+                    },
+                    _ => IsInFollow::No(TOKENS),
+                }
+            }
+            sym::pat => {
+                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
+                        FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
+                        Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
+                        _ => IsInFollow::No(TOKENS),
+                    },
+                    _ => IsInFollow::No(TOKENS),
+                }
+            }
+            sym::path | sym::ty => {
+                const TOKENS: &[&str] = &[
+                    "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
+                    "`where`",
+                ];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
+                        OpenDelim(token::DelimToken::Brace)
+                        | OpenDelim(token::DelimToken::Bracket)
+                        | Comma
+                        | FatArrow
+                        | Colon
+                        | Eq
+                        | Gt
+                        | BinOp(token::Shr)
+                        | Semi
+                        | BinOp(token::Or) => IsInFollow::Yes,
+                        Ident(name, false) if name == kw::As || name == kw::Where => {
+                            IsInFollow::Yes
+                        }
+                        _ => IsInFollow::No(TOKENS),
+                    },
+                    TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => {
+                        IsInFollow::Yes
+                    }
+                    _ => IsInFollow::No(TOKENS),
+                }
+            }
+            sym::ident | sym::lifetime => {
+                // being a single token, idents and lifetimes are harmless
+                IsInFollow::Yes
+            }
+            sym::literal => {
+                // literals may be of a single token, or two tokens (negative numbers)
+                IsInFollow::Yes
+            }
+            sym::meta | sym::tt => {
+                // being either a single token or a delimited sequence, tt is
+                // harmless
+                IsInFollow::Yes
+            }
+            sym::vis => {
+                // Explicitly disallow `priv`, on the off chance it comes back.
+                const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
+                        Comma => IsInFollow::Yes,
+                        Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
+                        _ => {
+                            if token.can_begin_type() {
+                                IsInFollow::Yes
+                            } else {
+                                IsInFollow::No(TOKENS)
+                            }
+                        }
+                    },
+                    TokenTree::MetaVarDecl(_, _, frag)
+                        if frag.name == sym::ident
+                            || frag.name == sym::ty
+                            || frag.name == sym::path =>
+                    {
+                        IsInFollow::Yes
+                    }
+                    _ => IsInFollow::No(TOKENS),
+                }
+            }
+            kw::Invalid => IsInFollow::Yes,
+            _ => IsInFollow::Invalid(
+                format!("invalid fragment specifier `{}`", frag),
+                VALID_FRAGMENT_NAMES_MSG,
+            ),
+        }
+    }
+}
+
+fn has_legal_fragment_specifier(
+    sess: &ParseSess,
+    features: &Features,
+    attrs: &[ast::Attribute],
+    tok: &mbe::TokenTree,
+) -> Result<(), String> {
+    debug!("has_legal_fragment_specifier({:?})", tok);
+    if let mbe::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
+        let frag_span = tok.span();
+        if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
+            return Err(frag_spec.to_string());
+        }
+    }
+    Ok(())
+}
+
+fn is_legal_fragment_specifier(
+    _sess: &ParseSess,
+    _features: &Features,
+    _attrs: &[ast::Attribute],
+    frag_name: Symbol,
+    _frag_span: Span,
+) -> bool {
+    /*
+     * If new fragment specifiers are invented in nightly, `_sess`,
+     * `_features`, `_attrs`, and `_frag_span` will be useful here
+     * for checking against feature gates. See past versions of
+     * this function.
+     */
+    match frag_name {
+        sym::item
+        | sym::block
+        | sym::stmt
+        | sym::expr
+        | sym::pat
+        | sym::lifetime
+        | sym::path
+        | sym::ty
+        | sym::ident
+        | sym::meta
+        | sym::tt
+        | sym::vis
+        | sym::literal
+        | kw::Invalid => true,
+        _ => false,
+    }
+}
+
+fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
+    match *tt {
+        mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token),
+        mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
+        mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
+        _ => panic!(
+            "unexpected mbe::TokenTree::{{Sequence or Delimited}} \
+             in follow set checker"
+        ),
+    }
+}
+
+/// Use this token tree as a matcher to parse given tts.
+fn parse_tt(cx: &ExtCtxt<'_>, mtch: &[mbe::TokenTree], tts: TokenStream) -> NamedParseResult {
+    // `None` is because we're not interpolating
+    let directory = Directory {
+        path: Cow::from(cx.current_expansion.module.directory.as_path()),
+        ownership: cx.current_expansion.directory_ownership,
+    };
+    parse(cx.parse_sess(), tts, mtch, Some(directory), true)
+}
+
+/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
+/// other tokens, this is "unexpected token...".
+fn parse_failure_msg(tok: &Token) -> String {
+    match tok.kind {
+        token::Eof => "unexpected end of macro invocation".to_string(),
+        _ => format!(
+            "no rules expected the token `{}`",
+            pprust::token_to_string(tok),
+        ),
+    }
+}
diff --git a/src/libsyntax_expand/mbe/quoted.rs b/src/libsyntax_expand/mbe/quoted.rs
new file mode 100644 (file)
index 0000000..cedd592
--- /dev/null
@@ -0,0 +1,264 @@
+use crate::mbe::macro_parser;
+use crate::mbe::{TokenTree, KleeneOp, KleeneToken, SequenceRepetition, Delimited};
+
+use syntax::ast;
+use syntax::parse::token::{self, Token};
+use syntax::print::pprust;
+use syntax::sess::ParseSess;
+use syntax::symbol::kw;
+use syntax::tokenstream;
+
+use syntax_pos::Span;
+
+use rustc_data_structures::sync::Lrc;
+
+/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
+/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
+/// collection of `TokenTree` for use in parsing a macro.
+///
+/// # Parameters
+///
+/// - `input`: a token stream to read from, the contents of which we are parsing.
+/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
+///   macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
+///   their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
+///   `ident` are "matchers". They are not present in the body of a macro rule -- just in the
+///   pattern, so we pass a parameter to indicate whether to expect them or not.
+/// - `sess`: the parsing session. Any errors will be emitted to this session.
+/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
+///   unstable features or not.
+/// - `edition`: which edition are we in.
+/// - `macro_node_id`: the NodeId of the macro we are parsing.
+///
+/// # Returns
+///
+/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
+pub(super) fn parse(
+    input: tokenstream::TokenStream,
+    expect_matchers: bool,
+    sess: &ParseSess,
+) -> Vec<TokenTree> {
+    // Will contain the final collection of `self::TokenTree`
+    let mut result = Vec::new();
+
+    // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
+    // additional trees if need be.
+    let mut trees = input.trees();
+    while let Some(tree) = trees.next() {
+        // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
+        // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
+        let tree = parse_tree(
+            tree,
+            &mut trees,
+            expect_matchers,
+            sess,
+        );
+        match tree {
+            TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
+                let span = match trees.next() {
+                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
+                        match trees.next() {
+                            Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
+                                Some((kind, _)) => {
+                                    let span = token.span.with_lo(start_sp.lo());
+                                    result.push(TokenTree::MetaVarDecl(span, ident, kind));
+                                    continue;
+                                }
+                                _ => token.span,
+                            },
+                            tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                        }
+                    }
+                    tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+                };
+                sess.missing_fragment_specifiers.borrow_mut().insert(span);
+                result.push(TokenTree::MetaVarDecl(span, ident, ast::Ident::invalid()));
+            }
+
+            // Not a metavar or no matchers allowed, so just return the tree
+            _ => result.push(tree),
+        }
+    }
+    result
+}
+
+/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
+/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
+/// for use in parsing a macro.
+///
+/// Converting the given tree may involve reading more tokens.
+///
+/// # Parameters
+///
+/// - `tree`: the tree we wish to convert.
+/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
+///   converting `tree`
+/// - `expect_matchers`: same as for `parse` (see above).
+/// - `sess`: the parsing session. Any errors will be emitted to this session.
+/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
+///   unstable features or not.
+fn parse_tree(
+    tree: tokenstream::TokenTree,
+    trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
+    expect_matchers: bool,
+    sess: &ParseSess,
+) -> TokenTree {
+    // Depending on what `tree` is, we could be parsing different parts of a macro
+    match tree {
+        // `tree` is a `$` token. Look at the next token in `trees`
+        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
+            // `tree` is followed by a delimited set of token trees. This indicates the beginning
+            // of a repetition sequence in the macro (e.g. `$(pat)*`).
+            Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
+                // Must have `(` not `{` or `[`
+                if delim != token::Paren {
+                    let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
+                    let msg = format!("expected `(`, found `{}`", tok);
+                    sess.span_diagnostic.span_err(span.entire(), &msg);
+                }
+                // Parse the contents of the sequence itself
+                let sequence = parse(
+                    tts.into(),
+                    expect_matchers,
+                    sess,
+                );
+                // Get the Kleene operator and optional separator
+                let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
+                // Count the number of captured "names" (i.e., named metavars)
+                let name_captures = macro_parser::count_names(&sequence);
+                TokenTree::Sequence(
+                    span,
+                    Lrc::new(SequenceRepetition {
+                        tts: sequence,
+                        separator,
+                        kleene,
+                        num_captures: name_captures,
+                    }),
+                )
+            }
+
+            // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
+            // metavariable that names the crate of the invocation.
+            Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
+                let (ident, is_raw) = token.ident().unwrap();
+                let span = ident.span.with_lo(span.lo());
+                if ident.name == kw::Crate && !is_raw {
+                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
+                } else {
+                    TokenTree::MetaVar(span, ident)
+                }
+            }
+
+            // `tree` is followed by a random token. This is an error.
+            Some(tokenstream::TokenTree::Token(token)) => {
+                let msg =
+                    format!("expected identifier, found `{}`", pprust::token_to_string(&token),);
+                sess.span_diagnostic.span_err(token.span, &msg);
+                TokenTree::MetaVar(token.span, ast::Ident::invalid())
+            }
+
+            // There are no more tokens. Just return the `$` we already have.
+            None => TokenTree::token(token::Dollar, span),
+        },
+
+        // `tree` is an arbitrary token. Keep it.
+        tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
+
+        // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
+        // descend into the delimited set and further parse it.
+        tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
+            span,
+            Lrc::new(Delimited {
+                delim,
+                tts: parse(
+                    tts.into(),
+                    expect_matchers,
+                    sess,
+                ),
+            }),
+        ),
+    }
+}
+
+/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
+/// `None`.
+fn kleene_op(token: &Token) -> Option<KleeneOp> {
+    match token.kind {
+        token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
+        token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
+        token::Question => Some(KleeneOp::ZeroOrOne),
+        _ => None,
+    }
+}
+
+/// Parse the next token tree of the input looking for a KleeneOp. Returns
+///
+/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
+/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
+/// - Err(span) if the next token tree is not a token
+fn parse_kleene_op(
+    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
+    span: Span,
+) -> Result<Result<(KleeneOp, Span), Token>, Span> {
+    match input.next() {
+        Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
+            Some(op) => Ok(Ok((op, token.span))),
+            None => Ok(Err(token)),
+        },
+        tree => Err(tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span)),
+    }
+}
+
+/// Attempt to parse a single Kleene star, possibly with a separator.
+///
+/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
+/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
+/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
+/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
+/// stream of tokens in an invocation of a macro.
+///
+/// This function will take some input iterator `input` corresponding to `span` and a parsing
+/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
+/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
+/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
+fn parse_sep_and_kleene_op(
+    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
+    span: Span,
+    sess: &ParseSess,
+) -> (Option<Token>, KleeneToken) {
+    // We basically look at two token trees here, denoted as #1 and #2 below
+    let span = match parse_kleene_op(input, span) {
+        // #1 is a `?`, `+`, or `*` KleeneOp
+        Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
+
+        // #1 is a separator followed by #2, a KleeneOp
+        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
+            // #2 is the `?` Kleene op, which does not take a separator (error)
+            Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
+                // Error!
+                sess.span_diagnostic.span_err(
+                    token.span,
+                    "the `?` macro repetition operator does not take a separator",
+                );
+
+                // Return a dummy
+                return (None, KleeneToken::new(KleeneOp::ZeroOrMore, span));
+            }
+
+            // #2 is a KleeneOp :D
+            Ok(Ok((op, span))) => return (Some(token), KleeneToken::new(op, span)),
+
+            // #2 is a random token or not a token at all :(
+            Ok(Err(Token { span, .. })) | Err(span) => span,
+        },
+
+        // #1 is not a token
+        Err(span) => span,
+    };
+
+    // If we ever get to this point, we have experienced an "unexpected token" error
+    sess.span_diagnostic.span_err(span, "expected one of: `*`, `+`, or `?`");
+
+    // Return a dummy
+    (None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
+}
diff --git a/src/libsyntax_expand/mbe/transcribe.rs b/src/libsyntax_expand/mbe/transcribe.rs
new file mode 100644 (file)
index 0000000..94523bb
--- /dev/null
@@ -0,0 +1,399 @@
+use crate::base::ExtCtxt;
+use crate::mbe;
+use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
+
+use syntax::ast::{Ident, Mac};
+use syntax::mut_visit::{self, MutVisitor};
+use syntax::parse::token::{self, NtTT, Token};
+use syntax::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
+
+use smallvec::{smallvec, SmallVec};
+
+use errors::pluralise;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::hygiene::{ExpnId, Transparency};
+use syntax_pos::Span;
+
+use std::mem;
+
+// A Marker adds the given mark to the syntax context.
+struct Marker(ExpnId, Transparency);
+
+impl MutVisitor for Marker {
+    fn visit_span(&mut self, span: &mut Span) {
+        *span = span.apply_mark(self.0, self.1)
+    }
+
+    fn visit_mac(&mut self, mac: &mut Mac) {
+        mut_visit::noop_visit_mac(mac, self)
+    }
+}
+
+impl Marker {
+    fn visit_delim_span(&mut self, dspan: &mut DelimSpan) {
+        self.visit_span(&mut dspan.open);
+        self.visit_span(&mut dspan.close);
+    }
+}
+
+/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
+enum Frame {
+    Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
+    Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
+}
+
+impl Frame {
+    /// Construct a new frame around the delimited set of tokens.
+    fn new(tts: Vec<mbe::TokenTree>) -> Frame {
+        let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
+        Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
+    }
+}
+
+impl Iterator for Frame {
+    type Item = mbe::TokenTree;
+
+    fn next(&mut self) -> Option<mbe::TokenTree> {
+        match *self {
+            Frame::Delimited { ref forest, ref mut idx, .. } => {
+                *idx += 1;
+                forest.tts.get(*idx - 1).cloned()
+            }
+            Frame::Sequence { ref forest, ref mut idx, .. } => {
+                *idx += 1;
+                forest.tts.get(*idx - 1).cloned()
+            }
+        }
+    }
+}
+
+/// This can do Macro-By-Example transcription.
+/// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
+///   invocation. We are assuming we already know there is a match.
+/// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
+///
+/// For example,
+///
+/// ```rust
+/// macro_rules! foo {
+///     ($id:ident) => { println!("{}", stringify!($id)); }
+/// }
+///
+/// foo!(bar);
+/// ```
+///
+/// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
+///
+/// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
+///
+/// Along the way, we do some additional error checking.
+pub(super) fn transcribe(
+    cx: &ExtCtxt<'_>,
+    interp: &FxHashMap<Ident, NamedMatch>,
+    src: Vec<mbe::TokenTree>,
+    transparency: Transparency,
+) -> TokenStream {
+    // Nothing for us to transcribe...
+    if src.is_empty() {
+        return TokenStream::default();
+    }
+
+    // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
+    // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
+    let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
+
+    // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
+    // `repeats` keeps track of where we are in matching at each level, with the last element being
+    // the most deeply nested sequence. This is used as a stack.
+    let mut repeats = Vec::new();
+
+    // `result` contains resulting token stream from the TokenTree we just finished processing. At
+    // the end, this will contain the full result of transcription, but at arbitrary points during
+    // `transcribe`, `result` will contain subsets of the final result.
+    //
+    // Specifically, as we descend into each TokenTree, we will push the existing results onto the
+    // `result_stack` and clear `results`. We will then produce the results of transcribing the
+    // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
+    // `result_stack` and append `results` too it to produce the new `results` up to that point.
+    //
+    // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
+    // again, and we are done transcribing.
+    let mut result: Vec<TreeAndJoint> = Vec::new();
+    let mut result_stack = Vec::new();
+    let mut marker = Marker(cx.current_expansion.id, transparency);
+
+    loop {
+        // Look at the last frame on the stack.
+        let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
+            // If it still has a TokenTree we have not looked at yet, use that tree.
+            tree
+        }
+        // The else-case never produces a value for `tree` (it `continue`s or `return`s).
+        else {
+            // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
+            // go back to the beginning of the sequence.
+            if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
+                let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
+                *repeat_idx += 1;
+                if repeat_idx < repeat_len {
+                    *idx = 0;
+                    if let Some(sep) = sep {
+                        result.push(TokenTree::Token(sep.clone()).into());
+                    }
+                    continue;
+                }
+            }
+
+            // We are done with the top of the stack. Pop it. Depending on what it was, we do
+            // different things. Note that the outermost item must be the delimited, wrapped RHS
+            // that was passed in originally to `transcribe`.
+            match stack.pop().unwrap() {
+                // Done with a sequence. Pop from repeats.
+                Frame::Sequence { .. } => {
+                    repeats.pop();
+                }
+
+                // We are done processing a Delimited. If this is the top-level delimited, we are
+                // done. Otherwise, we unwind the result_stack to append what we have produced to
+                // any previous results.
+                Frame::Delimited { forest, span, .. } => {
+                    if result_stack.is_empty() {
+                        // No results left to compute! We are back at the top-level.
+                        return TokenStream::new(result);
+                    }
+
+                    // Step back into the parent Delimited.
+                    let tree =
+                        TokenTree::Delimited(span, forest.delim, TokenStream::new(result).into());
+                    result = result_stack.pop().unwrap();
+                    result.push(tree.into());
+                }
+            }
+            continue;
+        };
+
+        // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
+        // `tree` contains the next `TokenTree` to be processed.
+        match tree {
+            // We are descending into a sequence. We first make sure that the matchers in the RHS
+            // and the matches in `interp` have the same shape. Otherwise, either the caller or the
+            // macro writer has made a mistake.
+            seq @ mbe::TokenTree::Sequence(..) => {
+                match lockstep_iter_size(&seq, interp, &repeats) {
+                    LockstepIterSize::Unconstrained => {
+                        cx.span_fatal(
+                            seq.span(), /* blame macro writer */
+                            "attempted to repeat an expression containing no syntax variables \
+                             matched as repeating at this depth",
+                        );
+                    }
+
+                    LockstepIterSize::Contradiction(ref msg) => {
+                        // FIXME: this really ought to be caught at macro definition time... It
+                        // happens when two meta-variables are used in the same repetition in a
+                        // sequence, but they come from different sequence matchers and repeat
+                        // different amounts.
+                        cx.span_fatal(seq.span(), &msg[..]);
+                    }
+
+                    LockstepIterSize::Constraint(len, _) => {
+                        // We do this to avoid an extra clone above. We know that this is a
+                        // sequence already.
+                        let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
+                            (sp, seq)
+                        } else {
+                            unreachable!()
+                        };
+
+                        // Is the repetition empty?
+                        if len == 0 {
+                            if seq.kleene.op == mbe::KleeneOp::OneOrMore {
+                                // FIXME: this really ought to be caught at macro definition
+                                // time... It happens when the Kleene operator in the matcher and
+                                // the body for the same meta-variable do not match.
+                                cx.span_fatal(sp.entire(), "this must repeat at least once");
+                            }
+                        } else {
+                            // 0 is the initial counter (we have done 0 repretitions so far). `len`
+                            // is the total number of reptitions we should generate.
+                            repeats.push((0, len));
+
+                            // The first time we encounter the sequence we push it to the stack. It
+                            // then gets reused (see the beginning of the loop) until we are done
+                            // repeating.
+                            stack.push(Frame::Sequence {
+                                idx: 0,
+                                sep: seq.separator.clone(),
+                                forest: seq,
+                            });
+                        }
+                    }
+                }
+            }
+
+            // Replace the meta-var with the matched token tree from the invocation.
+            mbe::TokenTree::MetaVar(mut sp, mut ident) => {
+                // Find the matched nonterminal from the macro invocation, and use it to replace
+                // the meta-var.
+                if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
+                    if let MatchedNonterminal(ref nt) = cur_matched {
+                        // FIXME #2887: why do we apply a mark when matching a token tree meta-var
+                        // (e.g. `$x:tt`), but not when we are matching any other type of token
+                        // tree?
+                        if let NtTT(ref tt) = **nt {
+                            result.push(tt.clone().into());
+                        } else {
+                            marker.visit_span(&mut sp);
+                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
+                            result.push(token.into());
+                        }
+                    } else {
+                        // We were unable to descend far enough. This is an error.
+                        cx.span_fatal(
+                            sp, /* blame the macro writer */
+                            &format!("variable '{}' is still repeating at this depth", ident),
+                        );
+                    }
+                } else {
+                    // If we aren't able to match the meta-var, we push it back into the result but
+                    // with modified syntax context. (I believe this supports nested macros).
+                    marker.visit_span(&mut sp);
+                    marker.visit_ident(&mut ident);
+                    result.push(TokenTree::token(token::Dollar, sp).into());
+                    result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
+                }
+            }
+
+            // If we are entering a new delimiter, we push its contents to the `stack` to be
+            // processed, and we push all of the currently produced results to the `result_stack`.
+            // We will produce all of the results of the inside of the `Delimited` and then we will
+            // jump back out of the Delimited, pop the result_stack and add the new results back to
+            // the previous results (from outside the Delimited).
+            mbe::TokenTree::Delimited(mut span, delimited) => {
+                marker.visit_delim_span(&mut span);
+                stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
+                result_stack.push(mem::take(&mut result));
+            }
+
+            // Nothing much to do here. Just push the token to the result, being careful to
+            // preserve syntax context.
+            mbe::TokenTree::Token(token) => {
+                let mut tt = TokenTree::Token(token);
+                marker.visit_tt(&mut tt);
+                result.push(tt.into());
+            }
+
+            // There should be no meta-var declarations in the invocation of a macro.
+            mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
+        }
+    }
+}
+
+/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
+/// the set of matches `interpolations`.
+///
+/// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
+/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
+/// made a mistake, and we return `None`.
+fn lookup_cur_matched<'a>(
+    ident: Ident,
+    interpolations: &'a FxHashMap<Ident, NamedMatch>,
+    repeats: &[(usize, usize)],
+) -> Option<&'a NamedMatch> {
+    interpolations.get(&ident).map(|matched| {
+        let mut matched = matched;
+        for &(idx, _) in repeats {
+            match matched {
+                MatchedNonterminal(_) => break,
+                MatchedSeq(ref ads, _) => matched = ads.get(idx).unwrap(),
+            }
+        }
+
+        matched
+    })
+}
+
+/// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
+/// sure that the size of each sequence and all of its nested sequences are the same as the sizes
+/// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
+/// has made a mistake (either the macro writer or caller).
+#[derive(Clone)]
+enum LockstepIterSize {
+    /// No constraints on length of matcher. This is true for any TokenTree variants except a
+    /// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
+    Unconstrained,
+
+    /// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
+    /// meta-var are returned.
+    Constraint(usize, Ident),
+
+    /// Two `Constraint`s on the same sequence had different lengths. This is an error.
+    Contradiction(String),
+}
+
+impl LockstepIterSize {
+    /// Find incompatibilities in matcher/invocation sizes.
+    /// - `Unconstrained` is compatible with everything.
+    /// - `Contradiction` is incompatible with everything.
+    /// - `Constraint(len)` is only compatible with other constraints of the same length.
+    fn with(self, other: LockstepIterSize) -> LockstepIterSize {
+        match self {
+            LockstepIterSize::Unconstrained => other,
+            LockstepIterSize::Contradiction(_) => self,
+            LockstepIterSize::Constraint(l_len, ref l_id) => match other {
+                LockstepIterSize::Unconstrained => self,
+                LockstepIterSize::Contradiction(_) => other,
+                LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
+                LockstepIterSize::Constraint(r_len, r_id) => {
+                    let msg = format!(
+                        "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
+                        l_id,
+                        l_len,
+                        pluralise!(l_len),
+                        r_id,
+                        r_len,
+                        pluralise!(r_len),
+                    );
+                    LockstepIterSize::Contradiction(msg)
+                }
+            },
+        }
+    }
+}
+
+/// Given a `tree`, make sure that all sequences have the same length as the matches for the
+/// appropriate meta-vars in `interpolations`.
+///
+/// Note that if `repeats` does not match the exact correct depth of a meta-var,
+/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
+/// multiple nested matcher sequences.
+fn lockstep_iter_size(
+    tree: &mbe::TokenTree,
+    interpolations: &FxHashMap<Ident, NamedMatch>,
+    repeats: &[(usize, usize)],
+) -> LockstepIterSize {
+    use mbe::TokenTree;
+    match *tree {
+        TokenTree::Delimited(_, ref delimed) => {
+            delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
+                size.with(lockstep_iter_size(tt, interpolations, repeats))
+            })
+        }
+        TokenTree::Sequence(_, ref seq) => {
+            seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
+                size.with(lockstep_iter_size(tt, interpolations, repeats))
+            })
+        }
+        TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
+            match lookup_cur_matched(name, interpolations, repeats) {
+                Some(matched) => match matched {
+                    MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
+                    MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name),
+                },
+                _ => LockstepIterSize::Unconstrained,
+            }
+        }
+        TokenTree::Token(..) => LockstepIterSize::Unconstrained,
+    }
+}
diff --git a/src/libsyntax_expand/placeholders.rs b/src/libsyntax_expand/placeholders.rs
new file mode 100644 (file)
index 0000000..e595888
--- /dev/null
@@ -0,0 +1,341 @@
+use crate::base::ExtCtxt;
+use crate::expand::{AstFragment, AstFragmentKind};
+
+use syntax::ast;
+use syntax::source_map::{DUMMY_SP, dummy_spanned};
+use syntax::tokenstream::TokenStream;
+use syntax::mut_visit::*;
+use syntax::ptr::P;
+use syntax::ThinVec;
+
+use smallvec::{smallvec, SmallVec};
+
+use rustc_data_structures::fx::FxHashMap;
+
+pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
+    fn mac_placeholder() -> ast::Mac {
+        ast::Mac {
+            path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
+            tts: TokenStream::default().into(),
+            delim: ast::MacDelimiter::Brace,
+            span: DUMMY_SP,
+            prior_type_ascription: None,
+        }
+    }
+
+    let ident = ast::Ident::invalid();
+    let attrs = Vec::new();
+    let generics = ast::Generics::default();
+    let vis = dummy_spanned(ast::VisibilityKind::Inherited);
+    let span = DUMMY_SP;
+    let expr_placeholder = || P(ast::Expr {
+        id, span,
+        attrs: ThinVec::new(),
+        kind: ast::ExprKind::Mac(mac_placeholder()),
+    });
+    let ty = || P(ast::Ty {
+        id,
+        kind: ast::TyKind::Mac(mac_placeholder()),
+        span,
+    });
+    let pat = || P(ast::Pat {
+        id,
+        kind: ast::PatKind::Mac(mac_placeholder()),
+        span,
+    });
+
+    match kind {
+        AstFragmentKind::Expr => AstFragment::Expr(expr_placeholder()),
+        AstFragmentKind::OptExpr => AstFragment::OptExpr(Some(expr_placeholder())),
+        AstFragmentKind::Items => AstFragment::Items(smallvec![P(ast::Item {
+            id, span, ident, vis, attrs,
+            kind: ast::ItemKind::Mac(mac_placeholder()),
+            tokens: None,
+        })]),
+        AstFragmentKind::TraitItems => AstFragment::TraitItems(smallvec![ast::TraitItem {
+            id, span, ident, attrs, generics,
+            kind: ast::TraitItemKind::Macro(mac_placeholder()),
+            tokens: None,
+        }]),
+        AstFragmentKind::ImplItems => AstFragment::ImplItems(smallvec![ast::ImplItem {
+            id, span, ident, vis, attrs, generics,
+            kind: ast::ImplItemKind::Macro(mac_placeholder()),
+            defaultness: ast::Defaultness::Final,
+            tokens: None,
+        }]),
+        AstFragmentKind::ForeignItems =>
+            AstFragment::ForeignItems(smallvec![ast::ForeignItem {
+                id, span, ident, vis, attrs,
+                kind: ast::ForeignItemKind::Macro(mac_placeholder()),
+            }]),
+        AstFragmentKind::Pat => AstFragment::Pat(P(ast::Pat {
+            id, span, kind: ast::PatKind::Mac(mac_placeholder()),
+        })),
+        AstFragmentKind::Ty => AstFragment::Ty(P(ast::Ty {
+            id, span, kind: ast::TyKind::Mac(mac_placeholder()),
+        })),
+        AstFragmentKind::Stmts => AstFragment::Stmts(smallvec![{
+            let mac = P((mac_placeholder(), ast::MacStmtStyle::Braces, ThinVec::new()));
+            ast::Stmt { id, span, kind: ast::StmtKind::Mac(mac) }
+        }]),
+        AstFragmentKind::Arms => AstFragment::Arms(smallvec![
+            ast::Arm {
+                attrs: Default::default(),
+                body: expr_placeholder(),
+                guard: None,
+                id,
+                pat: pat(),
+                span,
+                is_placeholder: true,
+            }
+        ]),
+        AstFragmentKind::Fields => AstFragment::Fields(smallvec![
+            ast::Field {
+                attrs: Default::default(),
+                expr: expr_placeholder(),
+                id,
+                ident,
+                is_shorthand: false,
+                span,
+                is_placeholder: true,
+            }
+        ]),
+        AstFragmentKind::FieldPats => AstFragment::FieldPats(smallvec![
+            ast::FieldPat {
+                attrs: Default::default(),
+                id,
+                ident,
+                is_shorthand: false,
+                pat: pat(),
+                span,
+                is_placeholder: true,
+            }
+        ]),
+        AstFragmentKind::GenericParams => AstFragment::GenericParams(smallvec![{
+            ast::GenericParam {
+                attrs: Default::default(),
+                bounds: Default::default(),
+                id,
+                ident,
+                is_placeholder: true,
+                kind: ast::GenericParamKind::Lifetime,
+            }
+        }]),
+        AstFragmentKind::Params => AstFragment::Params(smallvec![
+            ast::Param {
+                attrs: Default::default(),
+                id,
+                pat: pat(),
+                span,
+                ty: ty(),
+                is_placeholder: true,
+            }
+        ]),
+        AstFragmentKind::StructFields => AstFragment::StructFields(smallvec![
+            ast::StructField {
+                attrs: Default::default(),
+                id,
+                ident: None,
+                span,
+                ty: ty(),
+                vis,
+                is_placeholder: true,
+            }
+        ]),
+        AstFragmentKind::Variants => AstFragment::Variants(smallvec![
+            ast::Variant {
+                attrs: Default::default(),
+                data: ast::VariantData::Struct(Default::default(), false),
+                disr_expr: None,
+                id,
+                ident,
+                span,
+                is_placeholder: true,
+            }
+        ])
+    }
+}
+
+pub struct PlaceholderExpander<'a, 'b> {
+    expanded_fragments: FxHashMap<ast::NodeId, AstFragment>,
+    cx: &'a mut ExtCtxt<'b>,
+    monotonic: bool,
+}
+
+impl<'a, 'b> PlaceholderExpander<'a, 'b> {
+    pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
+        PlaceholderExpander {
+            cx,
+            expanded_fragments: FxHashMap::default(),
+            monotonic,
+        }
+    }
+
+    pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment) {
+        fragment.mut_visit_with(self);
+        self.expanded_fragments.insert(id, fragment);
+    }
+
+    fn remove(&mut self, id: ast::NodeId) -> AstFragment {
+        self.expanded_fragments.remove(&id).unwrap()
+    }
+}
+
+impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> {
+    fn flat_map_arm(&mut self, arm: ast::Arm) -> SmallVec<[ast::Arm; 1]> {
+        if arm.is_placeholder {
+            self.remove(arm.id).make_arms()
+        } else {
+            noop_flat_map_arm(arm, self)
+        }
+    }
+
+    fn flat_map_field(&mut self, field: ast::Field) -> SmallVec<[ast::Field; 1]> {
+        if field.is_placeholder {
+            self.remove(field.id).make_fields()
+        } else {
+            noop_flat_map_field(field, self)
+        }
+    }
+
+    fn flat_map_field_pattern(&mut self, fp: ast::FieldPat) -> SmallVec<[ast::FieldPat; 1]> {
+        if fp.is_placeholder {
+            self.remove(fp.id).make_field_patterns()
+        } else {
+            noop_flat_map_field_pattern(fp, self)
+        }
+    }
+
+    fn flat_map_generic_param(
+        &mut self,
+        param: ast::GenericParam
+    ) -> SmallVec<[ast::GenericParam; 1]>
+    {
+        if param.is_placeholder {
+            self.remove(param.id).make_generic_params()
+        } else {
+            noop_flat_map_generic_param(param, self)
+        }
+    }
+
+    fn flat_map_param(&mut self, p: ast::Param) -> SmallVec<[ast::Param; 1]> {
+        if p.is_placeholder {
+            self.remove(p.id).make_params()
+        } else {
+            noop_flat_map_param(p, self)
+        }
+    }
+
+    fn flat_map_struct_field(&mut self, sf: ast::StructField) -> SmallVec<[ast::StructField; 1]> {
+        if sf.is_placeholder {
+            self.remove(sf.id).make_struct_fields()
+        } else {
+            noop_flat_map_struct_field(sf, self)
+        }
+    }
+
+    fn flat_map_variant(&mut self, variant: ast::Variant) -> SmallVec<[ast::Variant; 1]> {
+        if variant.is_placeholder {
+            self.remove(variant.id).make_variants()
+        } else {
+            noop_flat_map_variant(variant, self)
+        }
+    }
+
+    fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+        match item.kind {
+            ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(),
+            ast::ItemKind::MacroDef(_) => return smallvec![item],
+            _ => {}
+        }
+
+        noop_flat_map_item(item, self)
+    }
+
+    fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> {
+        match item.kind {
+            ast::TraitItemKind::Macro(_) => self.remove(item.id).make_trait_items(),
+            _ => noop_flat_map_trait_item(item, self),
+        }
+    }
+
+    fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> {
+        match item.kind {
+            ast::ImplItemKind::Macro(_) => self.remove(item.id).make_impl_items(),
+            _ => noop_flat_map_impl_item(item, self),
+        }
+    }
+
+    fn flat_map_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> {
+        match item.kind {
+            ast::ForeignItemKind::Macro(_) => self.remove(item.id).make_foreign_items(),
+            _ => noop_flat_map_foreign_item(item, self),
+        }
+    }
+
+    fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
+        match expr.kind {
+            ast::ExprKind::Mac(_) => *expr = self.remove(expr.id).make_expr(),
+            _ => noop_visit_expr(expr, self),
+        }
+    }
+
+    fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
+        match expr.kind {
+            ast::ExprKind::Mac(_) => self.remove(expr.id).make_opt_expr(),
+            _ => noop_filter_map_expr(expr, self),
+        }
+    }
+
+    fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
+        let (style, mut stmts) = match stmt.kind {
+            ast::StmtKind::Mac(mac) => (mac.1, self.remove(stmt.id).make_stmts()),
+            _ => return noop_flat_map_stmt(stmt, self),
+        };
+
+        if style == ast::MacStmtStyle::Semicolon {
+            if let Some(stmt) = stmts.pop() {
+                stmts.push(stmt.add_trailing_semicolon());
+            }
+        }
+
+        stmts
+    }
+
+    fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
+        match pat.kind {
+            ast::PatKind::Mac(_) => *pat = self.remove(pat.id).make_pat(),
+            _ => noop_visit_pat(pat, self),
+        }
+    }
+
+    fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
+        match ty.kind {
+            ast::TyKind::Mac(_) => *ty = self.remove(ty.id).make_ty(),
+            _ => noop_visit_ty(ty, self),
+        }
+    }
+
+    fn visit_block(&mut self, block: &mut P<ast::Block>) {
+        noop_visit_block(block, self);
+
+        for stmt in block.stmts.iter_mut() {
+            if self.monotonic {
+                assert_eq!(stmt.id, ast::DUMMY_NODE_ID);
+                stmt.id = self.cx.resolver.next_node_id();
+            }
+        }
+    }
+
+    fn visit_mod(&mut self, module: &mut ast::Mod) {
+        noop_visit_mod(module, self);
+        module.items.retain(|item| match item.kind {
+            ast::ItemKind::Mac(_) if !self.cx.ecfg.keep_macs => false, // remove macro definitions
+            _ => true,
+        });
+    }
+
+    fn visit_mac(&mut self, _mac: &mut ast::Mac) {
+        // Do nothing.
+    }
+}
diff --git a/src/libsyntax_expand/proc_macro.rs b/src/libsyntax_expand/proc_macro.rs
new file mode 100644 (file)
index 0000000..07b618c
--- /dev/null
@@ -0,0 +1,215 @@
+use crate::base::{self, *};
+use crate::proc_macro_server;
+
+use syntax::ast::{self, ItemKind, Attribute, Mac};
+use syntax::attr::{mark_used, mark_known};
+use syntax::errors::{Applicability, FatalError};
+use syntax::parse::{self, token};
+use syntax::symbol::sym;
+use syntax::tokenstream::{self, TokenStream};
+use syntax::visit::Visitor;
+
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::{Span, DUMMY_SP};
+
+const EXEC_STRATEGY: pm::bridge::server::SameThread = pm::bridge::server::SameThread;
+
+pub struct BangProcMacro {
+    pub client: pm::bridge::client::Client<
+        fn(pm::TokenStream) -> pm::TokenStream,
+    >,
+}
+
+impl base::ProcMacro for BangProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt<'_>,
+                   span: Span,
+                   input: TokenStream)
+                   -> TokenStream {
+        let server = proc_macro_server::Rustc::new(ecx);
+        match self.client.run(&EXEC_STRATEGY, server, input) {
+            Ok(stream) => stream,
+            Err(e) => {
+                let msg = "proc macro panicked";
+                let mut err = ecx.struct_span_fatal(span, msg);
+                if let Some(s) = e.as_str() {
+                    err.help(&format!("message: {}", s));
+                }
+
+                err.emit();
+                FatalError.raise();
+            }
+        }
+    }
+}
+
+pub struct AttrProcMacro {
+    pub client: pm::bridge::client::Client<fn(pm::TokenStream, pm::TokenStream) -> pm::TokenStream>,
+}
+
+impl base::AttrProcMacro for AttrProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt<'_>,
+                   span: Span,
+                   annotation: TokenStream,
+                   annotated: TokenStream)
+                   -> TokenStream {
+        let server = proc_macro_server::Rustc::new(ecx);
+        match self.client.run(&EXEC_STRATEGY, server, annotation, annotated) {
+            Ok(stream) => stream,
+            Err(e) => {
+                let msg = "custom attribute panicked";
+                let mut err = ecx.struct_span_fatal(span, msg);
+                if let Some(s) = e.as_str() {
+                    err.help(&format!("message: {}", s));
+                }
+
+                err.emit();
+                FatalError.raise();
+            }
+        }
+    }
+}
+
+pub struct ProcMacroDerive {
+    pub client: pm::bridge::client::Client<fn(pm::TokenStream) -> pm::TokenStream>,
+}
+
+impl MultiItemModifier for ProcMacroDerive {
+    fn expand(&self,
+              ecx: &mut ExtCtxt<'_>,
+              span: Span,
+              _meta_item: &ast::MetaItem,
+              item: Annotatable)
+              -> Vec<Annotatable> {
+        let item = match item {
+            Annotatable::Arm(..) |
+            Annotatable::Field(..) |
+            Annotatable::FieldPat(..) |
+            Annotatable::GenericParam(..) |
+            Annotatable::Param(..) |
+            Annotatable::StructField(..) |
+            Annotatable::Variant(..)
+                => panic!("unexpected annotatable"),
+            Annotatable::Item(item) => item,
+            Annotatable::ImplItem(_) |
+            Annotatable::TraitItem(_) |
+            Annotatable::ForeignItem(_) |
+            Annotatable::Stmt(_) |
+            Annotatable::Expr(_) => {
+                ecx.span_err(span, "proc-macro derives may only be \
+                                    applied to a struct, enum, or union");
+                return Vec::new()
+            }
+        };
+        match item.kind {
+            ItemKind::Struct(..) |
+            ItemKind::Enum(..) |
+            ItemKind::Union(..) => {},
+            _ => {
+                ecx.span_err(span, "proc-macro derives may only be \
+                                    applied to a struct, enum, or union");
+                return Vec::new()
+            }
+        }
+
+        let token = token::Interpolated(Lrc::new(token::NtItem(item)));
+        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
+
+        let server = proc_macro_server::Rustc::new(ecx);
+        let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
+            Ok(stream) => stream,
+            Err(e) => {
+                let msg = "proc-macro derive panicked";
+                let mut err = ecx.struct_span_fatal(span, msg);
+                if let Some(s) = e.as_str() {
+                    err.help(&format!("message: {}", s));
+                }
+
+                err.emit();
+                FatalError.raise();
+            }
+        };
+
+        let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
+        let msg = "proc-macro derive produced unparseable tokens";
+
+        let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
+        let mut items = vec![];
+
+        loop {
+            match parser.parse_item() {
+                Ok(None) => break,
+                Ok(Some(item)) => {
+                    items.push(Annotatable::Item(item))
+                }
+                Err(mut err) => {
+                    // FIXME: handle this better
+                    err.cancel();
+                    ecx.struct_span_fatal(span, msg).emit();
+                    FatalError.raise();
+                }
+            }
+        }
+
+
+        // fail if there have been errors emitted
+        if ecx.parse_sess.span_diagnostic.err_count() > error_count_before {
+            ecx.struct_span_fatal(span, msg).emit();
+            FatalError.raise();
+        }
+
+        items
+    }
+}
+
+crate struct MarkAttrs<'a>(crate &'a [ast::Name]);
+
+impl<'a> Visitor<'a> for MarkAttrs<'a> {
+    fn visit_attribute(&mut self, attr: &Attribute) {
+        if let Some(ident) = attr.ident() {
+            if self.0.contains(&ident.name) {
+                mark_used(attr);
+                mark_known(attr);
+            }
+        }
+    }
+
+    fn visit_mac(&mut self, _mac: &Mac) {}
+}
+
+pub fn is_proc_macro_attr(attr: &Attribute) -> bool {
+    [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
+        .iter().any(|kind| attr.check_name(*kind))
+}
+
+crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
+    let mut result = Vec::new();
+    attrs.retain(|attr| {
+        if attr.path != sym::derive {
+            return true;
+        }
+        if !attr.is_meta_item_list() {
+            cx.struct_span_err(attr.span, "malformed `derive` attribute input")
+                .span_suggestion(
+                    attr.span,
+                    "missing traits to be derived",
+                    "#[derive(Trait1, Trait2, ...)]".to_owned(),
+                    Applicability::HasPlaceholders,
+                ).emit();
+            return false;
+        }
+
+        match attr.parse_derive_paths(cx.parse_sess) {
+            Ok(traits) => {
+                result.extend(traits);
+                true
+            }
+            Err(mut e) => {
+                e.emit();
+                false
+            }
+        }
+    });
+    result
+}
diff --git a/src/libsyntax_expand/proc_macro_server.rs b/src/libsyntax_expand/proc_macro_server.rs
new file mode 100644 (file)
index 0000000..4ce99cf
--- /dev/null
@@ -0,0 +1,713 @@
+use crate::base::ExtCtxt;
+
+use syntax::ast;
+use syntax::parse::{self, token};
+use syntax::parse::lexer::comments;
+use syntax::print::pprust;
+use syntax::sess::ParseSess;
+use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
+
+use errors::Diagnostic;
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
+use syntax_pos::symbol::{kw, sym, Symbol};
+
+use pm::{Delimiter, Level, LineColumn, Spacing};
+use pm::bridge::{server, TokenTree};
+use std::{ascii, panic};
+use std::ops::Bound;
+
+trait FromInternal<T> {
+    fn from_internal(x: T) -> Self;
+}
+
+trait ToInternal<T> {
+    fn to_internal(self) -> T;
+}
+
+impl FromInternal<token::DelimToken> for Delimiter {
+    fn from_internal(delim: token::DelimToken) -> Delimiter {
+        match delim {
+            token::Paren => Delimiter::Parenthesis,
+            token::Brace => Delimiter::Brace,
+            token::Bracket => Delimiter::Bracket,
+            token::NoDelim => Delimiter::None,
+        }
+    }
+}
+
+impl ToInternal<token::DelimToken> for Delimiter {
+    fn to_internal(self) -> token::DelimToken {
+        match self {
+            Delimiter::Parenthesis => token::Paren,
+            Delimiter::Brace => token::Brace,
+            Delimiter::Bracket => token::Bracket,
+            Delimiter::None => token::NoDelim,
+        }
+    }
+}
+
+impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
+    for TokenTree<Group, Punct, Ident, Literal>
+{
+    fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
+                    -> Self {
+        use syntax::parse::token::*;
+
+        let joint = is_joint == Joint;
+        let Token { kind, span } = match tree {
+            tokenstream::TokenTree::Delimited(span, delim, tts) => {
+                let delimiter = Delimiter::from_internal(delim);
+                return TokenTree::Group(Group {
+                    delimiter,
+                    stream: tts.into(),
+                    span,
+                });
+            }
+            tokenstream::TokenTree::Token(token) => token,
+        };
+
+        macro_rules! tt {
+            ($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
+                TokenTree::$ty(self::$ty {
+                    $($field $(: $value)*,)+
+                    span,
+                })
+            );
+            ($ty:ident::$method:ident($($value:expr),*)) => (
+                TokenTree::$ty(self::$ty::$method($($value,)* span))
+            );
+        }
+        macro_rules! op {
+            ($a:expr) => {
+                tt!(Punct::new($a, joint))
+            };
+            ($a:expr, $b:expr) => {{
+                stack.push(tt!(Punct::new($b, joint)));
+                tt!(Punct::new($a, true))
+            }};
+            ($a:expr, $b:expr, $c:expr) => {{
+                stack.push(tt!(Punct::new($c, joint)));
+                stack.push(tt!(Punct::new($b, true)));
+                tt!(Punct::new($a, true))
+            }};
+        }
+
+        match kind {
+            Eq => op!('='),
+            Lt => op!('<'),
+            Le => op!('<', '='),
+            EqEq => op!('=', '='),
+            Ne => op!('!', '='),
+            Ge => op!('>', '='),
+            Gt => op!('>'),
+            AndAnd => op!('&', '&'),
+            OrOr => op!('|', '|'),
+            Not => op!('!'),
+            Tilde => op!('~'),
+            BinOp(Plus) => op!('+'),
+            BinOp(Minus) => op!('-'),
+            BinOp(Star) => op!('*'),
+            BinOp(Slash) => op!('/'),
+            BinOp(Percent) => op!('%'),
+            BinOp(Caret) => op!('^'),
+            BinOp(And) => op!('&'),
+            BinOp(Or) => op!('|'),
+            BinOp(Shl) => op!('<', '<'),
+            BinOp(Shr) => op!('>', '>'),
+            BinOpEq(Plus) => op!('+', '='),
+            BinOpEq(Minus) => op!('-', '='),
+            BinOpEq(Star) => op!('*', '='),
+            BinOpEq(Slash) => op!('/', '='),
+            BinOpEq(Percent) => op!('%', '='),
+            BinOpEq(Caret) => op!('^', '='),
+            BinOpEq(And) => op!('&', '='),
+            BinOpEq(Or) => op!('|', '='),
+            BinOpEq(Shl) => op!('<', '<', '='),
+            BinOpEq(Shr) => op!('>', '>', '='),
+            At => op!('@'),
+            Dot => op!('.'),
+            DotDot => op!('.', '.'),
+            DotDotDot => op!('.', '.', '.'),
+            DotDotEq => op!('.', '.', '='),
+            Comma => op!(','),
+            Semi => op!(';'),
+            Colon => op!(':'),
+            ModSep => op!(':', ':'),
+            RArrow => op!('-', '>'),
+            LArrow => op!('<', '-'),
+            FatArrow => op!('=', '>'),
+            Pound => op!('#'),
+            Dollar => op!('$'),
+            Question => op!('?'),
+            SingleQuote => op!('\''),
+
+            Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
+            Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
+            Lifetime(name) => {
+                let ident = ast::Ident::new(name, span).without_first_quote();
+                stack.push(tt!(Ident::new(ident.name, false)));
+                tt!(Punct::new('\'', true))
+            }
+            Literal(lit) => tt!(Literal { lit }),
+            DocComment(c) => {
+                let style = comments::doc_comment_style(&c.as_str());
+                let stripped = comments::strip_doc_comment_decoration(&c.as_str());
+                let mut escaped = String::new();
+                for ch in stripped.chars() {
+                    escaped.extend(ch.escape_debug());
+                }
+                let stream = vec![
+                    Ident(sym::doc, false),
+                    Eq,
+                    TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
+                ]
+                .into_iter()
+                .map(|kind| tokenstream::TokenTree::token(kind, span))
+                .collect();
+                stack.push(TokenTree::Group(Group {
+                    delimiter: Delimiter::Bracket,
+                    stream,
+                    span: DelimSpan::from_single(span),
+                }));
+                if style == ast::AttrStyle::Inner {
+                    stack.push(tt!(Punct::new('!', false)));
+                }
+                tt!(Punct::new('#', false))
+            }
+
+            Interpolated(nt) => {
+                let stream = parse::nt_to_tokenstream(&nt, sess, span);
+                TokenTree::Group(Group {
+                    delimiter: Delimiter::None,
+                    stream,
+                    span: DelimSpan::from_single(span),
+                })
+            }
+
+            OpenDelim(..) | CloseDelim(..) => unreachable!(),
+            Whitespace | Comment | Shebang(..) | Unknown(..) | Eof => unreachable!(),
+        }
+    }
+}
+
+impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
+    fn to_internal(self) -> TokenStream {
+        use syntax::parse::token::*;
+
+        let (ch, joint, span) = match self {
+            TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
+            TokenTree::Group(Group {
+                delimiter,
+                stream,
+                span,
+            }) => {
+                return tokenstream::TokenTree::Delimited(
+                    span,
+                    delimiter.to_internal(),
+                    stream.into(),
+                )
+                .into();
+            }
+            TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
+                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
+            }
+            TokenTree::Literal(self::Literal {
+                lit: token::Lit { kind: token::Integer, symbol, suffix },
+                span,
+            }) if symbol.as_str().starts_with("-") => {
+                let minus = BinOp(BinOpToken::Minus);
+                let symbol = Symbol::intern(&symbol.as_str()[1..]);
+                let integer = TokenKind::lit(token::Integer, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(integer, span);
+                return vec![a, b].into_iter().collect();
+            }
+            TokenTree::Literal(self::Literal {
+                lit: token::Lit { kind: token::Float, symbol, suffix },
+                span,
+            }) if symbol.as_str().starts_with("-") => {
+                let minus = BinOp(BinOpToken::Minus);
+                let symbol = Symbol::intern(&symbol.as_str()[1..]);
+                let float = TokenKind::lit(token::Float, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(float, span);
+                return vec![a, b].into_iter().collect();
+            }
+            TokenTree::Literal(self::Literal { lit, span }) => {
+                return tokenstream::TokenTree::token(Literal(lit), span).into()
+            }
+        };
+
+        let kind = match ch {
+            '=' => Eq,
+            '<' => Lt,
+            '>' => Gt,
+            '!' => Not,
+            '~' => Tilde,
+            '+' => BinOp(Plus),
+            '-' => BinOp(Minus),
+            '*' => BinOp(Star),
+            '/' => BinOp(Slash),
+            '%' => BinOp(Percent),
+            '^' => BinOp(Caret),
+            '&' => BinOp(And),
+            '|' => BinOp(Or),
+            '@' => At,
+            '.' => Dot,
+            ',' => Comma,
+            ';' => Semi,
+            ':' => Colon,
+            '#' => Pound,
+            '$' => Dollar,
+            '?' => Question,
+            '\'' => SingleQuote,
+            _ => unreachable!(),
+        };
+
+        let tree = tokenstream::TokenTree::token(kind, span);
+        TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
+    }
+}
+
+impl ToInternal<errors::Level> for Level {
+    fn to_internal(self) -> errors::Level {
+        match self {
+            Level::Error => errors::Level::Error,
+            Level::Warning => errors::Level::Warning,
+            Level::Note => errors::Level::Note,
+            Level::Help => errors::Level::Help,
+            _ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+    cursor: tokenstream::Cursor,
+    stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
+}
+
+#[derive(Clone)]
+pub struct Group {
+    delimiter: Delimiter,
+    stream: TokenStream,
+    span: DelimSpan,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Punct {
+    ch: char,
+    // NB. not using `Spacing` here because it doesn't implement `Hash`.
+    joint: bool,
+    span: Span,
+}
+
+impl Punct {
+    fn new(ch: char, joint: bool, span: Span) -> Punct {
+        const LEGAL_CHARS: &[char] = &['=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^',
+                                       '&', '|', '@', '.', ',', ';', ':', '#', '$', '?', '\''];
+        if !LEGAL_CHARS.contains(&ch) {
+            panic!("unsupported character `{:?}`", ch)
+        }
+        Punct { ch, joint, span }
+    }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+    sym: Symbol,
+    is_raw: bool,
+    span: Span,
+}
+
+impl Ident {
+    fn is_valid(string: &str) -> bool {
+        let mut chars = string.chars();
+        if let Some(start) = chars.next() {
+            rustc_lexer::is_id_start(start) && chars.all(rustc_lexer::is_id_continue)
+        } else {
+            false
+        }
+    }
+    fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
+        let string = sym.as_str();
+        if !Self::is_valid(&string) {
+            panic!("`{:?}` is not a valid identifier", string)
+        }
+        if is_raw && !sym.can_be_raw() {
+            panic!("`{}` cannot be a raw identifier", string);
+        }
+        Ident { sym, is_raw, span }
+    }
+    fn dollar_crate(span: Span) -> Ident {
+        // `$crate` is accepted as an ident only if it comes from the compiler.
+        Ident { sym: kw::DollarCrate, is_raw: false, span }
+    }
+}
+
+// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
+#[derive(Clone, Debug)]
+pub struct Literal {
+    lit: token::Lit,
+    span: Span,
+}
+
+pub(crate) struct Rustc<'a> {
+    sess: &'a ParseSess,
+    def_site: Span,
+    call_site: Span,
+    mixed_site: Span,
+}
+
+impl<'a> Rustc<'a> {
+    pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
+        let expn_data = cx.current_expansion.id.expn_data();
+        Rustc {
+            sess: cx.parse_sess,
+            def_site: cx.with_def_site_ctxt(expn_data.def_site),
+            call_site: cx.with_call_site_ctxt(expn_data.call_site),
+            mixed_site: cx.with_mixed_site_ctxt(expn_data.call_site),
+        }
+    }
+
+    fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
+        Literal {
+            lit: token::Lit::new(kind, symbol, suffix),
+            span: server::Span::call_site(self),
+        }
+    }
+}
+
+impl server::Types for Rustc<'_> {
+    type TokenStream = TokenStream;
+    type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
+    type TokenStreamIter = TokenStreamIter;
+    type Group = Group;
+    type Punct = Punct;
+    type Ident = Ident;
+    type Literal = Literal;
+    type SourceFile = Lrc<SourceFile>;
+    type MultiSpan = Vec<Span>;
+    type Diagnostic = Diagnostic;
+    type Span = Span;
+}
+
+impl server::TokenStream for Rustc<'_> {
+    fn new(&mut self) -> Self::TokenStream {
+        TokenStream::default()
+    }
+    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+        stream.is_empty()
+    }
+    fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        parse::parse_stream_from_source_str(
+            FileName::proc_macro_source_code(src),
+            src.to_string(),
+            self.sess,
+            Some(self.call_site),
+        )
+    }
+    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+        pprust::tts_to_string(stream.clone())
+    }
+    fn from_token_tree(
+        &mut self,
+        tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+    ) -> Self::TokenStream {
+        tree.to_internal()
+    }
+    fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
+        TokenStreamIter {
+            cursor: stream.trees(),
+            stack: vec![],
+        }
+    }
+}
+
+impl server::TokenStreamBuilder for Rustc<'_> {
+    fn new(&mut self) -> Self::TokenStreamBuilder {
+        tokenstream::TokenStreamBuilder::new()
+    }
+    fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
+        builder.push(stream);
+    }
+    fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
+        builder.build()
+    }
+}
+
+impl server::TokenStreamIter for Rustc<'_> {
+    fn next(
+        &mut self,
+        iter: &mut Self::TokenStreamIter,
+    ) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+        loop {
+            let tree = iter.stack.pop().or_else(|| {
+                let next = iter.cursor.next_with_joint()?;
+                Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
+            })?;
+            // HACK: The condition "dummy span + group with empty delimiter" represents an AST
+            // fragment approximately converted into a token stream. This may happen, for
+            // example, with inputs to proc macro attributes, including derives. Such "groups"
+            // need to flattened during iteration over stream's token trees.
+            // Eventually this needs to be removed in favor of keeping original token trees
+            // and not doing the roundtrip through AST.
+            if let TokenTree::Group(ref group) = tree {
+                if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
+                    iter.cursor.append(group.stream.clone());
+                    continue;
+                }
+            }
+            return Some(tree);
+        }
+    }
+}
+
+impl server::Group for Rustc<'_> {
+    fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
+        Group {
+            delimiter,
+            stream,
+            span: DelimSpan::from_single(server::Span::call_site(self)),
+        }
+    }
+    fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
+        group.delimiter
+    }
+    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+        group.stream.clone()
+    }
+    fn span(&mut self, group: &Self::Group) -> Self::Span {
+        group.span.entire()
+    }
+    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+        group.span.open
+    }
+    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+        group.span.close
+    }
+    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+        group.span = DelimSpan::from_single(span);
+    }
+}
+
+impl server::Punct for Rustc<'_> {
+    fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
+        Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
+    }
+    fn as_char(&mut self, punct: Self::Punct) -> char {
+        punct.ch
+    }
+    fn spacing(&mut self, punct: Self::Punct) -> Spacing {
+        if punct.joint {
+            Spacing::Joint
+        } else {
+            Spacing::Alone
+        }
+    }
+    fn span(&mut self, punct: Self::Punct) -> Self::Span {
+        punct.span
+    }
+    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+        Punct { span, ..punct }
+    }
+}
+
+impl server::Ident for Rustc<'_> {
+    fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
+        Ident::new(Symbol::intern(string), is_raw, span)
+    }
+    fn span(&mut self, ident: Self::Ident) -> Self::Span {
+        ident.span
+    }
+    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+        Ident { span, ..ident }
+    }
+}
+
+impl server::Literal for Rustc<'_> {
+    // FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
+    fn debug(&mut self, literal: &Self::Literal) -> String {
+        format!("{:?}", literal)
+    }
+    fn integer(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Integer, Symbol::intern(n), None)
+    }
+    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+        self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
+    }
+    fn float(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Float, Symbol::intern(n), None)
+    }
+    fn f32(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Float, Symbol::intern(n), Some(sym::f32))
+    }
+    fn f64(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Float, Symbol::intern(n), Some(sym::f64))
+    }
+    fn string(&mut self, string: &str) -> Self::Literal {
+        let mut escaped = String::new();
+        for ch in string.chars() {
+            escaped.extend(ch.escape_debug());
+        }
+        self.lit(token::Str, Symbol::intern(&escaped), None)
+    }
+    fn character(&mut self, ch: char) -> Self::Literal {
+        let mut escaped = String::new();
+        escaped.extend(ch.escape_unicode());
+        self.lit(token::Char, Symbol::intern(&escaped), None)
+    }
+    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+        let string = bytes
+            .iter()
+            .cloned()
+            .flat_map(ascii::escape_default)
+            .map(Into::<char>::into)
+            .collect::<String>();
+        self.lit(token::ByteStr, Symbol::intern(&string), None)
+    }
+    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+        literal.span
+    }
+    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+        literal.span = span;
+    }
+    fn subspan(
+        &mut self,
+        literal: &Self::Literal,
+        start: Bound<usize>,
+        end: Bound<usize>,
+    ) -> Option<Self::Span> {
+        let span = literal.span;
+        let length = span.hi().to_usize() - span.lo().to_usize();
+
+        let start = match start {
+            Bound::Included(lo) => lo,
+            Bound::Excluded(lo) => lo + 1,
+            Bound::Unbounded => 0,
+        };
+
+        let end = match end {
+            Bound::Included(hi) => hi + 1,
+            Bound::Excluded(hi) => hi,
+            Bound::Unbounded => length,
+        };
+
+        // Bounds check the values, preventing addition overflow and OOB spans.
+        if start > u32::max_value() as usize
+            || end > u32::max_value() as usize
+            || (u32::max_value() - start as u32) < span.lo().to_u32()
+            || (u32::max_value() - end as u32) < span.lo().to_u32()
+            || start >= end
+            || end > length
+        {
+            return None;
+        }
+
+        let new_lo = span.lo() + BytePos::from_usize(start);
+        let new_hi = span.lo() + BytePos::from_usize(end);
+        Some(span.with_lo(new_lo).with_hi(new_hi))
+    }
+}
+
+impl server::SourceFile for Rustc<'_> {
+    fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
+        Lrc::ptr_eq(file1, file2)
+    }
+    fn path(&mut self, file: &Self::SourceFile) -> String {
+        match file.name {
+            FileName::Real(ref path) => path
+                .to_str()
+                .expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
+                .to_string(),
+            _ => file.name.to_string(),
+        }
+    }
+    fn is_real(&mut self, file: &Self::SourceFile) -> bool {
+        file.is_real_file()
+    }
+}
+
+impl server::MultiSpan for Rustc<'_> {
+    fn new(&mut self) -> Self::MultiSpan {
+        vec![]
+    }
+    fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
+        spans.push(span)
+    }
+}
+
+impl server::Diagnostic for Rustc<'_> {
+    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+        let mut diag = Diagnostic::new(level.to_internal(), msg);
+        diag.set_span(MultiSpan::from_spans(spans));
+        diag
+    }
+    fn sub(
+        &mut self,
+        diag: &mut Self::Diagnostic,
+        level: Level,
+        msg: &str,
+        spans: Self::MultiSpan,
+    ) {
+        diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
+    }
+    fn emit(&mut self, diag: Self::Diagnostic) {
+        self.sess.span_diagnostic.emit_diagnostic(&diag);
+    }
+}
+
+impl server::Span for Rustc<'_> {
+    fn debug(&mut self, span: Self::Span) -> String {
+        format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
+    }
+    fn def_site(&mut self) -> Self::Span {
+        self.def_site
+    }
+    fn call_site(&mut self) -> Self::Span {
+        self.call_site
+    }
+    fn mixed_site(&mut self) -> Self::Span {
+        self.mixed_site
+    }
+    fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
+        self.sess.source_map().lookup_char_pos(span.lo()).file
+    }
+    fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
+        span.parent()
+    }
+    fn source(&mut self, span: Self::Span) -> Self::Span {
+        span.source_callsite()
+    }
+    fn start(&mut self, span: Self::Span) -> LineColumn {
+        let loc = self.sess.source_map().lookup_char_pos(span.lo());
+        LineColumn {
+            line: loc.line,
+            column: loc.col.to_usize(),
+        }
+    }
+    fn end(&mut self, span: Self::Span) -> LineColumn {
+        let loc = self.sess.source_map().lookup_char_pos(span.hi());
+        LineColumn {
+            line: loc.line,
+            column: loc.col.to_usize(),
+        }
+    }
+    fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
+        let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
+        let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
+
+        if self_loc.file.name != other_loc.file.name {
+            return None;
+        }
+
+        Some(first.to(second))
+    }
+    fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
+        span.with_ctxt(at.ctxt())
+    }
+    fn source_text(&mut self,  span: Self::Span) -> Option<String> {
+        self.sess.source_map().span_to_snippet(span).ok()
+    }
+}
index 73310df305b32cb9892b5e5ce2e1327f6ac794c7..440873f3c2b2d7702ecde2e2f4d855fc0f357dc3 100644 (file)
@@ -17,4 +17,5 @@ rustc_data_structures = { path = "../librustc_data_structures" }
 rustc_target = { path = "../librustc_target" }
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
 syntax = { path = "../libsyntax" }
+syntax_expand = { path = "../libsyntax_expand" }
 syntax_pos = { path = "../libsyntax_pos" }
index becbf6d60a07012b3e1282e38c44eb9a88816310..8c9a34713ea604f5d2146520c1b50530eae7fbc6 100644 (file)
@@ -7,7 +7,7 @@
 use errors::DiagnosticBuilder;
 
 use syntax::ast;
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
index cbfe14fa439be85152b65d5885ed3131e004dc2c..f4d1f7fb09cdf1d1cef8430668c3f10c16cce528 100644 (file)
@@ -1,7 +1,7 @@
 use errors::{Applicability, DiagnosticBuilder};
 
 use syntax::ast::{self, *};
-use syntax::ext::base::*;
+use syntax_expand::base::*;
 use syntax::parse::token::{self, TokenKind};
 use syntax::parse::parser::Parser;
 use syntax::print::pprust;
index 3c33baf95a597705c44abc8f1baf21d82e746b31..9e693f29c5a3c80b9183364847a0e2c7e85ba90c 100644 (file)
@@ -5,7 +5,7 @@
 use errors::DiagnosticBuilder;
 
 use syntax::ast;
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax::attr;
 use syntax::tokenstream::TokenStream;
 use syntax::parse::token;
index 203c4a834899b96aaa0c0c265f2a2689c9aa1fc1..2d981526a39837374c4a9b917574f64ee3b93a0c 100644 (file)
@@ -2,8 +2,9 @@
 
 use syntax::ast::{self, AttrItem, AttrStyle};
 use syntax::attr::mk_attr;
-use syntax::panictry;
-use syntax::parse::{self, token, ParseSess};
+use syntax::parse::{self, token};
+use syntax::sess::ParseSess;
+use syntax_expand::panictry;
 use syntax_pos::FileName;
 
 pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate {
index 24f3a66d4ae19efeb8347196a42943b0ab13f2c8..cd7f78e9e342d0b329debf93d4f445ed3695b447 100644 (file)
@@ -1,6 +1,6 @@
 // The compiler code necessary to support the compile_error! extension.
 
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax_pos::Span;
 use syntax::tokenstream::TokenStream;
 
index 790fdad5b3f5820879aecc88a946be77f0825aeb..47bade698a80a0ef207e1ede42d8d93a5be09e58 100644 (file)
@@ -1,5 +1,5 @@
 use syntax::ast;
-use syntax::ext::base::{self, DummyResult};
+use syntax_expand::base::{self, DummyResult};
 use syntax::symbol::Symbol;
 use syntax::tokenstream::TokenStream;
 
index f6747658c070e3af4825f4500ede0811cf942b4e..a132a4136ea01b090b83743c637ad12f82d1945f 100644 (file)
@@ -1,7 +1,7 @@
 use rustc_data_structures::thin_vec::ThinVec;
 
 use syntax::ast;
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax_pos::Span;
index d5b8a00c75b834ad8862ba66c0d4a2adc99eca66..6a9b70920242f05150085c3e57d9354b0dfb6ac8 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::MetaItem;
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax_pos::Span;
 
 pub fn expand_deriving_copy(cx: &mut ExtCtxt<'_>,
index 9ef2c033b0784a43baaf766d7e478684a42b115c..67ef69babdc0ac3ad99f155e9e14c3ce1c40acb9 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{self, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData};
-use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
+use syntax_expand::base::{Annotatable, ExtCtxt, SpecialDerives};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax_pos::Span;
@@ -174,14 +174,12 @@ fn cs_clone(name: &str,
             all_fields = af;
             vdata = &variant.data;
         }
-        EnumNonMatchingCollapsed(..) => {
-            cx.span_bug(trait_span,
-                        &format!("non-matching enum variants in \
-                                 `derive({})`",
-                                 name))
-        }
+        EnumNonMatchingCollapsed(..) => cx.span_bug(trait_span, &format!(
+            "non-matching enum variants in `derive({})`",
+            name,
+        )),
         StaticEnum(..) | StaticStruct(..) => {
-            cx.span_bug(trait_span, &format!("static method in `derive({})`", name))
+            cx.span_bug(trait_span, &format!("associated function in `derive({})`", name))
         }
     }
 
@@ -191,12 +189,10 @@ fn cs_clone(name: &str,
                 .map(|field| {
                     let ident = match field.name {
                         Some(i) => i,
-                        None => {
-                            cx.span_bug(trait_span,
-                                        &format!("unnamed field in normal struct in \
-                                                `derive({})`",
-                                                    name))
-                        }
+                        None => cx.span_bug(trait_span, &format!(
+                            "unnamed field in normal struct in `derive({})`",
+                            name,
+                        )),
                     };
                     let call = subcall(cx, field);
                     cx.field_imm(field.span, ident, call)
index c92339dd2fbd5c2141900258d1340a013a063ff4..92721dab8782a46b2dd7db4fdbdd82a9ae534966 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{self, Ident, Expr, MetaItem, GenericArg};
-use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
+use syntax_expand::base::{Annotatable, ExtCtxt, SpecialDerives};
 use syntax::ptr::P;
 use syntax::symbol::{sym, Symbol};
 use syntax_pos::Span;
index 1f4f5aa37099fb364ee7b694b180490590765b0d..3eeed95aff75d3839ae47b19c8509c3e8f6f37bd 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{self, Expr, MetaItem};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::sym;
 use syntax_pos::Span;
index 91e1e80e4fbfa1347da4ee6a009eefbd76f66f18..1615d991792810a45bab862b4b8cc6a6bf3b2988 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{BinOpKind, Expr, MetaItem};
-use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
+use syntax_expand::base::{Annotatable, ExtCtxt, SpecialDerives};
 use syntax::ptr::P;
 use syntax::symbol::sym;
 use syntax_pos::Span;
index 13d63aaf2a80c987d50cd7397af34e18dfb2bb18..af8aacc6eb9bd9a1971a6b106e3ee0390133296d 100644 (file)
@@ -5,7 +5,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{self, BinOpKind, Expr, MetaItem};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::{sym, Symbol};
 use syntax_pos::Span;
index 003c2423576eb5f092dc50a07847c2a37b2f5487..35298211e4d31ce456941654415d49a63a225085 100644 (file)
@@ -6,7 +6,7 @@
 
 use syntax::ast::{self, Ident};
 use syntax::ast::{Expr, MetaItem};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::sym;
 use syntax_pos::{DUMMY_SP, Span};
index cde72abbdef6a7060c11e9f008452b2a81fb7710..3a0379a0eb02bf35ae8fb4a5e09ff9cb62e59952 100644 (file)
@@ -6,7 +6,7 @@
 
 use syntax::ast;
 use syntax::ast::{Expr, MetaItem, Mutability};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::Symbol;
 use syntax_pos::Span;
index 2fdea10b76f5170a4c3ea84b17acd93f9c3b664f..cfc0f3cd6cbf9afdfaccee628f9a0661b4e0bdf4 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{Expr, MetaItem};
-use syntax::ext::base::{Annotatable, DummyResult, ExtCtxt};
+use syntax_expand::base::{Annotatable, DummyResult, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym};
 use syntax::span_err;
@@ -75,6 +75,6 @@ fn default_substructure(cx: &mut ExtCtxt<'_>,
             // let compilation continue
             DummyResult::raw_expr(trait_span, true)
         }
-        _ => cx.span_bug(trait_span, "Non-static method in `derive(Default)`"),
+        _ => cx.span_bug(trait_span, "method in `derive(Default)`"),
     };
 }
index 655d3bb7c4ab8e3866b694aa7f9153d4e43cc941..2105946b666b231ce2928c1b5b3525507d11155e 100644 (file)
@@ -90,7 +90,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{Expr, ExprKind, MetaItem, Mutability};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::Symbol;
 use syntax_pos::Span;
index 9f75f72e820f25c8b84d58b634ea3245a3117a66..216338c1a8861576790caf024eae8d72886ff7e8 100644 (file)
 use syntax::ast::{self, BinOpKind, EnumDef, Expr, Generics, Ident, PatKind};
 use syntax::ast::{VariantData, GenericParamKind, GenericArg};
 use syntax::attr;
-use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
 use syntax::source_map::respan;
 use syntax::util::map_in_place::MapInPlace;
 use syntax::ptr::P;
+use syntax::sess::ParseSess;
 use syntax::symbol::{Symbol, kw, sym};
-use syntax::parse::ParseSess;
-use syntax_pos::{DUMMY_SP, Span};
+use syntax_expand::base::{Annotatable, ExtCtxt, SpecialDerives};
+use syntax_pos::{Span};
 
 use ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty};
 
@@ -1022,7 +1022,7 @@ fn expand_struct_method_body<'b>(&self,
                                  // [fields of next Self arg], [etc]>
         let mut patterns = Vec::new();
         for i in 0..self_args.len() {
-            let struct_path = cx.path(DUMMY_SP, vec![type_ident]);
+            let struct_path = cx.path(trait_.span, vec![type_ident]);
             let (pat, ident_expr) = trait_.create_struct_pattern(cx,
                                                                  struct_path,
                                                                  struct_def,
@@ -1055,9 +1055,7 @@ fn expand_struct_method_body<'b>(&self,
                 })
                 .collect()
         } else {
-            cx.span_bug(trait_.span,
-                        "no self arguments to non-static method in generic \
-                         `derive`")
+            cx.span_bug(trait_.span, "no `self` parameter for method in generic `derive`")
         };
 
         // body of the inner most destructuring match
index 6ae02a5cab199e8cc857370c6d5d4dd8a527d292..607746597a592abcb79ba1a0d9bad64f6d0c1123 100644 (file)
@@ -5,7 +5,7 @@
 pub use Ty::*;
 
 use syntax::ast::{self, Expr, GenericParamKind, Generics, Ident, SelfKind, GenericArg};
-use syntax::ext::base::ExtCtxt;
+use syntax_expand::base::ExtCtxt;
 use syntax::source_map::{respan, DUMMY_SP};
 use syntax::ptr::P;
 use syntax_pos::Span;
index 2fc594abd705e38290571dbc5a4a3b1fb6e27b21..fe9ef78bb1b004adca41c03329c153f18671f07b 100644 (file)
@@ -3,7 +3,7 @@
 use crate::deriving::generic::ty::*;
 
 use syntax::ast::{Expr, MetaItem, Mutability};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::sym;
 use syntax_pos::Span;
index 60b6eba7a4b53d54baa4a6505c600cddf69674e7..f0471a857dc93cacecfcd7a96928ad6580277609 100644 (file)
@@ -1,7 +1,7 @@
 //! The compiler code necessary to implement the `#[derive]` extensions.
 
 use syntax::ast::{self, MetaItem};
-use syntax::ext::base::{Annotatable, ExtCtxt, MultiItemModifier};
+use syntax_expand::base::{Annotatable, ExtCtxt, MultiItemModifier};
 use syntax::ptr::P;
 use syntax::symbol::{Symbol, sym};
 use syntax_pos::Span;
index 02757bf6b1689f730aa63aaea8a747f2870093a0..58fe56bd2353a19419acffcb210b940b8400094c 100644 (file)
@@ -4,7 +4,7 @@
 //
 
 use syntax::ast::{self, Ident, GenericArg};
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax::symbol::{kw, sym, Symbol};
 use syntax_pos::Span;
 use syntax::tokenstream::TokenStream;
index 8fc64021b51fcc50af8f0cfda4a62bcd9768ede5..45d9f79c28fc32d2aa704e101a7f75f78dc47a42 100644 (file)
@@ -8,7 +8,7 @@
 use errors::pluralise;
 
 use syntax::ast;
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax::parse::token;
 use syntax::ptr::P;
 use syntax::symbol::{Symbol, sym};
index cd2a9b61a76df395505dc7004f95e12cd08aac0b..75dda9535b333c509e3808c7c8eed18e3482f745 100644 (file)
@@ -1,8 +1,9 @@
+use crate::util::check_builtin_macro_attribute;
+
 use syntax::ast::{ItemKind, Mutability, Stmt, Ty, TyKind, Unsafety};
 use syntax::ast::{self, Param, Attribute, Expr, FnHeader, Generics, Ident};
-use syntax::attr::check_builtin_macro_attribute;
-use syntax::ext::allocator::{AllocatorKind, AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
-use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax_expand::allocator::{AllocatorKind, AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
+use syntax_expand::base::{Annotatable, ExtCtxt};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax_pos::Span;
index 72fb5b47c215401af843ceed2bbe527698082e39..879ae1e42153da497bd250c099ca2b8b1bfc642d 100644 (file)
@@ -12,7 +12,7 @@
 
 use syntax::ast;
 use syntax::source_map::respan;
-use syntax::ext::base::{self, *};
+use syntax_expand::base::{self, *};
 use syntax::parse::token;
 use syntax::ptr::P;
 use syntax_pos::Span;
index 631ab7a33106f2aa63ab765d78ab286ecab7545f..5516f276422e9144bd335b69b0c53a36ebb920f5 100644 (file)
@@ -5,7 +5,6 @@
 
 #![feature(crate_visibility_modifier)]
 #![feature(decl_macro)]
-#![feature(mem_take)]
 #![feature(nll)]
 #![feature(proc_macro_internals)]
 #![feature(proc_macro_quote)]
@@ -16,9 +15,9 @@
 
 use syntax::ast::Ident;
 use syntax::edition::Edition;
-use syntax::ext::base::{SyntaxExtension, SyntaxExtensionKind, MacroExpanderFn};
-use syntax::ext::proc_macro::BangProcMacro;
 use syntax::symbol::sym;
+use syntax_expand::base::{Resolver, SyntaxExtension, SyntaxExtensionKind, MacroExpanderFn};
+use syntax_expand::proc_macro::BangProcMacro;
 
 mod error_codes;
 
@@ -38,6 +37,7 @@
 mod source_util;
 mod test;
 mod trace_macros;
+mod util;
 
 pub mod cmdline_attrs;
 pub mod plugin_macro_defs;
@@ -45,7 +45,7 @@
 pub mod standard_library_imports;
 pub mod test_harness;
 
-pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) {
+pub fn register_builtin_macros(resolver: &mut dyn Resolver, edition: Edition) {
     let mut register = |name, kind| resolver.register_builtin_macro(
         Ident::with_dummy_span(name), SyntaxExtension {
             is_builtin: true, ..SyntaxExtension::default(kind, edition)
index 92130bfaf68e35f6ae359e26e0f4635af3ec2767..2202375e5e780436d1697aea50284086dc6dcebb 100644 (file)
@@ -1,4 +1,4 @@
-use syntax::ext::base;
+use syntax_expand::base;
 use syntax::print;
 use syntax::tokenstream::TokenStream;
 use syntax_pos;
index 315babceae32ce5a9844bac4183738d09f48cbb9..1ca9422eb9dcc364f47d5cd605be55b5ca8787a1 100644 (file)
@@ -4,7 +4,7 @@
 use syntax::ast::*;
 use syntax::attr;
 use syntax::edition::Edition;
-use syntax::ext::base::{Resolver, NamedSyntaxExtension};
+use syntax_expand::base::{Resolver, NamedSyntaxExtension};
 use syntax::parse::token;
 use syntax::ptr::P;
 use syntax::source_map::respan;
@@ -20,7 +20,7 @@ fn plugin_macro_def(name: Name, span: Span) -> P<Item> {
         attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span)));
 
     let parens: TreeAndJoint = TokenTree::Delimited(
-        DelimSpan::from_single(span), token::Paren, TokenStream::empty()
+        DelimSpan::from_single(span), token::Paren, TokenStream::default()
     ).into();
     let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens];
 
index 9b53bcb841c67893b384dab42a85cc6b5ebd2252..96d0c3fcab1c5cfd0ce6d89113fbd05f2938b43e 100644 (file)
@@ -3,13 +3,14 @@
 use smallvec::smallvec;
 use syntax::ast::{self, Ident};
 use syntax::attr;
-use syntax::ext::base::ExtCtxt;
-use syntax::ext::expand::{AstFragment, ExpansionConfig};
-use syntax::ext::proc_macro::is_proc_macro_attr;
-use syntax::parse::ParseSess;
+use syntax::print::pprust;
 use syntax::ptr::P;
+use syntax::sess::ParseSess;
 use syntax::symbol::{kw, sym};
 use syntax::visit::{self, Visitor};
+use syntax_expand::base::{ExtCtxt, Resolver};
+use syntax_expand::expand::{AstFragment, ExpansionConfig};
+use syntax_expand::proc_macro::is_proc_macro_attr;
 use syntax_pos::{Span, DUMMY_SP};
 use syntax_pos::hygiene::AstPass;
 
@@ -45,7 +46,7 @@ struct CollectProcMacros<'a> {
 }
 
 pub fn inject(sess: &ParseSess,
-              resolver: &mut dyn (::syntax::ext::base::Resolver),
+              resolver: &mut dyn Resolver,
               mut krate: ast::Crate,
               is_proc_macro_crate: bool,
               has_proc_macro_decls: bool,
@@ -248,13 +249,20 @@ fn visit_item(&mut self, item: &'a ast::Item) {
         for attr in &item.attrs {
             if is_proc_macro_attr(&attr) {
                 if let Some(prev_attr) = found_attr {
+                    let path_str = pprust::path_to_string(&attr.path);
                     let msg = if attr.path.segments[0].ident.name ==
                                  prev_attr.path.segments[0].ident.name {
-                        format!("only one `#[{}]` attribute is allowed on any given function",
-                                attr.path)
+                        format!(
+                            "only one `#[{}]` attribute is allowed on any given function",
+                            path_str,
+                        )
                     } else {
-                        format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
-                                to the same function", attr.path, prev_attr.path)
+                        format!(
+                            "`#[{}]` and `#[{}]` attributes cannot both be applied
+                            to the same function",
+                            path_str,
+                            pprust::path_to_string(&prev_attr.path),
+                        )
                     };
 
                     self.handler.struct_span_err(attr.span, &msg)
@@ -280,8 +288,10 @@ fn visit_item(&mut self, item: &'a ast::Item) {
         };
 
         if !is_fn {
-            let msg = format!("the `#[{}]` attribute may only be used on bare functions",
-                              attr.path);
+            let msg = format!(
+                "the `#[{}]` attribute may only be used on bare functions",
+                pprust::path_to_string(&attr.path),
+            );
 
             self.handler.span_err(attr.span, &msg);
             return;
@@ -292,8 +302,10 @@ fn visit_item(&mut self, item: &'a ast::Item) {
         }
 
         if !self.is_proc_macro_crate {
-            let msg = format!("the `#[{}]` attribute is only usable with crates of the \
-                              `proc-macro` crate type", attr.path);
+            let msg = format!(
+                "the `#[{}]` attribute is only usable with crates of the `proc-macro` crate type",
+                pprust::path_to_string(&attr.path),
+            );
 
             self.handler.span_err(attr.span, &msg);
             return;
index 9dc9d66b86f1d31f8f11e429f404d6da397b2b35..438e199ebdb8f6574cf67d76918601a3ad4a2174 100644 (file)
@@ -1,10 +1,12 @@
-use syntax::{ast, panictry};
-use syntax::ext::base::{self, *};
+use syntax_expand::panictry;
+use syntax_expand::base::{self, *};
+use syntax::ast;
 use syntax::parse::{self, token, DirectoryOwnership};
 use syntax::print::pprust;
 use syntax::ptr::P;
 use syntax::symbol::Symbol;
 use syntax::tokenstream::TokenStream;
+use syntax::early_buffered_lints::BufferedEarlyLintId;
 
 use smallvec::SmallVec;
 use syntax_pos::{self, Pos, Span};
@@ -83,7 +85,16 @@ struct ExpandResult<'a> {
     }
     impl<'a> base::MacResult for ExpandResult<'a> {
         fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
-            Some(panictry!(self.p.parse_expr()))
+            let r = panictry!(self.p.parse_expr());
+            if self.p.token != token::Eof {
+                self.p.sess.buffer_lint(
+                    BufferedEarlyLintId::IncompleteInclude,
+                    self.p.token.span,
+                    ast::CRATE_NODE_ID,
+                    "include macro expected single expression in source",
+                );
+            }
+            Some(r)
         }
 
         fn make_items(mut self: Box<ExpandResult<'a>>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
index c577b1e33cfebab0f18d8fc90d33d569bcaa051f..fd27a218906989e0fba5ef20eadab939696a803b 100644 (file)
@@ -1,11 +1,11 @@
 use syntax::{ast, attr};
 use syntax::edition::Edition;
-use syntax::ext::expand::ExpansionConfig;
-use syntax::ext::hygiene::AstPass;
-use syntax::ext::base::{ExtCtxt, Resolver};
-use syntax::parse::ParseSess;
 use syntax::ptr::P;
+use syntax::sess::ParseSess;
 use syntax::symbol::{Ident, Symbol, kw, sym};
+use syntax_expand::expand::ExpansionConfig;
+use syntax_expand::hygiene::AstPass;
+use syntax_expand::base::{ExtCtxt, Resolver};
 use syntax_pos::DUMMY_SP;
 
 pub fn inject(
index 5d68a92579f96cf7c0af3074ad682955a32943a0..b0da413d63a049f73f833f749f9357b7c516585f 100644 (file)
@@ -1,9 +1,11 @@
 /// The expansion from a test function to the appropriate test struct for libtest
 /// Ideally, this code would be in libtest but for efficiency and error messages it lives here.
 
+use crate::util::check_builtin_macro_attribute;
+
 use syntax::ast;
-use syntax::attr::{self, check_builtin_macro_attribute};
-use syntax::ext::base::*;
+use syntax::attr;
+use syntax_expand::base::*;
 use syntax::print::pprust;
 use syntax::source_map::respan;
 use syntax::symbol::{Symbol, sym};
@@ -106,6 +108,11 @@ pub fn expand_test_or_bench(
         cx.path(sp, vec![test_id, cx.ident_of("ShouldPanic", sp), cx.ident_of(name, sp)])
     };
 
+    // creates test::TestType::$name
+    let test_type_path = |name| {
+        cx.path(sp, vec![test_id, cx.ident_of("TestType", sp), cx.ident_of(name, sp)])
+    };
+
     // creates $name: $expr
     let field = |name, expr| cx.field_imm(sp, cx.ident_of(name, sp), expr);
 
@@ -181,6 +188,17 @@ pub fn expand_test_or_bench(
                             cx.expr_path(should_panic_path("YesWithMessage")),
                             vec![cx.expr_str(sp, sym)]),
                     }),
+                    // test_type: ...
+                    field("test_type", match test_type(cx) {
+                        // test::TestType::UnitTest
+                        TestType::UnitTest => cx.expr_path(test_type_path("UnitTest")),
+                        // test::TestType::IntegrationTest
+                        TestType::IntegrationTest => cx.expr_path(
+                            test_type_path("IntegrationTest")
+                        ),
+                        // test::TestPath::Unknown
+                        TestType::Unknown => cx.expr_path(test_type_path("Unknown")),
+                    }),
                 // },
                 ])),
                 // testfn: test::StaticTestFn(...) | test::StaticBenchFn(...)
@@ -261,6 +279,34 @@ fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic {
     }
 }
 
+enum TestType {
+    UnitTest,
+    IntegrationTest,
+    Unknown,
+}
+
+/// Attempts to determine the type of test.
+/// Since doctests are created without macro expanding, only possible variants here
+/// are `UnitTest`, `IntegrationTest` or `Unknown`.
+fn test_type(cx: &ExtCtxt<'_>) -> TestType {
+    // Root path from context contains the topmost sources directory of the crate.
+    // I.e., for `project` with sources in `src` and tests in `tests` folders
+    // (no matter how many nested folders lie inside),
+    // there will be two different root paths: `/project/src` and `/project/tests`.
+    let crate_path = cx.root_path.as_path();
+
+    if crate_path.ends_with("src") {
+        // `/src` folder contains unit-tests.
+        TestType::UnitTest
+    } else if crate_path.ends_with("tests") {
+        // `/tests` folder contains integration tests.
+        TestType::IntegrationTest
+    } else {
+        // Crate layout doesn't match expected one, test type is unknown.
+        TestType::Unknown
+    }
+}
+
 fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
     let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic);
     let ref sd = cx.parse_sess.span_diagnostic;
index f79ad1419e0b1079764c89053ef2c563000563d7..33d41a7f53e5e25fbb5f152b691887246c012fc8 100644 (file)
@@ -6,12 +6,12 @@
 use syntax::ast::{self, Ident};
 use syntax::attr;
 use syntax::entry::{self, EntryPointType};
-use syntax::ext::base::{ExtCtxt, Resolver};
-use syntax::ext::expand::{AstFragment, ExpansionConfig};
+use syntax_expand::base::{ExtCtxt, Resolver};
+use syntax_expand::expand::{AstFragment, ExpansionConfig};
 use syntax::feature_gate::Features;
 use syntax::mut_visit::{*, ExpectOne};
-use syntax::parse::ParseSess;
 use syntax::ptr::P;
+use syntax::sess::ParseSess;
 use syntax::source_map::respan;
 use syntax::symbol::{sym, Symbol};
 use syntax_pos::{Span, DUMMY_SP};
index d83c24046d9e5f08b0480df8c6d8cc8f1f0d75e3..dbf96d3b561a95ee6493a2d4b7eb468c95e90ef3 100644 (file)
@@ -1,4 +1,4 @@
-use syntax::ext::base::{self, ExtCtxt};
+use syntax_expand::base::{self, ExtCtxt};
 use syntax::symbol::kw;
 use syntax_pos::Span;
 use syntax::tokenstream::{TokenTree, TokenStream};
diff --git a/src/libsyntax_ext/util.rs b/src/libsyntax_ext/util.rs
new file mode 100644 (file)
index 0000000..d84fe19
--- /dev/null
@@ -0,0 +1,11 @@
+use syntax_pos::Symbol;
+use syntax::ast::MetaItem;
+use syntax::attr::{check_builtin_attribute, AttributeTemplate};
+use syntax_expand::base::ExtCtxt;
+
+pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
+    // All the built-in macro attributes are "words" at the moment.
+    let template = AttributeTemplate::only_word();
+    let attr = ecx.attribute(meta_item.clone());
+    check_builtin_attribute(ecx.parse_sess, &attr, name, template);
+}
index 30ee9b90515ee4dd7e075255b8f7a54450886999..7f7c5cb2e454865aa188616c11958410f98fe3b9 100644 (file)
@@ -12,7 +12,7 @@
 #![feature(non_exhaustive)]
 #![feature(optin_builtin_traits)]
 #![feature(rustc_attrs)]
-#![feature(proc_macro_hygiene)]
+#![cfg_attr(bootstrap, feature(proc_macro_hygiene))]
 #![feature(specialization)]
 #![feature(step_trait)]
 
index 82c47e6dbb75879ec01f1537ccc26dcefcc994c8..fa9567fb62c0f8487f426e5a1a693eb41706f317 100644 (file)
         console,
         const_compare_raw_pointers,
         const_constructor,
+        const_extern_fn,
         const_fn,
         const_fn_union,
         const_generics,
         link_cfg,
         link_llvm_intrinsics,
         link_name,
+        link_ordinal,
         link_section,
         LintPass,
         lint_reasons,
         RangeInclusive,
         RangeTo,
         RangeToInclusive,
+        raw_dylib,
         raw_identifiers,
         Ready,
         reason,
         suggestion,
         target_feature,
         target_has_atomic,
+        target_has_atomic_load_store,
         target_thread_local,
         task,
         tbm_target_feature,
         tool_attributes,
         tool_lints,
         trace_macros,
+        track_caller,
         trait_alias,
         transmute,
         transparent,
@@ -803,25 +808,13 @@ pub fn modern_and_legacy(self) -> Ident {
         Ident::new(self.name, self.span.modern_and_legacy())
     }
 
-    /// Transforms an underscore identifier into one with the same name, but
-    /// gensymed. Leaves non-underscore identifiers unchanged.
-    pub fn gensym_if_underscore(self) -> Ident {
-        if self.name == kw::Underscore {
-            let name = with_interner(|interner| interner.gensymed(self.name));
-            Ident::new(name, self.span)
-        } else {
-            self
-        }
-    }
-
     /// Convert the name to a `LocalInternedString`. This is a slowish
     /// operation because it requires locking the symbol interner.
     pub fn as_str(self) -> LocalInternedString {
         self.name.as_str()
     }
 
-    /// Convert the name to an `InternedString`. This is a slowish operation
-    /// because it requires locking the symbol interner.
+    /// Convert the name to an `InternedString`.
     pub fn as_interned_str(self) -> InternedString {
         self.name.as_interned_str()
     }
@@ -876,26 +869,9 @@ fn default_decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> {
     }
 }
 
-/// A symbol is an interned or gensymed string. A gensym is a symbol that is
-/// never equal to any other symbol.
+/// An interned string.
 ///
-/// Conceptually, a gensym can be thought of as a normal symbol with an
-/// invisible unique suffix. Gensyms are useful when creating new identifiers
-/// that must not match any existing identifiers, e.g. during macro expansion
-/// and syntax desugaring. Because gensyms should always be identifiers, all
-/// gensym operations are on `Ident` rather than `Symbol`. (Indeed, in the
-/// future the gensym-ness may be moved from `Symbol` to hygiene data.)
-///
-/// Examples:
-/// ```
-/// assert_eq!(Ident::from_str("_"), Ident::from_str("_"))
-/// assert_ne!(Ident::from_str("_").gensym_if_underscore(), Ident::from_str("_"))
-/// assert_ne!(
-///     Ident::from_str("_").gensym_if_underscore(),
-///     Ident::from_str("_").gensym_if_underscore(),
-/// )
-/// ```
-/// Internally, a symbol is implemented as an index, and all operations
+/// Internally, a `Symbol` is implemented as an index, and all operations
 /// (including hashing, equality, and ordering) operate on that index. The use
 /// of `rustc_index::newtype_index!` means that `Option<Symbol>` only takes up 4 bytes,
 /// because `rustc_index::newtype_index!` reserves the last 256 values for tagging purposes.
@@ -946,12 +922,9 @@ pub fn as_str(self) -> LocalInternedString {
         })
     }
 
-    /// Convert to an `InternedString`. This is a slowish operation because it
-    /// requires locking the symbol interner.
+    /// Convert to an `InternedString`.
     pub fn as_interned_str(self) -> InternedString {
-        with_interner(|interner| InternedString {
-            symbol: interner.interned(self)
-        })
+        InternedString { symbol: self }
     }
 
     pub fn as_u32(self) -> u32 {
@@ -961,24 +934,19 @@ pub fn as_u32(self) -> u32 {
 
 impl fmt::Debug for Symbol {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let is_gensymed = with_interner(|interner| interner.is_gensymed(*self));
-        if is_gensymed {
-            write!(f, "{}({:?})", self, self.0)
-        } else {
-            write!(f, "{}", self)
-        }
+        self.with(|str| fmt::Debug::fmt(&str, f))
     }
 }
 
 impl fmt::Display for Symbol {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Display::fmt(&self.as_str(), f)
+        self.with(|str| fmt::Display::fmt(&str, f))
     }
 }
 
 impl Encodable for Symbol {
     fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
-        s.emit_str(&self.as_str())
+        self.with(|string| s.emit_str(string))
     }
 }
 
@@ -989,15 +957,11 @@ fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {
 }
 
 // The `&'static str`s in this type actually point into the arena.
-//
-// Note that normal symbols are indexed upward from 0, and gensyms are indexed
-// downward from SymbolIndex::MAX_AS_U32.
 #[derive(Default)]
 pub struct Interner {
     arena: DroplessArena,
     names: FxHashMap<&'static str, Symbol>,
     strings: Vec<&'static str>,
-    gensyms: Vec<Symbol>,
 }
 
 impl Interner {
@@ -1030,34 +994,10 @@ pub fn intern(&mut self, string: &str) -> Symbol {
         self.names.insert(string, name);
         name
     }
-
-    fn interned(&self, symbol: Symbol) -> Symbol {
-        if (symbol.0.as_usize()) < self.strings.len() {
-            symbol
-        } else {
-            self.gensyms[(SymbolIndex::MAX_AS_U32 - symbol.0.as_u32()) as usize]
-        }
-    }
-
-    fn gensymed(&mut self, symbol: Symbol) -> Symbol {
-        self.gensyms.push(symbol);
-        Symbol::new(SymbolIndex::MAX_AS_U32 - self.gensyms.len() as u32 + 1)
-    }
-
-    fn is_gensymed(&mut self, symbol: Symbol) -> bool {
-        symbol.0.as_usize() >= self.strings.len()
-    }
-
     // Get the symbol as a string. `Symbol::as_str()` should be used in
     // preference to this function.
     pub fn get(&self, symbol: Symbol) -> &str {
-        match self.strings.get(symbol.0.as_usize()) {
-            Some(string) => string,
-            None => {
-                let symbol = self.gensyms[(SymbolIndex::MAX_AS_U32 - symbol.0.as_u32()) as usize];
-                self.strings[symbol.0.as_usize()]
-            }
-        }
+        self.strings[symbol.0.as_usize()]
     }
 }
 
@@ -1166,8 +1106,8 @@ fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
 }
 
 /// An alternative to `Symbol` and `InternedString`, useful when the chars
-/// within the symbol need to be accessed. It is best used for temporary
-/// values.
+/// within the symbol need to be accessed. It deliberately has limited
+/// functionality and should only be used for temporary values.
 ///
 /// Because the interner outlives any thread which uses this type, we can
 /// safely treat `string` which points to interner data, as an immortal string,
@@ -1176,7 +1116,7 @@ fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
 // FIXME: ensure that the interner outlives any thread which uses
 // `LocalInternedString`, by creating a new thread right after constructing the
 // interner.
-#[derive(Clone, Copy, Eq, PartialOrd, Ord)]
+#[derive(Eq, PartialOrd, Ord)]
 pub struct LocalInternedString {
     string: &'static str,
 }
@@ -1197,30 +1137,6 @@ fn eq(&self, other: &T) -> bool {
     }
 }
 
-impl std::cmp::PartialEq<LocalInternedString> for str {
-    fn eq(&self, other: &LocalInternedString) -> bool {
-        self == other.string
-    }
-}
-
-impl<'a> std::cmp::PartialEq<LocalInternedString> for &'a str {
-    fn eq(&self, other: &LocalInternedString) -> bool {
-        *self == other.string
-    }
-}
-
-impl std::cmp::PartialEq<LocalInternedString> for String {
-    fn eq(&self, other: &LocalInternedString) -> bool {
-        self == other.string
-    }
-}
-
-impl<'a> std::cmp::PartialEq<LocalInternedString> for &'a String {
-    fn eq(&self, other: &LocalInternedString) -> bool {
-        *self == other.string
-    }
-}
-
 impl !Send for LocalInternedString {}
 impl !Sync for LocalInternedString {}
 
@@ -1242,19 +1158,12 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-/// An alternative to `Symbol` that is focused on string contents. It has two
-/// main differences to `Symbol`.
+/// An alternative to `Symbol` that is focused on string contents.
 ///
-/// First, its implementations of `Hash`, `PartialOrd` and `Ord` work with the
+/// Its implementations of `Hash`, `PartialOrd` and `Ord` work with the
 /// string chars rather than the symbol integer. This is useful when hash
 /// stability is required across compile sessions, or a guaranteed sort
 /// ordering is required.
-///
-/// Second, gensym-ness is irrelevant. E.g.:
-/// ```
-/// assert_ne!(Symbol::gensym("x"), Symbol::gensym("x"))
-/// assert_eq!(Symbol::gensym("x").as_interned_str(), Symbol::gensym("x").as_interned_str())
-/// ```
 #[derive(Clone, Copy, PartialEq, Eq)]
 pub struct InternedString {
     symbol: Symbol,
index 1b91c9bb845a4a660e081dd977fe5730057b7ed0..f74b9a0cd1d1df0213739c04d1d389c9825ebe71 100644 (file)
@@ -14,13 +14,6 @@ fn interner_tests() {
     assert_eq!(i.intern("cat"), Symbol::new(1));
     // dog is still at zero
     assert_eq!(i.intern("dog"), Symbol::new(0));
-    let z = i.intern("zebra");
-    assert_eq!(i.gensymed(z), Symbol::new(SymbolIndex::MAX_AS_U32));
-    // gensym of same string gets new number:
-    assert_eq!(i.gensymed(z), Symbol::new(SymbolIndex::MAX_AS_U32 - 1));
-    // gensym of *existing* string gets new number:
-    let d = i.intern("dog");
-    assert_eq!(i.gensymed(d), Symbol::new(SymbolIndex::MAX_AS_U32 - 2));
 }
 
 #[test]
index d9e4abf61c3dc2f7aa0c65db1d020fe609341395..dcd733620bf90282cc50c5c835359450d1997bf8 100644 (file)
@@ -94,6 +94,15 @@ fn write_result(
                 self.write_event("test", desc.name.as_slice(), "failed", exec_time, stdout, None)
             }
 
+            TrTimedFail => self.write_event(
+                "test",
+                desc.name.as_slice(),
+                "failed",
+                exec_time,
+                stdout,
+                Some(r#""reason": "time limit exceeded""#),
+            ),
+
             TrFailedMsg(ref m) => self.write_event(
                 "test",
                 desc.name.as_slice(),
index 184726c67d3fb1a6bd87cbc96d9963413c856b53..2935b4c99cec42c68f76e5c1c0acc1cd6a1f3d58 100644 (file)
@@ -3,6 +3,7 @@
 pub(crate) struct PrettyFormatter<T> {
     out: OutputLocation<T>,
     use_color: bool,
+    time_options: Option<TestTimeOptions>,
 
     /// Number of columns to fill when aligning names
     max_name_len: usize,
@@ -16,12 +17,14 @@ pub fn new(
         use_color: bool,
         max_name_len: usize,
         is_multithreaded: bool,
+        time_options: Option<TestTimeOptions>,
     ) -> Self {
         PrettyFormatter {
             out,
             use_color,
             max_name_len,
             is_multithreaded,
+            time_options
         }
     }
 
@@ -30,20 +33,24 @@ pub fn output_location(&self) -> &OutputLocation<T> {
         &self.out
     }
 
-    pub fn write_ok(&mut self, exec_time: Option<&TestExecTime>) -> io::Result<()> {
-        self.write_short_result("ok", term::color::GREEN, exec_time)
+    pub fn write_ok(&mut self) -> io::Result<()> {
+        self.write_short_result("ok", term::color::GREEN)
     }
 
-    pub fn write_failed(&mut self, exec_time: Option<&TestExecTime>) -> io::Result<()> {
-        self.write_short_result("FAILED", term::color::RED, exec_time)
+    pub fn write_failed(&mut self) -> io::Result<()> {
+        self.write_short_result("FAILED", term::color::RED)
     }
 
-    pub fn write_ignored(&mut self, exec_time: Option<&TestExecTime>) -> io::Result<()> {
-        self.write_short_result("ignored", term::color::YELLOW, exec_time)
+    pub fn write_ignored(&mut self) -> io::Result<()> {
+        self.write_short_result("ignored", term::color::YELLOW)
     }
 
-    pub fn write_allowed_fail(&mut self, exec_time: Option<&TestExecTime>) -> io::Result<()> {
-        self.write_short_result("FAILED (allowed)", term::color::YELLOW, exec_time)
+    pub fn write_allowed_fail(&mut self) -> io::Result<()> {
+        self.write_short_result("FAILED (allowed)", term::color::YELLOW)
+    }
+
+    pub fn write_time_failed(&mut self) -> io::Result<()> {
+        self.write_short_result("FAILED (time limit exceeded)", term::color::RED)
     }
 
     pub fn write_bench(&mut self) -> io::Result<()> {
@@ -54,13 +61,8 @@ pub fn write_short_result(
         &mut self,
         result: &str,
         color: term::color::Color,
-        exec_time: Option<&TestExecTime>,
     ) -> io::Result<()> {
-        self.write_pretty(result, color)?;
-        if let Some(exec_time) = exec_time {
-            self.write_plain(format!(" {}", exec_time))?;
-        }
-        self.write_plain("\n")
+        self.write_pretty(result, color)
     }
 
     pub fn write_pretty(&mut self, word: &str, color: term::color::Color) -> io::Result<()> {
@@ -88,12 +90,48 @@ pub fn write_plain<S: AsRef<str>>(&mut self, s: S) -> io::Result<()> {
         self.out.flush()
     }
 
-    pub fn write_successes(&mut self, state: &ConsoleTestState) -> io::Result<()> {
-        self.write_plain("\nsuccesses:\n")?;
-        let mut successes = Vec::new();
+    fn write_time(
+        &mut self,
+        desc: &TestDesc,
+        exec_time: Option<&TestExecTime>
+    ) -> io::Result<()> {
+        if let (Some(opts), Some(time)) = (self.time_options, exec_time) {
+            let time_str = format!(" <{}>", time);
+
+            let color = if opts.colored {
+                if opts.is_critical(desc, time) {
+                    Some(term::color::RED)
+                } else if opts.is_warn(desc, time) {
+                    Some(term::color::YELLOW)
+                } else {
+                    None
+                }
+            } else {
+                None
+            };
+
+            match color {
+                Some(color) => self.write_pretty(&time_str, color)?,
+                None => self.write_plain(&time_str)?
+            }
+        }
+
+        Ok(())
+    }
+
+    fn write_results(
+        &mut self,
+        inputs: &Vec<(TestDesc, Vec<u8>)>,
+        results_type: &str
+    ) -> io::Result<()> {
+        let results_out_str = format!("\n{}:\n", results_type);
+
+        self.write_plain(&results_out_str)?;
+
+        let mut results = Vec::new();
         let mut stdouts = String::new();
-        for &(ref f, ref stdout) in &state.not_failures {
-            successes.push(f.name.to_string());
+        for &(ref f, ref stdout) in inputs {
+            results.push(f.name.to_string());
             if !stdout.is_empty() {
                 stdouts.push_str(&format!("---- {} stdout ----\n", f.name));
                 let output = String::from_utf8_lossy(stdout);
@@ -106,38 +144,24 @@ pub fn write_successes(&mut self, state: &ConsoleTestState) -> io::Result<()> {
             self.write_plain(&stdouts)?;
         }
 
-        self.write_plain("\nsuccesses:\n")?;
-        successes.sort();
-        for name in &successes {
+        self.write_plain(&results_out_str)?;
+        results.sort();
+        for name in &results {
             self.write_plain(&format!("    {}\n", name))?;
         }
         Ok(())
     }
 
+    pub fn write_successes(&mut self, state: &ConsoleTestState) -> io::Result<()> {
+        self.write_results(&state.not_failures, "successes")
+    }
+
     pub fn write_failures(&mut self, state: &ConsoleTestState) -> io::Result<()> {
-        self.write_plain("\nfailures:\n")?;
-        let mut failures = Vec::new();
-        let mut fail_out = String::new();
-        for &(ref f, ref stdout) in &state.failures {
-            failures.push(f.name.to_string());
-            if !stdout.is_empty() {
-                fail_out.push_str(&format!("---- {} stdout ----\n", f.name));
-                let output = String::from_utf8_lossy(stdout);
-                fail_out.push_str(&output);
-                fail_out.push_str("\n");
-            }
-        }
-        if !fail_out.is_empty() {
-            self.write_plain("\n")?;
-            self.write_plain(&fail_out)?;
-        }
+        self.write_results(&state.failures, "failures")
+    }
 
-        self.write_plain("\nfailures:\n")?;
-        failures.sort();
-        for name in &failures {
-            self.write_plain(&format!("    {}\n", name))?;
-        }
-        Ok(())
+    pub fn write_time_failures(&mut self, state: &ConsoleTestState) -> io::Result<()> {
+        self.write_results(&state.time_failures, "failures (time limit exceeded)")
     }
 
     fn write_test_name(&mut self, desc: &TestDesc) -> io::Result<()> {
@@ -179,15 +203,19 @@ fn write_result(
         }
 
         match *result {
-            TrOk => self.write_ok(exec_time),
-            TrFailed | TrFailedMsg(_) => self.write_failed(exec_time),
-            TrIgnored => self.write_ignored(exec_time),
-            TrAllowedFail => self.write_allowed_fail(exec_time),
+            TrOk => self.write_ok()?,
+            TrFailed | TrFailedMsg(_) => self.write_failed()?,
+            TrIgnored => self.write_ignored()?,
+            TrAllowedFail => self.write_allowed_fail()?,
             TrBench(ref bs) => {
                 self.write_bench()?;
-                self.write_plain(&format!(": {}\n", fmt_bench_samples(bs)))
+                self.write_plain(&format!(": {}", fmt_bench_samples(bs)))?;
             }
+            TrTimedFail => self.write_time_failed()?,
         }
+
+        self.write_time(desc, exec_time)?;
+        self.write_plain("\n")
     }
 
     fn write_timeout(&mut self, desc: &TestDesc) -> io::Result<()> {
@@ -207,7 +235,13 @@ fn write_run_finish(&mut self, state: &ConsoleTestState) -> io::Result<bool> {
         }
         let success = state.failed == 0;
         if !success {
-            self.write_failures(state)?;
+            if !state.failures.is_empty() {
+                self.write_failures(state)?;
+            }
+
+            if !state.time_failures.is_empty() {
+                self.write_time_failures(state)?;
+            }
         }
 
         self.write_plain("\ntest result: ")?;
index 1812c20904c87f591feb2d498b03c46b618fe89c..8914e7b6b5685e32fc4fd77a736b88f82a75f59c 100644 (file)
@@ -180,7 +180,7 @@ fn write_result(
     ) -> io::Result<()> {
         match *result {
             TrOk => self.write_ok(),
-            TrFailed | TrFailedMsg(_) => self.write_failed(),
+            TrFailed | TrFailedMsg(_) | TrTimedFail => self.write_failed(),
             TrIgnored => self.write_ignored(),
             TrAllowedFail => self.write_allowed_fail(),
             TrBench(ref bs) => {
index 4a28872decab2641210f5929e4ef58fa1eecab88..5dd495e3fa9844bd5be43f39abdf6d36eac7d33c 100644 (file)
@@ -56,6 +56,7 @@
 use std::path::PathBuf;
 use std::process;
 use std::process::{ExitStatus, Command, Termination};
+use std::str::FromStr;
 use std::sync::mpsc::{channel, Sender};
 use std::sync::{Arc, Mutex};
 use std::thread;
 const TR_OK: i32 = 50;
 const TR_FAILED: i32 = 51;
 
+/// This small module contains constants used by `report-time` option.
+/// Those constants values will be used if corresponding environment variables are not set.
+///
+/// To override values for unit-tests, use a constant `RUST_TEST_TIME_UNIT`,
+/// To override values for integration tests, use a constant `RUST_TEST_TIME_INTEGRATION`,
+/// To override values for doctests, use a constant `RUST_TEST_TIME_DOCTEST`.
+///
+/// Example of the expected format is `RUST_TEST_TIME_xxx=100,200`, where 100 means
+/// warn time, and 200 means critical time.
+pub mod time_constants {
+    use std::time::Duration;
+    use super::TEST_WARN_TIMEOUT_S;
+
+    /// Environment variable for overriding default threshold for unit-tests.
+    pub const UNIT_ENV_NAME: &str = "RUST_TEST_TIME_UNIT";
+
+    // Unit tests are supposed to be really quick.
+    pub const UNIT_WARN: Duration = Duration::from_millis(50);
+    pub const UNIT_CRITICAL: Duration = Duration::from_millis(100);
+
+    /// Environment variable for overriding default threshold for unit-tests.
+    pub const INTEGRATION_ENV_NAME: &str = "RUST_TEST_TIME_INTEGRATION";
+
+    // Integration tests may have a lot of work, so they can take longer to execute.
+    pub const INTEGRATION_WARN: Duration = Duration::from_millis(500);
+    pub const INTEGRATION_CRITICAL: Duration = Duration::from_millis(1000);
+
+    /// Environment variable for overriding default threshold for unit-tests.
+    pub const DOCTEST_ENV_NAME: &str = "RUST_TEST_TIME_DOCTEST";
+
+    // Doctests are similar to integration tests, because they can include a lot of
+    // initialization code.
+    pub const DOCTEST_WARN: Duration = INTEGRATION_WARN;
+    pub const DOCTEST_CRITICAL: Duration = INTEGRATION_CRITICAL;
+
+    // Do not suppose anything about unknown tests, base limits on the
+    // `TEST_WARN_TIMEOUT_S` constant.
+    pub const UNKNOWN_WARN: Duration = Duration::from_secs(TEST_WARN_TIMEOUT_S);
+    pub const UNKNOWN_CRITICAL: Duration = Duration::from_secs(TEST_WARN_TIMEOUT_S * 2);
+}
+
 // to be used by rustc to compile tests in libtest
 pub mod test {
     pub use crate::{
         assert_test_result, filter_tests, parse_opts, run_test, test_main, test_main_static,
         Bencher, DynTestFn, DynTestName, Metric, MetricMap, Options, RunIgnored, RunStrategy,
         ShouldPanic, StaticBenchFn, StaticTestFn, StaticTestName, TestDesc, TestDescAndFn, TestName,
-        TestOpts, TestResult, TrFailed, TrFailedMsg, TrIgnored, TrOk,
+        TestOpts, TestTimeOptions, TestType, TestResult, TrFailed, TrFailedMsg, TrIgnored, TrOk,
     };
 }
 
@@ -97,6 +139,21 @@ pub enum Concurrent {
     No,
 }
 
+/// Type of the test according to the [rust book](https://doc.rust-lang.org/cargo/guide/tests.html)
+/// conventions.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TestType {
+    /// Unit-tests are expected to be in the `src` folder of the crate.
+    UnitTest,
+    /// Integration-style tests are expected to be in the `tests` folder of the crate.
+    IntegrationTest,
+    /// Doctests are created by the `librustdoc` manually, so it's a different type of test.
+    DocTest,
+    /// Tests for the sources that don't follow the project layout convention
+    /// (e.g. tests in raw `main.rs` compiled by calling `rustc --test` directly).
+    Unknown,
+}
+
 // The name of a test. By convention this follows the rules for rust
 // paths; i.e., it should be a series of identifiers separated by double
 // colons. This way if some test runner wants to arrange the tests
@@ -232,6 +289,7 @@ pub struct TestDesc {
     pub ignore: bool,
     pub should_panic: ShouldPanic,
     pub allow_fail: bool,
+    pub test_type: TestType,
 }
 
 #[derive(Debug)]
@@ -403,6 +461,141 @@ pub enum RunIgnored {
     Only,
 }
 
+/// Structure denoting time limits for test execution.
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
+pub struct TimeThreshold {
+    pub warn: Duration,
+    pub critical: Duration,
+}
+
+impl TimeThreshold {
+    /// Creates a new `TimeThreshold` instance with provided durations.
+    pub fn new(warn: Duration, critical: Duration) -> Self {
+        Self {
+            warn,
+            critical,
+        }
+    }
+
+    /// Attempts to create a `TimeThreshold` instance with values obtained
+    /// from the environment variable, and returns `None` if the variable
+    /// is not set.
+    /// Environment variable format is expected to match `\d+,\d+`.
+    ///
+    /// # Panics
+    ///
+    /// Panics if variable with provided name is set but contains inappropriate
+    /// value.
+    pub fn from_env_var(env_var_name: &str) -> Option<Self> {
+        let durations_str = env::var(env_var_name).ok()?;
+
+        // Split string into 2 substrings by comma and try to parse numbers.
+        let mut durations = durations_str
+            .splitn(2, ',')
+            .map(|v| {
+                u64::from_str(v).unwrap_or_else(|_| {
+                    panic!(
+                        "Duration value in variable {} is expected to be a number, but got {}",
+                        env_var_name, v
+                    )
+                })
+            });
+
+        // Callback to be called if the environment variable has unexpected structure.
+        let panic_on_incorrect_value = || {
+            panic!(
+                "Duration variable {} expected to have 2 numbers separated by comma, but got {}",
+                env_var_name, durations_str
+            );
+        };
+
+        let (warn, critical) = (
+            durations.next().unwrap_or_else(panic_on_incorrect_value),
+            durations.next().unwrap_or_else(panic_on_incorrect_value)
+        );
+
+        if warn > critical {
+            panic!("Test execution warn time should be less or equal to the critical time");
+        }
+
+        Some(Self::new(Duration::from_millis(warn), Duration::from_millis(critical)))
+    }
+}
+
+/// Structure with parameters for calculating test execution time.
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
+pub struct TestTimeOptions {
+    /// Denotes if the test critical execution time limit excess should be considered
+    /// a test failure.
+    pub error_on_excess: bool,
+    pub colored: bool,
+    pub unit_threshold: TimeThreshold,
+    pub integration_threshold: TimeThreshold,
+    pub doctest_threshold: TimeThreshold,
+}
+
+impl TestTimeOptions {
+    pub fn new_from_env(error_on_excess: bool, colored: bool) -> Self {
+        let unit_threshold =
+            TimeThreshold::from_env_var(time_constants::UNIT_ENV_NAME)
+                .unwrap_or_else(Self::default_unit);
+
+        let integration_threshold =
+            TimeThreshold::from_env_var(time_constants::INTEGRATION_ENV_NAME)
+                .unwrap_or_else(Self::default_integration);
+
+        let doctest_threshold =
+            TimeThreshold::from_env_var(time_constants::DOCTEST_ENV_NAME)
+                .unwrap_or_else(Self::default_doctest);
+
+        Self {
+            error_on_excess,
+            colored,
+            unit_threshold,
+            integration_threshold,
+            doctest_threshold,
+        }
+    }
+
+    pub fn is_warn(&self, test: &TestDesc, exec_time: &TestExecTime) -> bool {
+        exec_time.0 >= self.warn_time(test)
+    }
+
+    pub fn is_critical(&self, test: &TestDesc, exec_time: &TestExecTime) -> bool {
+        exec_time.0 >= self.critical_time(test)
+    }
+
+    fn warn_time(&self, test: &TestDesc) -> Duration {
+        match test.test_type {
+            TestType::UnitTest => self.unit_threshold.warn,
+            TestType::IntegrationTest => self.integration_threshold.warn,
+            TestType::DocTest => self.doctest_threshold.warn,
+            TestType::Unknown => time_constants::UNKNOWN_WARN,
+        }
+    }
+
+    fn critical_time(&self, test: &TestDesc) -> Duration {
+        match test.test_type {
+            TestType::UnitTest => self.unit_threshold.critical,
+            TestType::IntegrationTest => self.integration_threshold.critical,
+            TestType::DocTest => self.doctest_threshold.critical,
+            TestType::Unknown => time_constants::UNKNOWN_CRITICAL,
+        }
+    }
+
+    fn default_unit() -> TimeThreshold {
+        TimeThreshold::new(time_constants::UNIT_WARN, time_constants::UNIT_CRITICAL)
+    }
+
+    fn default_integration() -> TimeThreshold {
+        TimeThreshold::new(time_constants::INTEGRATION_WARN, time_constants::INTEGRATION_CRITICAL)
+    }
+
+    fn default_doctest() -> TimeThreshold {
+        TimeThreshold::new(time_constants::DOCTEST_WARN, time_constants::DOCTEST_CRITICAL)
+    }
+}
+
 #[derive(Debug)]
 pub struct TestOpts {
     pub list: bool,
@@ -418,12 +611,14 @@ pub struct TestOpts {
     pub format: OutputFormat,
     pub test_threads: Option<usize>,
     pub skip: Vec<String>,
-    pub report_time: bool,
+    pub time_options: Option<TestTimeOptions>,
     pub options: Options,
 }
 
 /// Result of parsing the options.
 pub type OptRes = Result<TestOpts, String>;
+/// Result of parsing the option part.
+type OptPartRes<T> = Result<Option<T>, String>;
 
 fn optgroups() -> getopts::Options {
     let mut opts = getopts::Options::new();
@@ -502,10 +697,35 @@ fn optgroups() -> getopts::Options {
             unstable-options = Allow use of experimental features",
             "unstable-options",
         )
-        .optflag(
+        .optflagopt(
             "",
             "report-time",
-            "Show execution time of each test. Not available for --format=terse"
+            "Show execution time of each test. Awailable values:
+            plain   = do not colorize the execution time (default);
+            colored = colorize output according to the `color` parameter value;
+
+            Threshold values for colorized output can be configured via
+            `RUST_TEST_TIME_UNIT`, `RUST_TEST_TIME_INTEGRATION` and
+            `RUST_TEST_TIME_DOCTEST` environment variables.
+
+            Expected format of environment variable is `VARIABLE=WARN_TIME,CRITICAL_TIME`.
+
+            Not available for --format=terse",
+            "plain|colored"
+        )
+        .optflag(
+            "",
+            "ensure-time",
+            "Treat excess of the test execution time limit as error.
+
+            Threshold values for this option can be configured via
+            `RUST_TEST_TIME_UNIT`, `RUST_TEST_TIME_INTEGRATION` and
+            `RUST_TEST_TIME_DOCTEST` environment variables.
+
+            Expected format of environment variable is `VARIABLE=WARN_TIME,CRITICAL_TIME`.
+
+            `CRITICAL_TIME` here means the limit that should not be exceeded by test.
+            "
         );
     return opts;
 }
@@ -554,6 +774,45 @@ fn is_nightly() -> bool {
     bootstrap || !disable_unstable_features
 }
 
+// Gets the option value and checks if unstable features are enabled.
+macro_rules! unstable_optflag {
+    ($matches:ident, $allow_unstable:ident, $option_name:literal) => {{
+        let opt = $matches.opt_present($option_name);
+        if !$allow_unstable && opt {
+            return Some(Err(format!(
+                "The \"{}\" flag is only accepted on the nightly compiler",
+                $option_name
+            )));
+        }
+
+        opt
+    }};
+}
+
+// Gets the CLI options assotiated with `report-time` feature.
+fn get_time_options(
+    matches: &getopts::Matches,
+    allow_unstable: bool)
+-> Option<OptPartRes<TestTimeOptions>> {
+    let report_time = unstable_optflag!(matches, allow_unstable, "report-time");
+    let colored_opt_str = matches.opt_str("report-time");
+    let mut report_time_colored = report_time && colored_opt_str == Some("colored".into());
+    let ensure_test_time = unstable_optflag!(matches, allow_unstable, "ensure-time");
+
+    // If `ensure-test-time` option is provided, time output is enforced,
+    // so user won't be confused if any of tests will silently fail.
+    let options = if report_time || ensure_test_time {
+        if ensure_test_time && !report_time {
+            report_time_colored = true;
+        }
+        Some(TestTimeOptions::new_from_env(ensure_test_time, report_time_colored))
+    } else {
+        None
+    };
+
+    Some(Ok(options))
+}
+
 // Parses command line arguments into test options
 pub fn parse_opts(args: &[String]) -> Option<OptRes> {
     let mut allow_unstable = false;
@@ -592,26 +851,9 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
         None
     };
 
-    let exclude_should_panic = matches.opt_present("exclude-should-panic");
-    if !allow_unstable && exclude_should_panic {
-        return Some(Err(
-            "The \"exclude-should-panic\" flag is only accepted on the nightly compiler".into(),
-        ));
-    }
+    let exclude_should_panic = unstable_optflag!(matches, allow_unstable, "exclude-should-panic");
 
-    let include_ignored = matches.opt_present("include-ignored");
-    if !allow_unstable && include_ignored {
-        return Some(Err(
-            "The \"include-ignored\" flag is only accepted on the nightly compiler".into(),
-        ));
-    }
-
-    let report_time = matches.opt_present("report-time");
-    if !allow_unstable && report_time {
-        return Some(Err(
-            "The \"report-time\" flag is only accepted on the nightly compiler".into(),
-        ));
-    }
+    let include_ignored = unstable_optflag!(matches, allow_unstable, "include-ignored");
 
     let run_ignored = match (include_ignored, matches.opt_present("ignored")) {
         (true, true) => {
@@ -641,6 +883,12 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
         };
     }
 
+    let time_options = match get_time_options(&matches, allow_unstable) {
+        Some(Ok(val)) => val,
+        Some(Err(e)) => return Some(Err(e)),
+        None => panic!("Unexpected output from `get_time_options`"),
+    };
+
     let test_threads = match matches.opt_str("test-threads") {
         Some(n_str) => match n_str.parse::<usize>() {
             Ok(0) => return Some(Err("argument for --test-threads must not be 0".to_string())),
@@ -706,20 +954,20 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
         format,
         test_threads,
         skip: matches.opt_strs("skip"),
-        report_time,
+        time_options,
         options: Options::new().display_output(matches.opt_present("show-output")),
     };
 
     Some(Ok(test_opts))
 }
 
-#[derive(Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq)]
 pub struct BenchSamples {
     ns_iter_summ: stats::Summary,
     mb_s: usize,
 }
 
-#[derive(Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq)]
 pub enum TestResult {
     TrOk,
     TrFailed,
@@ -727,6 +975,7 @@ pub enum TestResult {
     TrIgnored,
     TrAllowedFail,
     TrBench(BenchSamples),
+    TrTimedFail,
 }
 
 unsafe impl Send for TestResult {}
@@ -774,6 +1023,7 @@ struct ConsoleTestState {
     metrics: MetricMap,
     failures: Vec<(TestDesc, Vec<u8>)>,
     not_failures: Vec<(TestDesc, Vec<u8>)>,
+    time_failures: Vec<(TestDesc, Vec<u8>)>,
     options: Options,
 }
 
@@ -796,6 +1046,7 @@ pub fn new(opts: &TestOpts) -> io::Result<ConsoleTestState> {
             metrics: MetricMap::new(),
             failures: Vec::new(),
             not_failures: Vec::new(),
+            time_failures: Vec::new(),
             options: opts.options,
         })
     }
@@ -831,11 +1082,12 @@ pub fn write_log_result(&mut self,test: &TestDesc,
                 TrIgnored => "ignored".to_owned(),
                 TrAllowedFail => "failed (allowed)".to_owned(),
                 TrBench(ref bs) => fmt_bench_samples(bs),
+                TrTimedFail => "failed (time limit exceeded)".to_owned(),
             },
             test.name,
         ))?;
         if let Some(exec_time) = exec_time {
-            self.write_log(|| format!(" {}", exec_time))?;
+            self.write_log(|| format!(" <{}>", exec_time))?;
         }
         self.write_log(|| "\n")
     }
@@ -993,6 +1245,10 @@ fn callback(
                         stdout.extend_from_slice(format!("note: {}", msg).as_bytes());
                         st.failures.push((test, stdout));
                     }
+                    TrTimedFail => {
+                        st.failed += 1;
+                        st.time_failures.push((test, stdout));
+                    }
                 }
                 Ok(())
             }
@@ -1018,6 +1274,7 @@ fn callback(
             use_color(opts),
             max_name_len,
             is_multithreaded,
+            opts.time_options,
         )),
         OutputFormat::Terse => Box::new(TerseFormatter::new(
             output,
@@ -1494,22 +1751,35 @@ pub fn run_test(
         return;
     }
 
+    struct TestRunOpts {
+        pub strategy: RunStrategy,
+        pub nocapture: bool,
+        pub concurrency: Concurrent,
+        pub time: Option<TestTimeOptions>,
+    }
+
     fn run_test_inner(
         desc: TestDesc,
-        nocapture: bool,
-        report_time: bool,
-        strategy: RunStrategy,
         monitor_ch: Sender<MonitorMsg>,
         testfn: Box<dyn FnOnce() + Send>,
-        concurrency: Concurrent,
+        opts: TestRunOpts,
     ) {
+        let concurrency = opts.concurrency;
         let name = desc.name.clone();
 
         let runtest = move || {
-            match strategy {
+            match opts.strategy {
                 RunStrategy::InProcess =>
-                    run_test_in_process(desc, nocapture, report_time, testfn, monitor_ch),
-                RunStrategy::SpawnPrimary => spawn_test_subprocess(desc, report_time, monitor_ch),
+                    run_test_in_process(
+                        desc,
+                        opts.nocapture,
+                        opts.time.is_some(),
+                        testfn,
+                        monitor_ch,
+                        opts.time
+                    ),
+                RunStrategy::SpawnPrimary =>
+                    spawn_test_subprocess(desc, opts.time.is_some(), monitor_ch, opts.time),
             }
         };
 
@@ -1525,6 +1795,13 @@ fn run_test_inner(
         }
     }
 
+    let test_run_opts = TestRunOpts {
+        strategy,
+        nocapture: opts.nocapture,
+        concurrency,
+        time: opts.time_options
+    };
+
     match testfn {
         DynBenchFn(bencher) => {
             // Benchmarks aren't expected to panic, so we run them all in-process.
@@ -1545,22 +1822,16 @@ fn run_test_inner(
             };
             run_test_inner(
                 desc,
-                opts.nocapture,
-                opts.report_time,
-                strategy,
                 monitor_ch,
                 Box::new(move || __rust_begin_short_backtrace(f)),
-                concurrency
+                test_run_opts,
             );
         }
         StaticTestFn(f) => run_test_inner(
             desc,
-            opts.nocapture,
-            opts.report_time,
-            strategy,
             monitor_ch,
             Box::new(move || __rust_begin_short_backtrace(f)),
-            concurrency,
+            test_run_opts,
         ),
     }
 }
@@ -1571,10 +1842,13 @@ fn __rust_begin_short_backtrace<F: FnOnce()>(f: F) {
     f()
 }
 
-fn calc_result<'a>(desc: &TestDesc,
-                   task_result: Result<(), &'a (dyn Any + 'static + Send)>)
--> TestResult {
-    match (&desc.should_panic, task_result) {
+fn calc_result<'a>(
+    desc: &TestDesc,
+    task_result: Result<(), &'a (dyn Any + 'static + Send)>,
+    time_opts: &Option<TestTimeOptions>,
+    exec_time: &Option<TestExecTime>
+) -> TestResult {
+    let result = match (&desc.should_panic, task_result) {
         (&ShouldPanic::No, Ok(())) | (&ShouldPanic::Yes, Err(_)) => TrOk,
         (&ShouldPanic::YesWithMessage(msg), Err(ref err)) => {
             if err
@@ -1596,23 +1870,59 @@ fn calc_result<'a>(desc: &TestDesc,
         (&ShouldPanic::Yes, Ok(())) => TrFailedMsg("test did not panic as expected".to_string()),
         _ if desc.allow_fail => TrAllowedFail,
         _ => TrFailed,
+    };
+
+    // If test is already failed (or allowed to fail), do not change the result.
+    if result != TrOk {
+        return result;
     }
+
+    // Check if test is failed due to timeout.
+    if let (Some(opts), Some(time)) = (time_opts, exec_time) {
+        if opts.error_on_excess && opts.is_critical(desc, time) {
+            return TrTimedFail;
+        }
+    }
+
+    result
 }
 
-fn get_result_from_exit_code(desc: &TestDesc, code: i32) -> TestResult {
-    match (desc.allow_fail, code) {
+fn get_result_from_exit_code(
+    desc: &TestDesc,
+    code: i32,
+    time_opts: &Option<TestTimeOptions>,
+    exec_time: &Option<TestExecTime>,
+) -> TestResult {
+    let result = match (desc.allow_fail, code) {
         (_, TR_OK) => TrOk,
         (true, TR_FAILED) => TrAllowedFail,
         (false, TR_FAILED) => TrFailed,
         (_, _) => TrFailedMsg(format!("got unexpected return code {}", code)),
+    };
+
+    // If test is already failed (or allowed to fail), do not change the result.
+    if result != TrOk {
+        return result;
     }
+
+    // Check if test is failed due to timeout.
+    if let (Some(opts), Some(time)) = (time_opts, exec_time) {
+        if opts.error_on_excess && opts.is_critical(desc, time) {
+            return TrTimedFail;
+        }
+    }
+
+    result
 }
 
-fn run_test_in_process(desc: TestDesc,
-                       nocapture: bool,
-                       report_time: bool,
-                       testfn: Box<dyn FnOnce() + Send>,
-                       monitor_ch: Sender<MonitorMsg>) {
+fn run_test_in_process(
+    desc: TestDesc,
+    nocapture: bool,
+    report_time: bool,
+    testfn: Box<dyn FnOnce() + Send>,
+    monitor_ch: Sender<MonitorMsg>,
+    time_opts: Option<TestTimeOptions>,
+) {
     // Buffer for capturing standard I/O
     let data = Arc::new(Mutex::new(Vec::new()));
 
@@ -1642,14 +1952,19 @@ fn run_test_in_process(desc: TestDesc,
     }
 
     let test_result = match result {
-        Ok(()) => calc_result(&desc, Ok(())),
-        Err(e) => calc_result(&desc, Err(e.as_ref())),
+        Ok(()) => calc_result(&desc, Ok(()), &time_opts, &exec_time),
+        Err(e) => calc_result(&desc, Err(e.as_ref()), &time_opts, &exec_time),
     };
     let stdout = data.lock().unwrap().to_vec();
     monitor_ch.send((desc.clone(), test_result, exec_time, stdout)).unwrap();
 }
 
-fn spawn_test_subprocess(desc: TestDesc, report_time: bool, monitor_ch: Sender<MonitorMsg>) {
+fn spawn_test_subprocess(
+    desc: TestDesc,
+    report_time: bool,
+    monitor_ch: Sender<MonitorMsg>,
+    time_opts: Option<TestTimeOptions>,
+) {
     let (result, test_output, exec_time) = (|| {
         let args = env::args().collect::<Vec<_>>();
         let current_exe = &args[0];
@@ -1680,7 +1995,7 @@ fn spawn_test_subprocess(desc: TestDesc, report_time: bool, monitor_ch: Sender<M
 
         let result = match (|| -> Result<TestResult, String> {
             let exit_code = get_exit_code(status)?;
-            Ok(get_result_from_exit_code(&desc, exit_code))
+            Ok(get_result_from_exit_code(&desc, exit_code, &time_opts, &exec_time))
         })() {
             Ok(r) => r,
             Err(e) => {
@@ -1695,12 +2010,15 @@ fn spawn_test_subprocess(desc: TestDesc, report_time: bool, monitor_ch: Sender<M
     monitor_ch.send((desc.clone(), result, exec_time, test_output)).unwrap();
 }
 
-fn run_test_in_spawned_subprocess(desc: TestDesc, testfn: Box<dyn FnOnce() + Send>) -> ! {
+fn run_test_in_spawned_subprocess(
+    desc: TestDesc,
+    testfn: Box<dyn FnOnce() + Send>,
+) -> ! {
     let builtin_panic_hook = panic::take_hook();
     let record_result = Arc::new(move |panic_info: Option<&'_ PanicInfo<'_>>| {
         let test_result = match panic_info {
-            Some(info) => calc_result(&desc, Err(info.payload())),
-            None => calc_result(&desc, Ok(())),
+            Some(info) => calc_result(&desc, Err(info.payload()), &None, &None),
+            None => calc_result(&desc, Ok(()), &None, &None),
         };
 
         // We don't support serializing TrFailedMsg, so just
index 6577ec8ad2391923dd918e45a294cedfa1346735..aab8d012fdf687f584f586cf0ff2d833c921393e 100644 (file)
@@ -115,7 +115,7 @@ pub trait Stats {
 }
 
 /// Extracted collection of all the summary statistics of a sample set.
-#[derive(Clone, PartialEq, Copy)]
+#[derive(Debug, Clone, PartialEq, Copy)]
 #[allow(missing_docs)]
 pub struct Summary {
     pub sum: f64,
index c3727b3013fc8921e01d05ec837b33412d047214..5f7150a8eeba24e6d6af9b21ea7d23a38821a80a 100644 (file)
@@ -2,10 +2,13 @@
 
 use crate::test::{
     filter_tests, parse_opts, run_test, DynTestFn, DynTestName, MetricMap, RunIgnored, RunStrategy,
+    // ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TestTimeOptions,
+    // TestType, TrFailedMsg, TrIgnored, TrOk,
     ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts,
     TrIgnored, TrOk,
 };
 use std::sync::mpsc::channel;
+use std::time::Duration;
 
 impl TestOpts {
     fn new() -> TestOpts {
@@ -23,7 +26,7 @@ fn new() -> TestOpts {
             format: OutputFormat::Pretty,
             test_threads: None,
             skip: vec![],
-            report_time: false,
+            time_options: None,
             options: Options::new(),
         }
     }
@@ -37,6 +40,7 @@ fn one_ignored_one_unignored_test() -> Vec<TestDescAndFn> {
                 ignore: true,
                 should_panic: ShouldPanic::No,
                 allow_fail: false,
+                test_type: TestType::Unknown,
             },
             testfn: DynTestFn(Box::new(move || {})),
         },
@@ -46,6 +50,7 @@ fn one_ignored_one_unignored_test() -> Vec<TestDescAndFn> {
                 ignore: false,
                 should_panic: ShouldPanic::No,
                 allow_fail: false,
+                test_type: TestType::Unknown,
             },
             testfn: DynTestFn(Box::new(move || {})),
         },
@@ -63,6 +68,7 @@ fn f() {
             ignore: true,
             should_panic: ShouldPanic::No,
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
@@ -81,6 +87,7 @@ fn f() {}
             ignore: true,
             should_panic: ShouldPanic::No,
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
@@ -103,6 +110,7 @@ fn f() {
             ignore: false,
             should_panic: ShouldPanic::Yes,
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
@@ -125,6 +133,7 @@ fn f() {
             ignore: false,
             should_panic: ShouldPanic::YesWithMessage("error message"),
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
@@ -150,6 +159,7 @@ fn f() {
             ignore: false,
             should_panic: ShouldPanic::YesWithMessage(expected),
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
@@ -170,6 +180,7 @@ fn f() {}
             ignore: false,
             should_panic: ShouldPanic::Yes,
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
@@ -187,11 +198,18 @@ fn f() {}
             ignore: false,
             should_panic: ShouldPanic::No,
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(f)),
     };
+    let time_options = if report_time {
+        Some(TestTimeOptions::default())
+    } else {
+        None
+    };
+
     let test_opts = TestOpts {
-        report_time,
+        time_options,
         ..TestOpts::new()
     };
     let (tx, rx) = channel();
@@ -212,6 +230,97 @@ fn test_should_report_time() {
     assert!(exec_time.is_some());
 }
 
+fn time_test_failure_template(test_type: TestType) -> TestResult {
+    fn f() {}
+    let desc = TestDescAndFn {
+        desc: TestDesc {
+            name: StaticTestName("whatever"),
+            ignore: false,
+            should_panic: ShouldPanic::No,
+            allow_fail: false,
+            test_type
+        },
+        testfn: DynTestFn(Box::new(f)),
+    };
+    // `Default` will initialize all the thresholds to 0 milliseconds.
+    let mut time_options = TestTimeOptions::default();
+    time_options.error_on_excess = true;
+
+    let test_opts = TestOpts {
+        time_options: Some(time_options),
+        ..TestOpts::new()
+    };
+    let (tx, rx) = channel();
+    run_test(&test_opts, false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+    let (_, result, _, _) = rx.recv().unwrap();
+
+    result
+}
+
+#[test]
+fn test_error_on_exceed() {
+    let types = [TestType::UnitTest, TestType::IntegrationTest, TestType::DocTest];
+
+    for test_type in types.into_iter() {
+        let result = time_test_failure_template(*test_type);
+
+        assert_eq!(result, TestResult::TrTimedFail);
+    }
+
+    // Check that for unknown tests thresholds aren't applied.
+    let result = time_test_failure_template(TestType::Unknown);
+    assert_eq!(result, TestResult::TrOk);
+}
+
+fn typed_test_desc(test_type: TestType) -> TestDesc {
+    TestDesc {
+        name: StaticTestName("whatever"),
+        ignore: false,
+        should_panic: ShouldPanic::No,
+        allow_fail: false,
+        test_type
+    }
+}
+
+fn test_exec_time(millis: u64) -> TestExecTime {
+    TestExecTime(Duration::from_millis(millis))
+}
+
+#[test]
+fn test_time_options_threshold() {
+    let unit = TimeThreshold::new(Duration::from_millis(50), Duration::from_millis(100));
+    let integration = TimeThreshold::new(Duration::from_millis(500), Duration::from_millis(1000));
+    let doc = TimeThreshold::new(Duration::from_millis(5000), Duration::from_millis(10000));
+
+    let options = TestTimeOptions {
+        error_on_excess: false,
+        colored: false,
+        unit_threshold: unit.clone(),
+        integration_threshold: integration.clone(),
+        doctest_threshold: doc.clone(),
+    };
+
+    let test_vector = [
+        (TestType::UnitTest, unit.warn.as_millis() - 1, false, false),
+        (TestType::UnitTest, unit.warn.as_millis(), true, false),
+        (TestType::UnitTest, unit.critical.as_millis(), true, true),
+        (TestType::IntegrationTest, integration.warn.as_millis() - 1, false, false),
+        (TestType::IntegrationTest, integration.warn.as_millis(), true, false),
+        (TestType::IntegrationTest, integration.critical.as_millis(), true, true),
+        (TestType::DocTest, doc.warn.as_millis() - 1, false, false),
+        (TestType::DocTest, doc.warn.as_millis(), true, false),
+        (TestType::DocTest, doc.critical.as_millis(), true, true),
+    ];
+
+    for (test_type, time, expected_warn, expected_critical) in test_vector.into_iter() {
+        let test_desc = typed_test_desc(*test_type);
+        let exec_time = test_exec_time(*time as u64);
+
+        assert_eq!(options.is_warn(&test_desc, &exec_time), *expected_warn);
+        assert_eq!(options.is_critical(&test_desc, &exec_time), *expected_critical);
+    }
+}
+
 #[test]
 fn parse_ignored_flag() {
     let args = vec![
@@ -293,6 +402,7 @@ pub fn exclude_should_panic_option() {
             ignore: false,
             should_panic: ShouldPanic::Yes,
             allow_fail: false,
+            test_type: TestType::Unknown,
         },
         testfn: DynTestFn(Box::new(move || {})),
     });
@@ -314,6 +424,7 @@ fn tests() -> Vec<TestDescAndFn> {
                     ignore: false,
                     should_panic: ShouldPanic::No,
                     allow_fail: false,
+                    test_type: TestType::Unknown,
                 },
                 testfn: DynTestFn(Box::new(move || {})),
             })
@@ -425,6 +536,7 @@ fn testfn() {}
                     ignore: false,
                     should_panic: ShouldPanic::No,
                     allow_fail: false,
+                    test_type: TestType::Unknown,
                 },
                 testfn: DynTestFn(Box::new(testfn)),
             };
@@ -501,6 +613,7 @@ fn f(_: &mut Bencher) {}
         ignore: false,
         should_panic: ShouldPanic::No,
         allow_fail: false,
+        test_type: TestType::Unknown,
     };
 
     crate::bench::benchmark(desc, tx, true, f);
@@ -520,6 +633,7 @@ fn f(b: &mut Bencher) {
         ignore: false,
         should_panic: ShouldPanic::No,
         allow_fail: false,
+        test_type: TestType::Unknown,
     };
 
     crate::bench::benchmark(desc, tx, true, f);
@@ -533,6 +647,7 @@ fn should_sort_failures_before_printing_them() {
         ignore: false,
         should_panic: ShouldPanic::No,
         allow_fail: false,
+        test_type: TestType::Unknown,
     };
 
     let test_b = TestDesc {
@@ -540,9 +655,10 @@ fn should_sort_failures_before_printing_them() {
         ignore: false,
         should_panic: ShouldPanic::No,
         allow_fail: false,
+        test_type: TestType::Unknown,
     };
 
-    let mut out = PrettyFormatter::new(Raw(Vec::new()), false, 10, false);
+    let mut out = PrettyFormatter::new(Raw(Vec::new()), false, 10, false, None);
 
     let st = ConsoleTestState {
         log_out: None,
@@ -557,6 +673,7 @@ fn should_sort_failures_before_printing_them() {
         failures: vec![(test_b, Vec::new()), (test_a, Vec::new())],
         options: Options::new(),
         not_failures: Vec::new(),
+        time_failures: Vec::new(),
     };
 
     out.write_failures(&st).unwrap();
index 8473db5f2af9dc36aaf6f9b053fcc2e0e6ac8026..14a3b123074e066d64a99886941473058e52197d 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 8473db5f2af9dc36aaf6f9b053fcc2e0e6ac8026
+Subproject commit 14a3b123074e066d64a99886941473058e52197d
diff --git a/src/test/codegen/extern-functions.rs b/src/test/codegen/extern-functions.rs
deleted file mode 100644 (file)
index 786f2c8..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-// ignore-emscripten compiled with panic=abort by default
-// compile-flags: -C no-prepopulate-passes
-
-#![crate_type = "lib"]
-#![feature(unwind_attributes)]
-
-extern {
-// CHECK: Function Attrs: nounwind
-// CHECK-NEXT: declare void @extern_fn
-    fn extern_fn();
-// CHECK-NOT: Function Attrs: nounwind
-// CHECK: declare void @unwinding_extern_fn
-    #[unwind(allowed)]
-    fn unwinding_extern_fn();
-}
-
-pub unsafe fn force_declare() {
-    extern_fn();
-    unwinding_extern_fn();
-}
diff --git a/src/test/codegen/non-terminate/infinite-loop-1.rs b/src/test/codegen/non-terminate/infinite-loop-1.rs
new file mode 100644 (file)
index 0000000..56b360e
--- /dev/null
@@ -0,0 +1,17 @@
+// compile-flags: -C opt-level=3 -Z insert-sideeffect
+
+#![crate_type = "lib"]
+
+fn infinite_loop() -> u8 {
+    loop {}
+}
+
+// CHECK-LABEL: @test
+#[no_mangle]
+fn test() -> u8 {
+    // CHECK-NOT: unreachable
+    // CHECK: br label %{{.+}}
+    // CHECK-NOT: unreachable
+    let x = infinite_loop();
+    x
+}
diff --git a/src/test/codegen/non-terminate/infinite-loop-2.rs b/src/test/codegen/non-terminate/infinite-loop-2.rs
new file mode 100644 (file)
index 0000000..2921ab6
--- /dev/null
@@ -0,0 +1,19 @@
+// compile-flags: -C opt-level=3 -Z insert-sideeffect
+
+#![crate_type = "lib"]
+
+fn infinite_loop() -> u8 {
+    let i = 2;
+    while i > 1 {}
+    1
+}
+
+// CHECK-LABEL: @test
+#[no_mangle]
+fn test() -> u8 {
+    // CHECK-NOT: unreachable
+    // CHECK: br label %{{.+}}
+    // CHECK-NOT: unreachable
+    let x = infinite_loop();
+    x
+}
diff --git a/src/test/codegen/non-terminate/infinite-recursion.rs b/src/test/codegen/non-terminate/infinite-recursion.rs
new file mode 100644 (file)
index 0000000..1f292ce
--- /dev/null
@@ -0,0 +1,14 @@
+// compile-flags: -C opt-level=3 -Z insert-sideeffect
+
+#![crate_type = "lib"]
+
+#![allow(unconditional_recursion)]
+
+// CHECK-LABEL: @infinite_recursion
+#[no_mangle]
+fn infinite_recursion() -> u8 {
+    // CHECK-NOT: ret i8 undef
+    // CHECK: br label %{{.+}}
+    // CHECK-NOT: ret i8 undef
+    infinite_recursion()
+}
diff --git a/src/test/codegen/nounwind-extern.rs b/src/test/codegen/nounwind-extern.rs
deleted file mode 100644 (file)
index 54d6a8d..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-// compile-flags: -O
-
-#![crate_type = "lib"]
-
-// CHECK: Function Attrs: norecurse nounwind
-pub extern fn foo() {}
diff --git a/src/test/codegen/unwind-extern-exports.rs b/src/test/codegen/unwind-extern-exports.rs
new file mode 100644 (file)
index 0000000..d924a3b
--- /dev/null
@@ -0,0 +1,20 @@
+// compile-flags: -C opt-level=0
+// ignore-emscripten compiled with panic=abort by default
+
+#![crate_type = "lib"]
+#![feature(unwind_attributes)]
+
+// Make sure these all do *not* get the attribute.
+// We disable optimizations to prevent LLVM from infering the attribute.
+// CHECK-NOT: nounwind
+
+// "C" ABI
+// pub extern fn foo() {} // FIXME right now we don't abort-on-panic but add `nounwind` nevertheless
+#[unwind(allowed)]
+pub extern fn foo_allowed() {}
+
+// "Rust"
+// (`extern "Rust"` could be removed as all `fn` get it implicitly; we leave it in for clarity.)
+pub extern "Rust" fn bar() {}
+#[unwind(allowed)]
+pub extern "Rust" fn bar_allowed() {}
diff --git a/src/test/codegen/unwind-extern-imports.rs b/src/test/codegen/unwind-extern-imports.rs
new file mode 100644 (file)
index 0000000..d88a498
--- /dev/null
@@ -0,0 +1,42 @@
+// compile-flags: -C no-prepopulate-passes
+// ignore-emscripten compiled with panic=abort by default
+
+#![crate_type = "lib"]
+#![feature(unwind_attributes)]
+
+extern {
+// CHECK: Function Attrs:{{.*}}nounwind
+// CHECK-NEXT: declare void @extern_fn
+    fn extern_fn();
+// CHECK-NOT: Function Attrs:{{.*}}nounwind
+// CHECK: declare void @unwinding_extern_fn
+    #[unwind(allowed)]
+    fn unwinding_extern_fn();
+// CHECK-NOT: nounwind
+// CHECK: declare void @aborting_extern_fn
+    #[unwind(aborts)]
+    fn aborting_extern_fn(); // FIXME: we want to have the attribute here
+}
+
+extern "Rust" {
+// CHECK-NOT: nounwind
+// CHECK: declare void @rust_extern_fn
+    fn rust_extern_fn();
+// CHECK-NOT: nounwind
+// CHECK: declare void @rust_unwinding_extern_fn
+    #[unwind(allowed)]
+    fn rust_unwinding_extern_fn();
+// CHECK-NOT: nounwind
+// CHECK: declare void @rust_aborting_extern_fn
+    #[unwind(aborts)]
+    fn rust_aborting_extern_fn(); // FIXME: we want to have the attribute here
+}
+
+pub unsafe fn force_declare() {
+    extern_fn();
+    unwinding_extern_fn();
+    aborting_extern_fn();
+    rust_extern_fn();
+    rust_unwinding_extern_fn();
+    rust_aborting_extern_fn();
+}
index fc10824f0c03cf3e85384b3a09d61e8667592d9b..add4eef13c78477d73172764b8fc3365d671d624 100644 (file)
@@ -14,6 +14,7 @@ fn main() {
     //~^ ERROR const_err
     let _e = [5u8][1];
     //~^ ERROR const_err
+    //~| ERROR this expression will panic at runtime
     black_box(b);
     black_box(c);
     black_box(d);
diff --git a/src/test/mir-opt/const_prop/aggregate.rs b/src/test/mir-opt/const_prop/aggregate.rs
new file mode 100644 (file)
index 0000000..0937d37
--- /dev/null
@@ -0,0 +1,25 @@
+// compile-flags: -O
+
+fn main() {
+    let x = (0, 1, 2).1 + 0;
+}
+
+// END RUST SOURCE
+// START rustc.main.ConstProp.before.mir
+//  bb0: {
+//      ...
+//      _3 = (const 0i32, const 1i32, const 2i32);
+//      _2 = (_3.1: i32);
+//      _1 = Add(move _2, const 0i32);
+//      ...
+//  }
+// END rustc.main.ConstProp.before.mir
+// START rustc.main.ConstProp.after.mir
+//  bb0: {
+//      ...
+//      _3 = (const 0i32, const 1i32, const 2i32);
+//      _2 = const 1i32;
+//      _1 = Add(move _2, const 0i32);
+//      ...
+//  }
+// END rustc.main.ConstProp.after.mir
diff --git a/src/test/mir-opt/const_prop/boxes.rs b/src/test/mir-opt/const_prop/boxes.rs
new file mode 100644 (file)
index 0000000..cf134da
--- /dev/null
@@ -0,0 +1,56 @@
+// compile-flags: -O
+// ignore-emscripten compiled with panic=abort by default
+// ignore-wasm32
+// ignore-wasm64
+
+#![feature(box_syntax)]
+
+// Note: this test verifies that we, in fact, do not const prop `box`
+
+fn main() {
+    let x = *(box 42) + 0;
+}
+
+// END RUST SOURCE
+// START rustc.main.ConstProp.before.mir
+//  bb0: {
+//      ...
+//      _4 = Box(i32);
+//      (*_4) = const 42i32;
+//      _3 = move _4;
+//      ...
+//      _2 = (*_3);
+//      _1 = Add(move _2, const 0i32);
+//      ...
+//      drop(_3) -> [return: bb2, unwind: bb1];
+//  }
+//  bb1 (cleanup): {
+//      resume;
+//  }
+//  bb2: {
+//      ...
+//      _0 = ();
+//      ...
+//  }
+// END rustc.main.ConstProp.before.mir
+// START rustc.main.ConstProp.after.mir
+//  bb0: {
+//      ...
+//      _4 = Box(i32);
+//      (*_4) = const 42i32;
+//      _3 = move _4;
+//      ...
+//      _2 = (*_3);
+//      _1 = Add(move _2, const 0i32);
+//      ...
+//      drop(_3) -> [return: bb2, unwind: bb1];
+//  }
+//  bb1 (cleanup): {
+//      resume;
+//  }
+//  bb2: {
+//      ...
+//      _0 = ();
+//      ...
+//  }
+// END rustc.main.ConstProp.after.mir
diff --git a/src/test/mir-opt/const_prop/discriminant.rs b/src/test/mir-opt/const_prop/discriminant.rs
new file mode 100644 (file)
index 0000000..07bbd92
--- /dev/null
@@ -0,0 +1,53 @@
+// compile-flags: -O
+
+fn main() {
+    let x = (if let Some(true) = Some(true) { 42 } else { 10 }) + 0;
+}
+
+// END RUST SOURCE
+// START rustc.main.ConstProp.before.mir
+//  bb0: {
+//      ...
+//      _3 = std::option::Option::<bool>::Some(const true,);
+//      _4 = discriminant(_3);
+//      switchInt(move _4) -> [1isize: bb3, otherwise: bb2];
+//  }
+//  bb1: {
+//      _2 = const 42i32;
+//      goto -> bb4;
+//  }
+//  bb2: {
+//      _2 = const 10i32;
+//      goto -> bb4;
+//  }
+//  bb3: {
+//      switchInt(((_3 as Some).0: bool)) -> [false: bb2, otherwise: bb1];
+//  }
+//  bb4: {
+//      _1 = Add(move _2, const 0i32);
+//      ...
+//  }
+// END rustc.main.ConstProp.before.mir
+// START rustc.main.ConstProp.after.mir
+//  bb0: {
+//      ...
+//      _3 = const Scalar(0x01) : std::option::Option<bool>;
+//      _4 = const 1isize;
+//      switchInt(const 1isize) -> [1isize: bb3, otherwise: bb2];
+//  }
+//  bb1: {
+//      _2 = const 42i32;
+//      goto -> bb4;
+//  }
+//  bb2: {
+//      _2 = const 10i32;
+//      goto -> bb4;
+//  }
+//  bb3: {
+//      switchInt(const true) -> [false: bb2, otherwise: bb1];
+//  }
+//  bb4: {
+//      _1 = Add(move _2, const 0i32);
+//      ...
+//  }
+// END rustc.main.ConstProp.after.mir
index e9b61690cf89e43b40eda855ca94340cb1a21c93..ad7f195676a68b194707837b52d1cef11c607725 100644 (file)
@@ -16,7 +16,7 @@ fn main() {
 // START rustc.main.ConstProp.after.mir
 //  bb0: {
 //      ...
-//      _3 = const Scalar(AllocId(0).0x0) : fn();
+//      _3 = const main;
 //      _2 = move _3 as usize (Misc);
 //      ...
 //      _1 = move _2 as *const fn() (Misc);
diff --git a/src/test/mir-opt/const_prop/repeat.rs b/src/test/mir-opt/const_prop/repeat.rs
new file mode 100644 (file)
index 0000000..fb091ad
--- /dev/null
@@ -0,0 +1,37 @@
+// compile-flags: -O
+
+fn main() {
+    let x: u32 = [42; 8][2] + 0;
+}
+
+// END RUST SOURCE
+// START rustc.main.ConstProp.before.mir
+//  bb0: {
+//      ...
+//      _3 = [const 42u32; 8];
+//      ...
+//      _4 = const 2usize;
+//      _5 = const 8usize;
+//      _6 = Lt(_4, _5);
+//      assert(move _6, "index out of bounds: the len is move _5 but the index is _4") -> bb1;
+//  }
+//  bb1: {
+//      _2 = _3[_4];
+//      _1 = Add(move _2, const 0u32);
+//      ...
+//      return;
+//  }
+// END rustc.main.ConstProp.before.mir
+// START rustc.main.ConstProp.after.mir
+//  bb0: {
+//      ...
+//      _6 = const true;
+//      assert(const true, "index out of bounds: the len is move _5 but the index is _4") -> bb1;
+//  }
+//  bb1: {
+//      _2 = const 42u32;
+//      _1 = Add(move _2, const 0u32);
+//      ...
+//      return;
+//  }
+// END rustc.main.ConstProp.after.mir
diff --git a/src/test/run-fail/adjust_never.rs b/src/test/run-fail/adjust_never.rs
deleted file mode 100644 (file)
index 8661a2f..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-// Test that a variable of type ! can coerce to another type.
-
-// error-pattern:explicit
-
-#![feature(never_type)]
-
-fn main() {
-    let x: ! = panic!();
-    let y: u32 = x;
-}
diff --git a/src/test/run-fail/call-fn-never-arg.rs b/src/test/run-fail/call-fn-never-arg.rs
deleted file mode 100644 (file)
index f5b2cfa..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-// Test that we can use a ! for an argument of type !
-
-// error-pattern:wowzers!
-
-#![feature(never_type)]
-#![allow(unreachable_code)]
-
-fn foo(x: !) -> ! {
-    x
-}
-
-fn main() {
-    foo(panic!("wowzers!"))
-}
diff --git a/src/test/run-fail/cast-never.rs b/src/test/run-fail/cast-never.rs
deleted file mode 100644 (file)
index 0b05a4b..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-// Test that we can explicitly cast ! to another type
-
-// error-pattern:explicit
-
-#![feature(never_type)]
-
-fn main() {
-    let x: ! = panic!();
-    let y: u32 = x as u32;
-}
diff --git a/src/test/run-fail/never-associated-type.rs b/src/test/run-fail/never-associated-type.rs
deleted file mode 100644 (file)
index 587f0f7..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-// Test that we can use ! as an associated type.
-
-// error-pattern:kapow!
-
-#![feature(never_type)]
-
-trait Foo {
-    type Wow;
-
-    fn smeg(&self) -> Self::Wow;
-}
-
-struct Blah;
-impl Foo for Blah {
-    type Wow = !;
-    fn smeg(&self) -> ! {
-        panic!("kapow!");
-    }
-}
-
-fn main() {
-    Blah.smeg();
-}
diff --git a/src/test/run-fail/never-type-arg.rs b/src/test/run-fail/never-type-arg.rs
deleted file mode 100644 (file)
index 1747e96..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-// Test that we can use ! as an argument to a trait impl.
-
-// error-pattern:oh no!
-
-#![feature(never_type)]
-
-struct Wub;
-
-impl PartialEq<!> for Wub {
-    fn eq(&self, other: &!) -> bool {
-        *other
-    }
-}
-
-fn main() {
-    let _ = Wub == panic!("oh no!");
-}
index 793f495240d578728ab6a1d8e12aaa83115460f3..58dfc5710ae4e3d7a619f6c1ffe2598a2f79ec5f 100644 (file)
@@ -2,6 +2,7 @@
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
+#![warn(const_err)]
 
 fn main() {
     let _n = 1i64 >> [64][0];
index d6b2f8dc9f9af3d1672d5f384733ee8e5b253f6a..c2fec5e4860af772e9449b5e997c563c54027f8c 100644 (file)
@@ -2,6 +2,7 @@
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
+#![warn(const_err)]
 #![feature(const_indexing)]
 
 fn main() {
diff --git a/src/test/run-make-fulldeps/linker-output-non-utf8/Makefile b/src/test/run-make-fulldeps/linker-output-non-utf8/Makefile
deleted file mode 100644 (file)
index b47ce17..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
--include ../tools.mk
-
-# Make sure we don't ICE if the linker prints a non-UTF-8 error message.
-
-# ignore-windows
-#
-# This does not work in its current form on windows, possibly due to
-# gcc bugs or something about valid Windows paths.  See issue #29151
-# for more information.
-
-# ignore-macos
-#
-# This also does not work on Apple APFS due to the filesystem requiring
-# valid UTF-8 paths.
-
-# The zzz it to allow humans to tab complete or glob this thing.
-bad_dir := $(TMPDIR)/zzz$$'\xff'
-
-all:
-       $(RUSTC) library.rs
-       mkdir $(bad_dir)
-       mv $(TMPDIR)/liblibrary.a $(bad_dir)
-       $(RUSTC) -L $(bad_dir) exec.rs 2>&1 | $(CGREP) this_symbol_not_defined
diff --git a/src/test/run-make-fulldeps/linker-output-non-utf8/exec.rs b/src/test/run-make-fulldeps/linker-output-non-utf8/exec.rs
deleted file mode 100644 (file)
index 6864018..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#[link(name="library")]
-extern "C" {
-    fn foo();
-}
-
-fn main() { unsafe { foo(); } }
diff --git a/src/test/run-make-fulldeps/linker-output-non-utf8/library.rs b/src/test/run-make-fulldeps/linker-output-non-utf8/library.rs
deleted file mode 100644 (file)
index 6689a82..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-#![crate_type = "staticlib"]
-
-extern "C" {
-    fn this_symbol_not_defined();
-}
-
-#[no_mangle]
-pub extern "C" fn foo() {
-    unsafe { this_symbol_not_defined(); }
-}
index 51d8a4a947adc553d035d7c4164a0be2a12105c2..3a377c32993d5497ef982964a1198944b3c9df4d 100644 (file)
@@ -24,4 +24,7 @@ endif
 
 all:
        $(RUSTC) -g -Z sanitizer=address -Z print-link-args $(EXTRA_RUSTFLAG) overflow.rs | $(CGREP) librustc_asan
+       # Verify that stack buffer overflow is detected:
        $(TMPDIR)/overflow 2>&1 | $(CGREP) stack-buffer-overflow
+       # Verify that variable name is included in address sanitizer report:
+       $(TMPDIR)/overflow 2>&1 | $(CGREP) "'xs'"
index c2eb4caea26358b4a66e03fac7653e2ae3eab9b8..9868fc1d41700ab5b1f66d2b80a5edf2614f4b9c 100644 (file)
@@ -2,4 +2,4 @@
 
 # The target used below doesn't support atomic CAS operations. Verify that's the case
 all:
-       $(RUSTC) --print cfg --target thumbv6m-none-eabi | $(CGREP) -v 'target_has_atomic="cas"'
+       $(RUSTC) --print cfg --target thumbv6m-none-eabi | $(CGREP) -v 'target_has_atomic="ptr"'
diff --git a/src/test/rustdoc-ui/doc-test-doctest-feature.rs b/src/test/rustdoc-ui/doc-test-doctest-feature.rs
new file mode 100644 (file)
index 0000000..984d49b
--- /dev/null
@@ -0,0 +1,15 @@
+// build-pass
+// compile-flags:--test
+// normalize-stdout-test: "src/test/rustdoc-ui" -> "$$DIR"
+
+#![feature(cfg_doctest)]
+
+// Make sure `cfg(doctest)` is set when finding doctests but not inside
+// the doctests.
+
+/// ```
+/// #![feature(cfg_doctest)]
+/// assert!(!cfg!(doctest));
+/// ```
+#[cfg(doctest)]
+pub struct Foo;
diff --git a/src/test/rustdoc-ui/doc-test-doctest-feature.stdout b/src/test/rustdoc-ui/doc-test-doctest-feature.stdout
new file mode 100644 (file)
index 0000000..75d29fa
--- /dev/null
@@ -0,0 +1,6 @@
+
+running 1 test
+test $DIR/doc-test-doctest-feature.rs - Foo (line 10) ... ok
+
+test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out
+
diff --git a/src/test/rustdoc/macro-in-closure.rs b/src/test/rustdoc/macro-in-closure.rs
new file mode 100644 (file)
index 0000000..298ff60
--- /dev/null
@@ -0,0 +1,9 @@
+// Regression issue for rustdoc ICE encountered in PR #65252.
+
+#![feature(decl_macro)]
+
+fn main() {
+    || {
+        macro m() {}
+    };
+}
index 6c5f539b87185e5b897100e737e5a72dc2d4b969..927e2c0820eb2bf35118ae5b2a1c2ad62b5eb2b1 100644 (file)
 use syntax::ast::*;
 use syntax::attr::*;
 use syntax::ast;
+use syntax::sess::ParseSess;
 use syntax::source_map::{FilePathMapping, FileName};
 use syntax::parse;
-use syntax::parse::{ParseSess, PResult};
+use syntax::parse::PResult;
 use syntax::parse::new_parser_from_source_str;
 use syntax::parse::parser::Parser;
 use syntax::parse::token;
 use syntax::ptr::P;
-use syntax::parse::attr::*;
+use syntax::parse::parser::attr::*;
 use syntax::print::pprust;
 use std::fmt;
 
index c053c715248769649c14dffdf83342b47ae3d22e..3d08c1c9eeefa2e0f52c6d45883614510e8598ae 100644 (file)
@@ -5,9 +5,10 @@
 
 extern crate rustc_driver;
 extern crate syntax;
+extern crate syntax_expand;
 
 use rustc_driver::plugin::Registry;
-use syntax::ext::base::SyntaxExtension;
+use syntax_expand::base::SyntaxExtension;
 use syntax::feature_gate::AttributeType;
 use syntax::symbol::Symbol;
 
index 6fb99b2c98361dec7155763b48e04fefb5cda351..bb0ebf693d0bec1113ebea9f468d134a4db724ca 100644 (file)
@@ -5,10 +5,11 @@
 extern crate rustc;
 extern crate rustc_driver;
 extern crate syntax;
+extern crate syntax_expand;
 
 use rustc_driver::plugin::Registry;
 use syntax::attr;
-use syntax::ext::base::*;
+use syntax_expand::base::*;
 use syntax::feature_gate::AttributeType::Whitelisted;
 use syntax::symbol::Symbol;
 
index 5ff24cff23c558aae7bee040c8c298b32e9dcd93..cccdfea208327ff66b9f19676d0652aa9d11f6af 100644 (file)
@@ -4,14 +4,15 @@
 #![feature(box_syntax, rustc_private)]
 
 extern crate syntax;
+extern crate syntax_expand;
 extern crate syntax_pos;
 extern crate rustc;
 extern crate rustc_driver;
 
 use std::borrow::ToOwned;
 use syntax::ast;
-use syntax::ext::base::{SyntaxExtension, SyntaxExtensionKind};
-use syntax::ext::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager};
+use syntax_expand::base::{SyntaxExtension, SyntaxExtensionKind};
+use syntax_expand::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager};
 use syntax::print::pprust;
 use syntax::symbol::Symbol;
 use syntax_pos::Span;
index 2b57e9289b5255324b90632f9953d8dc7a11f471..3524f449c745221636ddd10bd58c740f9a72b3e9 100644 (file)
 #![feature(plugin_registrar, rustc_private)]
 
 extern crate syntax;
+extern crate syntax_expand;
 extern crate syntax_pos;
 extern crate rustc;
 extern crate rustc_driver;
 
 use syntax::parse::token::{self, Token};
 use syntax::tokenstream::{TokenTree, TokenStream};
-use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
+use syntax_expand::base::{ExtCtxt, MacResult, DummyResult, MacEager};
 use syntax_pos::Span;
 use rustc_driver::plugin::Registry;
 
index aa031fb7a63d2c75bae1c48291a2ca986c5efecf..aec1325844f2a837a044985d486b654bc68a5c36 100644 (file)
@@ -7,11 +7,11 @@ LL | #![plugin(attr_plugin_test)]
    = note: for more information, see https://github.com/rust-lang/rust/issues/29597
    = help: add `#![feature(plugin)]` to the crate attributes to enable
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/gated-plugin.rs:3:1
    |
 LL | #![plugin(attr_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 3afdb1fbf807db6fe70ad7b447b9129c987bfe9f..e76044c56ef94132a4550ddf17037132161e6a7b 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-15778-fail.rs:6:1
    |
 LL | #![plugin(lint_for_crate)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index f81c314c23a2daf95eb0e124f0c8f85b220079f8..0c30d2cdcbf1c6fd9c409bf475fd2fd0c4a049b3 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-15778-pass.rs:8:1
    |
 LL | #![plugin(lint_for_crate_rpass)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 186721e2bb9e60aa158195d087ebfada49bd42c1..d0ad0275ed1584ec5307b58cd77496bccb77ef19 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-40001.rs:6:1
    |
 LL | #![plugin(issue_40001_plugin)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 28065bf3946c05b0a9659355b20828bdba5542ea..f8a4f271da5aa7ef1589b20ebe0f682123bf11d5 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-group-plugin-deny-cmdline.rs:7:1
    |
 LL | #![plugin(lint_group_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index a93cae1a2b1ec6675346af1791f97d50355dd4ac..58dc78b06d3f3b1721435c566a8df15fc5008066 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-group-plugin.rs:6:1
    |
 LL | #![plugin(lint_group_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 2185929e893b7f161db054e1566ac285b07fd5f8..c6d198dc458a60941586316c3f738c72ba54380b 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-plugin-cmdline-allow.rs:8:1
    |
 LL | #![plugin(lint_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index a0cd9687f5bc01733922cabb3939f24add6514eb..c611023e5490c2a414bbe4a95edb6a522d9c3fbe 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-plugin-deny-attr.rs:5:1
    |
 LL | #![plugin(lint_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 3c64025e5eb2307647be10aa185f0969214626be..03668fbfe664f6125eba53d33e58b6eeec35b8b2 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-plugin-deny-cmdline.rs:6:1
    |
 LL | #![plugin(lint_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index c0c43855c92a181100a9601b23448ae25e052411..c0de1feee7d46cf92d2ec5441ff7f68a2cce56a4 100644 (file)
@@ -7,11 +7,11 @@ LL | #![forbid(test_lint)]
 LL | #[allow(test_lint)]
    |         ^^^^^^^^^ overruled by previous forbid
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-plugin-forbid-attrs.rs:5:1
    |
 LL | #![plugin(lint_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 99d013921911b06d1dba87591ca463d18cb501e4..f189efbf61d859b96ebb2443e5fe736c15568260 100644 (file)
@@ -6,11 +6,11 @@ LL | #[allow(test_lint)]
    |
    = note: `forbid` lint level was set on command line
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-plugin-forbid-cmdline.rs:6:1
    |
 LL | #![plugin(lint_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 2ca5eefe4376c380e8acae03a33fd823b8189d1c..e95650090dde3400ea6974dd1546f181879a265c 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-plugin.rs:5:1
    |
 LL | #![plugin(lint_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 71c3dc929b2e5a16085965ee4126b89aff5c8a05..239732521d59e64d5c78b7ffa799cdca0c0a97b5 100644 (file)
@@ -2,11 +2,11 @@ warning: lint name `test_lint` is deprecated and does not have an effect anymore
    |
    = note: requested on the command line with `-A test_lint`
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-tool-cmdline-allow.rs:8:1
    |
 LL | #![plugin(lint_tool_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index c727cfc7015414640318faec1babf46b7d46993c..d4031a780c3d47e0d3e8ed0adf42eafeecada0b2 100644 (file)
@@ -32,11 +32,11 @@ warning: lint name `test_lint` is deprecated and may not have an effect in the f
 LL | #![cfg_attr(foo, warn(test_lint))]
    |                       ^^^^^^^^^ help: change it to: `clippy::test_lint`
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lint-tool-test.rs:6:1
    |
 LL | #![plugin(lint_tool_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index ebc092671a77c14e417509015cf22447760f79a7..61b53bb2b7cdfc0645349f17b1094510a4c6db2d 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/llvm-pass-plugin.rs:6:1
    |
 LL | #![plugin(llvm_pass_plugin)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 509331ceb218f729613d39a88b33e4f7a2666bda..529da32e10eeb1321a8516a0bd661313a82c0946 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/lto-syntax-extension.rs:9:1
    |
 LL | #![plugin(lto_syntax_extension_plugin)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 47d5ecb3742a878205487cf7acf3d2720482b898..b5bd761f1b580bedd4881995ff57f3a49cda3416 100644 (file)
@@ -4,11 +4,11 @@ error[E0457]: plugin `rlib_crate_test` only found in rlib format, but must be av
 LL | #![plugin(rlib_crate_test)]
    |           ^^^^^^^^^^^^^^^
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/macro-crate-rlib.rs:6:1
    |
 LL | #![plugin(rlib_crate_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 8631bcca6d27eae581ec9d3f5ae0a4b256310ff8..ac97ec70be207c6fca136bd0687cc8108cc8b93a 100644 (file)
@@ -7,8 +7,9 @@
 extern crate syntax;
 
 use std::path::Path;
+use syntax::sess::ParseSess;
 use syntax::source_map::FilePathMapping;
-use syntax::parse::{self, ParseSess};
+use syntax::parse;
 
 #[path = "mod_dir_simple/test.rs"]
 mod gravy;
index 68e143d86eeb2ec800956811ea79f1a6bc4d953c..d06fc480fb52255e8700c3023e0268965b5e9ed1 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/outlive-expansion-phase.rs:6:1
    |
 LL | #![plugin(outlive_expansion_phase)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 0d01a859df8ef2d84202333957075181ed9c83ed..ca3e27069ed2679fb3c0506b7d788085bc8837af 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/plugin-args-1.rs:6:1
    |
 LL | #![plugin(plugin_args)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 2bbabd2013817a6176eb477e299a0865ef4875c3..57c06513d5c06c7cbb77a862f2ad684e0f9ce669 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/plugin-args-2.rs:6:1
    |
 LL | #![plugin(plugin_args())]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index bf4108bd7f8df4b91c4aa264fa2e4b304bf8628e..179f1abc8c49c3814ddc00a02618e5b3bf062548 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/plugin-args-3.rs:6:1
    |
 LL | #![plugin(plugin_args(hello(there), how(are="you")))]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index a045782a95f778b09b5bc018787f80310dec4c78..8d95d6ff2d89f9e67bc8de31e191ccd048663935 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/plugin-attr-register-deny.rs:5:1
    |
 LL | #![plugin(attr_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 52d27c32897a5d8e613c43812dd100778a348916..365b26d131e79bff90ffc83b087023d0f690b3bf 100644 (file)
@@ -10,11 +10,11 @@ note: consider marking `mac` as `pub` in the imported module
 LL | pub use mac as reexport;
    |         ^^^^^^^^^^^^^^^
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/plugin-reexport.rs:6:1
    |
 LL | #![plugin(attr_plugin_test)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index d4aff73590734dfdbae933e01698eebfb4a034fd..932a173bc67a865e5723a34a97e54b0e3e81cae2 100644 (file)
 
 use rustc_data_structures::thin_vec::ThinVec;
 use syntax::ast::*;
+use syntax::sess::ParseSess;
 use syntax::source_map::{Spanned, DUMMY_SP, FileName};
 use syntax::source_map::FilePathMapping;
 use syntax::mut_visit::{self, MutVisitor, visit_clobber};
-use syntax::parse::{self, ParseSess};
+use syntax::parse;
 use syntax::print::pprust;
 use syntax::ptr::P;
 
-
 fn parse_expr(ps: &ParseSess, src: &str) -> Option<P<Expr>> {
     let src_as_string = src.to_string();
 
index 7ac619185a1e07809ab7a13a042e3eee4af53d66..8f3f558e91df55b37d10d7e6bbc8f5ea1309af3c 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/roman-numerals-macro.rs:6:1
    |
 LL | #![plugin(roman_numerals)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
diff --git a/src/test/ui/abi/abort-on-c-abi.rs b/src/test/ui/abi/abort-on-c-abi.rs
deleted file mode 100644 (file)
index 2f08730..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-// run-pass
-
-#![allow(unused_must_use)]
-#![feature(unwind_attributes)]
-// Since we mark some ABIs as "nounwind" to LLVM, we must make sure that
-// we never unwind through them.
-
-// ignore-cloudabi no env and process
-// ignore-emscripten no processes
-// ignore-sgx no processes
-
-use std::{env, panic};
-use std::io::prelude::*;
-use std::io;
-use std::process::{Command, Stdio};
-
-#[unwind(aborts)] // FIXME(#58794)
-extern "C" fn panic_in_ffi() {
-    panic!("Test");
-}
-
-fn test() {
-    let _ = panic::catch_unwind(|| { panic_in_ffi(); });
-    // The process should have aborted by now.
-    io::stdout().write(b"This should never be printed.\n");
-    let _ = io::stdout().flush();
-}
-
-fn main() {
-    let args: Vec<String> = env::args().collect();
-    if args.len() > 1 && args[1] == "test" {
-        return test();
-    }
-
-    let mut p = Command::new(&args[0])
-                        .stdout(Stdio::piped())
-                        .stdin(Stdio::piped())
-                        .arg("test").spawn().unwrap();
-    assert!(!p.wait().unwrap().success());
-}
diff --git a/src/test/ui/always-inhabited-union-ref.rs b/src/test/ui/always-inhabited-union-ref.rs
deleted file mode 100644 (file)
index 11eae2a..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-// The precise semantics of inhabitedness with respect to unions and references is currently
-// undecided. This test file currently checks a conservative choice.
-
-#![feature(exhaustive_patterns)]
-#![feature(never_type)]
-
-#![allow(dead_code)]
-#![allow(unreachable_code)]
-
-pub union Foo {
-    foo: !,
-}
-
-fn uninhab_ref() -> &'static ! {
-    unimplemented!()
-}
-
-fn uninhab_union() -> Foo {
-    unimplemented!()
-}
-
-fn match_on_uninhab() {
-    match uninhab_ref() {
-        //~^ ERROR non-exhaustive patterns: type `&'static !` is non-empty
-    }
-
-    match uninhab_union() {
-        //~^ ERROR non-exhaustive patterns: type `Foo` is non-empty
-    }
-}
-
-fn main() {}
diff --git a/src/test/ui/always-inhabited-union-ref.stderr b/src/test/ui/always-inhabited-union-ref.stderr
deleted file mode 100644 (file)
index 792ab6f..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-error[E0004]: non-exhaustive patterns: type `&'static !` is non-empty
-  --> $DIR/always-inhabited-union-ref.rs:23:11
-   |
-LL |     match uninhab_ref() {
-   |           ^^^^^^^^^^^^^
-   |
-   = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
-
-error[E0004]: non-exhaustive patterns: type `Foo` is non-empty
-  --> $DIR/always-inhabited-union-ref.rs:27:11
-   |
-LL |     match uninhab_union() {
-   |           ^^^^^^^^^^^^^^^
-   |
-   = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
-
-error: aborting due to 2 previous errors
-
-For more information about this error, try `rustc --explain E0004`.
index c213cfeeafa1f31feab92f84035907772aa4ebe0..2ceab394e9558c3e42a54711f4f42e72cc6f2fd3 100644 (file)
@@ -6,7 +6,7 @@ LL |     const NAME: &'a str = "unit";
    |
    = note: expected type `&'static str`
               found type `&'a str`
-note: the lifetime 'a as defined on the impl at 6:6...
+note: the lifetime `'a` as defined on the impl at 6:6...
   --> $DIR/associated-const-impl-wrong-lifetime.rs:6:6
    |
 LL | impl<'a> Foo for &'a () {
index 30b6b4f3909b20456a41cf3ad2e94aa34087c90d..c258892057bf26be1598d0445d5f34feedc209cc 100644 (file)
@@ -4,10 +4,10 @@ error[E0277]: the trait bound `A: Foo` is not satisfied
 LL |     const Y: usize;
    |     --------------- required by `Foo::Y`
 ...
+LL | pub fn test<A: Foo, B: Foo>() {
+   |             -- help: consider further restricting this bound: `A: Foo +`
 LL |     let _array = [4; <A as Foo>::Y];
    |                      ^^^^^^^^^^^^^ the trait `Foo` is not implemented for `A`
-   |
-   = help: consider adding a `where A: Foo` bound
 
 error: aborting due to previous error
 
index 30fa9891a13e10427341e210f2d5496f3637ef7b..f6c8e99e27a8158cfd0276f0d789350a498a9a50 100644 (file)
@@ -4,10 +4,10 @@ error[E0277]: the trait bound `A: Foo` is not satisfied
 LL |     const Y: usize;
    |     --------------- required by `Foo::Y`
 ...
+LL | pub fn test<A: Foo, B: Foo>() {
+   |             -- help: consider further restricting this bound: `A: Foo +`
 LL |     let _array: [u32; <A as Foo>::Y];
    |                       ^^^^^^^^^^^^^ the trait `Foo` is not implemented for `A`
-   |
-   = help: consider adding a `where A: Foo` bound
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/associated-item/issue-48027.rs b/src/test/ui/associated-item/issue-48027.rs
new file mode 100644 (file)
index 0000000..c9b4ccd
--- /dev/null
@@ -0,0 +1,8 @@
+trait Bar {
+    const X: usize;
+    fn return_n(&self) -> [u8; Bar::X]; //~ ERROR: type annotations needed
+}
+
+impl dyn Bar {} //~ ERROR: the trait `Bar` cannot be made into an object
+
+fn main() {}
diff --git a/src/test/ui/associated-item/issue-48027.stderr b/src/test/ui/associated-item/issue-48027.stderr
new file mode 100644 (file)
index 0000000..562146a
--- /dev/null
@@ -0,0 +1,21 @@
+error[E0038]: the trait `Bar` cannot be made into an object
+  --> $DIR/issue-48027.rs:6:6
+   |
+LL |     const X: usize;
+   |           - the trait cannot contain associated consts like `X`
+...
+LL | impl dyn Bar {}
+   |      ^^^^^^^ the trait `Bar` cannot be made into an object
+
+error[E0283]: type annotations needed: cannot resolve `_: Bar`
+  --> $DIR/issue-48027.rs:3:32
+   |
+LL |     const X: usize;
+   |     --------------- required by `Bar::X`
+LL |     fn return_n(&self) -> [u8; Bar::X];
+   |                                ^^^^^^
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0038, E0283.
+For more information about an error, try `rustc --explain E0038`.
index 06e8230aa158933b75ea72080e7d72b54bb205e4..9f6a73cfe39104f15fe8a5564f00e2747f0dc566 100644 (file)
@@ -9,7 +9,10 @@ LL | impl Case1 for S1 {
 error[E0277]: `<<T as Case1>::C as std::iter::Iterator>::Item` is not an iterator
   --> $DIR/bad-bounds-on-assoc-in-trait.rs:37:1
    |
-LL | / fn assume_case1<T: Case1>() {
+LL |   fn assume_case1<T: Case1>() {
+   |   ^                          - help: consider further restricting the associated type: `where <<T as Case1>::C as std::iter::Iterator>::Item: std::iter::Iterator`
+   |  _|
+   | |
 LL | |
 LL | |
 LL | |
@@ -19,7 +22,6 @@ LL | | }
    | |_^ `<<T as Case1>::C as std::iter::Iterator>::Item` is not an iterator
    |
    = help: the trait `std::iter::Iterator` is not implemented for `<<T as Case1>::C as std::iter::Iterator>::Item`
-   = help: consider adding a `where <<T as Case1>::C as std::iter::Iterator>::Item: std::iter::Iterator` bound
 
 error[E0277]: `<<T as Case1>::C as std::iter::Iterator>::Item` cannot be sent between threads safely
   --> $DIR/bad-bounds-on-assoc-in-trait.rs:37:1
@@ -27,7 +29,10 @@ error[E0277]: `<<T as Case1>::C as std::iter::Iterator>::Item` cannot be sent be
 LL |   trait Case1 {
    |   ----------- required by `Case1`
 ...
-LL | / fn assume_case1<T: Case1>() {
+LL |   fn assume_case1<T: Case1>() {
+   |   ^                          - help: consider further restricting the associated type: `where <<T as Case1>::C as std::iter::Iterator>::Item: std::marker::Send`
+   |  _|
+   | |
 LL | |
 LL | |
 LL | |
@@ -37,7 +42,6 @@ LL | | }
    | |_^ `<<T as Case1>::C as std::iter::Iterator>::Item` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `<<T as Case1>::C as std::iter::Iterator>::Item`
-   = help: consider adding a `where <<T as Case1>::C as std::iter::Iterator>::Item: std::marker::Send` bound
 
 error[E0277]: `<<T as Case1>::C as std::iter::Iterator>::Item` cannot be shared between threads safely
   --> $DIR/bad-bounds-on-assoc-in-trait.rs:37:1
@@ -45,7 +49,10 @@ error[E0277]: `<<T as Case1>::C as std::iter::Iterator>::Item` cannot be shared
 LL |   trait Case1 {
    |   ----------- required by `Case1`
 ...
-LL | / fn assume_case1<T: Case1>() {
+LL |   fn assume_case1<T: Case1>() {
+   |   ^                          - help: consider further restricting the associated type: `where <<T as Case1>::C as std::iter::Iterator>::Item: std::marker::Sync`
+   |  _|
+   | |
 LL | |
 LL | |
 LL | |
@@ -55,7 +62,6 @@ LL | | }
    | |_^ `<<T as Case1>::C as std::iter::Iterator>::Item` cannot be shared between threads safely
    |
    = help: the trait `std::marker::Sync` is not implemented for `<<T as Case1>::C as std::iter::Iterator>::Item`
-   = help: consider adding a `where <<T as Case1>::C as std::iter::Iterator>::Item: std::marker::Sync` bound
 
 error[E0277]: `<_ as Lam<&'a u8>>::App` doesn't implement `std::fmt::Debug`
   --> $DIR/bad-bounds-on-assoc-in-trait.rs:37:1
diff --git a/src/test/ui/associated-types/associated-types-bound-failure.fixed b/src/test/ui/associated-types/associated-types-bound-failure.fixed
new file mode 100644 (file)
index 0000000..cc47f31
--- /dev/null
@@ -0,0 +1,29 @@
+// run-rustfix
+// Test equality constraints on associated types in a where clause.
+#![allow(dead_code)]
+
+pub trait ToInt {
+    fn to_int(&self) -> isize;
+}
+
+pub trait GetToInt
+{
+    type R;
+
+    fn get(&self) -> <Self as GetToInt>::R;
+}
+
+fn foo<G>(g: G) -> isize
+    where G : GetToInt, <G as GetToInt>::R: ToInt 
+{
+    ToInt::to_int(&g.get()) //~ ERROR E0277
+}
+
+fn bar<G : GetToInt>(g: G) -> isize
+    where G::R : ToInt
+{
+    ToInt::to_int(&g.get()) // OK
+}
+
+pub fn main() {
+}
index 883ac363b44e5cb8e0b2e2468c206611306156ba..31e073cc7a8bde695fe07ad887dae1c4677ea596 100644 (file)
@@ -1,4 +1,6 @@
+// run-rustfix
 // Test equality constraints on associated types in a where clause.
+#![allow(dead_code)]
 
 pub trait ToInt {
     fn to_int(&self) -> isize;
index 85acf134d51d53ba60f9929214685b131836893b..c420c86a2758f1b696d9df295e80dee25902750e 100644 (file)
@@ -1,13 +1,14 @@
 error[E0277]: the trait bound `<G as GetToInt>::R: ToInt` is not satisfied
-  --> $DIR/associated-types-bound-failure.rs:17:19
+  --> $DIR/associated-types-bound-failure.rs:19:19
    |
 LL |     fn to_int(&self) -> isize;
    |     -------------------------- required by `ToInt::to_int`
 ...
+LL |     where G : GetToInt
+   |                       - help: consider further restricting the associated type: `, <G as GetToInt>::R: ToInt`
+LL | {
 LL |     ToInt::to_int(&g.get())
    |                   ^^^^^^^^ the trait `ToInt` is not implemented for `<G as GetToInt>::R`
-   |
-   = help: consider adding a `where <G as GetToInt>::R: ToInt` bound
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/associated-types/associated-types-for-unimpl-trait.fixed b/src/test/ui/associated-types/associated-types-for-unimpl-trait.fixed
new file mode 100644 (file)
index 0000000..aa23326
--- /dev/null
@@ -0,0 +1,15 @@
+// run-rustfix
+#![allow(unused_variables)]
+
+trait Get {
+    type Value;
+    fn get(&self) -> <Self as Get>::Value;
+}
+
+trait Other {
+    fn uhoh<U:Get>(&self, foo: U, bar: <Self as Get>::Value) where Self: Get  {}
+    //~^ ERROR the trait bound `Self: Get` is not satisfied
+}
+
+fn main() {
+}
index 5b10d1dc2fdb50293c312a4a54de95a01e788cb3..0f6cea8e69fcf29ef4e43816581e868427216875 100644 (file)
@@ -1,3 +1,6 @@
+// run-rustfix
+#![allow(unused_variables)]
+
 trait Get {
     type Value;
     fn get(&self) -> <Self as Get>::Value;
index 9f033687a00721bb68e7420f854019bb27f04789..83d5390417e77c771b336a27c610e6ffa69d0ff4 100644 (file)
@@ -1,10 +1,11 @@
 error[E0277]: the trait bound `Self: Get` is not satisfied
-  --> $DIR/associated-types-for-unimpl-trait.rs:7:5
+  --> $DIR/associated-types-for-unimpl-trait.rs:10:5
    |
 LL |     fn uhoh<U:Get>(&self, foo: U, bar: <Self as Get>::Value) {}
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: Get` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-^^
+   |     |                                                       |
+   |     |                                                       help: consider further restricting `Self`: `where Self: Get`
+   |     the trait `Get` is not implemented for `Self`
 
 error: aborting due to previous error
 
index 01f66a18d25bd07bef57489162b8e5031508cbaf..0b8b7fab1359ffcaafd27b7d0c530f364fb98ab4 100644 (file)
@@ -1,10 +1,10 @@
 error[E0277]: the trait bound `T: Foo<usize>` is not satisfied
   --> $DIR/associated-types-invalid-trait-ref-issue-18865.rs:10:12
    |
+LL | fn f<T:Foo<isize>>(t: &T) {
+   |      -- help: consider further restricting this bound: `T: Foo<usize> +`
 LL |     let u: <T as Foo<usize>>::Bar = t.get_bar();
    |            ^^^^^^^^^^^^^^^^^^^^^^ the trait `Foo<usize>` is not implemented for `T`
-   |
-   = help: consider adding a `where T: Foo<usize>` bound
 
 error: aborting due to previous error
 
index ada9cacbee5232e9b61bc4e4571229147e1e4b23..78198322913c7579fd92590525fb33c46843e0d6 100644 (file)
@@ -2,9 +2,10 @@ error[E0277]: the trait bound `T: Get` is not satisfied
   --> $DIR/associated-types-no-suitable-bound.rs:11:5
    |
 LL |     fn uhoh<T>(foo: <T as Get>::Value) {}
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `T`
-   |
-   = help: consider adding a `where T: Get` bound
+   |     ^^^^^^^^-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |     |       |
+   |     |       help: consider restricting this bound: `T: Get`
+   |     the trait `Get` is not implemented for `T`
 
 error: aborting due to previous error
 
index 56cd6d09caddc4fa400e3fa199c1e9aa7766d14d..6aa0403088d3c1586facd10853ced9358efacd3e 100644 (file)
@@ -2,9 +2,10 @@ error[E0277]: the trait bound `Self: Get` is not satisfied
   --> $DIR/associated-types-no-suitable-supertrait-2.rs:17:5
    |
 LL |     fn uhoh<U:Get>(&self, foo: U, bar: <Self as Get>::Value) {}
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: Get` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-^^
+   |     |                                                       |
+   |     |                                                       help: consider further restricting `Self`: `where Self: Get`
+   |     the trait `Get` is not implemented for `Self`
 
 error: aborting due to previous error
 
index 71175d36f645f482d6d8b970491f2ca14a008831..8c242be979611c32df6c6e44b85592177909c89b 100644 (file)
@@ -2,9 +2,10 @@ error[E0277]: the trait bound `Self: Get` is not satisfied
   --> $DIR/associated-types-no-suitable-supertrait.rs:17:5
    |
 LL |     fn uhoh<U:Get>(&self, foo: U, bar: <Self as Get>::Value) {}
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: Get` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-^^
+   |     |                                                       |
+   |     |                                                       help: consider further restricting `Self`: `where Self: Get`
+   |     the trait `Get` is not implemented for `Self`
 
 error[E0277]: the trait bound `(T, U): Get` is not satisfied
   --> $DIR/associated-types-no-suitable-supertrait.rs:22:5
diff --git a/src/test/ui/associated-types/associated-types-projection-to-unrelated-trait-in-method-without-default.fixed b/src/test/ui/associated-types/associated-types-projection-to-unrelated-trait-in-method-without-default.fixed
new file mode 100644 (file)
index 0000000..f357045
--- /dev/null
@@ -0,0 +1,30 @@
+// run-rustfix
+// Check that we get an error when you use `<Self as Get>::Value` in
+// the trait definition even if there is no default method.
+
+trait Get {
+    type Value;
+}
+
+trait Other {
+    fn okay<U:Get>(&self, foo: U, bar: <Self as Get>::Value) where Self: Get ;
+    //~^ ERROR E0277
+}
+
+impl Get for () {
+    type Value = f32;
+}
+
+impl Get for f64 {
+    type Value = u32;
+}
+
+impl Other for () {
+    fn okay<U:Get>(&self, _foo: U, _bar: <Self as Get>::Value) { }
+}
+
+impl Other for f64 {
+    fn okay<U:Get>(&self, _foo: U, _bar: <Self as Get>::Value) { }
+}
+
+fn main() { }
index fc38b26f50b624155ef1f5e2e4f7003dcdc6a7bf..549fc8fc618e03d7b2155a2c294ac01f9ddd935b 100644 (file)
@@ -1,3 +1,4 @@
+// run-rustfix
 // Check that we get an error when you use `<Self as Get>::Value` in
 // the trait definition even if there is no default method.
 
index a260e379182546dd0da70adcb8310fd57b9099e0..cb01488fa34d46d417c1c6269199dbdea2ceaee6 100644 (file)
@@ -1,10 +1,11 @@
 error[E0277]: the trait bound `Self: Get` is not satisfied
-  --> $DIR/associated-types-projection-to-unrelated-trait-in-method-without-default.rs:9:5
+  --> $DIR/associated-types-projection-to-unrelated-trait-in-method-without-default.rs:10:5
    |
 LL |     fn okay<U:Get>(&self, foo: U, bar: <Self as Get>::Value);
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: Get` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+   |     |                                                       |
+   |     |                                                       help: consider further restricting `Self`: `where Self: Get`
+   |     the trait `Get` is not implemented for `Self`
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/associated-types/associated-types-unsized.fixed b/src/test/ui/associated-types/associated-types-unsized.fixed
new file mode 100644 (file)
index 0000000..f780d17
--- /dev/null
@@ -0,0 +1,14 @@
+// run-rustfix
+#![allow(dead_code, unused_variables)]
+
+trait Get {
+    type Value: ?Sized;
+    fn get(&self) -> <Self as Get>::Value;
+}
+
+fn foo<T:Get>(t: T) where <T as Get>::Value: std::marker::Sized  {
+    let x = t.get(); //~ ERROR the size for values of type
+}
+
+fn main() {
+}
index a9bc24e44d165d75e238ca2e35af421f162d49f4..bdba4c7ff16a10344f5465b87147da0cb55de27b 100644 (file)
@@ -1,3 +1,6 @@
+// run-rustfix
+#![allow(dead_code, unused_variables)]
+
 trait Get {
     type Value: ?Sized;
     fn get(&self) -> <Self as Get>::Value;
index b5db9743932e93cb11967f2d28324af8e5a2c142..2352ac4ad3822cdd0770c326b4d4ecf76dfb3815 100644 (file)
@@ -1,12 +1,13 @@
 error[E0277]: the size for values of type `<T as Get>::Value` cannot be known at compilation time
-  --> $DIR/associated-types-unsized.rs:7:9
+  --> $DIR/associated-types-unsized.rs:10:9
    |
+LL | fn foo<T:Get>(t: T) {
+   |                    - help: consider further restricting the associated type: `where <T as Get>::Value: std::marker::Sized`
 LL |     let x = t.get();
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `<T as Get>::Value`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where <T as Get>::Value: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
index 15bebce47dd6a145c7d62f35334eb583e90354c3..5ea98dcd4a972a194e603ec6067d98dfb659b873 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |    bar(foo, x)
    |        ^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 37:8...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 37:8...
   --> $DIR/project-fn-ret-contravariant.rs:37:8
    |
 LL | fn baz<'a,'b>(x: &'a u32) -> &'static u32 {
@@ -23,3 +23,4 @@ LL |    bar(foo, x)
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 62b4cb10911fbe6721c0ec4d30fb9326f3c6b751..627609c4a9c004320be4cfc131fef7e65e2475c1 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |    bar(foo, x)
    |        ^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 44:8...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 44:8...
   --> $DIR/project-fn-ret-invariant.rs:44:8
    |
 LL | fn baz<'a,'b>(x: Type<'a>) -> Type<'static> {
@@ -19,3 +19,4 @@ LL | fn baz<'a,'b>(x: Type<'a>) -> Type<'static> {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
diff --git a/src/test/ui/associated-types/issue-44153.rs b/src/test/ui/associated-types/issue-44153.rs
new file mode 100644 (file)
index 0000000..2101cb6
--- /dev/null
@@ -0,0 +1,19 @@
+pub trait Array {
+    type Element;
+}
+
+pub trait Visit {
+    fn visit() {}
+}
+
+impl Array for () {
+    type Element = ();
+}
+
+impl<'a> Visit for () where
+    (): Array<Element=&'a ()>,
+{}
+
+fn main() {
+    <() as Visit>::visit(); //~ ERROR: type mismatch resolving
+}
diff --git a/src/test/ui/associated-types/issue-44153.stderr b/src/test/ui/associated-types/issue-44153.stderr
new file mode 100644 (file)
index 0000000..b62a866
--- /dev/null
@@ -0,0 +1,16 @@
+error[E0271]: type mismatch resolving `<() as Array>::Element == &()`
+  --> $DIR/issue-44153.rs:18:5
+   |
+LL |     fn visit() {}
+   |     ---------- required by `Visit::visit`
+...
+LL |     <() as Visit>::visit();
+   |     ^^^^^^^^^^^^^^^^^^^^ expected (), found &()
+   |
+   = note: expected type `()`
+              found type `&()`
+   = note: required because of the requirements on the impl of `Visit` for `()`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0271`.
diff --git a/src/test/ui/associated-types/issue-48010.rs b/src/test/ui/associated-types/issue-48010.rs
new file mode 100644 (file)
index 0000000..70e30c1
--- /dev/null
@@ -0,0 +1,23 @@
+// check-pass
+
+#![crate_type = "lib"]
+
+pub struct Foo;
+
+pub struct Path<T: Bar> {
+    _inner: T::Slice,
+}
+
+pub trait Bar: Sized {
+    type Slice: ?Sized;
+
+    fn open(_: &Path<Self>);
+}
+
+impl Bar for Foo {
+    type Slice = [u8];
+
+    fn open(_: &Path<Self>) {
+        unimplemented!()
+    }
+}
diff --git a/src/test/ui/async-await/async-assoc-fn-anon-lifetimes.rs b/src/test/ui/async-await/async-assoc-fn-anon-lifetimes.rs
new file mode 100644 (file)
index 0000000..8e08b82
--- /dev/null
@@ -0,0 +1,23 @@
+// check-pass
+// Check that the anonymous lifetimes used here aren't considered to shadow one
+// another. Note that `async fn` is different to `fn` here because the lifetimes
+// are numbered by HIR lowering, rather than lifetime resolution.
+
+// edition:2018
+
+struct A<'a, 'b>(&'a &'b i32);
+struct B<'a>(&'a i32);
+
+impl A<'_, '_> {
+    async fn assoc(x: &u32, y: B<'_>) {
+        async fn nested(x: &u32, y: A<'_, '_>) {}
+    }
+
+    async fn assoc2(x: &u32, y: A<'_, '_>) {
+        impl A<'_, '_> {
+            async fn nested_assoc(x: &u32, y: B<'_>) {}
+        }
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/async-await/async-borrowck-escaping-block-error.fixed b/src/test/ui/async-await/async-borrowck-escaping-block-error.fixed
new file mode 100644 (file)
index 0000000..f004b41
--- /dev/null
@@ -0,0 +1,12 @@
+// edition:2018
+// run-rustfix
+
+fn foo() -> Box<impl std::future::Future<Output = u32>> {
+    let x = 0u32;
+    Box::new(async move { x } )
+    //~^ ERROR E0373
+}
+
+fn main() {
+    let _foo = foo();
+}
diff --git a/src/test/ui/async-await/async-borrowck-escaping-block-error.rs b/src/test/ui/async-await/async-borrowck-escaping-block-error.rs
new file mode 100644 (file)
index 0000000..4f35fd5
--- /dev/null
@@ -0,0 +1,12 @@
+// edition:2018
+// run-rustfix
+
+fn foo() -> Box<impl std::future::Future<Output = u32>> {
+    let x = 0u32;
+    Box::new(async { x } )
+    //~^ ERROR E0373
+}
+
+fn main() {
+    let _foo = foo();
+}
diff --git a/src/test/ui/async-await/async-borrowck-escaping-block-error.stderr b/src/test/ui/async-await/async-borrowck-escaping-block-error.stderr
new file mode 100644 (file)
index 0000000..0eb3971
--- /dev/null
@@ -0,0 +1,22 @@
+error[E0373]: closure may outlive the current function, but it borrows `x`, which is owned by the current function
+  --> $DIR/async-borrowck-escaping-block-error.rs:6:20
+   |
+LL |     Box::new(async { x } )
+   |                    ^^-^^
+   |                    | |
+   |                    | `x` is borrowed here
+   |                    may outlive borrowed value `x`
+   |
+note: generator is returned here
+  --> $DIR/async-borrowck-escaping-block-error.rs:4:13
+   |
+LL | fn foo() -> Box<impl std::future::Future<Output = u32>> {
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: to force the async block to take ownership of `x` (and any other referenced variables), use the `move` keyword
+   |
+LL |     Box::new(async move { x } )
+   |                    ^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0373`.
index b63d5408a7147f9d66bcdd72b684db25d6003fb1..4a413381aa30098e0caf68e2720abafacb0da53f 100644 (file)
@@ -22,7 +22,8 @@
 impl BigFut {
     fn new() -> Self {
         BigFut([0; BIG_FUT_SIZE])
-    } }
+    }
+}
 
 impl Drop for BigFut {
     fn drop(&mut self) {}
diff --git a/src/test/ui/async-await/async-fn-size-uninit-locals.rs b/src/test/ui/async-await/async-fn-size-uninit-locals.rs
new file mode 100644 (file)
index 0000000..0558084
--- /dev/null
@@ -0,0 +1,103 @@
+// Test that we don't store uninitialized locals in futures from `async fn`.
+//
+// The exact sizes can change by a few bytes (we'd like to know when they do).
+// What we don't want to see is the wrong multiple of 1024 (the size of `Big`)
+// being reflected in the size.
+
+// ignore-emscripten (sizes don't match)
+// run-pass
+
+// edition:2018
+
+#![allow(unused_variables, unused_assignments)]
+
+use std::future::Future;
+use std::pin::Pin;
+use std::task::{Context, Poll};
+
+const BIG_FUT_SIZE: usize = 1024;
+struct Big([u8; BIG_FUT_SIZE]);
+
+impl Big {
+    fn new() -> Self {
+        Big([0; BIG_FUT_SIZE])
+    }
+}
+
+impl Drop for Big {
+    fn drop(&mut self) {}
+}
+
+#[allow(dead_code)]
+struct Joiner {
+    a: Option<Big>,
+    b: Option<Big>,
+    c: Option<Big>,
+}
+
+impl Future for Joiner {
+    type Output = ();
+
+    fn poll(self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Self::Output> {
+        Poll::Ready(())
+    }
+}
+
+fn noop() {}
+async fn fut() {}
+
+async fn single() {
+    let x;
+    fut().await;
+    x = Big::new();
+}
+
+async fn single_with_noop() {
+    let x;
+    fut().await;
+    noop();
+    x = Big::new();
+    noop();
+}
+
+async fn joined() {
+    let joiner;
+    let a = Big::new();
+    let b = Big::new();
+    let c = Big::new();
+
+    fut().await;
+    noop();
+    joiner = Joiner { a: Some(a), b: Some(b), c: Some(c) };
+    noop();
+}
+
+async fn joined_with_noop() {
+    let joiner;
+    let a = Big::new();
+    let b = Big::new();
+    let c = Big::new();
+
+    fut().await;
+    noop();
+    joiner = Joiner { a: Some(a), b: Some(b), c: Some(c) };
+    noop();
+}
+
+async fn join_retval() -> Joiner {
+    let a = Big::new();
+    let b = Big::new();
+    let c = Big::new();
+
+    fut().await;
+    noop();
+    Joiner { a: Some(a), b: Some(b), c: Some(c) }
+}
+
+fn main() {
+    assert_eq!(8, std::mem::size_of_val(&single()));
+    assert_eq!(12, std::mem::size_of_val(&single_with_noop()));
+    assert_eq!(3084, std::mem::size_of_val(&joined()));
+    assert_eq!(3084, std::mem::size_of_val(&joined_with_noop()));
+    assert_eq!(3080, std::mem::size_of_val(&join_retval()));
+}
index b5c94ecb716902bc14b2bb4efc956ecc0fff5e21..b313992db4ecbc38883d389a3d365735ac0c3dce 100644 (file)
@@ -89,10 +89,10 @@ fn main() {
     assert_eq!(8, std::mem::size_of_val(&await1_level1()));
     assert_eq!(12, std::mem::size_of_val(&await2_level1()));
     assert_eq!(12, std::mem::size_of_val(&await3_level1()));
-    assert_eq!(20, std::mem::size_of_val(&await3_level2()));
-    assert_eq!(28, std::mem::size_of_val(&await3_level3()));
-    assert_eq!(36, std::mem::size_of_val(&await3_level4()));
-    assert_eq!(44, std::mem::size_of_val(&await3_level5()));
+    assert_eq!(24, std::mem::size_of_val(&await3_level2()));
+    assert_eq!(36, std::mem::size_of_val(&await3_level3()));
+    assert_eq!(48, std::mem::size_of_val(&await3_level4()));
+    assert_eq!(60, std::mem::size_of_val(&await3_level5()));
 
     assert_eq!(1,   wait(base()));
     assert_eq!(1,   wait(await1_level1()));
index 5099297fbeb19ec34c626cb5160cd18fa35a63d9..efec160588fc411ec4e649a95160b8529049e9be 100644 (file)
@@ -15,12 +15,12 @@ LL |         foo: &dyn Foo, bar: &'a dyn Foo
 LL |         foo
    |         --- this return type evaluates to the `'static` lifetime...
    |
-note: ...can't outlive the lifetime '_ as defined on the method body at 11:14
+note: ...can't outlive the lifetime `'_` as defined on the method body at 11:14
   --> $DIR/issue-63388-2.rs:11:14
    |
 LL |         foo: &dyn Foo, bar: &'a dyn Foo
    |              ^
-help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime '_ as defined on the method body at 11:14
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime `'_` as defined on the method body at 11:14
    |
 LL |         foo + '_
    |
diff --git a/src/test/ui/async-await/issues/issue-65159.rs b/src/test/ui/async-await/issues/issue-65159.rs
new file mode 100644 (file)
index 0000000..b5fee06
--- /dev/null
@@ -0,0 +1,10 @@
+// Regression test for #65159. We used to ICE.
+//
+// edition:2018
+
+async fn copy() -> Result<()> //~ ERROR wrong number of type arguments
+{
+    Ok(())
+}
+
+fn main() { }
diff --git a/src/test/ui/async-await/issues/issue-65159.stderr b/src/test/ui/async-await/issues/issue-65159.stderr
new file mode 100644 (file)
index 0000000..56d2c38
--- /dev/null
@@ -0,0 +1,9 @@
+error[E0107]: wrong number of type arguments: expected 2, found 1
+  --> $DIR/issue-65159.rs:5:20
+   |
+LL | async fn copy() -> Result<()>
+   |                    ^^^^^^^^^^ expected 2 type arguments
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0107`.
diff --git a/src/test/ui/async-await/unused-lifetime.rs b/src/test/ui/async-await/unused-lifetime.rs
new file mode 100644 (file)
index 0000000..1cf546b
--- /dev/null
@@ -0,0 +1,42 @@
+// edition:2018
+
+// Avoid spurious warnings of unused lifetime. The below async functions
+// are desugered to have an unused lifetime
+// but we don't want to warn about that as there's nothing they can do about it.
+
+#![deny(unused_lifetimes)]
+#![allow(dead_code)]
+
+pub async fn october(s: &str) {
+    println!("{}", s);
+}
+
+pub async fn async_fn(&mut ref s: &mut[i32]) {
+    println!("{:?}", s);
+}
+
+macro_rules! foo_macro {
+    () => {
+        pub async fn async_fn_in_macro(&mut ref _s: &mut[i32]) {}
+    };
+}
+
+foo_macro!();
+
+pub async fn func_with_unused_lifetime<'a>(s: &'a str) {
+    //~^ ERROR lifetime parameter `'a` never used
+    println!("{}", s);
+}
+
+pub async fn func_with_two_unused_lifetime<'a, 'b>(s: &'a str, t: &'b str) {
+    //~^ ERROR lifetime parameter `'a` never used
+    //~^^ ERROR lifetime parameter `'b` never used
+    println!("{}", s);
+}
+
+pub async fn func_with_unused_lifetime_in_two_params<'c>(s: &'c str, t: &'c str) {
+    //~^ ERROR lifetime parameter `'c` never used
+    println!("{}", s);
+}
+
+fn main() {}
diff --git a/src/test/ui/async-await/unused-lifetime.stderr b/src/test/ui/async-await/unused-lifetime.stderr
new file mode 100644 (file)
index 0000000..885cdc0
--- /dev/null
@@ -0,0 +1,32 @@
+error: lifetime parameter `'a` never used
+  --> $DIR/unused-lifetime.rs:26:40
+   |
+LL | pub async fn func_with_unused_lifetime<'a>(s: &'a str) {
+   |                                        ^^
+   |
+note: lint level defined here
+  --> $DIR/unused-lifetime.rs:7:9
+   |
+LL | #![deny(unused_lifetimes)]
+   |         ^^^^^^^^^^^^^^^^
+
+error: lifetime parameter `'a` never used
+  --> $DIR/unused-lifetime.rs:31:44
+   |
+LL | pub async fn func_with_two_unused_lifetime<'a, 'b>(s: &'a str, t: &'b str) {
+   |                                            ^^
+
+error: lifetime parameter `'b` never used
+  --> $DIR/unused-lifetime.rs:31:48
+   |
+LL | pub async fn func_with_two_unused_lifetime<'a, 'b>(s: &'a str, t: &'b str) {
+   |                                                ^^
+
+error: lifetime parameter `'c` never used
+  --> $DIR/unused-lifetime.rs:37:54
+   |
+LL | pub async fn func_with_unused_lifetime_in_two_params<'c>(s: &'c str, t: &'c str) {
+   |                                                      ^^
+
+error: aborting due to 4 previous errors
+
index ae21984c06d723c36ab4e8902f657565f5784ea6..51422fab81fdaae931ba321bfa5313b415d362ed 100644 (file)
@@ -18,4 +18,5 @@ LL | auto trait MyTrait { fn foo() {} }
 
 error: aborting due to 3 previous errors
 
-For more information about this error, try `rustc --explain E0380`.
+Some errors have detailed explanations: E0380, E0567, E0568.
+For more information about an error, try `rustc --explain E0380`.
index c72b965236006ba8cac9334808f2e63841c7dddf..740667f1466801c4605a3ddf6ce2967021c9de91 100644 (file)
@@ -1,11 +1,12 @@
 error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/bad-method-typaram-kind.rs:2:7
    |
+LL | fn foo<T:'static>() {
+   |        -- help: consider further restricting this bound: `T: std::marker::Send +`
 LL |     1.bar::<T>();
    |       ^^^ `T` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
 
 error: aborting due to previous error
 
index 4c7c0d1a0dfa514f5beea8594406e3ed5e0d70b3..52d43eae658aeb08e06b7ac2ab0983a043a68f94 100644 (file)
@@ -24,12 +24,12 @@ LL |     fn wrong_bound1<'b,'c,'d:'a+'c>(self, b: Inv<'b>, c: Inv<'c>, d: Inv<'d
    |
    = note: expected type `fn(&'a isize, Inv<'c>, Inv<'c>, Inv<'d>)`
               found type `fn(&'a isize, Inv<'_>, Inv<'c>, Inv<'d>)`
-note: the lifetime 'c as defined on the method body at 27:24...
+note: the lifetime `'c` as defined on the method body at 27:24...
   --> $DIR/regions-bound-missing-bound-in-impl.rs:27:24
    |
 LL |     fn wrong_bound1<'b,'c,'d:'a+'c>(self, b: Inv<'b>, c: Inv<'c>, d: Inv<'d>) {
    |                        ^^
-note: ...does not necessarily outlive the lifetime 'c as defined on the method body at 27:24
+note: ...does not necessarily outlive the lifetime `'c` as defined on the method body at 27:24
   --> $DIR/regions-bound-missing-bound-in-impl.rs:27:24
    |
 LL |     fn wrong_bound1<'b,'c,'d:'a+'c>(self, b: Inv<'b>, c: Inv<'c>, d: Inv<'d>) {
index 9771436d167df0065fdc1e198ee08965852b9c4c..5be6ab05d6607d014eadda611c8bc6f5494cd116 100644 (file)
@@ -2,20 +2,22 @@ error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/builtin-superkinds-double-superkind.rs:6:24
    |
 LL | impl <T: Sync+'static> Foo for (T,) { }
-   |                        ^^^ `T` cannot be sent between threads safely
+   |       --               ^^^ `T` cannot be sent between threads safely
+   |       |
+   |       help: consider further restricting this bound: `T: std::marker::Send +`
    |
    = help: within `(T,)`, the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
    = note: required because it appears within the type `(T,)`
 
 error[E0277]: `T` cannot be shared between threads safely
   --> $DIR/builtin-superkinds-double-superkind.rs:9:16
    |
 LL | impl <T: Send> Foo for (T,T) { }
-   |                ^^^ `T` cannot be shared between threads safely
+   |       --       ^^^ `T` cannot be shared between threads safely
+   |       |
+   |       help: consider further restricting this bound: `T: std::marker::Sync +`
    |
    = help: within `(T, T)`, the trait `std::marker::Sync` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Sync` bound
    = note: required because it appears within the type `(T, T)`
 
 error: aborting due to 2 previous errors
index 61c18a24fb0c9a1d55afadb697f5dec85df0c95e..8cce9bfdf52a82915d1ff85c07e1f0eb9f557fb7 100644 (file)
@@ -2,10 +2,11 @@ error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/builtin-superkinds-in-metadata.rs:13:23
    |
 LL | impl <T:Sync+'static> RequiresRequiresShareAndSend for X<T> { }
-   |                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `T` cannot be sent between threads safely
+   |       --              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `T` cannot be sent between threads safely
+   |       |
+   |       help: consider further restricting this bound: `T: std::marker::Send +`
    |
    = help: within `X<T>`, the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
    = note: required because it appears within the type `X<T>`
 
 error: aborting due to previous error
index dc5479e5e2da6204b0b4a0adab647ae3b021333c..4381a5b8682628a4274b9dd262db6326180e9ec2 100644 (file)
@@ -2,10 +2,11 @@ error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/builtin-superkinds-typaram-not-send.rs:5:24
    |
 LL | impl <T: Sync+'static> Foo for T { }
-   |                        ^^^ `T` cannot be sent between threads safely
+   |       --               ^^^ `T` cannot be sent between threads safely
+   |       |
+   |       help: consider further restricting this bound: `T: std::marker::Send +`
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
 
 error: aborting due to previous error
 
index b986d0c24350615783699b8fce02bc5dd18bf820..05535659161b82e7a960dbc740a55a877590b37c 100644 (file)
@@ -14,7 +14,7 @@ LL |   pub unsafe extern "C" fn no_escape0<'f>(_: usize, ap: ...) -> VaListImpl<
 LL | |     ap
 LL | | }
    | |_^
-note: ...does not necessarily outlive the lifetime 'f as defined on the function body at 7:37
+note: ...does not necessarily outlive the lifetime `'f` as defined on the function body at 7:37
   --> $DIR/variadic-ffi-4.rs:7:37
    |
 LL | pub unsafe extern "C" fn no_escape0<'f>(_: usize, ap: ...) -> VaListImpl<'f> {
@@ -209,4 +209,5 @@ LL | | }
 
 error: aborting due to 8 previous errors
 
-For more information about this error, try `rustc --explain E0308`.
+Some errors have detailed explanations: E0308, E0495.
+For more information about an error, try `rustc --explain E0308`.
diff --git a/src/test/ui/call-fn-never-arg-wrong-type.rs b/src/test/ui/call-fn-never-arg-wrong-type.rs
deleted file mode 100644 (file)
index d06637e..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-// Test that we can't pass other types for !
-
-#![feature(never_type)]
-
-fn foo(x: !) -> ! {
-    x
-}
-
-fn main() {
-    foo("wow"); //~ ERROR mismatched types
-}
diff --git a/src/test/ui/call-fn-never-arg-wrong-type.stderr b/src/test/ui/call-fn-never-arg-wrong-type.stderr
deleted file mode 100644 (file)
index 7a50fd3..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0308]: mismatched types
-  --> $DIR/call-fn-never-arg-wrong-type.rs:10:9
-   |
-LL |     foo("wow");
-   |         ^^^^^ expected !, found reference
-   |
-   = note: expected type `!`
-              found type `&'static str`
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0308`.
index a4d6e9b777f0262b5f528a91ed696a2a9f82dcf3..5c6834459f0d4021538bb4f5f6ca5d4b5f9fe80f 100644 (file)
@@ -1,7 +1,7 @@
-// build-pass (FIXME(62277): could be check-pass?)
-
+// check-pass
 #![feature(exclusive_range_pattern)]
 #![warn(unreachable_patterns)]
+#![warn(overlapping_patterns)]
 
 fn main() {
     // These cases should generate no warning.
@@ -13,7 +13,7 @@ fn main() {
 
     match 10 {
         1..10 => {},
-        9..=10 => {},
+        9..=10 => {}, //~ WARNING multiple patterns covering the same range
         _ => {},
     }
 
@@ -23,22 +23,25 @@ fn main() {
         _ => {},
     }
 
-    // These cases should generate an "unreachable pattern" warning.
+    // These cases should generate "unreachable pattern" warnings.
     match 10 {
         1..10 => {},
-        9 => {},
+        9 => {}, //~ WARNING unreachable pattern
         _ => {},
     }
 
     match 10 {
         1..10 => {},
-        8..=9 => {},
+        8..=9 => {}, //~ WARNING multiple patterns covering the same range
         _ => {},
     }
 
     match 10 {
-        1..10 => {},
-        9..=9 => {},
+        5..7 => {},
+        6 => {}, //~ WARNING unreachable pattern
+        1..10 => {}, //~ WARNING multiple patterns covering the same range
+        9..=9 => {}, //~ WARNING unreachable pattern
+        6 => {}, //~ WARNING unreachable pattern
         _ => {},
     }
 }
index d961f623e1fa73ec0edb273901cc4b3943eb3001..cb4a0486eef9ab244f6884659a7dc1a2982a649d 100644 (file)
@@ -1,3 +1,17 @@
+warning: multiple patterns covering the same range
+  --> $DIR/issue-43253.rs:16:9
+   |
+LL |         1..10 => {},
+   |         ----- this range overlaps on `9i32`
+LL |         9..=10 => {},
+   |         ^^^^^^ overlapping patterns
+   |
+note: lint level defined here
+  --> $DIR/issue-43253.rs:4:9
+   |
+LL | #![warn(overlapping_patterns)]
+   |         ^^^^^^^^^^^^^^^^^^^^
+
 warning: unreachable pattern
   --> $DIR/issue-43253.rs:29:9
    |
@@ -5,7 +19,7 @@ LL |         9 => {},
    |         ^
    |
 note: lint level defined here
-  --> $DIR/issue-43253.rs:4:9
+  --> $DIR/issue-43253.rs:3:9
    |
 LL | #![warn(unreachable_patterns)]
    |         ^^^^^^^^^^^^^^^^^^^^
@@ -19,6 +33,18 @@ LL |         8..=9 => {},
 warning: unreachable pattern
   --> $DIR/issue-43253.rs:41:9
    |
+LL |         6 => {},
+   |         ^
+
+warning: unreachable pattern
+  --> $DIR/issue-43253.rs:43:9
+   |
 LL |         9..=9 => {},
    |         ^^^^^
 
+warning: unreachable pattern
+  --> $DIR/issue-43253.rs:44:9
+   |
+LL |         6 => {},
+   |         ^
+
index ac4666fe36de6c92e2266e08fc327de84a39d06a..8af7f882cc29925c7b434c3abba32ce731fe27d4 100644 (file)
@@ -11,7 +11,7 @@ note: the anonymous lifetime #2 defined on the body at 14:48...
    |
 LL |     with_closure_expecting_fn_with_free_region(|x: fn(&'x u32), y| {});
    |                                                ^^^^^^^^^^^^^^^^^^^^^^
-note: ...does not necessarily outlive the lifetime 'x as defined on the function body at 11:36
+note: ...does not necessarily outlive the lifetime `'x` as defined on the function body at 11:36
   --> $DIR/expect-fn-supply-fn.rs:11:36
    |
 LL | fn expect_free_supply_free_from_fn<'x>(x: &'x u32) {
@@ -25,7 +25,7 @@ LL |     with_closure_expecting_fn_with_free_region(|x: fn(&'x u32), y| {});
    |
    = note: expected type `fn(&u32)`
               found type `fn(&'x u32)`
-note: the lifetime 'x as defined on the function body at 11:36...
+note: the lifetime `'x` as defined on the function body at 11:36...
   --> $DIR/expect-fn-supply-fn.rs:11:36
    |
 LL | fn expect_free_supply_free_from_fn<'x>(x: &'x u32) {
index 51077b1b2922e681dc2bd2673f8159ef2757711a..3c8f637e13369722bcb1157496ac9e677851272c 100644 (file)
@@ -4,14 +4,16 @@ error[E0277]: `F` cannot be sent between threads safely
 LL |   struct X<F> where F: FnOnce() + 'static + Send {
    |   ---------------------------------------------- required by `X`
 ...
-LL | / fn foo<F>(blk: F) -> X<F> where F: FnOnce() + 'static {
+LL |   fn foo<F>(blk: F) -> X<F> where F: FnOnce() + 'static {
+   |   ^                                                    - help: consider further restricting type parameter `F`: `, F: std::marker::Send`
+   |  _|
+   | |
 LL | |
 LL | |     return X { field: blk };
 LL | | }
    | |_^ `F` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `F`
-   = help: consider adding a `where F: std::marker::Send` bound
 
 error: aborting due to previous error
 
index 4b703eded69c361b1b81b5a80f6625c0cff11c90..05d5bb1e8d5a8d35fdada3b1859c57d02022f3cb 100644 (file)
@@ -4,11 +4,13 @@ error[E0277]: `F` cannot be shared between threads safely
 LL | fn take_const_owned<F>(_: F) where F: FnOnce() + Sync + Send {
    |    ----------------                              ---- required by this bound in `take_const_owned`
 ...
+LL | fn give_owned<F>(f: F) where F: FnOnce() + Send {
+   |                                                - help: consider further restricting type parameter `F`: `, F: std::marker::Sync`
+LL |     take_any(f);
 LL |     take_const_owned(f);
    |                      ^ `F` cannot be shared between threads safely
    |
    = help: the trait `std::marker::Sync` is not implemented for `F`
-   = help: consider adding a `where F: std::marker::Sync` bound
 
 error: aborting due to previous error
 
index e3b623d55248ed3b6c6051103bafff9b016d664d..9f74738315a0adbec20c0da3718730b27d04771f 100644 (file)
@@ -38,7 +38,7 @@ LL | |
 LL | |
 LL | |     });
    | |_____^
-note: ...does not necessarily outlive the lifetime 'x as defined on the function body at 32:30
+note: ...does not necessarily outlive the lifetime `'x` as defined on the function body at 32:30
   --> $DIR/expect-region-supply-region.rs:32:30
    |
 LL | fn expect_bound_supply_named<'x>() {
@@ -52,7 +52,7 @@ LL |     closure_expecting_bound(|x: &'x u32| {
    |
    = note: expected type `&u32`
               found type `&'x u32`
-note: the lifetime 'x as defined on the function body at 32:30...
+note: the lifetime `'x` as defined on the function body at 32:30...
   --> $DIR/expect-region-supply-region.rs:32:30
    |
 LL | fn expect_bound_supply_named<'x>() {
index f005245e6dcb9141f6d0d4228159ccef678e5e62..5038eb3ebf4583db98b83239ee4860f7a1a66581 100644 (file)
@@ -1,4 +1,4 @@
-// build-pass (FIXME(62277): could be check-pass?)
+// check-pass
 
 #![feature(never_type)]
 #![allow(unreachable_code)]
index 6b15b7ebbe9ee6e6924c5a8d110e22a132d2af00..832f5c3ac2bb7ec3f57cba28eec4b48fdfb67bc8 100644 (file)
@@ -66,3 +66,4 @@ LL | | }
 
 error: aborting due to 8 previous errors
 
+For more information about this error, try `rustc --explain E0566`.
diff --git a/src/test/ui/const-generics/auxiliary/const_generic_lib.rs b/src/test/ui/const-generics/auxiliary/const_generic_lib.rs
new file mode 100644 (file)
index 0000000..901fb5d
--- /dev/null
@@ -0,0 +1,9 @@
+#![feature(const_generics)]
+
+pub struct Struct<const N: usize>(pub [u8; N]);
+
+pub type Alias = Struct<2>;
+
+pub fn function(value: Struct<3>) -> u8 {
+    value.0[0]
+}
diff --git a/src/test/ui/const-generics/const-argument-cross-crate-mismatch.rs b/src/test/ui/const-generics/const-argument-cross-crate-mismatch.rs
new file mode 100644 (file)
index 0000000..d863d09
--- /dev/null
@@ -0,0 +1,10 @@
+// aux-build:const_generic_lib.rs
+
+extern crate const_generic_lib;
+
+fn main() {
+    let _ = const_generic_lib::function(const_generic_lib::Struct([0u8, 1u8]));
+    //~^ ERROR mismatched types
+    let _: const_generic_lib::Alias = const_generic_lib::Struct([0u8, 1u8, 2u8]);
+    //~^ ERROR mismatched types
+}
diff --git a/src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr b/src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr
new file mode 100644 (file)
index 0000000..b7fd29c
--- /dev/null
@@ -0,0 +1,21 @@
+error[E0308]: mismatched types
+  --> $DIR/const-argument-cross-crate-mismatch.rs:6:41
+   |
+LL |     let _ = const_generic_lib::function(const_generic_lib::Struct([0u8, 1u8]));
+   |                                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `3usize`, found `2usize`
+   |
+   = note: expected type `const_generic_lib::Struct<3usize>`
+              found type `const_generic_lib::Struct<_: usize>`
+
+error[E0308]: mismatched types
+  --> $DIR/const-argument-cross-crate-mismatch.rs:8:39
+   |
+LL |     let _: const_generic_lib::Alias = const_generic_lib::Struct([0u8, 1u8, 2u8]);
+   |                                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `2usize`, found `3usize`
+   |
+   = note: expected type `const_generic_lib::Struct<2usize>`
+              found type `const_generic_lib::Struct<_: usize>`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/const-generics/const-argument-cross-crate.rs b/src/test/ui/const-generics/const-argument-cross-crate.rs
new file mode 100644 (file)
index 0000000..98cf39a
--- /dev/null
@@ -0,0 +1,12 @@
+// run-pass
+// aux-build:const_generic_lib.rs
+
+extern crate const_generic_lib;
+
+struct Container(const_generic_lib::Alias);
+
+fn main() {
+    let res = const_generic_lib::function(const_generic_lib::Struct([14u8, 1u8, 2u8]));
+    assert_eq!(res, 14u8);
+    let _ = Container(const_generic_lib::Struct([0u8, 1u8]));
+}
index fddb06981bc734754c1093921f252ec01acba8c6..32cf8d8a01a783bbea196604263e4c24169de95e 100644 (file)
@@ -10,7 +10,7 @@ error: const parameter `x` should have an upper case name
   --> $DIR/const-parameter-uppercase-lint.rs:6:15
    |
 LL | fn noop<const x: u32>() {
-   |               ^ help: convert the identifier to upper case: `X`
+   |               ^ help: convert the identifier to upper case (notice the capitalization): `X`
    |
 note: lint level defined here
   --> $DIR/const-parameter-uppercase-lint.rs:4:9
diff --git a/src/test/ui/const-generics/fn-const-param-call.rs b/src/test/ui/const-generics/fn-const-param-call.rs
new file mode 100644 (file)
index 0000000..8461538
--- /dev/null
@@ -0,0 +1,20 @@
+// run-pass
+
+#![feature(const_generics, const_compare_raw_pointers)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn function() -> u32 {
+    17
+}
+
+struct Wrapper<const F: fn() -> u32>;
+
+impl<const F: fn() -> u32> Wrapper<{F}> {
+    fn call() -> u32 {
+        F()
+    }
+}
+
+fn main() {
+    assert_eq!(Wrapper::<{function}>::call(), 17);
+}
diff --git a/src/test/ui/const-generics/fn-const-param-call.stderr b/src/test/ui/const-generics/fn-const-param-call.stderr
new file mode 100644 (file)
index 0000000..c677d70
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/fn-const-param-call.rs:3:12
+   |
+LL | #![feature(const_generics, const_compare_raw_pointers)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
diff --git a/src/test/ui/const-generics/fn-const-param-infer.rs b/src/test/ui/const-generics/fn-const-param-infer.rs
new file mode 100644 (file)
index 0000000..78fb10e
--- /dev/null
@@ -0,0 +1,26 @@
+#![feature(const_generics, const_compare_raw_pointers)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+struct Checked<const F: fn(usize) -> bool>;
+
+fn not_one(val: usize) -> bool { val != 1 }
+fn not_two(val: usize) -> bool { val != 2 }
+
+fn generic_arg<T>(val: T) -> bool { true }
+
+fn generic<T>(val: usize) -> bool { val != 1 }
+
+fn main() {
+    let _: Option<Checked<{not_one}>> = None;
+    let _: Checked<{not_one}> = Checked::<{not_one}>;
+    let _: Checked<{not_one}> = Checked::<{not_two}>; //~ mismatched types
+
+    let _ = Checked::<{generic_arg}>;
+    let _ = Checked::<{generic_arg::<usize>}>;
+    let _ = Checked::<{generic_arg::<u32>}>;  //~ mismatched types
+
+    let _ = Checked::<{generic}>; //~ type annotations needed
+    let _ = Checked::<{generic::<u16>}>;
+    let _: Checked<{generic::<u16>}> = Checked::<{generic::<u16>}>;
+    let _: Checked<{generic::<u32>}> = Checked::<{generic::<u16>}>; //~ mismatched types
+}
diff --git a/src/test/ui/const-generics/fn-const-param-infer.stderr b/src/test/ui/const-generics/fn-const-param-infer.stderr
new file mode 100644 (file)
index 0000000..de0916b
--- /dev/null
@@ -0,0 +1,45 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/fn-const-param-infer.rs:1:12
+   |
+LL | #![feature(const_generics, const_compare_raw_pointers)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0308]: mismatched types
+  --> $DIR/fn-const-param-infer.rs:16:33
+   |
+LL |     let _: Checked<{not_one}> = Checked::<{not_two}>;
+   |                                 ^^^^^^^^^^^^^^^^^^^^ expected `not_one`, found `not_two`
+   |
+   = note: expected type `Checked<not_one>`
+              found type `Checked<not_two>`
+
+error[E0308]: mismatched types
+  --> $DIR/fn-const-param-infer.rs:20:24
+   |
+LL |     let _ = Checked::<{generic_arg::<u32>}>;
+   |                        ^^^^^^^^^^^^^^^^^^ expected usize, found u32
+   |
+   = note: expected type `fn(usize) -> bool`
+              found type `fn(u32) -> bool {generic_arg::<u32>}`
+
+error[E0282]: type annotations needed
+  --> $DIR/fn-const-param-infer.rs:22:24
+   |
+LL |     let _ = Checked::<{generic}>;
+   |                        ^^^^^^^ cannot infer type for `T`
+
+error[E0308]: mismatched types
+  --> $DIR/fn-const-param-infer.rs:25:40
+   |
+LL |     let _: Checked<{generic::<u32>}> = Checked::<{generic::<u16>}>;
+   |                                        ^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `generic::<u32>`, found `generic::<u16>`
+   |
+   = note: expected type `Checked<generic::<u32>>`
+              found type `Checked<generic::<u16>>`
+
+error: aborting due to 4 previous errors
+
+Some errors have detailed explanations: E0282, E0308.
+For more information about an error, try `rustc --explain E0282`.
diff --git a/src/test/ui/const-generics/issue-60263.rs b/src/test/ui/const-generics/issue-60263.rs
deleted file mode 100644 (file)
index 70cbc24..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-struct B<const I: u8>; //~ ERROR const generics are unstable
-
-impl B<0> {
-    fn bug() -> Self {
-        panic!()
-    }
-}
-
-fn main() {}
diff --git a/src/test/ui/const-generics/issue-60263.stderr b/src/test/ui/const-generics/issue-60263.stderr
deleted file mode 100644 (file)
index fe7b6fd..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0658]: const generics are unstable
-  --> $DIR/issue-60263.rs:1:16
-   |
-LL | struct B<const I: u8>;
-   |                ^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/44580
-   = help: add `#![feature(const_generics)]` to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/const-generics/issue-60818-struct-constructors.rs b/src/test/ui/const-generics/issue-60818-struct-constructors.rs
deleted file mode 100644 (file)
index b810efe..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-// build-pass (FIXME(62277): could be check-pass?)
-
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-struct Generic<const V: usize>;
-
-fn main() {
-    let _ = Generic::<0>;
-}
diff --git a/src/test/ui/const-generics/issue-60818-struct-constructors.stderr b/src/test/ui/const-generics/issue-60818-struct-constructors.stderr
deleted file mode 100644 (file)
index 3e0cd81..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-60818-struct-constructors.rs:3:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
diff --git a/src/test/ui/const-generics/issue-61336-1.rs b/src/test/ui/const-generics/issue-61336-1.rs
deleted file mode 100644 (file)
index 5b5e431..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
-    [x; N]
-    //~^ ERROR array lengths can't depend on generic parameters
-}
-
-fn main() {
-    let x: [u32; 5] = f::<u32, 5>(3);
-    assert_eq!(x, [3u32; 5]);
-}
diff --git a/src/test/ui/const-generics/issue-61336-1.stderr b/src/test/ui/const-generics/issue-61336-1.stderr
deleted file mode 100644 (file)
index 949fa89..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-61336-1.rs:1:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
-error: array lengths can't depend on generic parameters
-  --> $DIR/issue-61336-1.rs:5:9
-   |
-LL |     [x; N]
-   |         ^
-
-error: aborting due to previous error
-
diff --git a/src/test/ui/const-generics/issue-61336-2.rs b/src/test/ui/const-generics/issue-61336-2.rs
deleted file mode 100644 (file)
index 7bb36f4..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
-    [x; {N}]
-    //~^ ERROR array lengths can't depend on generic parameters
-}
-
-fn g<T, const N: usize>(x: T) -> [T; N] {
-    [x; {N}]
-    //~^ ERROR array lengths can't depend on generic parameters
-}
-
-fn main() {
-    let x: [u32; 5] = f::<u32, 5>(3);
-    assert_eq!(x, [3u32; 5]);
-}
diff --git a/src/test/ui/const-generics/issue-61336-2.stderr b/src/test/ui/const-generics/issue-61336-2.stderr
deleted file mode 100644 (file)
index 63f86c8..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-61336-2.rs:1:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
-error: array lengths can't depend on generic parameters
-  --> $DIR/issue-61336-2.rs:5:9
-   |
-LL |     [x; {N}]
-   |         ^^^
-
-error: array lengths can't depend on generic parameters
-  --> $DIR/issue-61336-2.rs:10:9
-   |
-LL |     [x; {N}]
-   |         ^^^
-
-error: aborting due to 2 previous errors
-
diff --git a/src/test/ui/const-generics/issue-61336.rs b/src/test/ui/const-generics/issue-61336.rs
deleted file mode 100644 (file)
index edc012c..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
-    [x; N]
-    //~^ ERROR array lengths can't depend on generic parameters
-}
-
-fn g<T, const N: usize>(x: T) -> [T; N] {
-    [x; N]
-    //~^ ERROR array lengths can't depend on generic parameters
-}
-
-fn main() {
-    let x: [u32; 5] = f::<u32, 5>(3);
-    assert_eq!(x, [3u32; 5]);
-}
diff --git a/src/test/ui/const-generics/issue-61336.stderr b/src/test/ui/const-generics/issue-61336.stderr
deleted file mode 100644 (file)
index f96e8e0..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-61336.rs:1:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
-error: array lengths can't depend on generic parameters
-  --> $DIR/issue-61336.rs:5:9
-   |
-LL |     [x; N]
-   |         ^
-
-error: array lengths can't depend on generic parameters
-  --> $DIR/issue-61336.rs:10:9
-   |
-LL |     [x; N]
-   |         ^
-
-error: aborting due to 2 previous errors
-
diff --git a/src/test/ui/const-generics/issue-61422.rs b/src/test/ui/const-generics/issue-61422.rs
deleted file mode 100644 (file)
index 45d37b6..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-// check-pass
-
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-use std::mem;
-
-fn foo<const SIZE: usize>() {
-    let arr: [u8; SIZE] = unsafe {
-        #[allow(deprecated)]
-        let array: [u8; SIZE] = mem::uninitialized();
-        array
-    };
-}
-
-fn main() {}
diff --git a/src/test/ui/const-generics/issue-61422.stderr b/src/test/ui/const-generics/issue-61422.stderr
deleted file mode 100644 (file)
index 166bd3c..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-61422.rs:3:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
diff --git a/src/test/ui/const-generics/issue-61432.rs b/src/test/ui/const-generics/issue-61432.rs
deleted file mode 100644 (file)
index 832095c..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-// run-pass
-
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-fn promote<const N: i32>() {
-    // works:
-    //
-    // let n = N;
-    // &n;
-
-    &N;
-}
-
-fn main() {
-    promote::<0>();
-}
diff --git a/src/test/ui/const-generics/issue-61432.stderr b/src/test/ui/const-generics/issue-61432.stderr
deleted file mode 100644 (file)
index 33f77b0..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-61432.rs:3:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
diff --git a/src/test/ui/const-generics/issue-64519.rs b/src/test/ui/const-generics/issue-64519.rs
deleted file mode 100644 (file)
index 72cce9b..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-// check-pass
-
-#![feature(const_generics)]
-//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
-
-struct Foo<const D: usize> {
-    state: Option<[u8; D]>,
-}
-
-impl<const D: usize> Iterator for Foo<{D}> {
-    type Item = [u8; D];
-    fn next(&mut self) -> Option<Self::Item> {
-        if true {
-            return Some(self.state.unwrap().clone());
-        } else {
-            return Some(self.state.unwrap().clone());
-        }
-    }
-}
-
-fn main() {}
diff --git a/src/test/ui/const-generics/issue-64519.stderr b/src/test/ui/const-generics/issue-64519.stderr
deleted file mode 100644 (file)
index d368f39..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-warning: the feature `const_generics` is incomplete and may cause the compiler to crash
-  --> $DIR/issue-64519.rs:3:12
-   |
-LL | #![feature(const_generics)]
-   |            ^^^^^^^^^^^^^^
-   |
-   = note: `#[warn(incomplete_features)]` on by default
-
diff --git a/src/test/ui/const-generics/issues/issue-60263.rs b/src/test/ui/const-generics/issues/issue-60263.rs
new file mode 100644 (file)
index 0000000..70cbc24
--- /dev/null
@@ -0,0 +1,9 @@
+struct B<const I: u8>; //~ ERROR const generics are unstable
+
+impl B<0> {
+    fn bug() -> Self {
+        panic!()
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/const-generics/issues/issue-60263.stderr b/src/test/ui/const-generics/issues/issue-60263.stderr
new file mode 100644 (file)
index 0000000..fe7b6fd
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0658]: const generics are unstable
+  --> $DIR/issue-60263.rs:1:16
+   |
+LL | struct B<const I: u8>;
+   |                ^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/44580
+   = help: add `#![feature(const_generics)]` to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/const-generics/issues/issue-60818-struct-constructors.rs b/src/test/ui/const-generics/issues/issue-60818-struct-constructors.rs
new file mode 100644 (file)
index 0000000..b810efe
--- /dev/null
@@ -0,0 +1,10 @@
+// build-pass (FIXME(62277): could be check-pass?)
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+struct Generic<const V: usize>;
+
+fn main() {
+    let _ = Generic::<0>;
+}
diff --git a/src/test/ui/const-generics/issues/issue-60818-struct-constructors.stderr b/src/test/ui/const-generics/issues/issue-60818-struct-constructors.stderr
new file mode 100644 (file)
index 0000000..3e0cd81
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-60818-struct-constructors.rs:3:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
diff --git a/src/test/ui/const-generics/issues/issue-61336-1.rs b/src/test/ui/const-generics/issues/issue-61336-1.rs
new file mode 100644 (file)
index 0000000..5b5e431
--- /dev/null
@@ -0,0 +1,12 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
+    [x; N]
+    //~^ ERROR array lengths can't depend on generic parameters
+}
+
+fn main() {
+    let x: [u32; 5] = f::<u32, 5>(3);
+    assert_eq!(x, [3u32; 5]);
+}
diff --git a/src/test/ui/const-generics/issues/issue-61336-1.stderr b/src/test/ui/const-generics/issues/issue-61336-1.stderr
new file mode 100644 (file)
index 0000000..949fa89
--- /dev/null
@@ -0,0 +1,16 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61336-1.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: array lengths can't depend on generic parameters
+  --> $DIR/issue-61336-1.rs:5:9
+   |
+LL |     [x; N]
+   |         ^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/const-generics/issues/issue-61336-2.rs b/src/test/ui/const-generics/issues/issue-61336-2.rs
new file mode 100644 (file)
index 0000000..7bb36f4
--- /dev/null
@@ -0,0 +1,17 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
+    [x; {N}]
+    //~^ ERROR array lengths can't depend on generic parameters
+}
+
+fn g<T, const N: usize>(x: T) -> [T; N] {
+    [x; {N}]
+    //~^ ERROR array lengths can't depend on generic parameters
+}
+
+fn main() {
+    let x: [u32; 5] = f::<u32, 5>(3);
+    assert_eq!(x, [3u32; 5]);
+}
diff --git a/src/test/ui/const-generics/issues/issue-61336-2.stderr b/src/test/ui/const-generics/issues/issue-61336-2.stderr
new file mode 100644 (file)
index 0000000..63f86c8
--- /dev/null
@@ -0,0 +1,22 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61336-2.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: array lengths can't depend on generic parameters
+  --> $DIR/issue-61336-2.rs:5:9
+   |
+LL |     [x; {N}]
+   |         ^^^
+
+error: array lengths can't depend on generic parameters
+  --> $DIR/issue-61336-2.rs:10:9
+   |
+LL |     [x; {N}]
+   |         ^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/const-generics/issues/issue-61336.rs b/src/test/ui/const-generics/issues/issue-61336.rs
new file mode 100644 (file)
index 0000000..edc012c
--- /dev/null
@@ -0,0 +1,17 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
+    [x; N]
+    //~^ ERROR array lengths can't depend on generic parameters
+}
+
+fn g<T, const N: usize>(x: T) -> [T; N] {
+    [x; N]
+    //~^ ERROR array lengths can't depend on generic parameters
+}
+
+fn main() {
+    let x: [u32; 5] = f::<u32, 5>(3);
+    assert_eq!(x, [3u32; 5]);
+}
diff --git a/src/test/ui/const-generics/issues/issue-61336.stderr b/src/test/ui/const-generics/issues/issue-61336.stderr
new file mode 100644 (file)
index 0000000..f96e8e0
--- /dev/null
@@ -0,0 +1,22 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61336.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: array lengths can't depend on generic parameters
+  --> $DIR/issue-61336.rs:5:9
+   |
+LL |     [x; N]
+   |         ^
+
+error: array lengths can't depend on generic parameters
+  --> $DIR/issue-61336.rs:10:9
+   |
+LL |     [x; N]
+   |         ^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/const-generics/issues/issue-61422.rs b/src/test/ui/const-generics/issues/issue-61422.rs
new file mode 100644 (file)
index 0000000..45d37b6
--- /dev/null
@@ -0,0 +1,16 @@
+// check-pass
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+use std::mem;
+
+fn foo<const SIZE: usize>() {
+    let arr: [u8; SIZE] = unsafe {
+        #[allow(deprecated)]
+        let array: [u8; SIZE] = mem::uninitialized();
+        array
+    };
+}
+
+fn main() {}
diff --git a/src/test/ui/const-generics/issues/issue-61422.stderr b/src/test/ui/const-generics/issues/issue-61422.stderr
new file mode 100644 (file)
index 0000000..166bd3c
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61422.rs:3:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
diff --git a/src/test/ui/const-generics/issues/issue-61432.rs b/src/test/ui/const-generics/issues/issue-61432.rs
new file mode 100644 (file)
index 0000000..832095c
--- /dev/null
@@ -0,0 +1,17 @@
+// run-pass
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn promote<const N: i32>() {
+    // works:
+    //
+    // let n = N;
+    // &n;
+
+    &N;
+}
+
+fn main() {
+    promote::<0>();
+}
diff --git a/src/test/ui/const-generics/issues/issue-61432.stderr b/src/test/ui/const-generics/issues/issue-61432.stderr
new file mode 100644 (file)
index 0000000..33f77b0
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61432.rs:3:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
diff --git a/src/test/ui/const-generics/issues/issue-62187-encountered-polymorphic-const.rs b/src/test/ui/const-generics/issues/issue-62187-encountered-polymorphic-const.rs
new file mode 100644 (file)
index 0000000..4dc46eb
--- /dev/null
@@ -0,0 +1,16 @@
+// run-pass
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+pub trait BitLen: Sized {
+    const BIT_LEN: usize;
+}
+
+impl<const L: usize> BitLen for [u8; L] {
+    const BIT_LEN: usize = 8 * L;
+}
+
+fn main() {
+    let foo = <[u8; 2]>::BIT_LEN;
+}
diff --git a/src/test/ui/const-generics/issues/issue-62187-encountered-polymorphic-const.stderr b/src/test/ui/const-generics/issues/issue-62187-encountered-polymorphic-const.stderr
new file mode 100644 (file)
index 0000000..20347ac
--- /dev/null
@@ -0,0 +1,16 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-62187-encountered-polymorphic-const.rs:3:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+warning: unused variable: `foo`
+  --> $DIR/issue-62187-encountered-polymorphic-const.rs:15:9
+   |
+LL |     let foo = <[u8; 2]>::BIT_LEN;
+   |         ^^^ help: consider prefixing with an underscore: `_foo`
+   |
+   = note: `#[warn(unused_variables)]` on by default
+
diff --git a/src/test/ui/const-generics/issues/issue-64519.rs b/src/test/ui/const-generics/issues/issue-64519.rs
new file mode 100644 (file)
index 0000000..72cce9b
--- /dev/null
@@ -0,0 +1,21 @@
+// check-pass
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+struct Foo<const D: usize> {
+    state: Option<[u8; D]>,
+}
+
+impl<const D: usize> Iterator for Foo<{D}> {
+    type Item = [u8; D];
+    fn next(&mut self) -> Option<Self::Item> {
+        if true {
+            return Some(self.state.unwrap().clone());
+        } else {
+            return Some(self.state.unwrap().clone());
+        }
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/const-generics/issues/issue-64519.stderr b/src/test/ui/const-generics/issues/issue-64519.stderr
new file mode 100644 (file)
index 0000000..d368f39
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-64519.rs:3:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
diff --git a/src/test/ui/const-generics/raw-ptr-const-param-deref.rs b/src/test/ui/const-generics/raw-ptr-const-param-deref.rs
new file mode 100644 (file)
index 0000000..d26ab8b
--- /dev/null
@@ -0,0 +1,19 @@
+// run-pass
+#![feature(const_generics, const_compare_raw_pointers)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+const A: u32 = 3;
+
+struct Const<const P: *const u32>;
+
+impl<const P: *const u32> Const<{P}> {
+    fn get() -> u32 {
+        unsafe {
+            *P
+        }
+    }
+}
+
+fn main() {
+    assert_eq!(Const::<{&A as *const _}>::get(), 3)
+}
diff --git a/src/test/ui/const-generics/raw-ptr-const-param-deref.stderr b/src/test/ui/const-generics/raw-ptr-const-param-deref.stderr
new file mode 100644 (file)
index 0000000..7322159
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/raw-ptr-const-param-deref.rs:2:12
+   |
+LL | #![feature(const_generics, const_compare_raw_pointers)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
diff --git a/src/test/ui/const-generics/raw-ptr-const-param.rs b/src/test/ui/const-generics/raw-ptr-const-param.rs
new file mode 100644 (file)
index 0000000..f69c37f
--- /dev/null
@@ -0,0 +1,9 @@
+#![feature(const_generics, const_compare_raw_pointers)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+struct Const<const P: *const u32>;
+
+fn main() {
+    let _: Const<{15 as *const _}> = Const::<{10 as *const _}>; //~ mismatched types
+    let _: Const<{10 as *const _}> = Const::<{10 as *const _}>;
+}
diff --git a/src/test/ui/const-generics/raw-ptr-const-param.stderr b/src/test/ui/const-generics/raw-ptr-const-param.stderr
new file mode 100644 (file)
index 0000000..75b4c0a
--- /dev/null
@@ -0,0 +1,20 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/raw-ptr-const-param.rs:1:12
+   |
+LL | #![feature(const_generics, const_compare_raw_pointers)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0308]: mismatched types
+  --> $DIR/raw-ptr-const-param.rs:7:38
+   |
+LL |     let _: Const<{15 as *const _}> = Const::<{10 as *const _}>;
+   |                                      ^^^^^^^^^^^^^^^^^^^^^^^^^ expected `{pointer}`, found `{pointer}`
+   |
+   = note: expected type `Const<{pointer}>`
+              found type `Const<{pointer}>`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
index 72369ab24ebfc5ddfe1ebca3d874ae0c30418c60..380a70d664e05939a032bf5041b79ffd158a82f5 100644 (file)
@@ -12,8 +12,8 @@ error[E0308]: mismatched types
 LL |     let _: ConstString<"Hello"> = ConstString::<"World">;
    |                                   ^^^^^^^^^^^^^^^^^^^^^^ expected `"Hello"`, found `"World"`
    |
-   = note: expected type `ConstString<>`
-              found type `ConstString<>`
+   = note: expected type `ConstString<"Hello">`
+              found type `ConstString<"World">`
 
 error[E0308]: mismatched types
   --> $DIR/slice-const-param-mismatch.rs:11:33
@@ -21,8 +21,8 @@ error[E0308]: mismatched types
 LL |     let _: ConstString<"ℇ㇈↦"> = ConstString::<"ℇ㇈↥">;
    |                                  ^^^^^^^^^^^^^^^^^^^^^ expected `"ℇ㇈↦"`, found `"ℇ㇈↥"`
    |
-   = note: expected type `ConstString<>`
-              found type `ConstString<>`
+   = note: expected type `ConstString<"ℇ㇈↦">`
+              found type `ConstString<"ℇ㇈↥">`
 
 error[E0308]: mismatched types
   --> $DIR/slice-const-param-mismatch.rs:13:33
@@ -30,8 +30,8 @@ error[E0308]: mismatched types
 LL |     let _: ConstBytes<b"AAA"> = ConstBytes::<b"BBB">;
    |                                 ^^^^^^^^^^^^^^^^^^^^ expected `b"AAA"`, found `b"BBB"`
    |
-   = note: expected type `ConstBytes<>`
-              found type `ConstBytes<>`
+   = note: expected type `ConstBytes<b"AAA">`
+              found type `ConstBytes<b"BBB">`
 
 error: aborting due to 3 previous errors
 
index dfa2557e9f6f8479da5fc852a40ac943b169fe2c..b3aa35e079aec96f369836e6eb10753848e04a4f 100644 (file)
@@ -14,3 +14,4 @@ LL | #![feature(const_generics)]
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0573`.
diff --git a/src/test/ui/const-generics/types-mismatch-const-args.rs b/src/test/ui/const-generics/types-mismatch-const-args.rs
new file mode 100644 (file)
index 0000000..b25b733
--- /dev/null
@@ -0,0 +1,19 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+// tests the diagnostic output of type mismatches for types that have const generics arguments.
+
+use std::marker::PhantomData;
+
+struct A<'a, T, const X: u32, const Y: u32> {
+    data: PhantomData<&'a T>
+}
+
+fn a<'a, 'b>() {
+    let _: A<'a, u32, {2u32}, {3u32}> = A::<'a, u32, {4u32}, {3u32}> { data: PhantomData };
+    //~^ ERROR mismatched types
+    let _: A<'a, u16, {2u32}, {3u32}> = A::<'b, u32, {2u32}, {3u32}> { data: PhantomData };
+    //~^ ERROR mismatched types
+}
+
+pub fn main() {}
diff --git a/src/test/ui/const-generics/types-mismatch-const-args.stderr b/src/test/ui/const-generics/types-mismatch-const-args.stderr
new file mode 100644 (file)
index 0000000..805a306
--- /dev/null
@@ -0,0 +1,29 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/types-mismatch-const-args.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0308]: mismatched types
+  --> $DIR/types-mismatch-const-args.rs:13:41
+   |
+LL |     let _: A<'a, u32, {2u32}, {3u32}> = A::<'a, u32, {4u32}, {3u32}> { data: PhantomData };
+   |                                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `2u32`, found `4u32`
+   |
+   = note: expected type `A<'_, _, 2u32, _>`
+              found type `A<'_, _, 4u32, _>`
+
+error[E0308]: mismatched types
+  --> $DIR/types-mismatch-const-args.rs:15:41
+   |
+LL |     let _: A<'a, u16, {2u32}, {3u32}> = A::<'b, u32, {2u32}, {3u32}> { data: PhantomData };
+   |                                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected u16, found u32
+   |
+   = note: expected type `A<'a, u16, _, _>`
+              found type `A<'b, u32, _, _>`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/consts/auxiliary/external_macro.rs b/src/test/ui/consts/auxiliary/external_macro.rs
new file mode 100644 (file)
index 0000000..d260634
--- /dev/null
@@ -0,0 +1,14 @@
+#![feature(allow_internal_unstable)]
+
+// Macro to help ensure CONST_ERR lint errors
+// are not silenced in external macros.
+// https://github.com/rust-lang/rust/issues/65300
+
+#[macro_export]
+#[allow_internal_unstable(type_ascription)]
+macro_rules! static_assert {
+    ($test:expr) => {
+        #[allow(dead_code)]
+        const _: () = [()][!($test: bool) as usize];
+    }
+}
index ecbcc2a4b496f882e1414066a7e75dff2884adab..e5ee90fc9f11f00b84eea165de12dadab70be07f 100644 (file)
@@ -23,6 +23,7 @@ fn main() {
     //~^ ERROR const_err
     let _e = [5u8][1];
     //~^ ERROR index out of bounds
+    //~| ERROR this expression will panic at runtime
     black_box(a);
     black_box(b);
     black_box(c);
index 1d84d44dc27b3c834400ad29840aae4a0e9d54d3..0a09a7213dabc8f6596c2351ddaacf9521d69570 100644 (file)
@@ -34,5 +34,11 @@ error: index out of bounds: the len is 1 but the index is 1
 LL |     let _e = [5u8][1];
    |              ^^^^^^^^
 
-error: aborting due to 5 previous errors
+error: this expression will panic at runtime
+  --> $DIR/const-err2.rs:24:14
+   |
+LL |     let _e = [5u8][1];
+   |              ^^^^^^^^ index out of bounds: the len is 1 but the index is 1
+
+error: aborting due to 6 previous errors
 
index a9cf04cda7a5aed0be90fdfdd085e39b30b7d1f6..89373f99f75c235ac7017ade4ce3679565d2a0c6 100644 (file)
@@ -23,6 +23,7 @@ fn main() {
     //~^ ERROR const_err
     let _e = [5u8][1];
     //~^ ERROR const_err
+    //~| ERROR this expression will panic at runtime
     black_box(a);
     black_box(b);
     black_box(c);
index 0602707be70408458efe8ba4b1f7f4882e7cab05..42de247c8f7e074d6a9e139cae02d89e99d5104f 100644 (file)
@@ -34,5 +34,11 @@ error: index out of bounds: the len is 1 but the index is 1
 LL |     let _e = [5u8][1];
    |              ^^^^^^^^
 
-error: aborting due to 5 previous errors
+error: this expression will panic at runtime
+  --> $DIR/const-err3.rs:24:14
+   |
+LL |     let _e = [5u8][1];
+   |              ^^^^^^^^ index out of bounds: the len is 1 but the index is 1
+
+error: aborting due to 6 previous errors
 
index 274967ef60de54cb477ee0948057e7133d342a0c..b85cecda16e9550fb45726a14b0e55c367ff4cfb 100644 (file)
@@ -11,7 +11,6 @@ impl Unsigned for U8 {
 
 impl<A: Unsigned, B: Unsigned> Unsigned for Sum<A,B> {
     const MAX: u8 = A::MAX + B::MAX; //~ ERROR any use of this value will cause an error
-    //~| ERROR any use of this value will cause an error
 }
 
 fn foo<T>(_: T) -> &'static u8 {
index de3459c72dd2b0397ab80996ed1dae5c62814d9f..707dfee7cd5b89c96b2128a858a80a5ebf91ef64 100644 (file)
@@ -9,21 +9,13 @@ LL |     const MAX: u8 = A::MAX + B::MAX;
    = note: `#[deny(const_err)]` on by default
 
 error[E0080]: evaluation of constant expression failed
-  --> $DIR/issue-50814.rs:18:5
+  --> $DIR/issue-50814.rs:17:5
    |
 LL |     &Sum::<U8,U8>::MAX
    |     ^-----------------
    |      |
    |      referenced constant has errors
 
-error: any use of this value will cause an error
-  --> $DIR/issue-50814.rs:13:21
-   |
-LL |     const MAX: u8 = A::MAX + B::MAX;
-   |     ----------------^^^^^^^^^^^^^^^-
-   |                     |
-   |                     attempt to add with overflow
-
-error: aborting due to 3 previous errors
+error: aborting due to 2 previous errors
 
 For more information about this error, try `rustc --explain E0080`.
diff --git a/src/test/ui/consts/const-eval/issue-64908.rs b/src/test/ui/consts/const-eval/issue-64908.rs
new file mode 100644 (file)
index 0000000..d2e0950
--- /dev/null
@@ -0,0 +1,20 @@
+// run-pass
+
+// This test verifies that the `ConstProp` pass doesn't cause an ICE when evaluating polymorphic
+// promoted MIR.
+
+pub trait ArrowPrimitiveType {
+    type Native;
+}
+
+pub fn new<T: ArrowPrimitiveType>() {
+    assert_eq!(0, std::mem::size_of::<T::Native>());
+}
+
+impl ArrowPrimitiveType for () {
+    type Native = ();
+}
+
+fn main() {
+    new::<()>();
+}
diff --git a/src/test/ui/consts/const-eval/issue-65394.rs b/src/test/ui/consts/const-eval/issue-65394.rs
new file mode 100644 (file)
index 0000000..978e227
--- /dev/null
@@ -0,0 +1,13 @@
+// Test for absence of validation mismatch ICE in #65394
+
+#![feature(rustc_attrs)]
+
+#[rustc_mir(borrowck_graphviz_postflow="hello.dot")]
+const _: Vec<i32> = {
+    let mut x = Vec::<i32>::new();
+    let r = &mut x; //~ ERROR references in constants may only refer to immutable values
+    let y = x;
+    y
+};
+
+fn main() {}
diff --git a/src/test/ui/consts/const-eval/issue-65394.stderr b/src/test/ui/consts/const-eval/issue-65394.stderr
new file mode 100644 (file)
index 0000000..f48c551
--- /dev/null
@@ -0,0 +1,11 @@
+error[E0017]: references in constants may only refer to immutable values
+  --> $DIR/issue-65394.rs:8:13
+   |
+LL |     let r = &mut x;
+   |             ^^^^^^ constants require immutable values
+
+[ERROR rustc_mir::transform::qualify_consts] old validator: [($DIR/issue-65394.rs:8:13: 8:19, "MutBorrow(Mut { allow_two_phase_borrow: false })")]
+[ERROR rustc_mir::transform::qualify_consts] new validator: [($DIR/issue-65394.rs:8:13: 8:19, "MutBorrow(Mut { allow_two_phase_borrow: false })"), ($DIR/issue-65394.rs:7:9: 7:14, "LiveDrop")]
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0017`.
diff --git a/src/test/ui/consts/const-eval/write-to-uninhabited-enum-variant.rs b/src/test/ui/consts/const-eval/write-to-uninhabited-enum-variant.rs
new file mode 100644 (file)
index 0000000..cccb787
--- /dev/null
@@ -0,0 +1,28 @@
+// run-pass
+
+#![allow(dead_code)]
+
+enum Empty { }
+enum Test1 {
+    A(u8),
+    B(Empty),
+}
+enum Test2 {
+    A(u8),
+    B(Empty),
+    C,
+}
+
+fn bar() -> Option<Empty> {
+    None
+}
+
+fn main() {
+    if let Some(x) = bar() {
+        Test1::B(x);
+    }
+
+    if let Some(x) = bar() {
+        Test2::B(x);
+    }
+}
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs b/src/test/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.rs
new file mode 100644 (file)
index 0000000..7c6a574
--- /dev/null
@@ -0,0 +1,23 @@
+#![feature(const_extern_fn)]
+
+extern "C" {
+    fn regular_in_block();
+}
+
+const extern fn bar() {
+    unsafe {
+        regular_in_block();
+        //~^ ERROR: cannot call functions with `"C"` abi in `min_const_fn`
+    }
+}
+
+extern fn regular() {}
+
+const extern fn foo() {
+    unsafe {
+        regular();
+        //~^ ERROR: cannot call functions with `"C"` abi in `min_const_fn`
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr b/src/test/ui/consts/const-extern-fn/const-extern-fn-call-extern-fn.stderr
new file mode 100644 (file)
index 0000000..d8bdf0a
--- /dev/null
@@ -0,0 +1,21 @@
+error[E0723]: cannot call functions with `"C"` abi in `min_const_fn`
+  --> $DIR/const-extern-fn-call-extern-fn.rs:9:9
+   |
+LL |         regular_in_block();
+   |         ^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
+   = help: add `#![feature(const_fn)]` to the crate attributes to enable
+
+error[E0723]: cannot call functions with `"C"` abi in `min_const_fn`
+  --> $DIR/const-extern-fn-call-extern-fn.rs:18:9
+   |
+LL |         regular();
+   |         ^^^^^^^^^
+   |
+   = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
+   = help: add `#![feature(const_fn)]` to the crate attributes to enable
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0723`.
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn-min-const-fn.rs b/src/test/ui/consts/const-extern-fn/const-extern-fn-min-const-fn.rs
new file mode 100644 (file)
index 0000000..5619811
--- /dev/null
@@ -0,0 +1,13 @@
+#![feature(const_extern_fn)]
+
+const extern fn unsize(x: &[u8; 3]) -> &[u8] { x }
+//~^ ERROR unsizing casts are not allowed in const fn
+const unsafe extern "C" fn closure() -> fn() { || {} }
+//~^ ERROR function pointers in const fn are unstable
+const unsafe extern fn use_float() { 1.0 + 1.0; }
+//~^ ERROR only int, `bool` and `char` operations are stable in const fn
+const extern "C" fn ptr_cast(val: *const u8) { val as usize; }
+//~^ ERROR casting pointers to ints is unstable in const fn
+
+
+fn main() {}
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn-min-const-fn.stderr b/src/test/ui/consts/const-extern-fn/const-extern-fn-min-const-fn.stderr
new file mode 100644 (file)
index 0000000..0ab1ddd
--- /dev/null
@@ -0,0 +1,39 @@
+error[E0723]: unsizing casts are not allowed in const fn
+  --> $DIR/const-extern-fn-min-const-fn.rs:3:48
+   |
+LL | const extern fn unsize(x: &[u8; 3]) -> &[u8] { x }
+   |                                                ^
+   |
+   = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
+   = help: add `#![feature(const_fn)]` to the crate attributes to enable
+
+error[E0723]: function pointers in const fn are unstable
+  --> $DIR/const-extern-fn-min-const-fn.rs:5:41
+   |
+LL | const unsafe extern "C" fn closure() -> fn() { || {} }
+   |                                         ^^^^
+   |
+   = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
+   = help: add `#![feature(const_fn)]` to the crate attributes to enable
+
+error[E0723]: only int, `bool` and `char` operations are stable in const fn
+  --> $DIR/const-extern-fn-min-const-fn.rs:7:38
+   |
+LL | const unsafe extern fn use_float() { 1.0 + 1.0; }
+   |                                      ^^^^^^^^^
+   |
+   = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
+   = help: add `#![feature(const_fn)]` to the crate attributes to enable
+
+error[E0723]: casting pointers to ints is unstable in const fn
+  --> $DIR/const-extern-fn-min-const-fn.rs:9:48
+   |
+LL | const extern "C" fn ptr_cast(val: *const u8) { val as usize; }
+   |                                                ^^^^^^^^^^^^
+   |
+   = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
+   = help: add `#![feature(const_fn)]` to the crate attributes to enable
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0723`.
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn-requires-unsafe.rs b/src/test/ui/consts/const-extern-fn/const-extern-fn-requires-unsafe.rs
new file mode 100644 (file)
index 0000000..cab175b
--- /dev/null
@@ -0,0 +1,10 @@
+#![feature(const_extern_fn)]
+
+const unsafe extern fn foo() -> usize { 5 }
+
+fn main() {
+    let a: [u8; foo()];
+    //~^ ERROR call to unsafe function is unsafe and requires unsafe function or block
+    foo();
+    //~^ ERROR call to unsafe function is unsafe and requires unsafe function or block
+}
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn-requires-unsafe.stderr b/src/test/ui/consts/const-extern-fn/const-extern-fn-requires-unsafe.stderr
new file mode 100644 (file)
index 0000000..5196b8e
--- /dev/null
@@ -0,0 +1,19 @@
+error[E0133]: call to unsafe function is unsafe and requires unsafe function or block
+  --> $DIR/const-extern-fn-requires-unsafe.rs:8:5
+   |
+LL |     foo();
+   |     ^^^^^ call to unsafe function
+   |
+   = note: consult the function's documentation for information on how to avoid undefined behavior
+
+error[E0133]: call to unsafe function is unsafe and requires unsafe function or block
+  --> $DIR/const-extern-fn-requires-unsafe.rs:6:17
+   |
+LL |     let a: [u8; foo()];
+   |                 ^^^^^ call to unsafe function
+   |
+   = note: consult the function's documentation for information on how to avoid undefined behavior
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0133`.
diff --git a/src/test/ui/consts/const-extern-fn/const-extern-fn.rs b/src/test/ui/consts/const-extern-fn/const-extern-fn.rs
new file mode 100644 (file)
index 0000000..1dc0f83
--- /dev/null
@@ -0,0 +1,35 @@
+// run-pass
+#![feature(const_extern_fn)]
+
+const extern fn foo1(val: u8) -> u8 {
+    val + 1
+}
+
+const extern "C" fn foo2(val: u8) -> u8 {
+    val + 1
+}
+
+const unsafe extern fn bar1(val: bool) -> bool {
+    !val
+}
+
+const unsafe extern "C" fn bar2(val: bool) -> bool {
+    !val
+}
+
+
+fn main() {
+    let a: [u8; foo1(25) as usize] = [0; 26];
+    let b: [u8; foo2(25) as usize] = [0; 26];
+    assert_eq!(a, b);
+
+    let bar1_res = unsafe { bar1(false) };
+    let bar2_res = unsafe { bar2(false) };
+    assert!(bar1_res);
+    assert_eq!(bar1_res, bar2_res);
+
+    let _foo1_cast: extern fn(u8) -> u8 = foo1;
+    let _foo2_cast: extern fn(u8) -> u8 = foo2;
+    let _bar1_cast: unsafe extern fn(bool) -> bool = bar1;
+    let _bar2_cast: unsafe extern fn(bool) -> bool = bar2;
+}
diff --git a/src/test/ui/consts/const-extern-fn/feature-gate-const_extern_fn.rs b/src/test/ui/consts/const-extern-fn/feature-gate-const_extern_fn.rs
new file mode 100644 (file)
index 0000000..d39f2c1
--- /dev/null
@@ -0,0 +1,12 @@
+// Check that `const extern fn` and `const unsafe extern fn` are feature-gated.
+
+#[cfg(FALSE)] const extern fn foo1() {} //~ ERROR `const extern fn` definitions are unstable
+#[cfg(FALSE)] const extern "C" fn foo2() {} //~ ERROR `const extern fn` definitions are unstable
+#[cfg(FALSE)] const extern "Rust" fn foo3() {} //~ ERROR `const extern fn` definitions are unstable
+#[cfg(FALSE)] const unsafe extern fn bar1() {} //~ ERROR `const extern fn` definitions are unstable
+#[cfg(FALSE)] const unsafe extern "C" fn bar2() {}
+//~^ ERROR `const extern fn` definitions are unstable
+#[cfg(FALSE)] const unsafe extern "Rust" fn bar3() {}
+//~^ ERROR `const extern fn` definitions are unstable
+
+fn main() {}
diff --git a/src/test/ui/consts/const-extern-fn/feature-gate-const_extern_fn.stderr b/src/test/ui/consts/const-extern-fn/feature-gate-const_extern_fn.stderr
new file mode 100644 (file)
index 0000000..f138620
--- /dev/null
@@ -0,0 +1,57 @@
+error[E0658]: `const extern fn` definitions are unstable
+  --> $DIR/feature-gate-const_extern_fn.rs:3:15
+   |
+LL | #[cfg(FALSE)] const extern fn foo1() {}
+   |               ^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/64926
+   = help: add `#![feature(const_extern_fn)]` to the crate attributes to enable
+
+error[E0658]: `const extern fn` definitions are unstable
+  --> $DIR/feature-gate-const_extern_fn.rs:4:15
+   |
+LL | #[cfg(FALSE)] const extern "C" fn foo2() {}
+   |               ^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/64926
+   = help: add `#![feature(const_extern_fn)]` to the crate attributes to enable
+
+error[E0658]: `const extern fn` definitions are unstable
+  --> $DIR/feature-gate-const_extern_fn.rs:5:15
+   |
+LL | #[cfg(FALSE)] const extern "Rust" fn foo3() {}
+   |               ^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/64926
+   = help: add `#![feature(const_extern_fn)]` to the crate attributes to enable
+
+error[E0658]: `const extern fn` definitions are unstable
+  --> $DIR/feature-gate-const_extern_fn.rs:6:15
+   |
+LL | #[cfg(FALSE)] const unsafe extern fn bar1() {}
+   |               ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/64926
+   = help: add `#![feature(const_extern_fn)]` to the crate attributes to enable
+
+error[E0658]: `const extern fn` definitions are unstable
+  --> $DIR/feature-gate-const_extern_fn.rs:7:15
+   |
+LL | #[cfg(FALSE)] const unsafe extern "C" fn bar2() {}
+   |               ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/64926
+   = help: add `#![feature(const_extern_fn)]` to the crate attributes to enable
+
+error[E0658]: `const extern fn` definitions are unstable
+  --> $DIR/feature-gate-const_extern_fn.rs:9:15
+   |
+LL | #[cfg(FALSE)] const unsafe extern "Rust" fn bar3() {}
+   |               ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/64926
+   = help: add `#![feature(const_extern_fn)]` to the crate attributes to enable
+
+error: aborting due to 6 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/consts/const-external-macro-const-err.rs b/src/test/ui/consts/const-external-macro-const-err.rs
new file mode 100644 (file)
index 0000000..616d24f
--- /dev/null
@@ -0,0 +1,13 @@
+// edition:2018
+// aux-build:external_macro.rs
+
+// Ensure that CONST_ERR lint errors
+// are not silenced in external macros.
+// https://github.com/rust-lang/rust/issues/65300
+
+extern crate external_macro;
+use external_macro::static_assert;
+
+fn main() {
+    static_assert!(2 + 2 == 5); //~ ERROR
+}
diff --git a/src/test/ui/consts/const-external-macro-const-err.stderr b/src/test/ui/consts/const-external-macro-const-err.stderr
new file mode 100644 (file)
index 0000000..237c4d7
--- /dev/null
@@ -0,0 +1,11 @@
+error: any use of this value will cause an error
+  --> $DIR/const-external-macro-const-err.rs:12:5
+   |
+LL |     static_assert!(2 + 2 == 5);
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^ index out of bounds: the len is 1 but the index is 1
+   |
+   = note: `#[deny(const_err)]` on by default
+   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+error: aborting due to previous error
+
index 24d2e3ce53937775ce8b336457a0e111f2d0a2b6..087cc3c86a68d9da9657af1e97c7c47b50dd4dad 100644 (file)
@@ -3,6 +3,13 @@ error[E0005]: refutable pattern in local binding: `std::i32::MIN..=-1i32` and `1
    |
 LL |     A = { let 0 = 0; 0 },
    |               ^ patterns `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     A = { if let 0 = 0 { /* */ } 0 },
+   |           ^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to previous error
 
index 5d59d06f7982ae204da75e934cdc8b9249081084..80d9f794bc1d5ae35165abf4b5e935102e16ec3b 100644 (file)
@@ -3,6 +3,13 @@ error[E0005]: refutable pattern in local binding: `std::i32::MIN..=-1i32` and `1
    |
 LL |     let x: [i32; { let 0 = 0; 0 }] = [];
    |                        ^ patterns `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     let x: [i32; { if let 0 = 0 { /* */ } 0 }] = [];
+   |                    ^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to previous error
 
index 6d74c26f9f7a54eba330d7050bcca3ed70df8f00..e6b2f212bb43057873daa4c5e8b54467dee0f46d 100644 (file)
@@ -3,24 +3,52 @@ error[E0005]: refutable pattern in local binding: `std::i32::MIN..=-1i32` and `1
    |
 LL | const X: i32 = { let 0 = 0; 0 };
    |                      ^ patterns `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL | const X: i32 = { if let 0 = 0 { /* */ } 0 };
+   |                  ^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0005]: refutable pattern in local binding: `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
   --> $DIR/const-match-check.rs:8:23
    |
 LL | static Y: i32 = { let 0 = 0; 0 };
    |                       ^ patterns `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL | static Y: i32 = { if let 0 = 0 { /* */ } 0 };
+   |                   ^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0005]: refutable pattern in local binding: `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
   --> $DIR/const-match-check.rs:13:26
    |
 LL |     const X: i32 = { let 0 = 0; 0 };
    |                          ^ patterns `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     const X: i32 = { if let 0 = 0 { /* */ } 0 };
+   |                      ^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0005]: refutable pattern in local binding: `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
   --> $DIR/const-match-check.rs:19:26
    |
 LL |     const X: i32 = { let 0 = 0; 0 };
    |                          ^ patterns `std::i32::MIN..=-1i32` and `1i32..=std::i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     const X: i32 = { if let 0 = 0 { /* */ } 0 };
+   |                      ^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to 4 previous errors
 
index 13309f978b6729733584a2ea1dd170e77a8a2338..48c4b7da942e4a8b33e35e0e8aac36d9094271dd 100644 (file)
@@ -1,3 +1,4 @@
 fn main() {
     [0; 3][3u64 as usize]; //~ ERROR the len is 3 but the index is 3
+    //~| ERROR this expression will panic at runtime
 }
index 4b3880198bf2ddcc938fd39146a06571fdb72170..8ecc6f4bc6b120c977e50b97ac3c2b30d3978b3f 100644 (file)
@@ -6,5 +6,11 @@ LL |     [0; 3][3u64 as usize];
    |
    = note: `#[deny(const_err)]` on by default
 
-error: aborting due to previous error
+error: this expression will panic at runtime
+  --> $DIR/const-prop-ice.rs:2:5
+   |
+LL |     [0; 3][3u64 as usize];
+   |     ^^^^^^^^^^^^^^^^^^^^^ index out of bounds: the len is 3 but the index is 3
+
+error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/consts/issue-64506.rs b/src/test/ui/consts/issue-64506.rs
new file mode 100644 (file)
index 0000000..db3e85a
--- /dev/null
@@ -0,0 +1,20 @@
+// check-pass
+
+#[derive(Copy, Clone)]
+pub struct ChildStdin {
+    inner: AnonPipe,
+}
+
+#[derive(Copy, Clone)]
+enum AnonPipe {}
+
+const FOO: () = {
+    union Foo {
+        a: ChildStdin,
+        b: (),
+    }
+    let x = unsafe { Foo { b: () }.a };
+    let x = &x.inner;
+};
+
+fn main() {}
diff --git a/src/test/ui/consts/issue-65348.rs b/src/test/ui/consts/issue-65348.rs
new file mode 100644 (file)
index 0000000..5eafa83
--- /dev/null
@@ -0,0 +1,23 @@
+// check-pass
+
+struct Generic<T>(T);
+
+impl<T> Generic<T> {
+    const ARRAY: [T; 0] = [];
+    const NEWTYPE_ARRAY: Generic<[T; 0]> = Generic([]);
+    const ARRAY_FIELD: Generic<(i32, [T; 0])> = Generic((0, []));
+}
+
+pub const fn array<T>() ->  &'static T {
+    &Generic::<T>::ARRAY[0]
+}
+
+pub const fn newtype_array<T>() ->  &'static T {
+    &Generic::<T>::NEWTYPE_ARRAY.0[0]
+}
+
+pub const fn array_field<T>() ->  &'static T {
+    &(Generic::<T>::ARRAY_FIELD.0).1[0]
+}
+
+fn main() {}
index 0733a51233e339004872930b8c747093e96fefa0..2fb9977f4d700a87d0fed2afab0fbc2ec5fc5f06 100644 (file)
@@ -16,26 +16,30 @@ error[E0277]: the size for values of type `A` cannot be known at compilation tim
    |
 LL | pub struct Foo<A, B>(A, B);
    | --------------------------- required by `Foo`
+LL | 
+LL | impl<A, B> Foo<A, B> {
+   |      - help: consider restricting this bound: `A: std::marker::Sized`
 ...
 LL |         [5; Self::HOST_SIZE] == [6; 0]
    |             ^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `A`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where A: std::marker::Sized` bound
 
 error[E0277]: the size for values of type `B` cannot be known at compilation time
   --> $DIR/too_generic_eval_ice.rs:7:13
    |
 LL | pub struct Foo<A, B>(A, B);
    | --------------------------- required by `Foo`
+LL | 
+LL | impl<A, B> Foo<A, B> {
+   |         - help: consider restricting this bound: `B: std::marker::Sized`
 ...
 LL |         [5; Self::HOST_SIZE] == [6; 0]
    |             ^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `B`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where B: std::marker::Sized` bound
 
 error: aborting due to 3 previous errors
 
diff --git a/src/test/ui/defaulted-never-note.rs b/src/test/ui/defaulted-never-note.rs
deleted file mode 100644 (file)
index d3fb8a0..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-// We need to opt into the `!` feature in order to trigger the
-// requirement that this is testing.
-#![feature(never_type)]
-
-#![allow(unused)]
-
-trait Deserialize: Sized {
-    fn deserialize() -> Result<Self, String>;
-}
-
-impl Deserialize for () {
-    fn deserialize() -> Result<(), String> {
-        Ok(())
-    }
-}
-
-trait ImplementedForUnitButNotNever {}
-
-impl ImplementedForUnitButNotNever for () {}
-
-fn foo<T: ImplementedForUnitButNotNever>(_t: T) {}
-//~^ NOTE required by this bound in `foo`
-//~| NOTE
-
-fn smeg() {
-    let _x = return;
-    foo(_x);
-    //~^ ERROR the trait bound
-    //~| NOTE the trait `ImplementedForUnitButNotNever` is not implemented
-    //~| NOTE the trait is implemented for `()`
-}
-
-fn main() {
-    smeg();
-}
diff --git a/src/test/ui/defaulted-never-note.stderr b/src/test/ui/defaulted-never-note.stderr
deleted file mode 100644 (file)
index 28c9da0..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-error[E0277]: the trait bound `!: ImplementedForUnitButNotNever` is not satisfied
-  --> $DIR/defaulted-never-note.rs:27:5
-   |
-LL | fn foo<T: ImplementedForUnitButNotNever>(_t: T) {}
-   |    ---    ----------------------------- required by this bound in `foo`
-...
-LL |     foo(_x);
-   |     ^^^ the trait `ImplementedForUnitButNotNever` is not implemented for `!`
-   |
-   = note: the trait is implemented for `()`. Possibly this error has been caused by changes to Rust's type-inference algorithm (see: https://github.com/rust-lang/rust/issues/48950 for more info). Consider whether you meant to use the type `()` here instead.
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0277`.
index 15afa78b140d5eb2aa09e7f660fe2fd3304392a3..57143d68105540f9edf7576c568e963589ec6c2e 100644 (file)
@@ -54,5 +54,5 @@ LL | #[deprecated(since = "a", since = "b", note = "c")]
 
 error: aborting due to 9 previous errors
 
-Some errors have detailed explanations: E0538, E0541, E0550, E0565.
+Some errors have detailed explanations: E0538, E0541, E0550, E0551, E0565.
 For more information about an error, try `rustc --explain E0538`.
index ed4055ecdd38cfd3e59decebaf5b655ea2fa675c..ac771ac81d118097440239f29c261dcad60561f0 100644 (file)
@@ -6,4 +6,10 @@
 #[derive(Default)]
 struct X;
 
+#[deprecated(note="Do not use this")]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
+pub struct Step<I> {
+    _skip: Option<I>,
+}
+
 fn main() {}
index 6902779f33d23c7a3497e96c1a5ba43d5ee7aa30..189335520543331b5aea5c52951a28cd83ed0e37 100644 (file)
@@ -1,16 +1,8 @@
-fn foo() {
+fn main() {
     (0..13).collect<Vec<i32>>();
     //~^ ERROR chained comparison
-}
-
-fn bar() {
     Vec<i32>::new();
     //~^ ERROR chained comparison
-}
-
-fn qux() {
     (0..13).collect<Vec<i32>();
     //~^ ERROR chained comparison
 }
-
-fn main() {}
index 7a08fda27e355c3d6816cfafe251d57443fcc19c..7fc7c2628c472ddbc46f76f1d1d7e28c9a5986e4 100644 (file)
@@ -2,28 +2,31 @@ error: chained comparison operators require parentheses
   --> $DIR/issue-40396.rs:2:20
    |
 LL |     (0..13).collect<Vec<i32>>();
-   |                    ^^^^^^^^
+   |                    ^^^^^
+help: use `::<...>` instead of `<...>` to specify type arguments
    |
-   = help: use `::<...>` instead of `<...>` if you meant to specify type arguments
-   = help: or use `(...)` if you meant to specify fn arguments
+LL |     (0..13).collect::<Vec<i32>>();
+   |                    ^^
 
 error: chained comparison operators require parentheses
-  --> $DIR/issue-40396.rs:7:8
+  --> $DIR/issue-40396.rs:4:8
    |
 LL |     Vec<i32>::new();
-   |        ^^^^^^^
+   |        ^^^^^
+help: use `::<...>` instead of `<...>` to specify type arguments
    |
-   = help: use `::<...>` instead of `<...>` if you meant to specify type arguments
-   = help: or use `(...)` if you meant to specify fn arguments
+LL |     Vec::<i32>::new();
+   |        ^^
 
 error: chained comparison operators require parentheses
-  --> $DIR/issue-40396.rs:12:20
+  --> $DIR/issue-40396.rs:6:20
    |
 LL |     (0..13).collect<Vec<i32>();
-   |                    ^^^^^^^^
+   |                    ^^^^^
+help: use `::<...>` instead of `<...>` to specify type arguments
    |
-   = help: use `::<...>` instead of `<...>` if you meant to specify type arguments
-   = help: or use `(...)` if you meant to specify fn arguments
+LL |     (0..13).collect::<Vec<i32>();
+   |                    ^^
 
 error: aborting due to 3 previous errors
 
index 84235ca4d637281b9d189b4dc794f129a1f4a27d..79bc7d2565be619f284419e92b3809b57cb7996a 100644 (file)
@@ -2,7 +2,7 @@ error: `~` cannot be used as a unary operator
   --> $DIR/issue-41679-tilde-bitwise-negation-attempt.rs:2:13
    |
 LL |     let x = ~1;
-   |             ^ help: use `!` to perform bitwise negation
+   |             ^ help: use `!` to perform bitwise not
 
 error: aborting due to previous error
 
index d02f30152d687720dcc3b776175306c546a16a8c..ef68bf52cf3bd4c7862994428a5119dd5fff78c8 100644 (file)
@@ -5,10 +5,10 @@ LL |     let x = Option(1);
    |             ^^^^^^
 help: try using one of the enum's variants
    |
-LL |     let x = std::prelude::v1::Option::None(1);
-   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-LL |     let x = std::prelude::v1::Option::Some(1);
-   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL |     let x = std::option::Option::None(1);
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^
+LL |     let x = std::option::Option::Some(1);
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0532]: expected tuple struct/variant, found enum `Option`
   --> $DIR/issue-43871-enum-instead-of-variant.rs:21:12
@@ -17,10 +17,10 @@ LL |     if let Option(_) = x {
    |            ^^^^^^
 help: try using one of the enum's variants
    |
-LL |     if let std::prelude::v1::Option::None(_) = x {
-   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-LL |     if let std::prelude::v1::Option::Some(_) = x {
-   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL |     if let std::option::Option::None(_) = x {
+   |            ^^^^^^^^^^^^^^^^^^^^^^^^^
+LL |     if let std::option::Option::Some(_) = x {
+   |            ^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0532]: expected tuple struct/variant, found enum `Example`
   --> $DIR/issue-43871-enum-instead-of-variant.rs:27:12
@@ -47,15 +47,15 @@ LL |     let z = ManyVariants();
    |             ^^^^^^^^^^^^
 help: try using one of the enum's variants
    |
-LL |     let z = ManyVariants::Eight();
+LL |     let z = ManyVariants::One();
+   |             ^^^^^^^^^^^^^^^^^
+LL |     let z = ManyVariants::Two();
+   |             ^^^^^^^^^^^^^^^^^
+LL |     let z = ManyVariants::Three();
    |             ^^^^^^^^^^^^^^^^^^^
-LL |     let z = ManyVariants::Five();
-   |             ^^^^^^^^^^^^^^^^^^
 LL |     let z = ManyVariants::Four();
    |             ^^^^^^^^^^^^^^^^^^
-LL |     let z = ManyVariants::Nine();
-   |             ^^^^^^^^^^^^^^^^^^
-and 6 other candidates
+     and 6 other candidates
 
 error: aborting due to 5 previous errors
 
index 8d3a86df0233d656ca7347360b69c445582ba5b2..cb350a1faeed2a6af11b41d18e95c0c660ce9d85 100644 (file)
@@ -13,7 +13,7 @@ LL | fn setup() -> Determine { Set }
    |               ^^^^^^^^^
 LL | fn setup() -> PutDown { Set }
    |               ^^^^^^^
-and 3 other candidates
+     and 3 other candidates
 
 error[E0425]: cannot find value `Set` in this scope
   --> $DIR/issue-56028-there-is-an-enum-variant.rs:9:21
@@ -30,7 +30,7 @@ LL | use Determine::Set;
    |
 LL | use PutDown::Set;
    |
-and 3 other candidates
+     and 3 other candidates
 
 error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/dispatch_from_dyn_zst.rs b/src/test/ui/dispatch_from_dyn_zst.rs
deleted file mode 100644 (file)
index 764f58c..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-// run-pass
-
-#![feature(unsize, dispatch_from_dyn, never_type)]
-
-#![allow(dead_code)]
-
-use std::{
-    ops::DispatchFromDyn,
-    marker::{Unsize, PhantomData},
-};
-
-struct Zst;
-struct NestedZst(PhantomData<()>, Zst);
-
-
-struct WithUnit<T: ?Sized>(Box<T>, ());
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithUnit<U>> for WithUnit<T>
-    where T: Unsize<U> {}
-
-struct WithPhantom<T: ?Sized>(Box<T>, PhantomData<()>);
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithPhantom<U>> for WithPhantom<T>
-    where T: Unsize<U> {}
-
-struct WithNever<T: ?Sized>(Box<T>, !);
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithNever<U>> for WithNever<T>
-    where T: Unsize<U> {}
-
-struct WithZst<T: ?Sized>(Box<T>, Zst);
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithZst<U>> for WithZst<T>
-    where T: Unsize<U> {}
-
-struct WithNestedZst<T: ?Sized>(Box<T>, NestedZst);
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithNestedZst<U>> for WithNestedZst<T>
-    where T: Unsize<U> {}
-
-
-struct Generic<T: ?Sized, A>(Box<T>, A);
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, ()>> for Generic<T, ()>
-    where T: Unsize<U> {}
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, PhantomData<()>>>
-    for Generic<T, PhantomData<()>>
-    where T: Unsize<U> {}
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, !>> for Generic<T, !>
-    where T: Unsize<U> {}
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, Zst>> for Generic<T, Zst>
-    where T: Unsize<U> {}
-impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, NestedZst>> for Generic<T, NestedZst>
-    where T: Unsize<U> {}
-
-
-fn main() {}
diff --git a/src/test/ui/diverging-fallback-control-flow.rs b/src/test/ui/diverging-fallback-control-flow.rs
deleted file mode 100644 (file)
index 0f0f787..0000000
+++ /dev/null
@@ -1,101 +0,0 @@
-// run-pass
-
-#![allow(dead_code)]
-#![allow(unused_assignments)]
-#![allow(unused_variables)]
-#![allow(unreachable_code)]
-// Test various cases where we permit an unconstrained variable
-// to fallback based on control-flow.
-//
-// These represent current behavior, but are pretty dubious.  I would
-// like to revisit these and potentially change them. --nmatsakis
-
-#![feature(never_type)]
-
-trait BadDefault {
-    fn default() -> Self;
-}
-
-impl BadDefault for u32 {
-    fn default() -> Self {
-        0
-    }
-}
-
-impl BadDefault for ! {
-    fn default() -> ! {
-        panic!()
-    }
-}
-
-fn assignment() {
-    let x;
-
-    if true {
-        x = BadDefault::default();
-    } else {
-        x = return;
-    }
-}
-
-fn assignment_rev() {
-    let x;
-
-    if true {
-        x = return;
-    } else {
-        x = BadDefault::default();
-    }
-}
-
-fn if_then_else() {
-    let _x = if true {
-        BadDefault::default()
-    } else {
-        return;
-    };
-}
-
-fn if_then_else_rev() {
-    let _x = if true {
-        return;
-    } else {
-        BadDefault::default()
-    };
-}
-
-fn match_arm() {
-    let _x = match Ok(BadDefault::default()) {
-        Ok(v) => v,
-        Err(()) => return,
-    };
-}
-
-fn match_arm_rev() {
-    let _x = match Ok(BadDefault::default()) {
-        Err(()) => return,
-        Ok(v) => v,
-    };
-}
-
-fn loop_break() {
-    let _x = loop {
-        if false {
-            break return;
-        } else {
-            break BadDefault::default();
-        }
-    };
-}
-
-fn loop_break_rev() {
-    let _x = loop {
-        if false {
-            break return;
-        } else {
-            break BadDefault::default();
-        }
-    };
-}
-
-fn main() { }
index 55ac625fc985b723c516371ed2aff701db759fc4..40db575eabd38c5c278385ddb7877e446521e0c9 100644 (file)
@@ -1,23 +1,25 @@
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/dst-object-from-unsized-type.rs:8:23
    |
+LL | fn test1<T: ?Sized + Foo>(t: &T) {
+   |          -- help: consider further restricting this bound: `T: std::marker::Sized +`
 LL |     let u: &dyn Foo = t;
    |                       ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: required for the cast to the object type `dyn Foo`
 
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/dst-object-from-unsized-type.rs:13:23
    |
+LL | fn test2<T: ?Sized + Foo>(t: &T) {
+   |          -- help: consider further restricting this bound: `T: std::marker::Sized +`
 LL |     let v: &dyn Foo = t as &dyn Foo;
    |                       ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: required for the cast to the object type `dyn Foo`
 
 error[E0277]: the size for values of type `str` cannot be known at compilation time
index 7d59d553d88fd98d2463a1ab2375d7200bc0c549..d865b59f0b9459a5c9ce3f164df3b8f5fcd9d671 100644 (file)
@@ -11,6 +11,13 @@ LL | | }
 ...
 LL |       let Helper::U(u) = Helper::T(t, []);
    |           ^^^^^^^^^^^^ pattern `T(_, _)` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Helper::U(u) = Helper::T(t, []) { /* */ }
+   |
 
 error[E0381]: use of possibly-uninitialized variable: `u`
   --> $DIR/empty-never-array.rs:12:5
index 65c45d9bad0dcee9b0052ab31b724e53fddb3df5..7e8453c61f62b65858bd4f7f80008c969dbb6d74 100644 (file)
@@ -9,3 +9,4 @@ LL | fn foo(x: Foo::Bar) {}
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0573`.
index 56a4bcffc81a2e6dedfefb7309c13289fe0074f0..577c6e886d52331e5c8b3975b22006d0c67fbe2e 100644 (file)
@@ -3,6 +3,13 @@ error[E0005]: refutable pattern in local binding: `None` not covered
    |
 LL |     let Some(y) = x;
    |         ^^^^^^^ pattern `None` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Some(y) = x { /* */ }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to previous error
 
index ce631ca4bf786ec3e2ca69a41ff1b0a5c047d5e4..754006bc217ee6f8f659680d817e2b3e5c0e9a87 100644 (file)
@@ -34,7 +34,7 @@ LL |     let f = Foo();
    |             ^^^
    |             |
    |             did you mean `Foo { /* fields */ }`?
-   |             help: a function with a similar name exists: `foo`
+   |             help: a function with a similar name exists (notice the capitalization): `foo`
 
 error[E0423]: expected value, found struct `T`
   --> $DIR/E0423.rs:14:8
index d67a2660dac38ccbadf43a8bd5437fc85c753755..567d1b3cc75f4c44447645e60e8c5fb3ee59240d 100644 (file)
@@ -1,14 +1,20 @@
 error[E0424]: expected value, found module `self`
   --> $DIR/E0424.rs:7:9
    |
-LL |         self.bar();
-   |         ^^^^ `self` value is a keyword only available in methods with `self` parameter
+LL | /     fn foo() {
+LL | |         self.bar();
+   | |         ^^^^ `self` value is a keyword only available in methods with a `self` parameter
+LL | |     }
+   | |_____- this function doesn't have a `self` parameter
 
 error[E0424]: expected unit struct/variant or constant, found module `self`
   --> $DIR/E0424.rs:12:9
    |
-LL |     let self = "self";
-   |         ^^^^ `self` value is a keyword and may not be bound to variables or shadowed
+LL | / fn main () {
+LL | |     let self = "self";
+   | |         ^^^^ `self` value is a keyword and may not be bound to variables or shadowed
+LL | | }
+   | |_- this function doesn't have a `self` parameter
 
 error: aborting due to 2 previous errors
 
index 587125fdc336e4110ec335bbff433b43aa88890a..1380840e0db2d7da12c2d32565823c10118acd79 100644 (file)
@@ -4,12 +4,12 @@ error[E0478]: lifetime bound not satisfied
 LL |     child: Box<dyn Wedding<'kiss> + 'SnowWhite>,
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: lifetime parameter instantiated with the lifetime 'SnowWhite as defined on the struct at 3:22
+note: lifetime parameter instantiated with the lifetime `'SnowWhite` as defined on the struct at 3:22
   --> $DIR/E0478.rs:3:22
    |
 LL | struct Prince<'kiss, 'SnowWhite> {
    |                      ^^^^^^^^^^
-note: but lifetime parameter must outlive the lifetime 'kiss as defined on the struct at 3:15
+note: but lifetime parameter must outlive the lifetime `'kiss` as defined on the struct at 3:15
   --> $DIR/E0478.rs:3:15
    |
 LL | struct Prince<'kiss, 'SnowWhite> {
index f50c64780118b4235438b15e1863dd87caa6b441..feca7f10b706b22890178427631abd62ee72cbff 100644 (file)
@@ -27,3 +27,4 @@ LL |     invoke(&x, |a, b| if a > b { a } else { b });
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 2570bc8a56078be796593a113e2d11ef9123ba81..59f749198971b5a1931817cdc9a91e48380d1476 100644 (file)
@@ -1,7 +1,7 @@
 #![feature(precise_pointer_size_matching)]
 #![feature(exclusive_range_pattern)]
-
 #![deny(unreachable_patterns)]
+#![deny(overlapping_patterns)]
 
 use std::{char, u8, u16, u32, u64, u128, i8, i16, i32, i64, i128};
 
@@ -41,7 +41,8 @@ fn main() {
     match x { //~ ERROR non-exhaustive patterns
         -7 => {}
         -5..=120 => {}
-        -2..=20 => {} //~ ERROR unreachable pattern
+        -2..=20 => {}
+        //~^ ERROR unreachable pattern
         125 => {}
     }
 
@@ -135,9 +136,9 @@ fn main() {
         (125 .. 128, false) => {}
     }
 
-    match 0u8 { // ok
+    match 0u8 {
         0 .. 2 => {}
-        1 ..= 2 => {}
+        1 ..= 2 => {} //~ ERROR multiple patterns covering the same range
         _ => {}
     }
 
index 6c4b7b0cc03529b4764e6514629d8016c875d490..7a3a36a820c65cfc1540aae208b095030f99d11c 100644 (file)
@@ -5,7 +5,7 @@ LL |         200 => {}
    |         ^^^
    |
 note: lint level defined here
-  --> $DIR/exhaustive_integer_patterns.rs:4:9
+  --> $DIR/exhaustive_integer_patterns.rs:3:9
    |
 LL | #![deny(unreachable_patterns)]
    |         ^^^^^^^^^^^^^^^^^^^^
@@ -41,7 +41,7 @@ LL |     match x {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `std::i8::MIN` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:82:11
+  --> $DIR/exhaustive_integer_patterns.rs:83:11
    |
 LL |     match 0i8 {
    |           ^^^ pattern `std::i8::MIN` not covered
@@ -49,7 +49,7 @@ LL |     match 0i8 {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `0i16` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:90:11
+  --> $DIR/exhaustive_integer_patterns.rs:91:11
    |
 LL |     match 0i16 {
    |           ^^^^ pattern `0i16` not covered
@@ -57,7 +57,7 @@ LL |     match 0i16 {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `128u8..=std::u8::MAX` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:108:11
+  --> $DIR/exhaustive_integer_patterns.rs:109:11
    |
 LL |     match 0u8 {
    |           ^^^ pattern `128u8..=std::u8::MAX` not covered
@@ -65,7 +65,7 @@ LL |     match 0u8 {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `(0u8, Some(_))` and `(2u8..=std::u8::MAX, Some(_))` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:120:11
+  --> $DIR/exhaustive_integer_patterns.rs:121:11
    |
 LL |     match (0u8, Some(())) {
    |           ^^^^^^^^^^^^^^^ patterns `(0u8, Some(_))` and `(2u8..=std::u8::MAX, Some(_))` not covered
@@ -73,15 +73,29 @@ LL |     match (0u8, Some(())) {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `(126u8..=127u8, false)` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:125:11
+  --> $DIR/exhaustive_integer_patterns.rs:126:11
    |
 LL |     match (0u8, true) {
    |           ^^^^^^^^^^^ pattern `(126u8..=127u8, false)` not covered
    |
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
+error: multiple patterns covering the same range
+  --> $DIR/exhaustive_integer_patterns.rs:141:9
+   |
+LL |         0 .. 2 => {}
+   |         ------ this range overlaps on `1u8`
+LL |         1 ..= 2 => {}
+   |         ^^^^^^^ overlapping patterns
+   |
+note: lint level defined here
+  --> $DIR/exhaustive_integer_patterns.rs:4:9
+   |
+LL | #![deny(overlapping_patterns)]
+   |         ^^^^^^^^^^^^^^^^^^^^
+
 error[E0004]: non-exhaustive patterns: `std::u128::MAX` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:145:11
+  --> $DIR/exhaustive_integer_patterns.rs:146:11
    |
 LL |     match 0u128 {
    |           ^^^^^ pattern `std::u128::MAX` not covered
@@ -89,7 +103,7 @@ LL |     match 0u128 {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `5u128..=std::u128::MAX` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:149:11
+  --> $DIR/exhaustive_integer_patterns.rs:150:11
    |
 LL |     match 0u128 {
    |           ^^^^^ pattern `5u128..=std::u128::MAX` not covered
@@ -97,13 +111,13 @@ LL |     match 0u128 {
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
 error[E0004]: non-exhaustive patterns: `0u128..=3u128` not covered
-  --> $DIR/exhaustive_integer_patterns.rs:153:11
+  --> $DIR/exhaustive_integer_patterns.rs:154:11
    |
 LL |     match 0u128 {
    |           ^^^^^ pattern `0u128..=3u128` not covered
    |
    = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
 
-error: aborting due to 13 previous errors
+error: aborting due to 14 previous errors
 
 For more information about this error, try `rustc --explain E0004`.
index 4bf2d573d4f967580eb8aa07333eb077e963b8ff..cbd6422e5df7653801413bd5708f9538c122468e 100644 (file)
@@ -6,12 +6,12 @@ LL |            Foo<'b,'a>
    |
    = note: expected type `Foo<'a, 'b>`
               found type `Foo<'b, 'a>`
-note: the lifetime 'b as defined on the impl at 6:9...
+note: the lifetime `'b` as defined on the impl at 6:9...
   --> $DIR/explicit-self-lifetime-mismatch.rs:6:9
    |
 LL | impl<'a,'b> Foo<'a,'b> {
    |         ^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 6:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 6:6
   --> $DIR/explicit-self-lifetime-mismatch.rs:6:6
    |
 LL | impl<'a,'b> Foo<'a,'b> {
@@ -25,12 +25,12 @@ LL |            Foo<'b,'a>
    |
    = note: expected type `Foo<'a, 'b>`
               found type `Foo<'b, 'a>`
-note: the lifetime 'a as defined on the impl at 6:6...
+note: the lifetime `'a` as defined on the impl at 6:6...
   --> $DIR/explicit-self-lifetime-mismatch.rs:6:6
    |
 LL | impl<'a,'b> Foo<'a,'b> {
    |      ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the impl at 6:9
+note: ...does not necessarily outlive the lifetime `'b` as defined on the impl at 6:9
   --> $DIR/explicit-self-lifetime-mismatch.rs:6:9
    |
 LL | impl<'a,'b> Foo<'a,'b> {
index 89f615f53ddd333fce96278bb553fcf2f2a75d62..57a9350c089a8e498bc63a3c766c9877cd9df978 100644 (file)
@@ -2,7 +2,7 @@ error: variable `X` should have a snake case name
   --> $DIR/expr_attr_paren_order.rs:19:17
    |
 LL |             let X = 0;
-   |                 ^ help: convert the identifier to snake case: `x`
+   |                 ^ help: convert the identifier to snake case (notice the capitalization): `x`
    |
 note: lint level defined here
   --> $DIR/expr_attr_paren_order.rs:17:17
diff --git a/src/test/ui/extern/issue-64655-allow-unwind-when-calling-panic-directly.rs b/src/test/ui/extern/issue-64655-allow-unwind-when-calling-panic-directly.rs
new file mode 100644 (file)
index 0000000..ff10d41
--- /dev/null
@@ -0,0 +1,65 @@
+// run-pass
+// ignore-wasm32-bare compiled with panic=abort by default
+// ignore-emscripten no threads support
+
+// rust-lang/rust#64655: with panic=unwind, a panic from a subroutine
+// should still run destructors as it unwinds the stack. However,
+// bugs with how the nounwind LLVM attribute was applied led to this
+// simple case being mishandled *if* you had fat LTO turned on.
+
+// Unlike issue-64655-extern-rust-must-allow-unwind.rs, the issue
+// embodied in this test cropped up regardless of optimization level.
+// Therefore it seemed worthy of being enshrined as a dedicated unit
+// test.
+
+// LTO settings cannot be combined with -C prefer-dynamic
+// no-prefer-dynamic
+
+// The revisions just enumerate lto settings (the opt-level appeared irrelevant in practice)
+
+// revisions: no thin fat
+//[no]compile-flags: -C lto=no
+//[thin]compile-flags: -C lto=thin
+//[fat]compile-flags: -C lto=fat
+
+#![feature(core_panic)]
+
+// (For some reason, reproducing the LTO issue requires pulling in std
+// explicitly this way.)
+#![no_std]
+extern crate std;
+
+fn main() {
+    use std::sync::atomic::{AtomicUsize, Ordering};
+    use std::boxed::Box;
+
+    static SHARED: AtomicUsize = AtomicUsize::new(0);
+
+    assert_eq!(SHARED.fetch_add(0, Ordering::SeqCst), 0);
+
+    let old_hook = std::panic::take_hook();
+
+    std::panic::set_hook(Box::new(|_| { } )); // no-op on panic.
+
+    let handle = std::thread::spawn(|| {
+        struct Droppable;
+        impl Drop for Droppable {
+            fn drop(&mut self) {
+                SHARED.fetch_add(1, Ordering::SeqCst);
+            }
+        }
+
+        let _guard = Droppable;
+        let s = "issue-64655-allow-unwind-when-calling-panic-directly.rs";
+        core::panicking::panic(&("???", s, 17, 4));
+    });
+
+    let wait = handle.join();
+
+    // Reinstate handler to ease observation of assertion failures.
+    std::panic::set_hook(old_hook);
+
+    assert!(wait.is_err());
+
+    assert_eq!(SHARED.fetch_add(0, Ordering::SeqCst), 1);
+}
diff --git a/src/test/ui/extern/issue-64655-extern-rust-must-allow-unwind.rs b/src/test/ui/extern/issue-64655-extern-rust-must-allow-unwind.rs
new file mode 100644 (file)
index 0000000..bc15fcb
--- /dev/null
@@ -0,0 +1,83 @@
+// run-pass
+// ignore-wasm32-bare compiled with panic=abort by default
+// ignore-emscripten no threads support
+
+// rust-lang/rust#64655: with panic=unwind, a panic from a subroutine
+// should still run destructors as it unwinds the stack. However,
+// bugs with how the nounwind LLVM attribute was applied led to this
+// simple case being mishandled *if* you had optimization *and* fat
+// LTO turned on.
+
+// This test is the closest thing to a "regression test" we can do
+// without actually spawning subprocesses and comparing stderr
+// results.
+//
+// This test takes the code from the above issue and adapts it to
+// better fit our test infrastructure:
+//
+// * Instead of relying on `println!` to observe whether the destructor
+//   is run, we instead run the code in a spawned thread and
+//   communicate the destructor's operation via a synchronous atomic
+//   in static memory.
+//
+// * To keep the output from confusing a casual user, we override the
+//   panic hook to be a no-op (rather than printing a message to
+//   stderr).
+//
+// (pnkfelix has confirmed by hand that these additions do not mask
+// the underlying bug.)
+
+// LTO settings cannot be combined with -C prefer-dynamic
+// no-prefer-dynamic
+
+// The revisions combine each lto setting with each optimization
+// setting; pnkfelix observed three differing behaviors at opt-levels
+// 0/1/2+3 for this test, so it seems prudent to be thorough.
+
+// revisions: no0 no1 no2 no3 thin0 thin1 thin2 thin3 fat0 fat1 fat2  fat3
+
+//[no0]compile-flags: -C opt-level=0 -C lto=no
+//[no1]compile-flags: -C opt-level=1 -C lto=no
+//[no2]compile-flags: -C opt-level=2 -C lto=no
+//[no3]compile-flags: -C opt-level=3 -C lto=no
+//[thin0]compile-flags: -C opt-level=0 -C lto=thin
+//[thin1]compile-flags: -C opt-level=1 -C lto=thin
+//[thin2]compile-flags: -C opt-level=2 -C lto=thin
+//[thin3]compile-flags: -C opt-level=3 -C lto=thin
+//[fat0]compile-flags: -C opt-level=0 -C lto=fat
+//[fat1]compile-flags: -C opt-level=1 -C lto=fat
+//[fat2]compile-flags: -C opt-level=2 -C lto=fat
+//[fat3]compile-flags: -C opt-level=3 -C lto=fat
+
+fn main() {
+    use std::sync::atomic::{AtomicUsize, Ordering};
+
+    static SHARED: AtomicUsize = AtomicUsize::new(0);
+
+    assert_eq!(SHARED.fetch_add(0, Ordering::SeqCst), 0);
+
+    let old_hook = std::panic::take_hook();
+
+    std::panic::set_hook(Box::new(|_| { } )); // no-op on panic.
+
+    let handle = std::thread::spawn(|| {
+        struct Droppable;
+        impl Drop for Droppable {
+            fn drop(&mut self) {
+                SHARED.fetch_add(1, Ordering::SeqCst);
+            }
+        }
+
+        let _guard = Droppable;
+        None::<()>.expect("???");
+    });
+
+    let wait = handle.join();
+
+    // reinstate handler to ease observation of assertion failures.
+    std::panic::set_hook(old_hook);
+
+    assert!(wait.is_err());
+
+    assert_eq!(SHARED.fetch_add(0, Ordering::SeqCst), 1);
+}
index 62a6d97dfe83b04b74e8d7def179ead5698ee881..e78d9840abf7d659f7c7289c0b8f3826bd749650 100644 (file)
@@ -186,43 +186,43 @@ LL |     mod inner { #![macro_escape] }
    |
    = help: consider an outer attribute, `#[macro_use]` mod ...
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:219:17
    |
 LL |     mod inner { #![plugin_registrar] }
-   |                 ^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   |                 ^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:224:5
    |
 LL |     #[plugin_registrar] struct S;
-   |     ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   |     ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:227:5
    |
 LL |     #[plugin_registrar] type T = S;
-   |     ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   |     ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:230:5
    |
 LL |     #[plugin_registrar] impl S { }
-   |     ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   |     ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:216:1
    |
 LL | #[plugin_registrar]
-   | ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:40:1
    |
 LL | #![plugin_registrar]
-   | ^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
 
 warning: use of deprecated attribute `crate_id`: no longer used.
   --> $DIR/issue-43106-gating-of-builtin-attrs.rs:90:1
index afe4dc7d54c9b03339dcb818d3f898bdc34d2328..8de390becbe7d7d87d81adf498ab3d9ad6dd5c9a 100644 (file)
@@ -1,5 +1,9 @@
+// edition:2018
+
 #[bench] //~ ERROR use of unstable library feature 'test'
          //~| WARN this was previously accepted
 fn bench() {}
 
+use bench as _; //~ ERROR use of unstable library feature 'test'
+                //~| WARN this was previously accepted
 fn main() {}
index b9e24e931d42b9e00a29129098ee445ba19df5ee..168ac925724376e042251592733718bf7158161f 100644 (file)
@@ -1,5 +1,5 @@
 error: use of unstable library feature 'test': `bench` is a part of custom test frameworks which are unstable
-  --> $DIR/bench.rs:1:3
+  --> $DIR/bench.rs:3:3
    |
 LL | #[bench]
    |   ^^^^^
@@ -8,5 +8,14 @@ LL | #[bench]
    = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
    = note: for more information, see issue #64266 <https://github.com/rust-lang/rust/issues/64266>
 
-error: aborting due to previous error
+error: use of unstable library feature 'test': `bench` is a part of custom test frameworks which are unstable
+  --> $DIR/bench.rs:7:5
+   |
+LL | use bench as _;
+   |     ^^^^^
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #64266 <https://github.com/rust-lang/rust/issues/64266>
+
+error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/feature-gates/feature-gate-const_generics-ptr.rs b/src/test/ui/feature-gates/feature-gate-const_generics-ptr.rs
new file mode 100644 (file)
index 0000000..1ab11ce
--- /dev/null
@@ -0,0 +1,9 @@
+struct ConstFn<const F: fn()>;
+//~^ ERROR const generics are unstable
+//~^^ ERROR using function pointers as const generic parameters is unstable
+
+struct ConstPtr<const P: *const u32>;
+//~^ ERROR const generics are unstable
+//~^^ ERROR using raw pointers as const generic parameters is unstable
+
+fn main() {}
diff --git a/src/test/ui/feature-gates/feature-gate-const_generics-ptr.stderr b/src/test/ui/feature-gates/feature-gate-const_generics-ptr.stderr
new file mode 100644 (file)
index 0000000..935f84b
--- /dev/null
@@ -0,0 +1,39 @@
+error[E0658]: const generics are unstable
+  --> $DIR/feature-gate-const_generics-ptr.rs:1:22
+   |
+LL | struct ConstFn<const F: fn()>;
+   |                      ^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/44580
+   = help: add `#![feature(const_generics)]` to the crate attributes to enable
+
+error[E0658]: const generics are unstable
+  --> $DIR/feature-gate-const_generics-ptr.rs:5:23
+   |
+LL | struct ConstPtr<const P: *const u32>;
+   |                       ^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/44580
+   = help: add `#![feature(const_generics)]` to the crate attributes to enable
+
+error[E0658]: using function pointers as const generic parameters is unstable
+  --> $DIR/feature-gate-const_generics-ptr.rs:1:25
+   |
+LL | struct ConstFn<const F: fn()>;
+   |                         ^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/53020
+   = help: add `#![feature(const_compare_raw_pointers)]` to the crate attributes to enable
+
+error[E0658]: using raw pointers as const generic parameters is unstable
+  --> $DIR/feature-gate-const_generics-ptr.rs:5:26
+   |
+LL | struct ConstPtr<const P: *const u32>;
+   |                          ^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/53020
+   = help: add `#![feature(const_compare_raw_pointers)]` to the crate attributes to enable
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
index 27ff5ace25ddc2754e312b7de0c71a80e51c7b86..f0cc9ea70550e6a7cf8483d35a0869a48ff4282b 100644 (file)
@@ -1,4 +1,5 @@
 #![feature(never_type)]
+
 fn foo() -> Result<u32, !> {
     Ok(123)
 }
index dd4ca1f67e330e49b830f77ad2c5cb09095b1ece..08c36cece4cf9b42ebed048ddf39aaacc3e1c856 100644 (file)
@@ -1,8 +1,15 @@
 error[E0005]: refutable pattern in local binding: `Err(_)` not covered
-  --> $DIR/feature-gate-exhaustive-patterns.rs:7:9
+  --> $DIR/feature-gate-exhaustive-patterns.rs:8:9
    |
 LL |     let Ok(_x) = foo();
    |         ^^^^^^ pattern `Err(_)` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Ok(_x) = foo() { /* */ }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to previous error
 
index d1eee8cc58895fdfb784dce24e844bde870d90f0..f89ddf995c49eb32dd0524d7f61343f6a21ea8da 100644 (file)
@@ -7,11 +7,11 @@ LL | #![plugin(foo)]
    = note: for more information, see https://github.com/rust-lang/rust/issues/29597
    = help: add `#![feature(plugin)]` to the crate attributes to enable
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/feature-gate-plugin.rs:3:1
    |
 LL | #![plugin(foo)]
-   | ^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 1c4ccac1dcffb07ef531207a88a81db049bfd64e..4856cf7c3f7d9546b61332d8872667e564b8e20c 100644 (file)
@@ -16,11 +16,11 @@ LL | #[plugin_registrar]
    = note: for more information, see https://github.com/rust-lang/rust/issues/29597
    = help: add `#![feature(plugin_registrar)]` to the crate attributes to enable
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/feature-gate-plugin_registrar.rs:5:1
    |
 LL | #[plugin_registrar]
-   | ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index dfaa85bc5f014efeb96c63c05fd6f3f1d805e786..02c8400e03e8202258163d9d2cbfbd1fcd176394 100644 (file)
@@ -26,4 +26,5 @@ LL | #[repr(simd)]
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0658`.
+Some errors have detailed explanations: E0566, E0658.
+For more information about an error, try `rustc --explain E0566`.
diff --git a/src/test/ui/feature-gates/feature-gate-track_caller.rs b/src/test/ui/feature-gates/feature-gate-track_caller.rs
new file mode 100644 (file)
index 0000000..5865cf0
--- /dev/null
@@ -0,0 +1,5 @@
+#[track_caller]
+fn f() {}
+//~^^ ERROR the `#[track_caller]` attribute is an experimental feature
+
+fn main() {}
diff --git a/src/test/ui/feature-gates/feature-gate-track_caller.stderr b/src/test/ui/feature-gates/feature-gate-track_caller.stderr
new file mode 100644 (file)
index 0000000..b890019
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0658]: the `#[track_caller]` attribute is an experimental feature
+  --> $DIR/feature-gate-track_caller.rs:1:1
+   |
+LL | #[track_caller]
+   | ^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/47809
+   = help: add `#![feature(track_caller)]` to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
index 6d8ac7e8f2911f3e88ce41d64292cc82ac7a4869..759fb170f90ddf1d4d925c1b6172a36b07c9d8e0 100644 (file)
@@ -1,3 +1,4 @@
+// ignore-emscripten compiled with panic=abort by default
 // compile-flags: -C no-prepopulate-passes -Cpasses=name-anon-globals
 
 #![crate_type = "lib"]
index 10cc494213507fc27e9b487994264663fe361e6f..97365c34d0148b150628c424683e293d982e9d6b 100644 (file)
@@ -1,5 +1,5 @@
 error[E0658]: the `#[unwind]` attribute is an experimental feature
-  --> $DIR/feature-gate-unwind-attributes.rs:11:5
+  --> $DIR/feature-gate-unwind-attributes.rs:12:5
    |
 LL |     #[unwind(allowed)]
    |     ^^^^^^^^^^^^^^^^^^
index e1edbbb929e6a4236f99a1e8bc81711161379d0a..d7209fc4de867f168c5db50c403efc4ba33abf49 100644 (file)
@@ -1,4 +1,5 @@
 // run-pass
+
 #![allow(unreachable_code)]
 #![feature(never_type)]
 
diff --git a/src/test/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs b/src/test/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs
new file mode 100644 (file)
index 0000000..54b483f
--- /dev/null
@@ -0,0 +1,14 @@
+#![crate_type="lib"]
+
+struct Nested<K>(K);
+
+fn should_error<T>() where T : Into<&u32> {}
+//~^ ERROR `&` without an explicit lifetime name cannot be used here [E0637]
+
+trait X<'a, K: 'a> {
+    fn foo<'b, L: X<&'b Nested<K>>>();
+    //~^ ERROR missing lifetime specifier [E0106]
+}
+
+fn bar<'b, L: X<&'b Nested<i32>>>(){}
+//~^ ERROR missing lifetime specifier [E0106]
diff --git a/src/test/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr b/src/test/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr
new file mode 100644 (file)
index 0000000..8720288
--- /dev/null
@@ -0,0 +1,21 @@
+error[E0637]: `&` without an explicit lifetime name cannot be used here
+  --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:5:37
+   |
+LL | fn should_error<T>() where T : Into<&u32> {}
+   |                                     ^ explicit lifetime name needed here
+
+error[E0106]: missing lifetime specifier
+  --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:9:19
+   |
+LL |     fn foo<'b, L: X<&'b Nested<K>>>();
+   |                   ^^^^^^^^^^^^^^^^ expected lifetime parameter
+
+error[E0106]: missing lifetime specifier
+  --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:13:15
+   |
+LL | fn bar<'b, L: X<&'b Nested<i32>>>(){}
+   |               ^^^^^^^^^^^^^^^^^^ expected lifetime parameter
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0106`.
index 979f64fcd90a51b8f8ee0801ce6404f4dcb0056e..76d97dd2f585d0367e8e40f6a7007614c3fbe7d9 100644 (file)
@@ -10,7 +10,7 @@ LL | |                                     fn(Inv<'y>)) }
    |
    = note: expected type `std::option::Option<fn(Inv<'y>)>`
               found type `std::option::Option<fn(Inv<'x>)>`
-note: the lifetime 'x as defined on the function body at 32:20...
+note: the lifetime `'x` as defined on the function body at 32:20...
   --> $DIR/hr-subtype.rs:32:20
    |
 LL |           fn subtype<'x,'y:'x,'z:'y>() {
@@ -19,7 +19,7 @@ LL |           fn subtype<'x,'y:'x,'z:'y>() {
 LL | / check! { free_inv_x_vs_free_inv_y: (fn(Inv<'x>),
 LL | |                                     fn(Inv<'y>)) }
    | |__________________________________________________- in this macro invocation
-note: ...does not necessarily outlive the lifetime 'y as defined on the function body at 32:23
+note: ...does not necessarily outlive the lifetime `'y` as defined on the function body at 32:23
   --> $DIR/hr-subtype.rs:32:23
    |
 LL |           fn subtype<'x,'y:'x,'z:'y>() {
@@ -41,7 +41,7 @@ LL | |                                     fn(Inv<'y>)) }
    |
    = note: expected type `std::option::Option<fn(Inv<'x>)>`
               found type `std::option::Option<fn(Inv<'y>)>`
-note: the lifetime 'x as defined on the function body at 38:22...
+note: the lifetime `'x` as defined on the function body at 38:22...
   --> $DIR/hr-subtype.rs:38:22
    |
 LL |           fn supertype<'x,'y:'x,'z:'y>() {
@@ -50,7 +50,7 @@ LL |           fn supertype<'x,'y:'x,'z:'y>() {
 LL | / check! { free_inv_x_vs_free_inv_y: (fn(Inv<'x>),
 LL | |                                     fn(Inv<'y>)) }
    | |__________________________________________________- in this macro invocation
-note: ...does not necessarily outlive the lifetime 'y as defined on the function body at 38:25
+note: ...does not necessarily outlive the lifetime `'y` as defined on the function body at 38:25
   --> $DIR/hr-subtype.rs:38:25
    |
 LL |           fn supertype<'x,'y:'x,'z:'y>() {
index 3b6aff521698911aac704975200d4801b6ac97ec..74f4212b2468b410d9b0a7c51cbc428eb8e217a4 100644 (file)
@@ -10,7 +10,7 @@ LL | |                             fn(&'y u32)) }
    |
    = note: expected type `std::option::Option<fn(&'x u32)>`
               found type `std::option::Option<fn(&'y u32)>`
-note: the lifetime 'x as defined on the function body at 38:22...
+note: the lifetime `'x` as defined on the function body at 38:22...
   --> $DIR/hr-subtype.rs:38:22
    |
 LL |           fn supertype<'x,'y:'x,'z:'y>() {
@@ -19,7 +19,7 @@ LL |           fn supertype<'x,'y:'x,'z:'y>() {
 LL | / check! { free_x_vs_free_y: (fn(&'x u32),
 LL | |                             fn(&'y u32)) }
    | |__________________________________________- in this macro invocation
-note: ...does not necessarily outlive the lifetime 'y as defined on the function body at 38:25
+note: ...does not necessarily outlive the lifetime `'y` as defined on the function body at 38:25
   --> $DIR/hr-subtype.rs:38:25
    |
 LL |           fn supertype<'x,'y:'x,'z:'y>() {
index c2cc8ebad272658a2f8f2bbb3ad8397f052467be..afcb467ad471170a2e9503b2edf7404dc1218182 100644 (file)
@@ -6,10 +6,11 @@ LL | fn want_bar_for_any_ccx<B>(b: &B)
 LL |     where B : for<'ccx> Bar<'ccx>
    |               ------------------- required by this bound in `want_bar_for_any_ccx`
 ...
+LL |     where B : Qux
+   |                  - help: consider further restricting type parameter `B`: `, for<'ccx> B: Bar<'ccx>`
+...
 LL |     want_bar_for_any_ccx(b);
    |                          ^ the trait `for<'ccx> Bar<'ccx>` is not implemented for `B`
-   |
-   = help: consider adding a `where for<'ccx> B: Bar<'ccx>` bound
 
 error: aborting due to previous error
 
index a93814ad4c25e8f8912c492a4c49154e5b9e9476..20913b4f28c8e7a3da9aa9ed4d874e126a1f5ff1 100644 (file)
@@ -1,6 +1,9 @@
 error[E0277]: the trait bound `for<'tcx> F: Foo<'tcx>` is not satisfied
   --> $DIR/hrtb-higher-ranker-supertraits.rs:18:26
    |
+LL |     where F : Foo<'x>
+   |                      - help: consider further restricting type parameter `F`: `, for<'tcx> F: Foo<'tcx>`
+...
 LL |     want_foo_for_any_tcx(f);
    |                          ^ the trait `for<'tcx> Foo<'tcx>` is not implemented for `F`
 ...
@@ -8,12 +11,13 @@ LL | fn want_foo_for_any_tcx<F>(f: &F)
    |    --------------------
 LL |     where F : for<'tcx> Foo<'tcx>
    |               ------------------- required by this bound in `want_foo_for_any_tcx`
-   |
-   = help: consider adding a `where for<'tcx> F: Foo<'tcx>` bound
 
 error[E0277]: the trait bound `for<'ccx> B: Bar<'ccx>` is not satisfied
   --> $DIR/hrtb-higher-ranker-supertraits.rs:35:26
    |
+LL |     where B : Bar<'x>
+   |                      - help: consider further restricting type parameter `B`: `, for<'ccx> B: Bar<'ccx>`
+...
 LL |     want_bar_for_any_ccx(b);
    |                          ^ the trait `for<'ccx> Bar<'ccx>` is not implemented for `B`
 ...
@@ -21,8 +25,6 @@ LL | fn want_bar_for_any_ccx<B>(b: &B)
    |    --------------------
 LL |     where B : for<'ccx> Bar<'ccx>
    |               ------------------- required by this bound in `want_bar_for_any_ccx`
-   |
-   = help: consider adding a `where for<'ccx> B: Bar<'ccx>` bound
 
 error: aborting due to 2 previous errors
 
index 7e0f4e4e0b8f3261a6e59bf839d5cf6adf88165e..7acb266f49c0b6f22f85ef74fc13c5e699c6b50e 100644 (file)
@@ -34,7 +34,7 @@ LL | use foo::test2::test::g;
    |
 LL | use foo::test::g;
    |
-and 2 other candidates
+     and 2 other candidates
 
 error[E0425]: cannot find function `f` in this scope
   --> $DIR/globs.rs:61:12
index 5eacfdf8dee139b380227cfcf3d9004154c0bf4b..45a2efebbb8dad8afeef17abdaf941adc6044983 100644 (file)
@@ -2,7 +2,7 @@ error[E0425]: cannot find value `Opaque` in this scope
   --> $DIR/rustc-macro-transparency.rs:26:5
    |
 LL |     Opaque;
-   |     ^^^^^^ help: a local variable with a similar name exists: `opaque`
+   |     ^^^^^^ help: a local variable with a similar name exists (notice the capitalization): `opaque`
 
 error[E0423]: expected value, found macro `semitransparent`
   --> $DIR/rustc-macro-transparency.rs:29:5
index 6971873ba601e814670599a3c1a234b798d6e211..acd852103cae3d0b5219b7f1c50c5a60cd31778a 100644 (file)
@@ -17,7 +17,7 @@ fn y /* 0#0 */() { }
 /*
 Expansions:
 0: parent: ExpnId(0), call_site_ctxt: #0, kind: Root
-1: parent: ExpnId(0), call_site_ctxt: #0, kind: Macro(Bang, foo)
+1: parent: ExpnId(0), call_site_ctxt: #0, kind: Macro(Bang, "foo")
 
 SyntaxContexts:
 #0: parent: #0, outer_mark: (ExpnId(0), Opaque)
index cbf52476ae37f5dffe46478494984365de7212ee..0936f3b9e38e856c313cb33272fcaffa658de498 100644 (file)
@@ -2,7 +2,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:18:8
    |
 LL |     if b_ref() {}
-   |        ^^^^^^^ expected bool, found &bool
+   |        ^^^^^^^
+   |        |
+   |        expected bool, found &bool
+   |        help: consider dereferencing the borrow: `*b_ref()`
    |
    = note: expected type `bool`
               found type `&bool`
@@ -11,7 +14,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:19:8
    |
 LL |     if b_mut_ref() {}
-   |        ^^^^^^^^^^^ expected bool, found &mut bool
+   |        ^^^^^^^^^^^
+   |        |
+   |        expected bool, found &mut bool
+   |        help: consider dereferencing the borrow: `*b_mut_ref()`
    |
    = note: expected type `bool`
               found type `&mut bool`
@@ -20,7 +26,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:20:8
    |
 LL |     if &true {}
-   |        ^^^^^ expected bool, found &bool
+   |        ^^^^^
+   |        |
+   |        expected bool, found &bool
+   |        help: consider removing the borrow: `true`
    |
    = note: expected type `bool`
               found type `&bool`
@@ -29,7 +38,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:21:8
    |
 LL |     if &mut true {}
-   |        ^^^^^^^^^ expected bool, found &mut bool
+   |        ^^^^^^^^^
+   |        |
+   |        expected bool, found &mut bool
+   |        help: consider removing the borrow: `true`
    |
    = note: expected type `bool`
               found type `&mut bool`
@@ -38,7 +50,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:24:11
    |
 LL |     while b_ref() {}
-   |           ^^^^^^^ expected bool, found &bool
+   |           ^^^^^^^
+   |           |
+   |           expected bool, found &bool
+   |           help: consider dereferencing the borrow: `*b_ref()`
    |
    = note: expected type `bool`
               found type `&bool`
@@ -47,7 +62,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:25:11
    |
 LL |     while b_mut_ref() {}
-   |           ^^^^^^^^^^^ expected bool, found &mut bool
+   |           ^^^^^^^^^^^
+   |           |
+   |           expected bool, found &mut bool
+   |           help: consider dereferencing the borrow: `*b_mut_ref()`
    |
    = note: expected type `bool`
               found type `&mut bool`
@@ -56,7 +74,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:26:11
    |
 LL |     while &true {}
-   |           ^^^^^ expected bool, found &bool
+   |           ^^^^^
+   |           |
+   |           expected bool, found &bool
+   |           help: consider removing the borrow: `true`
    |
    = note: expected type `bool`
               found type `&bool`
@@ -65,7 +86,10 @@ error[E0308]: mismatched types
   --> $DIR/if-no-match-bindings.rs:27:11
    |
 LL |     while &mut true {}
-   |           ^^^^^^^^^ expected bool, found &mut bool
+   |           ^^^^^^^^^
+   |           |
+   |           expected bool, found &mut bool
+   |           help: consider removing the borrow: `true`
    |
    = note: expected type `bool`
               found type `&mut bool`
diff --git a/src/test/ui/impl-for-never.rs b/src/test/ui/impl-for-never.rs
deleted file mode 100644 (file)
index c5f1298..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-// run-pass
-// Test that we can call static methods on ! both directly and when it appears in a generic
-
-#![feature(never_type)]
-
-trait StringifyType {
-    fn stringify_type() -> &'static str;
-}
-
-impl StringifyType for ! {
-    fn stringify_type() -> &'static str {
-        "!"
-    }
-}
-
-fn maybe_stringify<T: StringifyType>(opt: Option<T>) -> &'static str {
-    match opt {
-        Some(_) => T::stringify_type(),
-        None => "none",
-    }
-}
-
-fn main() {
-    println!("! is {}", <!>::stringify_type());
-    println!("None is {}", maybe_stringify(None::<!>));
-}
index eb824def24687e19502ab4d94c0368de4a0956e5..5e80c673258b8484432a74442ec7a58020d05ac9 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |     static_val(x);
    |                ^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 19:26...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 19:26...
   --> $DIR/dyn-trait.rs:19:26
    |
 LL | fn with_dyn_debug_static<'a>(x: Box<dyn Debug + 'a>) {
@@ -19,3 +19,4 @@ LL | fn with_dyn_debug_static<'a>(x: Box<dyn Debug + 'a>) {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 650161753d1e5171193fd5b7a2b2ad0bbbb98fc8..956ac1f1a11672789e026218f07513b4d6f6fd1f 100644 (file)
@@ -4,7 +4,7 @@ error[E0700]: hidden type for `impl Trait` captures lifetime that does not appea
 LL | fn hide_ref<'a, 'b, T: 'static>(x: &'a mut &'b T) -> impl Swap + 'a {
    |                                                      ^^^^^^^^^^^^^^
    |
-note: hidden type `&'a mut &'b T` captures the lifetime 'b as defined on the function body at 28:17
+note: hidden type `&'a mut &'b T` captures the lifetime `'b` as defined on the function body at 28:17
   --> $DIR/hidden-lifetimes.rs:28:17
    |
 LL | fn hide_ref<'a, 'b, T: 'static>(x: &'a mut &'b T) -> impl Swap + 'a {
@@ -16,7 +16,7 @@ error[E0700]: hidden type for `impl Trait` captures lifetime that does not appea
 LL | fn hide_rc_refcell<'a, 'b: 'a, T: 'static>(x: Rc<RefCell<&'b T>>) -> impl Swap + 'a {
    |                                                                      ^^^^^^^^^^^^^^
    |
-note: hidden type `std::rc::Rc<std::cell::RefCell<&'b T>>` captures the lifetime 'b as defined on the function body at 45:24
+note: hidden type `std::rc::Rc<std::cell::RefCell<&'b T>>` captures the lifetime `'b` as defined on the function body at 45:24
   --> $DIR/hidden-lifetimes.rs:45:24
    |
 LL | fn hide_rc_refcell<'a, 'b: 'a, T: 'static>(x: Rc<RefCell<&'b T>>) -> impl Swap + 'a {
index d5756c015596e5aa0f07283c1ed8adb580f5399c..0d8ee61b5ba13926040563ae61e362b76e227449 100644 (file)
@@ -1,10 +1,11 @@
 error[E0277]: the trait bound `S: std::marker::Copy` is not satisfied in `(S, T)`
   --> $DIR/issue-55872-1.rs:12:5
    |
+LL | impl<S: Default> Bar for S {
+   |      -- help: consider further restricting this bound: `S: std::marker::Copy +`
 LL |     type E = impl Copy;
    |     ^^^^^^^^^^^^^^^^^^^ within `(S, T)`, the trait `std::marker::Copy` is not implemented for `S`
    |
-   = help: consider adding a `where S: std::marker::Copy` bound
    = note: required because it appears within the type `(S, T)`
    = note: the return type of a function must have a statically known size
 
@@ -13,8 +14,10 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied in `(S, T)
    |
 LL |     type E = impl Copy;
    |     ^^^^^^^^^^^^^^^^^^^ within `(S, T)`, the trait `std::marker::Copy` is not implemented for `T`
+...
+LL |     fn foo<T: Default>() -> Self::E {
+   |            -- help: consider further restricting this bound: `T: std::marker::Copy +`
    |
-   = help: consider adding a `where T: std::marker::Copy` bound
    = note: required because it appears within the type `(S, T)`
    = note: the return type of a function must have a statically known size
 
index a6ea7837678ed5ddeb5998b64d6f6ec51c714b50..2ffb94348616bca0f1bddc12af34b0e3fd8eb8f5 100644 (file)
@@ -24,12 +24,12 @@ LL | fn explicit<'a>(x: &'a i32) -> impl Copy { x }
    |                                |
    |                                this return type evaluates to the `'static` lifetime...
    |
-note: ...can't outlive the lifetime 'a as defined on the function body at 6:13
+note: ...can't outlive the lifetime `'a` as defined on the function body at 6:13
   --> $DIR/must_outlive_least_region_or_bound.rs:6:13
    |
 LL | fn explicit<'a>(x: &'a i32) -> impl Copy { x }
    |             ^^
-help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime 'a as defined on the function body at 6:13
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime `'a` as defined on the function body at 6:13
    |
 LL | fn explicit<'a>(x: &'a i32) -> impl Copy + 'a { x }
    |                                ^^^^^^^^^^^^^^
@@ -42,12 +42,12 @@ LL | fn with_bound<'a>(x: &'a i32) -> impl LifetimeTrait<'a> + 'static { x }
    |                                  |
    |                                  this return type evaluates to the `'static` lifetime...
    |
-note: ...can't outlive the lifetime 'a as defined on the function body at 12:15
+note: ...can't outlive the lifetime `'a` as defined on the function body at 12:15
   --> $DIR/must_outlive_least_region_or_bound.rs:12:15
    |
 LL | fn with_bound<'a>(x: &'a i32) -> impl LifetimeTrait<'a> + 'static { x }
    |               ^^
-help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime 'a as defined on the function body at 12:15
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime `'a` as defined on the function body at 12:15
    |
 LL | fn with_bound<'a>(x: &'a i32) -> impl LifetimeTrait<'a> + 'static + 'a { x }
    |                                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
index 5c8e322f712d167f370846766c2b63649cfa8908..894a65ff389953460abc66b10c58962ea5330b35 100644 (file)
@@ -4,7 +4,7 @@ error[E0700]: hidden type for `impl Trait` captures lifetime that does not appea
 LL | fn foo(x: Cell<&'x u32>) -> impl Trait<'y>
    |                             ^^^^^^^^^^^^^^
    |
-note: hidden type `std::cell::Cell<&'x u32>` captures the lifetime 'x as defined on the function body at 17:7
+note: hidden type `std::cell::Cell<&'x u32>` captures the lifetime `'x` as defined on the function body at 17:7
   --> $DIR/region-escape-via-bound.rs:17:7
    |
 LL | where 'x: 'y
index 0bc0180e78a42fcc44ea2318d921138cc7dab715..1d6b5f56aa0cfae2d671e0f45b1fce008af40225 100644 (file)
@@ -30,12 +30,12 @@ LL |         self.x.iter().map(|a| a.0)
    |         |
    |         ...but this borrow...
    |
-note: ...can't outlive the lifetime 'a as defined on the method body at 10:20
+note: ...can't outlive the lifetime `'a` as defined on the method body at 10:20
   --> $DIR/static-return-lifetime-infered.rs:10:20
    |
 LL |     fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
    |                    ^^
-help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime 'a as defined on the method body at 10:20
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime `'a` as defined on the method body at 10:20
    |
 LL |     fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> + 'a {
    |                                     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
index b50a926c63795aa4a6e08a759097535a730247a0..c1ec536ef43620da3c5a26a89b8caec29d222714 100644 (file)
@@ -18,3 +18,4 @@ LL | |     }
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index cd65bab2d4668688d6c83df001b8203c83bc4ee1..b5287f32a50451b57473e6823c2252c3f68e1e1f 100644 (file)
@@ -11,7 +11,7 @@ LL | /     fn foo(&self, x: &u32, y: &'a u32) -> &'a u32 {
 LL | |         x
 LL | |     }
    | |_____^
-note: ...but the lifetime must also be valid for the lifetime 'a as defined on the method body at 9:32...
+note: ...but the lifetime must also be valid for the lifetime `'a` as defined on the method body at 9:32...
   --> $DIR/mismatched_trait_impl.rs:9:32
    |
 LL |     fn foo(&self, x: &u32, y: &'a u32) -> &'a u32 {
@@ -22,3 +22,4 @@ LL |     fn foo(&self, x: &u32, y: &'a u32) -> &'a u32 {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 80f15b7c5847f4cbc82b051dd67e2e28c5d3d308..734ca0819e4166251a23eb6db5abcfff8bde2afd 100644 (file)
@@ -11,7 +11,7 @@ LL | /     fn foo(&self, x: &u32, y: &'a u32) -> &'a u32 {
 LL | |         x
 LL | |     }
    | |_____^
-note: ...but the lifetime must also be valid for the lifetime 'a as defined on the method body at 9:32...
+note: ...but the lifetime must also be valid for the lifetime `'a` as defined on the method body at 9:32...
   --> $DIR/mismatched_trait_impl.rs:9:32
    |
 LL |     fn foo(&self, x: &u32, y: &'a u32) -> &'a u32 {
@@ -32,3 +32,4 @@ LL |         x
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0495`.
diff --git a/src/test/ui/include-single-expr-helper-1.rs b/src/test/ui/include-single-expr-helper-1.rs
new file mode 100644 (file)
index 0000000..aa6380b
--- /dev/null
@@ -0,0 +1,5 @@
+// ignore-test auxiliary file for include-single-expr.rs
+
+0
+
+// trailing comment permitted
diff --git a/src/test/ui/include-single-expr-helper.rs b/src/test/ui/include-single-expr-helper.rs
new file mode 100644 (file)
index 0000000..84d8b69
--- /dev/null
@@ -0,0 +1,5 @@
+// ignore-test auxiliary file for include-single-expr.rs
+
+0
+10
+100
diff --git a/src/test/ui/include-single-expr.rs b/src/test/ui/include-single-expr.rs
new file mode 100644 (file)
index 0000000..0f4c29e
--- /dev/null
@@ -0,0 +1,6 @@
+// error-pattern include macro expected single expression
+
+fn main() {
+    include!("include-single-expr-helper.rs");
+    include!("include-single-expr-helper-1.rs");
+}
diff --git a/src/test/ui/include-single-expr.stderr b/src/test/ui/include-single-expr.stderr
new file mode 100644 (file)
index 0000000..80eecf8
--- /dev/null
@@ -0,0 +1,10 @@
+error: include macro expected single expression in source
+  --> $DIR/include-single-expr-helper.rs:4:1
+   |
+LL | 10
+   | ^^
+   |
+   = note: `#[deny(incomplete_include)]` on by default
+
+error: aborting due to previous error
+
index dfc663e4a79f84b88916c81b63d08dd643077aa3..1e74445af55cb0327bb4facb4604e54d0212e01b 100644 (file)
@@ -2,9 +2,7 @@ error[E0401]: can't use generic parameters from outer function
   --> $DIR/inner-static-type-parameter.rs:6:19
    |
 LL | fn foo<T>() {
-   |    --- - type parameter from outer function
-   |    |
-   |    try adding a local generic parameter in this method instead
+   |        - type parameter from outer function
 LL |     static a: Bar<T> = Bar::What;
    |                   ^ use of generic parameter from outer function
 
index d14a7524bf2d9d8a981c167e1f3165ef4c82999f..0d7315dd887ca12f8e78406ba95642ad8b44dc7e 100644 (file)
@@ -1,8 +1,8 @@
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/invalid-plugin-attr.rs:4:1
    |
 LL | #[plugin(bla)]
-   | ^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
diff --git a/src/test/ui/issue-53912.rs b/src/test/ui/issue-53912.rs
deleted file mode 100644 (file)
index 4718aea..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-// build-pass (FIXME(62277): could be check-pass?)
-
-// This test is the same code as in ui/symbol-names/issue-60925.rs but this checks that the
-// reproduction compiles successfully and doesn't segfault, whereas that test just checks that the
-// symbol mangling fix produces the correct result.
-
-fn dummy() {}
-
-mod llvm {
-    pub(crate) struct Foo;
-}
-mod foo {
-    pub(crate) struct Foo<T>(T);
-
-    impl Foo<::llvm::Foo> {
-        pub(crate) fn foo() {
-            for _ in 0..0 {
-                for _ in &[::dummy()] {
-                    ::dummy();
-                    ::dummy();
-                    ::dummy();
-                }
-            }
-        }
-    }
-
-    pub(crate) fn foo() {
-        Foo::foo();
-        Foo::foo();
-    }
-}
-
-pub fn foo() {
-    foo::foo();
-}
-
-fn main() {}
diff --git a/src/test/ui/issue-59020.rs b/src/test/ui/issue-59020.rs
deleted file mode 100644 (file)
index e754493..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-// edition:2018
-// run-pass
-// ignore-emscripten no threads support
-// ignore-sgx no thread sleep support
-
-use std::thread;
-use std::time::Duration;
-
-fn main() {
-    let t1 = thread::spawn(|| {
-        let sleep = Duration::new(0,100_000);
-        for _ in 0..100 {
-            println!("Parking1");
-            thread::park_timeout(sleep);
-        }
-    });
-
-    let t2 = thread::spawn(|| {
-        let sleep = Duration::new(0,100_000);
-        for _ in 0..100 {
-            println!("Parking2");
-            thread::park_timeout(sleep);
-        }
-    });
-
-    t1.join().expect("Couldn't join thread 1");
-    t2.join().expect("Couldn't join thread 2");
-}
diff --git a/src/test/ui/issues/auxiliary/issue-57271-lib.rs b/src/test/ui/issues/auxiliary/issue-57271-lib.rs
new file mode 100644 (file)
index 0000000..ff62566
--- /dev/null
@@ -0,0 +1,11 @@
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub enum BaseType {
+    Byte,
+    Char,
+    Double,
+    Float,
+    Int,
+    Long,
+    Short,
+    Boolean,
+}
index 544716e89b39d7249c6aac3562507de00f8302fd..b1057d45869a8ae9394752dbc0d70383401c663b 100644 (file)
@@ -2,7 +2,7 @@ error[E0532]: expected tuple struct/variant, found function `foo`
   --> $DIR/issue-10200.rs:6:9
    |
 LL |         foo(x)
-   |         ^^^ help: a tuple struct with a similar name exists: `Foo`
+   |         ^^^ help: a tuple struct with a similar name exists (notice the capitalization): `Foo`
 
 error: aborting due to previous error
 
index a836593e0da108defd40295c7c1fe6efabcb5433..4fff4ee866c395898e460447eb4bdd833207f738 100644 (file)
@@ -12,7 +12,7 @@ LL |       drop::<Box<dyn for<'z> FnMut(&'z isize) -> &'z isize>>(Box::new(|z| {
 LL | |         x
 LL | |     }));
    | |_____^
-note: ...but the borrowed content is only valid for the lifetime 'x as defined on the function body at 1:9
+note: ...but the borrowed content is only valid for the lifetime `'x` as defined on the function body at 1:9
   --> $DIR/issue-10291.rs:1:9
    |
 LL | fn test<'x>(x: &'x isize) {
diff --git a/src/test/ui/issues/issue-13352.rs b/src/test/ui/issues/issue-13352.rs
deleted file mode 100644 (file)
index e6995be..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-// ignore-cloudabi no std::process
-
-fn foo(_: Box<dyn FnMut()>) {}
-
-fn main() {
-    foo(loop {
-        std::process::exit(0);
-    });
-    2_usize + (loop {});
-    //~^ ERROR E0277
-}
diff --git a/src/test/ui/issues/issue-13352.stderr b/src/test/ui/issues/issue-13352.stderr
deleted file mode 100644 (file)
index 58ac74b..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-error[E0277]: cannot add `()` to `usize`
-  --> $DIR/issue-13352.rs:9:13
-   |
-LL |     2_usize + (loop {});
-   |             ^ no implementation for `usize + ()`
-   |
-   = help: the trait `std::ops::Add<()>` is not implemented for `usize`
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0277`.
index e66368f9ba8fab56b2a88994a15d0cb944c848ae..9510aae77534158245da2265c6bf2d22fae18340 100644 (file)
@@ -2,7 +2,6 @@
 // Test that codegen works correctly when there are multiple refutable
 // patterns in match expression.
 
-
 enum Foo {
     FooUint(usize),
     FooNullary,
index 771a2ddf240f59ae643ccdb037138e28ef83a5f6..b663e213ed05e357f8d8d48749573938828dd128 100644 (file)
@@ -16,7 +16,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |         self.a();
    |         ^^^^
-note: but, the lifetime must be valid for the lifetime 'a as defined on the trait at 1:9...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the trait at 1:9...
   --> $DIR/issue-16683.rs:1:9
    |
 LL | trait T<'a> {
@@ -27,3 +27,4 @@ LL | trait T<'a> {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 2374e829556ec2276cc95b44cd215c1886d54cc1..d7e6069977b4e1e0ea13f38fe0c4dda50d208166 100644 (file)
@@ -6,3 +6,4 @@ LL |     let p = foo { x: () };
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index 4b5678a88773b8aa8bd79f6bb054bfa325f7a151..37274e239ba0b1a1e03b56d22c8fd9dd380a8230 100644 (file)
@@ -6,3 +6,4 @@ LL |         Foo { i } => ()
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index e27f49b4a3f0a61d4335f406602951404c0fb207..4a0fb186e49f6c91fec5cc95ebb356216c16e1e1 100644 (file)
@@ -27,7 +27,7 @@ LL |     use std::prelude::v1::Result;
    |
 LL |     use std::result::Result;
    |
-and 1 other candidates
+     and 1 other candidates
 
 error[E0573]: expected type, found variant `Result`
   --> $DIR/issue-17546.rs:28:13
@@ -44,7 +44,7 @@ LL | use std::prelude::v1::Result;
    |
 LL | use std::result::Result;
    |
-and 1 other candidates
+     and 1 other candidates
 
 error[E0573]: expected type, found variant `NoResult`
   --> $DIR/issue-17546.rs:33:15
@@ -62,3 +62,4 @@ LL | fn newer() -> Result<foo::MyEnum, String> {
 
 error: aborting due to 4 previous errors
 
+For more information about this error, try `rustc --explain E0573`.
index 1fe1821292c08a18ca97fba4f2fb31e82ef1adee..e320c436f5b68595f9a1c73de780a77fc6296f94 100644 (file)
@@ -15,7 +15,7 @@ error: constant `foo` should have an upper case name
   --> $DIR/issue-17718-const-naming.rs:4:7
    |
 LL | const foo: isize = 3;
-   |       ^^^ help: convert the identifier to upper case: `FOO`
+   |       ^^^ help: convert the identifier to upper case (notice the capitalization): `FOO`
    |
 note: lint level defined here
   --> $DIR/issue-17718-const-naming.rs:2:9
index b8a0a0676319a292aa5ac9888fc9f3b7336ca303..d392ea3c1b8617bae73b0833c85822c532db0e4e 100644 (file)
@@ -17,7 +17,7 @@ LL | |
 LL | |
 LL | |     }
    | |_____^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 5:7
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 5:7
   --> $DIR/issue-17740.rs:5:7
    |
 LL | impl <'a> Foo<'a>{
@@ -31,7 +31,7 @@ LL |     fn bar(self: &mut Foo) {
    |
    = note: expected type `Foo<'a>`
               found type `Foo<'_>`
-note: the lifetime 'a as defined on the impl at 5:7...
+note: the lifetime `'a` as defined on the impl at 5:7...
   --> $DIR/issue-17740.rs:5:7
    |
 LL | impl <'a> Foo<'a>{
index 0ef3b98719d341c949d0ce4830dd65f869a5aeb0..adcbb62e3d5bda4ee80c04f2e5e0fbb9494c1511 100644 (file)
@@ -17,7 +17,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |         self.foo();
    |         ^^^^
-note: but, the lifetime must be valid for the lifetime 'a as defined on the trait at 4:11...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the trait at 4:11...
   --> $DIR/issue-17758.rs:4:11
    |
 LL | trait Foo<'a> {
@@ -28,3 +28,4 @@ LL | trait Foo<'a> {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 585bc9c14883bfb867afe9f9e22eed6a44ab15d3..04be62dc661bfe3c18c02462846d2927a33e3c8c 100644 (file)
@@ -15,7 +15,7 @@ LL | |
 LL | |         println!("{:?}", self);
 LL | |     }
    | |_____^
-note: ...does not necessarily outlive the lifetime '_ as defined on the impl at 5:5
+note: ...does not necessarily outlive the lifetime `'_` as defined on the impl at 5:5
   --> $DIR/issue-17905-2.rs:5:5
    |
 LL |     &str,
@@ -29,7 +29,7 @@ LL |     fn say(self: &Pair<&str, isize>) {
    |
    = note: expected type `Pair<&str, _>`
               found type `Pair<&str, _>`
-note: the lifetime '_ as defined on the impl at 5:5...
+note: the lifetime `'_` as defined on the impl at 5:5...
   --> $DIR/issue-17905-2.rs:5:5
    |
 LL |     &str,
index 4c5b940190ee671b54f7349786a5e27b2e46943b..ddee5a9da7a42acd060a119171aedde15482e71a 100644 (file)
@@ -18,3 +18,4 @@ LL | impl foo {}
 
 error: aborting due to 3 previous errors
 
+For more information about this error, try `rustc --explain E0573`.
index 2754d6bdd83066a60778aefd9efa8edb6556fb44..31376f2d1be0fa557a43dfdacb6521eaecbf69e5 100644 (file)
@@ -7,13 +7,13 @@ LL |   trait From<Src> {
 LL | /     fn to<Dst>(
 LL | |         self
 LL | |     ) -> <Dst as From<Self>>::Result where Dst: From<Self> {
+   | |                                                           - help: consider further restricting `Self`: `, Self: std::marker::Sized`
 LL | |         From::from(self)
 LL | |     }
    | |_____^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Self`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Self: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index 64e3cdc64c1127d3522a1283fbd3ac3c6e069d55..13c9c09461eae290e88fa5f3d56138e92346560b 100644 (file)
@@ -23,7 +23,7 @@ LL | |
 LL | |         self.sub = t;
 LL | |     }
    | |_____^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 26:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 26:6
   --> $DIR/issue-20831-debruijn.rs:26:6
    |
 LL | impl<'a> Publisher<'a> for MyStruct<'a> {
@@ -43,7 +43,7 @@ LL | |     }
    |
    = note: expected type `'a`
               found type `'_`
-note: the lifetime 'a as defined on the impl at 26:6...
+note: the lifetime `'a` as defined on the impl at 26:6...
   --> $DIR/issue-20831-debruijn.rs:26:6
    |
 LL | impl<'a> Publisher<'a> for MyStruct<'a> {
@@ -83,7 +83,7 @@ LL | |
 LL | |         self.sub = t;
 LL | |     }
    | |_____^
-note: ...but the lifetime must also be valid for the lifetime 'a as defined on the impl at 26:6...
+note: ...but the lifetime must also be valid for the lifetime `'a` as defined on the impl at 26:6...
   --> $DIR/issue-20831-debruijn.rs:26:6
    |
 LL | impl<'a> Publisher<'a> for MyStruct<'a> {
@@ -94,4 +94,5 @@ LL | impl<'a> Publisher<'a> for MyStruct<'a> {
 
 error: aborting due to 3 previous errors
 
-For more information about this error, try `rustc --explain E0308`.
+Some errors have detailed explanations: E0308, E0495.
+For more information about an error, try `rustc --explain E0308`.
index 21de1ea0915684a33e2a1773edd21e72dfb8c809..ecaf6faba429e160fa7f951863beb6eed6a1e775 100644 (file)
@@ -6,3 +6,4 @@ LL |     let myVar = MyMod { T: 0 };
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index 16d003aba7cabe832f00afebd5e69975a80c54d9..ab0a18869632a986e9253cba7c7e506b9163506a 100644 (file)
@@ -1,5 +1,5 @@
 // run-pass
-#![allow(unused_imports)]
+#![allow(unused_imports, overlapping_patterns)]
 // pretty-expanded FIXME #23616
 
 use m::{START, END};
diff --git a/src/test/ui/issues/issue-2149.rs b/src/test/ui/issues/issue-2149.rs
deleted file mode 100644 (file)
index d46f0e6..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-trait VecMonad<A> {
-    fn bind<B, F>(&self, f: F) where F: FnMut(A) -> Vec<B>;
-}
-
-impl<A> VecMonad<A> for Vec<A> {
-    fn bind<B, F>(&self, mut f: F) where F: FnMut(A) -> Vec<B> {
-        let mut r = panic!();
-        for elt in self { r = r + f(*elt); }
-        //~^ ERROR E0277
-   }
-}
-fn main() {
-    ["hi"].bind(|x| [x] );
-    //~^ ERROR no method named `bind` found for type `[&str; 1]` in the current scope
-}
diff --git a/src/test/ui/issues/issue-2149.stderr b/src/test/ui/issues/issue-2149.stderr
deleted file mode 100644 (file)
index 8ce2ba0..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-error[E0277]: cannot add `std::vec::Vec<B>` to `()`
-  --> $DIR/issue-2149.rs:8:33
-   |
-LL |         for elt in self { r = r + f(*elt); }
-   |                                 ^ no implementation for `() + std::vec::Vec<B>`
-   |
-   = help: the trait `std::ops::Add<std::vec::Vec<B>>` is not implemented for `()`
-
-error[E0599]: no method named `bind` found for type `[&str; 1]` in the current scope
-  --> $DIR/issue-2149.rs:13:12
-   |
-LL |     ["hi"].bind(|x| [x] );
-   |            ^^^^ method not found in `[&str; 1]`
-   |
-   = help: items from traits can only be used if the trait is implemented and in scope
-   = note: the following trait defines an item `bind`, perhaps you need to implement it:
-           candidate #1: `VecMonad`
-
-error: aborting due to 2 previous errors
-
-Some errors have detailed explanations: E0277, E0599.
-For more information about an error, try `rustc --explain E0277`.
index 20d02a90315d0a356d99c89ad93aaf9f08227450..50fdf2d6185be93f30e276adb11f6f8ee27a01d6 100644 (file)
@@ -5,9 +5,9 @@ LL | pub struct Foo<T: Bound>(T);
    | ---------------------------- required by `Foo`
 ...
 LL | impl<T> Trait2 for Foo<T> {}
-   |         ^^^^^^ the trait `Bound` is not implemented for `T`
-   |
-   = help: consider adding a `where T: Bound` bound
+   |      -  ^^^^^^ the trait `Bound` is not implemented for `T`
+   |      |
+   |      help: consider restricting this bound: `T: Bound`
 
 error: aborting due to previous error
 
index fc5de23752b3e7e85ab8bdfe69690dee8b20c948..283a5e04a8b6f8d19199e5cb28e65719999c9731 100644 (file)
@@ -1,11 +1,12 @@
 error[E0277]: `<P as Process<'_>>::Item` is not an iterator
   --> $DIR/issue-22872.rs:20:40
    |
+LL | fn push_process<P>(process: P) where P: Process<'static> {
+   |                                                         - help: consider further restricting the associated type: `, <P as Process<'_>>::Item: std::iter::Iterator`
 LL |     let _: Box<dyn for<'b> Wrap<'b>> = Box::new(Wrapper(process));
    |                                        ^^^^^^^^^^^^^^^^^^^^^^^^^^ `<P as Process<'_>>::Item` is not an iterator
    |
    = help: the trait `std::iter::Iterator` is not implemented for `<P as Process<'_>>::Item`
-   = help: consider adding a `where <P as Process<'_>>::Item: std::iter::Iterator` bound
    = note: required because of the requirements on the impl of `for<'b> Wrap<'b>` for `Wrapper<P>`
    = note: required for the cast to the object type `dyn for<'b> Wrap<'b>`
 
index 50c09f17486ded2107765996dd4aaf853236c10f..ed065212c560ba2808c31fda35b3e690b7aebde5 100644 (file)
@@ -6,3 +6,4 @@ LL |     let _ = module { x: 0 };
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index 0434ef9e5a958cb9158b47680f36efccd00b6823..edb06fea8ad5333db9322141654ee1cbd24ca6e5 100644 (file)
@@ -1,4 +1,6 @@
 // run-pass
+#![allow(overlapping_patterns)]
+
 fn main() {
     let x = 'a';
 
index c7909a142bec4a9ce85b3cfad3b1069bec776bff..187369263a44617563257fae3968c4b14cf173f6 100644 (file)
@@ -6,3 +6,4 @@ LL |         char{ch} => true
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index f7227c341014621ca689d11d3f57a2bcac985f64..553041c5106c56c3102398c83d54e46b7976376b 100644 (file)
@@ -1,12 +1,13 @@
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/issue-27060-2.rs:3:5
    |
+LL | pub struct Bad<T: ?Sized> {
+   |                -- help: consider further restricting this bound: `T: std::marker::Sized +`
 LL |     data: T,
    |     ^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: the last field of a packed struct may only have a dynamically sized type if it does not need drop to be run
 
 error: aborting due to previous error
index 76cc3e7b0a36e156427f65b96ef3e0b561341776..fbc72d063f37cc687a1862ecaf9446f38193852b 100644 (file)
@@ -2,11 +2,12 @@ error[E0277]: the size for values of type `Self` cannot be known at compilation
   --> $DIR/issue-27078.rs:5:12
    |
 LL |     fn foo(self) -> &'static i32 {
-   |            ^^^^ doesn't have a size known at compile-time
+   |            ^^^^                 - help: consider further restricting `Self`: `where Self: std::marker::Sized`
+   |            |
+   |            doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Self`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Self: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
index 1d68e3bf558f3547b64bccd108d569dd96fed5bc..43f78ccf6395aa882649074f3247c5748f53a720 100644 (file)
@@ -24,3 +24,4 @@ LL |         u32 { x: 1 } => {}
 
 error: aborting due to 4 previous errors
 
+For more information about this error, try `rustc --explain E0574`.
index e03959598b8b81c8ab92059b57ad0006919a73a3..d290b176161be5637aa813c8bdf6205c84042256 100644 (file)
@@ -11,7 +11,7 @@ note: the anonymous lifetime #1 defined on the method body at 5:5...
    |
 LL |     fn select(&self) -> BufferViewHandle<R>;
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the trait at 3:18
+note: ...does not necessarily outlive the lifetime `'a` as defined on the trait at 3:18
   --> $DIR/issue-27942.rs:3:18
    |
 LL | pub trait Buffer<'a, R: Resources<'a>> {
@@ -25,7 +25,7 @@ LL |     fn select(&self) -> BufferViewHandle<R>;
    |
    = note: expected type `Resources<'_>`
               found type `Resources<'a>`
-note: the lifetime 'a as defined on the trait at 3:18...
+note: the lifetime `'a` as defined on the trait at 3:18...
   --> $DIR/issue-27942.rs:3:18
    |
 LL | pub trait Buffer<'a, R: Resources<'a>> {
index 5f0f202c0b27ba62305b7d1b51933f7fda3ddb7e..726844a31841f7089c36faa311042420d046f62f 100644 (file)
@@ -4,12 +4,12 @@ error[E0478]: lifetime bound not satisfied
 LL |     Foo::<'a, 'b>::xmute(u)
    |     ^^^^^^^^^^^^^^^^^^^^
    |
-note: lifetime parameter instantiated with the lifetime 'b as defined on the function body at 9:16
+note: lifetime parameter instantiated with the lifetime `'b` as defined on the function body at 9:16
   --> $DIR/issue-28848.rs:9:16
    |
 LL | pub fn foo<'a, 'b>(u: &'b ()) -> &'a () {
    |                ^^
-note: but lifetime parameter must outlive the lifetime 'a as defined on the function body at 9:12
+note: but lifetime parameter must outlive the lifetime `'a` as defined on the function body at 9:12
   --> $DIR/issue-28848.rs:9:12
    |
 LL | pub fn foo<'a, 'b>(u: &'b ()) -> &'a () {
index 5faf0374210d878e3c3c18e8ae7c4e9e43296bd0..e3692934b62adafc2169692c18788cd63d303c65 100644 (file)
@@ -9,3 +9,4 @@ LL |     _: foo::Foo::FooV
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0573`.
index 9ec26b024bce24f9cd8e3c2c4abfcc4c810441c6..d3c8e876b8a8816b778272b0d8f21e6a168eac3f 100644 (file)
@@ -12,6 +12,13 @@ LL | | }
 ...
 LL |       let Thing::Foo(y) = Thing::Foo(1);
    |           ^^^^^^^^^^^^^ patterns `Bar` and `Baz` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Thing::Foo(y) = Thing::Foo(1) { /* */ }
+   |
 
 error: aborting due to previous error
 
index 28555a15afae1f487f699310f1eed9bc769e3ebd..91814d94963765690bbb19c96918c1bb3caca716 100644 (file)
@@ -63,9 +63,9 @@ LL | fn qux() -> Some {
    |             ^^^^
    |             |
    |             not a type
-   |             help: try using the variant's enum: `Option`
+   |             help: try using the variant's enum: `std::option::Option`
 
 error: aborting due to 7 previous errors
 
-Some errors have detailed explanations: E0412, E0425.
+Some errors have detailed explanations: E0412, E0425, E0573.
 For more information about an error, try `rustc --explain E0412`.
index 05c8ce4c3f11e5d7908cebe237d007b9ff3f2143..b98bc572a397c6c87cbf8b6ed148beb919fe5b1f 100644 (file)
@@ -5,7 +5,7 @@ LL |         self.boom();
    |         -----^^^^
    |         |    |
    |         |    this is an associated function, not a method
-   |         help: use associated function syntax instead: `&Obj::boom`
+   |         help: use associated function syntax instead: `Obj::boom`
    |
    = note: found the following associated functions; to be used as methods, functions must have a `self` parameter
 note: the candidate is defined in an impl for the type `Obj`
index 9a5f659da160495b61097ea3b69767e5e9ad2811..8e75d7be066a3f2c2ccac3c2194a4d823f9e8549 100644 (file)
@@ -21,7 +21,7 @@ LL | |     {
 LL | |         Some(&mut self.0)
 LL | |     }
    | |_____^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 3:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 3:6
   --> $DIR/issue-37884.rs:3:6
    |
 LL | impl<'a, T: 'a> Iterator for RepeatMut<'a, T> {
index dbd204ec299d7f41584bd3fc829c32e139656f9c..0687fc940dec1ce9b5642ae9aa055a212d92f613 100644 (file)
@@ -4,7 +4,6 @@ error[E0277]: the trait bound `<Col as Expression>::SqlType: NotNull` is not sat
 LL | #[derive(Debug, Copy, Clone)]
    |                 ^^^^ the trait `NotNull` is not implemented for `<Col as Expression>::SqlType`
    |
-   = help: consider adding a `where <Col as Expression>::SqlType: NotNull` bound
    = note: required because of the requirements on the impl of `IntoNullable` for `<Col as Expression>::SqlType`
 
 error: aborting due to previous error
index bfdc4272fb3089d1550ca3459f5b16bf524a72f2..6688203147eaf2ea38323f9dd07304e66f7119d2 100644 (file)
@@ -2,11 +2,12 @@ error[E0277]: the size for values of type `<Self as std::ops::Deref>::Target` ca
   --> $DIR/issue-42312.rs:4:29
    |
 LL |     fn baz(_: Self::Target) where Self: Deref {}
-   |                             ^ doesn't have a size known at compile-time
+   |                             ^                - help: consider further restricting the associated type: `, <Self as std::ops::Deref>::Target: std::marker::Sized`
+   |                             |
+   |                             doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `<Self as std::ops::Deref>::Target`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where <Self as std::ops::Deref>::Target: std::marker::Sized` bound
    = note: all function arguments must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
index fc05d280693b85b2c1591d9a1f4abb3ed78edfc7..e91e53499ce6c1f83218c9a96e24deb4449c5a05 100644 (file)
@@ -2,9 +2,9 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
   --> $DIR/issue-43784-associated-type.rs:13:9
    |
 LL | impl<T> Complete for T {
-   |         ^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
+   |      -  ^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
+   |      |
+   |      help: consider restricting this bound: `T: std::marker::Copy`
 
 error: aborting due to previous error
 
index 4c423f2e77fe8e2c8a0b4b1cccd6baf7515dfd0f..5ac32041bce481291119238c5dc3e2c6421c4d26 100644 (file)
@@ -2,9 +2,9 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
   --> $DIR/issue-43784-supertrait.rs:8:9
    |
 LL | impl<T> Complete for T {}
-   |         ^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
+   |      -  ^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
+   |      |
+   |      help: consider restricting this bound: `T: std::marker::Copy`
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/issues/issue-44402.rs b/src/test/ui/issues/issue-44402.rs
deleted file mode 100644 (file)
index 29b7eb5..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-// build-pass (FIXME(62277): could be check-pass?)
-#![allow(dead_code)]
-#![feature(never_type)]
-#![feature(exhaustive_patterns)]
-
-// Regression test for inhabitedness check. The old
-// cache used to cause us to incorrectly decide
-// that `test_b` was invalid.
-
-struct Foo {
-    field1: !,
-    field2: Option<&'static Bar>,
-}
-
-struct Bar {
-    field1: &'static Foo
-}
-
-fn test_a() {
-    let x: Option<Foo> = None;
-    match x { None => () }
-}
-
-fn test_b() {
-    let x: Option<Bar> = None;
-    match x {
-        Some(_) => (),
-        None => ()
-    }
-}
-
-fn main() { }
index 812a50000d1c293453b4ea239fb8b6764dbaee93..c7e9d71700e6da595d6c7ed5e1a58e143a432dff 100644 (file)
@@ -2,7 +2,7 @@ error[E0422]: cannot find struct, variant or union type `TyUInt` in this scope
   --> $DIR/issue-46332.rs:9:5
    |
 LL |     TyUInt {};
-   |     ^^^^^^ help: a struct with a similar name exists: `TyUint`
+   |     ^^^^^^ help: a struct with a similar name exists (notice the capitalization): `TyUint`
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/issues/issue-47486.rs b/src/test/ui/issues/issue-47486.rs
new file mode 100644 (file)
index 0000000..d686f02
--- /dev/null
@@ -0,0 +1,4 @@
+fn main() {
+    () < std::mem::size_of::<_>(); //~ ERROR: mismatched types
+    [0u8; std::mem::size_of::<_>()]; //~ ERROR: type annotations needed
+}
diff --git a/src/test/ui/issues/issue-47486.stderr b/src/test/ui/issues/issue-47486.stderr
new file mode 100644 (file)
index 0000000..af6e301
--- /dev/null
@@ -0,0 +1,19 @@
+error[E0308]: mismatched types
+  --> $DIR/issue-47486.rs:2:10
+   |
+LL |     () < std::mem::size_of::<_>();
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^ expected (), found usize
+   |
+   = note: expected type `()`
+              found type `usize`
+
+error[E0282]: type annotations needed
+  --> $DIR/issue-47486.rs:3:11
+   |
+LL |     [0u8; std::mem::size_of::<_>()];
+   |           ^^^^^^^^^^^^^^^^^^^^^^ cannot infer type
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0282, E0308.
+For more information about an error, try `rustc --explain E0282`.
index 67feb3ff6aec3a98457c1b87efba2fdfb27e706b..24c61425b8e29e2b7cce1e00efff8fd10eb616b1 100644 (file)
@@ -1,5 +1,3 @@
-#![feature(inner_deref)]
-
 fn main() {
     let _result = &Some(42).as_deref();
 //~^ ERROR no method named `as_deref` found for type `std::option::Option<{integer}>`
index 345f91437b82725ea92e7b25d0ada275d9938db7..0eb7bf0247565d5a4bfd61ff34811520d40e904e 100644 (file)
@@ -1,5 +1,5 @@
 error[E0599]: no method named `as_deref` found for type `std::option::Option<{integer}>` in the current scope
-  --> $DIR/option-as_deref.rs:4:29
+  --> $DIR/option-as_deref.rs:2:29
    |
 LL |     let _result = &Some(42).as_deref();
    |                             ^^^^^^^^ help: there is a method with a similar name: `as_ref`
index 56aead8d0e00d2bcda6c445dbad592e37645b6d2..67ad73f58477396e6801f47bf01f1860c6fa3e8f 100644 (file)
@@ -1,5 +1,3 @@
-#![feature(inner_deref)]
-
 fn main() {
     let _result = &mut Some(42).as_deref_mut();
 //~^ ERROR no method named `as_deref_mut` found for type `std::option::Option<{integer}>`
index 29fd15fb396e99143e6340240ca81669035de956..845ddb52319c722a043faa2f54e6e862ca0b3e15 100644 (file)
@@ -1,5 +1,5 @@
 error[E0599]: no method named `as_deref_mut` found for type `std::option::Option<{integer}>` in the current scope
-  --> $DIR/option-as_deref_mut.rs:4:33
+  --> $DIR/option-as_deref_mut.rs:2:33
    |
 LL |     let _result = &mut Some(42).as_deref_mut();
    |                                 ^^^^^^^^^^^^ method not found in `std::option::Option<{integer}>`
index b4df10efc5d8da322c755f7b5107d8bac7ea8b3c..b79a5ddf3e1bf601ea4c96976270225b56aa8e78 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |     match (&t,) {
    |           ^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 1:23...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 1:23...
   --> $DIR/issue-52213.rs:1:23
    |
 LL | fn transmute_lifetime<'a, 'b, T>(t: &'a (T,)) -> &'b T {
@@ -12,7 +12,7 @@ LL | fn transmute_lifetime<'a, 'b, T>(t: &'a (T,)) -> &'b T {
    = note: ...so that the types are compatible:
            expected (&&(T,),)
               found (&&'a (T,),)
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 1:27...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 1:27...
   --> $DIR/issue-52213.rs:1:27
    |
 LL | fn transmute_lifetime<'a, 'b, T>(t: &'a (T,)) -> &'b T {
@@ -25,3 +25,4 @@ LL |         ((u,),) => u,
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
diff --git a/src/test/ui/issues/issue-52262.rs b/src/test/ui/issues/issue-52262.rs
new file mode 100644 (file)
index 0000000..2195b89
--- /dev/null
@@ -0,0 +1,25 @@
+// compile-flags:-Ztreat-err-as-bug=5
+#[derive(Debug)]
+enum MyError {
+    NotFound { key: Vec<u8> },
+    Err41,
+}
+
+impl std::error::Error for MyError {}
+
+impl std::fmt::Display for MyError {
+    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+        match self {
+            MyError::NotFound { key } => write!(
+                f,
+                "unknown error with code {}.",
+                String::from_utf8(*key).unwrap()
+                //~^ ERROR cannot move out of `*key` which is behind a shared reference
+            ),
+            MyError::Err41 => write!(f, "Sit by a lake"),
+        }
+    }
+}
+fn main() {
+    println!("Hello, world!");
+}
diff --git a/src/test/ui/issues/issue-52262.stderr b/src/test/ui/issues/issue-52262.stderr
new file mode 100644 (file)
index 0000000..7312976
--- /dev/null
@@ -0,0 +1,9 @@
+error[E0507]: cannot move out of `*key` which is behind a shared reference
+  --> $DIR/issue-52262.rs:16:35
+   |
+LL |                 String::from_utf8(*key).unwrap()
+   |                                   ^^^^ move occurs because `*key` has type `std::vec::Vec<u8>`, which does not implement the `Copy` trait
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0507`.
diff --git a/src/test/ui/issues/issue-53912.rs b/src/test/ui/issues/issue-53912.rs
new file mode 100644 (file)
index 0000000..4718aea
--- /dev/null
@@ -0,0 +1,37 @@
+// build-pass (FIXME(62277): could be check-pass?)
+
+// This test is the same code as in ui/symbol-names/issue-60925.rs but this checks that the
+// reproduction compiles successfully and doesn't segfault, whereas that test just checks that the
+// symbol mangling fix produces the correct result.
+
+fn dummy() {}
+
+mod llvm {
+    pub(crate) struct Foo;
+}
+mod foo {
+    pub(crate) struct Foo<T>(T);
+
+    impl Foo<::llvm::Foo> {
+        pub(crate) fn foo() {
+            for _ in 0..0 {
+                for _ in &[::dummy()] {
+                    ::dummy();
+                    ::dummy();
+                    ::dummy();
+                }
+            }
+        }
+    }
+
+    pub(crate) fn foo() {
+        Foo::foo();
+        Foo::foo();
+    }
+}
+
+pub fn foo() {
+    foo::foo();
+}
+
+fn main() {}
index 68d838054776e7675e9aa0fd91e2bea74b9ec09f..e7221e2cbb1e134463c78a65c114b36d227bfb43 100644 (file)
@@ -1,5 +1,7 @@
 fn main() {
     [1][0u64 as usize];
     [1][1.5 as usize]; //~ ERROR index out of bounds
+    //~| ERROR this expression will panic at runtime
     [1][1u64 as usize]; //~ ERROR index out of bounds
+    //~| ERROR this expression will panic at runtime
 }
index fa77bd6fd77973777265f9315d26ac86907eb4fc..79320ef4f31c788bdf58fc00895b38811fea695c 100644 (file)
@@ -6,11 +6,23 @@ LL |     [1][1.5 as usize];
    |
    = note: `#[deny(const_err)]` on by default
 
+error: this expression will panic at runtime
+  --> $DIR/issue-54348.rs:3:5
+   |
+LL |     [1][1.5 as usize];
+   |     ^^^^^^^^^^^^^^^^^ index out of bounds: the len is 1 but the index is 1
+
 error: index out of bounds: the len is 1 but the index is 1
-  --> $DIR/issue-54348.rs:4:5
+  --> $DIR/issue-54348.rs:5:5
    |
 LL |     [1][1u64 as usize];
    |     ^^^^^^^^^^^^^^^^^^
 
-error: aborting due to 2 previous errors
+error: this expression will panic at runtime
+  --> $DIR/issue-54348.rs:5:5
+   |
+LL |     [1][1u64 as usize];
+   |     ^^^^^^^^^^^^^^^^^^ index out of bounds: the len is 1 but the index is 1
+
+error: aborting due to 4 previous errors
 
index 9e67e5e125f625567e0b11e06cc310852b8afe59..7b910f5e3e5a654cc88ea4fa129185cfaa66cf4e 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |         Box::new(self.out_edges(u).map(|e| e.target()))
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the trait at 5:17...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the trait at 5:17...
   --> $DIR/issue-55796.rs:5:17
    |
 LL | pub trait Graph<'a> {
@@ -25,7 +25,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |         Box::new(self.in_edges(u).map(|e| e.target()))
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the trait at 5:17...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the trait at 5:17...
   --> $DIR/issue-55796.rs:5:17
    |
 LL | pub trait Graph<'a> {
@@ -42,3 +42,4 @@ LL |         Box::new(self.in_edges(u).map(|e| e.target()))
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0495`.
diff --git a/src/test/ui/issues/issue-57271.rs b/src/test/ui/issues/issue-57271.rs
new file mode 100644 (file)
index 0000000..9940fec
--- /dev/null
@@ -0,0 +1,24 @@
+// aux-build:issue-57271-lib.rs
+
+extern crate issue_57271_lib;
+
+use issue_57271_lib::BaseType;
+
+pub enum ObjectType { //~ ERROR recursive type `ObjectType` has infinite size
+    Class(ClassTypeSignature),
+    Array(TypeSignature),
+    TypeVariable(()),
+}
+
+pub struct ClassTypeSignature {
+    pub package: (),
+    pub class: (),
+    pub inner: (),
+}
+
+pub enum TypeSignature { //~ ERROR recursive type `TypeSignature` has infinite size
+    Base(BaseType),
+    Object(ObjectType),
+}
+
+fn main() {}
diff --git a/src/test/ui/issues/issue-57271.stderr b/src/test/ui/issues/issue-57271.stderr
new file mode 100644 (file)
index 0000000..4f16462
--- /dev/null
@@ -0,0 +1,25 @@
+error[E0072]: recursive type `ObjectType` has infinite size
+  --> $DIR/issue-57271.rs:7:1
+   |
+LL | pub enum ObjectType {
+   | ^^^^^^^^^^^^^^^^^^^ recursive type has infinite size
+LL |     Class(ClassTypeSignature),
+LL |     Array(TypeSignature),
+   |           ------------- recursive without indirection
+   |
+   = help: insert indirection (e.g., a `Box`, `Rc`, or `&`) at some point to make `ObjectType` representable
+
+error[E0072]: recursive type `TypeSignature` has infinite size
+  --> $DIR/issue-57271.rs:19:1
+   |
+LL | pub enum TypeSignature {
+   | ^^^^^^^^^^^^^^^^^^^^^^ recursive type has infinite size
+LL |     Base(BaseType),
+LL |     Object(ObjectType),
+   |            ---------- recursive without indirection
+   |
+   = help: insert indirection (e.g., a `Box`, `Rc`, or `&`) at some point to make `TypeSignature` representable
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0072`.
diff --git a/src/test/ui/issues/issue-57399-self-return-impl-trait.rs b/src/test/ui/issues/issue-57399-self-return-impl-trait.rs
new file mode 100644 (file)
index 0000000..23d6826
--- /dev/null
@@ -0,0 +1,22 @@
+// run-pass
+
+trait T {
+    type T;
+}
+
+impl T for i32 {
+    type T = u32;
+}
+
+struct S<A> {
+    a: A,
+}
+
+
+impl From<u32> for S<<i32 as T>::T> {
+    fn from(a: u32) -> Self {
+        Self { a }
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/issues/issue-57399-self-return-impl-trait.stderr b/src/test/ui/issues/issue-57399-self-return-impl-trait.stderr
new file mode 100644 (file)
index 0000000..5c71410
--- /dev/null
@@ -0,0 +1,8 @@
+warning: field is never used: `a`
+  --> $DIR/issue-57399-self-return-impl-trait.rs:12:5
+   |
+LL |     a: A,
+   |     ^^^^
+   |
+   = note: `#[warn(dead_code)]` on by default
+
diff --git a/src/test/ui/issues/issue-59020.rs b/src/test/ui/issues/issue-59020.rs
new file mode 100644 (file)
index 0000000..e754493
--- /dev/null
@@ -0,0 +1,28 @@
+// edition:2018
+// run-pass
+// ignore-emscripten no threads support
+// ignore-sgx no thread sleep support
+
+use std::thread;
+use std::time::Duration;
+
+fn main() {
+    let t1 = thread::spawn(|| {
+        let sleep = Duration::new(0,100_000);
+        for _ in 0..100 {
+            println!("Parking1");
+            thread::park_timeout(sleep);
+        }
+    });
+
+    let t2 = thread::spawn(|| {
+        let sleep = Duration::new(0,100_000);
+        for _ in 0..100 {
+            println!("Parking2");
+            thread::park_timeout(sleep);
+        }
+    });
+
+    t1.join().expect("Couldn't join thread 1");
+    t2.join().expect("Couldn't join thread 2");
+}
diff --git a/src/test/ui/issues/issue-64792-bad-unicode-ctor.rs b/src/test/ui/issues/issue-64792-bad-unicode-ctor.rs
new file mode 100644 (file)
index 0000000..7bce579
--- /dev/null
@@ -0,0 +1,5 @@
+struct X {}
+
+const Y: X = X("ö"); //~ ERROR expected function, found struct `X`
+
+fn main() {}
diff --git a/src/test/ui/issues/issue-64792-bad-unicode-ctor.stderr b/src/test/ui/issues/issue-64792-bad-unicode-ctor.stderr
new file mode 100644 (file)
index 0000000..ae9025b
--- /dev/null
@@ -0,0 +1,15 @@
+error[E0423]: expected function, found struct `X`
+  --> $DIR/issue-64792-bad-unicode-ctor.rs:3:14
+   |
+LL | struct X {}
+   | ----------- `X` defined here
+LL | 
+LL | const Y: X = X("ö");
+   |              ^
+   |              |
+   |              did you mean `X { /* fields */ }`?
+   |              help: a constant with a similar name exists: `Y`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0423`.
diff --git a/src/test/ui/issues/issue-65284-suggest-generic-trait-bound.rs b/src/test/ui/issues/issue-65284-suggest-generic-trait-bound.rs
new file mode 100644 (file)
index 0000000..e0eaafd
--- /dev/null
@@ -0,0 +1,11 @@
+trait Foo {
+    fn foo(&self);
+}
+
+trait Bar {}
+
+fn do_stuff<T : Bar>(t : T) {
+    t.foo() //~ ERROR no method named `foo` found for type `T` in the current scope
+}
+
+fn main() {}
diff --git a/src/test/ui/issues/issue-65284-suggest-generic-trait-bound.stderr b/src/test/ui/issues/issue-65284-suggest-generic-trait-bound.stderr
new file mode 100644 (file)
index 0000000..24bf60a
--- /dev/null
@@ -0,0 +1,15 @@
+error[E0599]: no method named `foo` found for type `T` in the current scope
+  --> $DIR/issue-65284-suggest-generic-trait-bound.rs:8:7
+   |
+LL |     t.foo()
+   |       ^^^ method not found in `T`
+   |
+   = help: items from traits can only be used if the type parameter is bounded by the trait
+help: the following trait defines an item `foo`, perhaps you need to restrict type parameter `T` with it:
+   |
+LL | fn do_stuff<T: Foo + Bar>(t : T) {
+   |             ^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0599`.
index 25d0e74187f75f68e2e25bef6b7582259685daca..82efa83990553e16cc68326b257343b6710af87d 100644 (file)
@@ -1,42 +1,50 @@
 error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/kindck-impl-type-params.rs:18:13
    |
+LL | fn f<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Send`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a = &t as &dyn Gettable<T>;
    |             ^^ `T` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
 error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
   --> $DIR/kindck-impl-type-params.rs:18:13
    |
+LL | fn f<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a = &t as &dyn Gettable<T>;
    |             ^^ the trait `std::marker::Copy` is not implemented for `T`
    |
-   = help: consider adding a `where T: std::marker::Copy` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
 error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/kindck-impl-type-params.rs:25:31
    |
+LL | fn g<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Send`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a: &dyn Gettable<T> = &t;
    |                               ^^ `T` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
 error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
   --> $DIR/kindck-impl-type-params.rs:25:31
    |
+LL | fn g<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a: &dyn Gettable<T> = &t;
    |                               ^^ the trait `std::marker::Copy` is not implemented for `T`
    |
-   = help: consider adding a `where T: std::marker::Copy` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
index e6f7088bd46355b0b17fc73fb9e657a159de59ad..777a553c2a58a7900cdaf40da1ec43fafc7e7b4c 100644 (file)
@@ -1,42 +1,50 @@
 error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/kindck-impl-type-params.rs:18:13
    |
+LL | fn f<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Send`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a = &t as &dyn Gettable<T>;
    |             ^^ `T` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
 error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
   --> $DIR/kindck-impl-type-params.rs:18:13
    |
+LL | fn f<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a = &t as &dyn Gettable<T>;
    |             ^^ the trait `std::marker::Copy` is not implemented for `T`
    |
-   = help: consider adding a `where T: std::marker::Copy` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
 error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/kindck-impl-type-params.rs:25:31
    |
+LL | fn g<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Send`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a: &dyn Gettable<T> = &t;
    |                               ^^ `T` cannot be sent between threads safely
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
 error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
   --> $DIR/kindck-impl-type-params.rs:25:31
    |
+LL | fn g<T>(val: T) {
+   |      - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     let t: S<T> = S(marker::PhantomData);
 LL |     let a: &dyn Gettable<T> = &t;
    |                               ^^ the trait `std::marker::Copy` is not implemented for `T`
    |
-   = help: consider adding a `where T: std::marker::Copy` bound
    = note: required because of the requirements on the impl of `Gettable<T>` for `S<T>`
    = note: required for the cast to the object type `dyn Gettable<T>`
 
index e990f705af3f92688457a3e24e950f83c911b291..38c7393e136a604985c3d661da89df17129918fc 100644 (file)
@@ -16,4 +16,5 @@ LL |     Foo::f();
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0599`.
+Some errors have detailed explanations: E0574, E0599.
+For more information about an error, try `rustc --explain E0574`.
index 35d63c172765112e11dc5578d9e559bc4d45269b..b4011990b68e6343661906ba011b390e4dd30036 100644 (file)
@@ -6,7 +6,7 @@ LL |     ref_obj(x)
    |
    = note: expected type `&std::boxed::Box<(dyn std::ops::Fn() + 'static)>`
               found type `&std::boxed::Box<(dyn std::ops::Fn() + 'a)>`
-note: the lifetime 'a as defined on the function body at 32:10...
+note: the lifetime `'a` as defined on the function body at 32:10...
   --> $DIR/lifetime-bound-will-change-warning.rs:32:10
    |
 LL | fn test2<'a>(x: &'a Box<dyn Fn() + 'a>) {
@@ -21,7 +21,7 @@ LL |     lib::ref_obj(x)
    |
    = note: expected type `&std::boxed::Box<(dyn std::ops::Fn() + 'static)>`
               found type `&std::boxed::Box<(dyn std::ops::Fn() + 'a)>`
-note: the lifetime 'a as defined on the function body at 37:12...
+note: the lifetime `'a` as defined on the function body at 37:12...
   --> $DIR/lifetime-bound-will-change-warning.rs:37:12
    |
 LL | fn test2cc<'a>(x: &'a Box<dyn Fn() + 'a>) {
index 69b627355b8019198c33399011e4eb87dcc5454f..2c213daddd7527a74470d02b5a2531fad6f34241 100644 (file)
@@ -8,7 +8,7 @@ fn main() {
       let n = 1u8 << (4+3);
       let n = 1u8 << (4+4); //~ ERROR: attempt to shift left with overflow
       let n = 1i64 >> [63][0];
-      let n = 1i64 >> [64][0]; // should be linting, needs to wait for const propagation
+      let n = 1i64 >> [64][0]; //~ ERROR: attempt to shift right with overflow
 
       #[cfg(target_pointer_width = "32")]
       const BITS: usize = 32;
index cb96982a78930c01cd6084ffd4129ad78d5e3388..d9c76d233d03eb01186e402865639ccd49d85107 100644 (file)
@@ -10,6 +10,12 @@ note: lint level defined here
 LL | #![deny(exceeding_bitshifts, const_err)]
    |         ^^^^^^^^^^^^^^^^^^^
 
+error: attempt to shift right with overflow
+  --> $DIR/lint-exceeding-bitshifts2.rs:11:15
+   |
+LL |       let n = 1i64 >> [64][0];
+   |               ^^^^^^^^^^^^^^^
+
 error: attempt to shift left with overflow
   --> $DIR/lint-exceeding-bitshifts2.rs:17:15
    |
@@ -22,5 +28,5 @@ error: attempt to shift left with overflow
 LL |       let n = 1_usize << BITS;
    |               ^^^^^^^^^^^^^^^
 
-error: aborting due to 3 previous errors
+error: aborting due to 4 previous errors
 
index 432a16debc698f3ac4d8e945dfca62dcff0b76b1..177f8c8fe9b63d9fc0d1ebef802e65e6962722c6 100644 (file)
@@ -14,25 +14,25 @@ error: type `foo` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:7:8
    |
 LL | struct foo {
-   |        ^^^ help: convert the identifier to upper camel case: `Foo`
+   |        ^^^ help: convert the identifier to upper camel case (notice the capitalization): `Foo`
 
 error: type `foo2` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:11:6
    |
 LL | enum foo2 {
-   |      ^^^^ help: convert the identifier to upper camel case: `Foo2`
+   |      ^^^^ help: convert the identifier to upper camel case (notice the capitalization): `Foo2`
 
 error: type `foo3` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:15:8
    |
 LL | struct foo3 {
-   |        ^^^^ help: convert the identifier to upper camel case: `Foo3`
+   |        ^^^^ help: convert the identifier to upper camel case (notice the capitalization): `Foo3`
 
 error: type `foo4` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:19:6
    |
 LL | type foo4 = isize;
-   |      ^^^^ help: convert the identifier to upper camel case: `Foo4`
+   |      ^^^^ help: convert the identifier to upper camel case (notice the capitalization): `Foo4`
 
 error: variant `bar` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:22:5
@@ -44,7 +44,7 @@ error: trait `foo6` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:25:7
    |
 LL | trait foo6 {
-   |       ^^^^ help: convert the identifier to upper camel case: `Foo6`
+   |       ^^^^ help: convert the identifier to upper camel case (notice the capitalization): `Foo6`
 
 error: type parameter `ty` should have an upper camel case name
   --> $DIR/lint-non-camel-case-types.rs:29:6
index 49cbfa941261055d30a2509083bdd64801d606f1..c5eca89debb82c3b49ad7f26adbbe7722e81f162 100644 (file)
@@ -38,7 +38,7 @@ error: trait method `a_b_C` should have a snake case name
   --> $DIR/lint-non-snake-case-functions.rs:25:8
    |
 LL |     fn a_b_C(&self) {}
-   |        ^^^^^ help: convert the identifier to snake case: `a_b_c`
+   |        ^^^^^ help: convert the identifier to snake case (notice the capitalization): `a_b_c`
 
 error: trait method `something__else` should have a snake case name
   --> $DIR/lint-non-snake-case-functions.rs:28:8
@@ -50,13 +50,13 @@ error: function `Cookie` should have a snake case name
   --> $DIR/lint-non-snake-case-functions.rs:38:4
    |
 LL | fn Cookie() {}
-   |    ^^^^^^ help: convert the identifier to snake case: `cookie`
+   |    ^^^^^^ help: convert the identifier to snake case (notice the capitalization): `cookie`
 
 error: function `bi_S_Cuit` should have a snake case name
   --> $DIR/lint-non-snake-case-functions.rs:41:8
    |
 LL | pub fn bi_S_Cuit() {}
-   |        ^^^^^^^^^ help: convert the identifier to snake case: `bi_s_cuit`
+   |        ^^^^^^^^^ help: convert the identifier to snake case (notice the capitalization): `bi_s_cuit`
 
 error: aborting due to 9 previous errors
 
index 8b477276efc9315fa63e4407419c027df08bab79..ceb83d08f27779d61a971ec786b74c31516a62c6 100644 (file)
@@ -2,7 +2,7 @@ error: static variable `foo` should have an upper case name
   --> $DIR/lint-non-uppercase-statics.rs:4:8
    |
 LL | static foo: isize = 1;
-   |        ^^^ help: convert the identifier to upper case: `FOO`
+   |        ^^^ help: convert the identifier to upper case (notice the capitalization): `FOO`
    |
 note: lint level defined here
   --> $DIR/lint-non-uppercase-statics.rs:1:11
index 9ea3795f89e6ca3b0e93ff07880fe1e9f5e416cc..f614d5d71f88cdc0afcfa02e7082e02cf10873b6 100644 (file)
@@ -21,7 +21,7 @@ error: structure field `X` should have a snake case name
   --> $DIR/lint-uppercase-variables.rs:10:5
    |
 LL |     X: usize
-   |     ^ help: convert the identifier to snake case: `x`
+   |     ^ help: convert the identifier to snake case (notice the capitalization): `x`
    |
 note: lint level defined here
   --> $DIR/lint-uppercase-variables.rs:3:9
@@ -33,7 +33,7 @@ error: variable `Xx` should have a snake case name
   --> $DIR/lint-uppercase-variables.rs:13:9
    |
 LL | fn test(Xx: usize) {
-   |         ^^ help: convert the identifier to snake case: `xx`
+   |         ^^ help: convert the identifier to snake case (notice the capitalization): `xx`
 
 error: variable `Test` should have a snake case name
   --> $DIR/lint-uppercase-variables.rs:18:9
@@ -45,7 +45,7 @@ error: variable `Foo` should have a snake case name
   --> $DIR/lint-uppercase-variables.rs:22:9
    |
 LL |         Foo => {}
-   |         ^^^ help: convert the identifier to snake case: `foo`
+   |         ^^^ help: convert the identifier to snake case (notice the capitalization): `foo`
 
 error: aborting due to 4 previous errors
 
index 92568252164f683d0b7e57ee78c564143553dffd..4dd4798abb7ce890e9cca2a72188bf028fb9d6ef 100644 (file)
@@ -1,5 +1,4 @@
 #![feature(never_type)]
-
 #![deny(unused_must_use)]
 
 #[must_use]
index f6229c0442f999f5157432e0acee4867f47fe708..0a9939b2015b7f14cc859b7c88238c27bc2745a7 100644 (file)
@@ -1,17 +1,17 @@
 error: unused return value of `foo` that must be used
-  --> $DIR/must_use-unit.rs:14:5
+  --> $DIR/must_use-unit.rs:13:5
    |
 LL |     foo();
    |     ^^^^^^
    |
 note: lint level defined here
-  --> $DIR/must_use-unit.rs:3:9
+  --> $DIR/must_use-unit.rs:2:9
    |
 LL | #![deny(unused_must_use)]
    |         ^^^^^^^^^^^^^^^
 
 error: unused return value of `bar` that must be used
-  --> $DIR/must_use-unit.rs:16:5
+  --> $DIR/must_use-unit.rs:15:5
    |
 LL |     bar();
    |     ^^^^^^
index 70d49a4e69ca0c4c5ce2e3845d22f720d180904e..5016d9b97d69e661c04f47bf62ac8da54e148715 100644 (file)
@@ -16,5 +16,5 @@ warning: unknown lint: `Warnings`
   --> $DIR/not_found.rs:10:8
    |
 LL | #[deny(Warnings)]
-   |        ^^^^^^^^ help: did you mean: `warnings`
+   |        ^^^^^^^^ help: did you mean (notice the capitalization): `warnings`
 
index cb5f4ddf47b6318e75f0459ef8179384d7e6e693..139b3f13fd6b2beccdc4e6328426ee0d828eab44 100644 (file)
@@ -15,7 +15,7 @@ warning: variable `Social_exchange_psychology` should have a snake case name
   --> $DIR/reasons.rs:30:9
    |
 LL |     let Social_exchange_psychology = CheaterDetectionMechanism {};
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case: `social_exchange_psychology`
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: convert the identifier to snake case (notice the capitalization): `social_exchange_psychology`
    |
    = note: people shouldn't have to change their usual style habits
            to contribute to our project
index 5f289c0914d6e9873d4cf3756c16a3f0c0c7af12..2160df51a8375c92e8082ab2a60953b4f3a4137e 100644 (file)
@@ -1,4 +1,4 @@
-TokenStream [Ident { ident: "fn", span: #0 bytes(197..199) }, Ident { ident: "span_preservation", span: #0 bytes(200..217) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: #0 bytes(217..219) }, Group { delimiter: Brace, stream: TokenStream [Ident { ident: "let", span: #0 bytes(227..230) }, Ident { ident: "tst", span: #0 bytes(231..234) }, Punct { ch: '=', spacing: Alone, span: #0 bytes(235..236) }, Literal { lit: Lit { kind: Integer, symbol: 123, suffix: None }, span: Span { lo: BytePos(237), hi: BytePos(240), ctxt: #0 } }, Punct { ch: ';', spacing: Joint, span: #0 bytes(240..241) }, Punct { ch: ';', spacing: Alone, span: #0 bytes(241..242) }, Ident { ident: "match", span: #0 bytes(288..293) }, Ident { ident: "tst", span: #0 bytes(294..297) }, Group { delimiter: Brace, stream: TokenStream [Literal { lit: Lit { kind: Integer, symbol: 123, suffix: None }, span: Span { lo: BytePos(482), hi: BytePos(485), ctxt: #0 } }, Punct { ch: '=', spacing: Joint, span: #0 bytes(486..488) }, Punct { ch: '>', spacing: Alone, span: #0 bytes(486..488) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: #0 bytes(489..491) }, Punct { ch: ',', spacing: Alone, span: #0 bytes(491..492) }, Ident { ident: "_", span: #0 bytes(501..502) }, Punct { ch: '=', spacing: Joint, span: #0 bytes(503..505) }, Punct { ch: '>', spacing: Alone, span: #0 bytes(503..505) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: #0 bytes(506..508) }], span: #0 bytes(298..514) }, Punct { ch: ';', spacing: Joint, span: #0 bytes(514..515) }, Punct { ch: ';', spacing: Joint, span: #0 bytes(515..516) }, Punct { ch: ';', spacing: Alone, span: #0 bytes(516..517) }], span: #0 bytes(221..561) }]
+TokenStream [Ident { ident: "fn", span: #0 bytes(197..199) }, Ident { ident: "span_preservation", span: #0 bytes(200..217) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: #0 bytes(217..219) }, Group { delimiter: Brace, stream: TokenStream [Ident { ident: "let", span: #0 bytes(227..230) }, Ident { ident: "tst", span: #0 bytes(231..234) }, Punct { ch: '=', spacing: Alone, span: #0 bytes(235..236) }, Literal { lit: Lit { kind: Integer, symbol: "123", suffix: None }, span: Span { lo: BytePos(237), hi: BytePos(240), ctxt: #0 } }, Punct { ch: ';', spacing: Joint, span: #0 bytes(240..241) }, Punct { ch: ';', spacing: Alone, span: #0 bytes(241..242) }, Ident { ident: "match", span: #0 bytes(288..293) }, Ident { ident: "tst", span: #0 bytes(294..297) }, Group { delimiter: Brace, stream: TokenStream [Literal { lit: Lit { kind: Integer, symbol: "123", suffix: None }, span: Span { lo: BytePos(482), hi: BytePos(485), ctxt: #0 } }, Punct { ch: '=', spacing: Joint, span: #0 bytes(486..488) }, Punct { ch: '>', spacing: Alone, span: #0 bytes(486..488) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: #0 bytes(489..491) }, Punct { ch: ',', spacing: Alone, span: #0 bytes(491..492) }, Ident { ident: "_", span: #0 bytes(501..502) }, Punct { ch: '=', spacing: Joint, span: #0 bytes(503..505) }, Punct { ch: '>', spacing: Alone, span: #0 bytes(503..505) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: #0 bytes(506..508) }], span: #0 bytes(298..514) }, Punct { ch: ';', spacing: Joint, span: #0 bytes(514..515) }, Punct { ch: ';', spacing: Joint, span: #0 bytes(515..516) }, Punct { ch: ';', spacing: Alone, span: #0 bytes(516..517) }], span: #0 bytes(221..561) }]
 error: unnecessary trailing semicolon
   --> $DIR/redundant-semi-proc-macro.rs:9:19
    |
index 678c88849b561d6a112804264055d1134cb2cd68..1da5acc966163ed004237388b6492da475369cce 100644 (file)
@@ -395,7 +395,7 @@ mod foo {
 \u001b[0m   \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m
 \u001b[0m\u001b[1m\u001b[38;5;12mLL\u001b[0m\u001b[0m \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m| \u001b[0m\u001b[0muse std::collections::hash_map::Iter;\u001b[0m
 \u001b[0m   \u001b[0m\u001b[0m\u001b[1m\u001b[38;5;12m|\u001b[0m
-\u001b[0mand 8 other candidates\u001b[0m
+\u001b[0m     and 8 other candidates\u001b[0m
 
 "
 }
index 26f756c91833bf7ba4e46f5bf60a12fb8d94a36d..0a4744013a62ca9c0876e98638726369c90927a5 100644 (file)
@@ -5,7 +5,7 @@ LL |         s
    |         ^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 23:17
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 23:17
   --> $DIR/lub-if.rs:23:17
    |
 LL | pub fn opt_str2<'a>(maybestr: &'a Option<String>) -> &'static str {
@@ -18,7 +18,7 @@ LL |         s
    |         ^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 32:17
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 32:17
   --> $DIR/lub-if.rs:32:17
    |
 LL | pub fn opt_str3<'a>(maybestr: &'a Option<String>) -> &'static str {
index 0cb0a23c6f2df30339960f73eed818cd76a247cf..168a389446921dc1bd9b09b9f141fea2cc2aed0f 100644 (file)
@@ -5,7 +5,7 @@ LL |             s
    |             ^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 25:17
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 25:17
   --> $DIR/lub-match.rs:25:17
    |
 LL | pub fn opt_str2<'a>(maybestr: &'a Option<String>) -> &'static str {
@@ -18,7 +18,7 @@ LL |             s
    |             ^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 35:17
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 35:17
   --> $DIR/lub-match.rs:35:17
    |
 LL | pub fn opt_str3<'a>(maybestr: &'a Option<String>) -> &'static str {
index a4f70b6b68d1ee29d8886ec34a7bcb92ae5264e7..e0bb4d98525b9c4d5d61fc1fc200407fcc03e4c4 100644 (file)
@@ -4,7 +4,6 @@
 // left-hand side of a macro definition behave as if they had unique spans, and in particular that
 // they don't crash the compiler.
 
-#![feature(proc_macro_hygiene)]
 #![allow(unused_macros)]
 
 extern crate proc_macro_sequence;
index 0eef4a2a678b6b7bb51d8f3932e9e3d03e8bfab5..896f579765f957c58ba6df3efa3e72055aeaa762 100644 (file)
@@ -1,5 +1,5 @@
 error: `$x:expr` may be followed by `$y:tt`, which is not allowed for `expr` fragments
-  --> $DIR/same-sequence-span.rs:15:18
+  --> $DIR/same-sequence-span.rs:14:18
    |
 LL |     (1 $x:expr $($y:tt,)*
    |                  ^^^^^ not allowed after `expr` fragments
@@ -7,7 +7,7 @@ LL |     (1 $x:expr $($y:tt,)*
    = note: allowed there are: `=>`, `,` or `;`
 
 error: `$x:expr` may be followed by `=`, which is not allowed for `expr` fragments
-  --> $DIR/same-sequence-span.rs:16:18
+  --> $DIR/same-sequence-span.rs:15:18
    |
 LL |                $(= $z:tt)*
    |                  ^ not allowed after `expr` fragments
@@ -15,7 +15,7 @@ LL |                $(= $z:tt)*
    = note: allowed there are: `=>`, `,` or `;`
 
 error: `$x:expr` may be followed by `$y:tt`, which is not allowed for `expr` fragments
-  --> $DIR/same-sequence-span.rs:20:1
+  --> $DIR/same-sequence-span.rs:19:1
    |
 LL |   proc_macro_sequence::make_foo!();
    |   ^--------------------------------
@@ -30,7 +30,7 @@ LL | | fn main() {}
    = note: allowed there are: `=>`, `,` or `;`
 
 error: `$x:expr` may be followed by `=`, which is not allowed for `expr` fragments
-  --> $DIR/same-sequence-span.rs:20:1
+  --> $DIR/same-sequence-span.rs:19:1
    |
 LL | proc_macro_sequence::make_foo!();
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
index d8416c409542b86f995dc1ec7e0070820b52cc09..3860864bd13366a2534c62faaf5f098367c13eb6 100644 (file)
@@ -4,11 +4,11 @@ error: malformed `plugin` attribute input
 LL | #![plugin]
    | ^^^^^^^^^^ help: must be of the form: `#[plugin(name|name(args))]`
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/malformed-plugin-1.rs:2:1
    |
 LL | #![plugin]
-   | ^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 34383ba082812d0a92e6281cf528031fc992443a..e4bca93f13b356b4473e3161d48dd3dbec30f713 100644 (file)
@@ -4,11 +4,11 @@ error: malformed `plugin` attribute input
 LL | #![plugin="bleh"]
    | ^^^^^^^^^^^^^^^^^ help: must be of the form: `#[plugin(name|name(args))]`
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/malformed-plugin-2.rs:2:1
    |
 LL | #![plugin="bleh"]
-   | ^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index 71f607d68a4afa68e7f157d30fe4ae1ba12eb5b0..7393072cb1ca3e45d204af14b90b86365aac5cf6 100644 (file)
@@ -4,11 +4,11 @@ error[E0498]: malformed `plugin` attribute
 LL | #![plugin(foo="bleh")]
    | ^^^^^^^^^^^^^^^^^^^^^^ malformed attribute
 
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/malformed-plugin-3.rs:2:1
    |
 LL | #![plugin(foo="bleh")]
-   | ^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
index a0cc773d20edd3cd184a5598f3b616b92454b25b..7de7b7e79be44faf85c4b75fe98ff73dd672e40b 100644 (file)
@@ -1,39 +1,45 @@
-//error-pattern: unreachable
-//error-pattern: unreachable
-//error-pattern: unreachable
-//error-pattern: unreachable
-//error-pattern: unreachable
-
-#![deny(unreachable_patterns)]
+#![deny(unreachable_patterns, overlapping_patterns)]
 
 fn main() {
     match 5 {
       1 ..= 10 => { }
       5 ..= 6 => { }
+      //~^ ERROR unreachable pattern
       _ => {}
     };
 
     match 5 {
       3 ..= 6 => { }
       4 ..= 6 => { }
+      //~^ ERROR unreachable pattern
       _ => {}
     };
 
     match 5 {
       4 ..= 6 => { }
       4 ..= 6 => { }
+      //~^ ERROR unreachable pattern
       _ => {}
     };
 
     match 'c' {
       'A' ..= 'z' => {}
       'a' ..= 'z' => {}
+      //~^ ERROR unreachable pattern
       _ => {}
     };
 
     match 1.0f64 {
       0.01f64 ..= 6.5f64 => {}
-      0.02f64 => {}
+      //~^ WARNING floating-point types cannot be used in patterns
+      //~| WARNING floating-point types cannot be used in patterns
+      //~| WARNING floating-point types cannot be used in patterns
+      //~| WARNING this was previously accepted by the compiler
+      //~| WARNING this was previously accepted by the compiler
+      //~| WARNING this was previously accepted by the compiler
+      0.02f64 => {} //~ ERROR unreachable pattern
+      //~^ WARNING floating-point types cannot be used in patterns
+      //~| WARNING this was previously accepted by the compiler
       _ => {}
     };
 }
index d0ff4930a45195bfe84a189f78248e5195bc4e2c..c15186d2558f24402c0746132a7f3fbf4c3b1783 100644 (file)
@@ -1,35 +1,35 @@
 error: unreachable pattern
-  --> $DIR/match-range-fail-dominate.rs:12:7
+  --> $DIR/match-range-fail-dominate.rs:6:7
    |
 LL |       5 ..= 6 => { }
    |       ^^^^^^^
    |
 note: lint level defined here
-  --> $DIR/match-range-fail-dominate.rs:7:9
+  --> $DIR/match-range-fail-dominate.rs:1:9
    |
-LL | #![deny(unreachable_patterns)]
+LL | #![deny(unreachable_patterns, overlapping_patterns)]
    |         ^^^^^^^^^^^^^^^^^^^^
 
 error: unreachable pattern
-  --> $DIR/match-range-fail-dominate.rs:18:7
+  --> $DIR/match-range-fail-dominate.rs:13:7
    |
 LL |       4 ..= 6 => { }
    |       ^^^^^^^
 
 error: unreachable pattern
-  --> $DIR/match-range-fail-dominate.rs:24:7
+  --> $DIR/match-range-fail-dominate.rs:20:7
    |
 LL |       4 ..= 6 => { }
    |       ^^^^^^^
 
 error: unreachable pattern
-  --> $DIR/match-range-fail-dominate.rs:30:7
+  --> $DIR/match-range-fail-dominate.rs:27:7
    |
 LL |       'a' ..= 'z' => {}
    |       ^^^^^^^^^^^
 
 warning: floating-point types cannot be used in patterns
-  --> $DIR/match-range-fail-dominate.rs:35:7
+  --> $DIR/match-range-fail-dominate.rs:33:7
    |
 LL |       0.01f64 ..= 6.5f64 => {}
    |       ^^^^^^^
@@ -39,7 +39,7 @@ LL |       0.01f64 ..= 6.5f64 => {}
    = note: for more information, see issue #41620 <https://github.com/rust-lang/rust/issues/41620>
 
 warning: floating-point types cannot be used in patterns
-  --> $DIR/match-range-fail-dominate.rs:35:19
+  --> $DIR/match-range-fail-dominate.rs:33:19
    |
 LL |       0.01f64 ..= 6.5f64 => {}
    |                   ^^^^^^
@@ -48,7 +48,7 @@ LL |       0.01f64 ..= 6.5f64 => {}
    = note: for more information, see issue #41620 <https://github.com/rust-lang/rust/issues/41620>
 
 warning: floating-point types cannot be used in patterns
-  --> $DIR/match-range-fail-dominate.rs:36:7
+  --> $DIR/match-range-fail-dominate.rs:40:7
    |
 LL |       0.02f64 => {}
    |       ^^^^^^^
@@ -57,13 +57,13 @@ LL |       0.02f64 => {}
    = note: for more information, see issue #41620 <https://github.com/rust-lang/rust/issues/41620>
 
 error: unreachable pattern
-  --> $DIR/match-range-fail-dominate.rs:36:7
+  --> $DIR/match-range-fail-dominate.rs:40:7
    |
 LL |       0.02f64 => {}
    |       ^^^^^^^
 
 warning: floating-point types cannot be used in patterns
-  --> $DIR/match-range-fail-dominate.rs:35:7
+  --> $DIR/match-range-fail-dominate.rs:33:7
    |
 LL |       0.01f64 ..= 6.5f64 => {}
    |       ^^^^^^^
index 30bbb8d7800f9b086f5b602c2433af9db70c0d1b..0a020989d6f35d7821607e207fcb87e6aa8e0930 100644 (file)
@@ -6,12 +6,12 @@ LL |         match self.0 { ref mut x => x }
    |
    = note: expected type `&'a mut &'a i32`
               found type `&'a mut &'b i32`
-note: the lifetime 'a as defined on the method body at 9:12...
+note: the lifetime `'a` as defined on the method body at 9:12...
   --> $DIR/match-ref-mut-invariance.rs:9:12
    |
 LL |     fn bar<'a>(&'a mut self) -> &'a mut &'a i32 {
    |            ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the impl at 8:6
+note: ...does not necessarily outlive the lifetime `'b` as defined on the impl at 8:6
   --> $DIR/match-ref-mut-invariance.rs:8:6
    |
 LL | impl<'b> S<'b> {
index 6ca222d9c2ffc55a3859c2f717d01fb7131fca17..1bea9bce11e47df90641ca478bfd6bd21c9603a7 100644 (file)
@@ -6,12 +6,12 @@ LL |         x
    |
    = note: expected type `&'a mut &'a i32`
               found type `&'a mut &'b i32`
-note: the lifetime 'a as defined on the method body at 9:12...
+note: the lifetime `'a` as defined on the method body at 9:12...
   --> $DIR/match-ref-mut-let-invariance.rs:9:12
    |
 LL |     fn bar<'a>(&'a mut self) -> &'a mut &'a i32 {
    |            ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the impl at 8:6
+note: ...does not necessarily outlive the lifetime `'b` as defined on the impl at 8:6
   --> $DIR/match-ref-mut-let-invariance.rs:8:6
    |
 LL | impl<'b> S<'b> {
index 25b8bbdab2d8e7813d8c7f480aead2dce4f913f2..e5f01174ac1bf7539ebbb0d6cab2a6f0acf806a0 100644 (file)
@@ -41,6 +41,13 @@ LL | | }
 ...
 LL |       let E::A = e;
    |           ^^^^ patterns `B` and `C` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let E::A = e { /* */ }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0004]: non-exhaustive patterns: `&B` and `&C` not covered
   --> $DIR/non-exhaustive-defined-here.rs:40:11
@@ -85,6 +92,13 @@ LL | | }
 ...
 LL |       let E::A = e;
    |           ^^^^ patterns `&B` and `&C` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let E::A = e { /* */ }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error[E0004]: non-exhaustive patterns: `&&mut &B` and `&&mut &C` not covered
   --> $DIR/non-exhaustive-defined-here.rs:48:11
@@ -129,6 +143,13 @@ LL | | }
 ...
 LL |       let E::A = e;
    |           ^^^^ patterns `&&mut &B` and `&&mut &C` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let E::A = e { /* */ }
+   |
 
 error[E0004]: non-exhaustive patterns: `None` not covered
   --> $DIR/non-exhaustive-defined-here.rs:65:11
@@ -163,6 +184,13 @@ LL | | }
 ...
 LL |       let Opt::Some(ref _x) = e;
    |           ^^^^^^^^^^^^^^^^^ pattern `None` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Opt::Some(ref _x) = e { /* */ }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to 8 previous errors
 
index 27df0241aa21f0e0bf9a61c40cd23e94cb2613be..2b77d866fb30b3e011e096b3e2d9317c0c18aa24 100644 (file)
@@ -8,3 +8,4 @@ LL | mod mod_file_disambig_aux;
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0584`.
index 3a7895a08c6ef76fd03b0f65809f5d0613444e77..dad8172e0c59e68d35bd91c5c8756a4bdbce15f0 100644 (file)
@@ -1,16 +1,16 @@
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/multiple-plugin-registrars.rs:6:1
    |
 LL | #[plugin_registrar]
-   | ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
    |
    = note: `#[warn(deprecated)]` on by default
 
-warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/issues/29597
+warning: use of deprecated attribute `plugin_registrar`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
   --> $DIR/multiple-plugin-registrars.rs:9:1
    |
 LL | #[plugin_registrar]
-   | ^^^^^^^^^^^^^^^^^^^ help: remove this attribute
+   | ^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
 
 error: multiple plugin registration functions found
    |
diff --git a/src/test/ui/never-assign-dead-code.rs b/src/test/ui/never-assign-dead-code.rs
deleted file mode 100644 (file)
index fd5fbc3..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-// Test that an assignment of type ! makes the rest of the block dead code.
-
-#![feature(never_type)]
-// build-pass (FIXME(62277): could be check-pass?)
-#![warn(unused)]
-
-
-fn main() {
-    let x: ! = panic!("aah"); //~ WARN unused
-    drop(x); //~ WARN unreachable
-    //~^ WARN unreachable
-}
diff --git a/src/test/ui/never-assign-dead-code.stderr b/src/test/ui/never-assign-dead-code.stderr
deleted file mode 100644 (file)
index b887d58..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-warning: unreachable statement
-  --> $DIR/never-assign-dead-code.rs:10:5
-   |
-LL |     let x: ! = panic!("aah");
-   |                ------------- any code following this expression is unreachable
-LL |     drop(x);
-   |     ^^^^^^^^ unreachable statement
-   |
-note: lint level defined here
-  --> $DIR/never-assign-dead-code.rs:5:9
-   |
-LL | #![warn(unused)]
-   |         ^^^^^^
-   = note: `#[warn(unreachable_code)]` implied by `#[warn(unused)]`
-   = note: this warning originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-
-warning: unreachable call
-  --> $DIR/never-assign-dead-code.rs:10:5
-   |
-LL |     drop(x);
-   |     ^^^^ - any code following this expression is unreachable
-   |     |
-   |     unreachable call
-
-warning: unused variable: `x`
-  --> $DIR/never-assign-dead-code.rs:9:9
-   |
-LL |     let x: ! = panic!("aah");
-   |         ^ help: consider prefixing with an underscore: `_x`
-   |
-note: lint level defined here
-  --> $DIR/never-assign-dead-code.rs:5:9
-   |
-LL | #![warn(unused)]
-   |         ^^^^^^
-   = note: `#[warn(unused_variables)]` implied by `#[warn(unused)]`
-
diff --git a/src/test/ui/never-assign-wrong-type.rs b/src/test/ui/never-assign-wrong-type.rs
deleted file mode 100644 (file)
index 67e26f5..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-// Test that we can't use another type in place of !
-
-#![feature(never_type)]
-#![deny(warnings)]
-
-fn main() {
-    let x: ! = "hello"; //~ ERROR mismatched types
-}
diff --git a/src/test/ui/never-assign-wrong-type.stderr b/src/test/ui/never-assign-wrong-type.stderr
deleted file mode 100644 (file)
index da2e77d..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0308]: mismatched types
-  --> $DIR/never-assign-wrong-type.rs:7:16
-   |
-LL |     let x: ! = "hello";
-   |                ^^^^^^^ expected !, found reference
-   |
-   = note: expected type `!`
-              found type `&'static str`
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/never-from-impl-is-reserved.rs b/src/test/ui/never-from-impl-is-reserved.rs
deleted file mode 100644 (file)
index 9d16015..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-// check that the `for<T> T: From<!>` impl is reserved
-
-#![feature(never_type)]
-
-pub struct MyFoo;
-pub trait MyTrait {}
-
-impl MyTrait for MyFoo {}
-// This will conflict with the first impl if we impl `for<T> T: From<!>`.
-impl<T> MyTrait for T where T: From<!> {} //~ ERROR conflicting implementation
-
-fn main() {}
diff --git a/src/test/ui/never-from-impl-is-reserved.stderr b/src/test/ui/never-from-impl-is-reserved.stderr
deleted file mode 100644 (file)
index 8b8d0f4..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-error[E0119]: conflicting implementations of trait `MyTrait` for type `MyFoo`:
-  --> $DIR/never-from-impl-is-reserved.rs:10:1
-   |
-LL | impl MyTrait for MyFoo {}
-   | ---------------------- first implementation here
-LL | // This will conflict with the first impl if we impl `for<T> T: From<!>`.
-LL | impl<T> MyTrait for T where T: From<!> {}
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `MyFoo`
-   |
-   = note: permitting this impl would forbid us from adding `impl<T> From<!> for T` later; see rust-lang/rust#64715 for details
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0119`.
diff --git a/src/test/ui/never-result.rs b/src/test/ui/never-result.rs
deleted file mode 100644 (file)
index 98ce326..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-// run-pass
-
-#![allow(unused_variables)]
-#![allow(unreachable_code)]
-// Test that we can extract a ! through pattern matching then use it as several different types.
-
-#![feature(never_type)]
-
-fn main() {
-    let x: Result<u32, !> = Ok(123);
-    match x {
-        Ok(z) => (),
-        Err(y) => {
-            let q: u32 = y;
-            let w: i32 = y;
-            let e: String = y;
-            y
-        },
-    }
-}
diff --git a/src/test/ui/never-type-rvalues.rs b/src/test/ui/never-type-rvalues.rs
deleted file mode 100644 (file)
index 9ccc73d..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-// run-pass
-
-#![feature(never_type)]
-#![allow(dead_code)]
-#![allow(path_statements)]
-#![allow(unreachable_patterns)]
-
-fn never_direct(x: !) {
-    x;
-}
-
-fn never_ref_pat(ref x: !) {
-    *x;
-}
-
-fn never_ref(x: &!) {
-    let &y = x;
-    y;
-}
-
-fn never_pointer(x: *const !) {
-    unsafe {
-        *x;
-    }
-}
-
-fn never_slice(x: &[!]) {
-    x[0];
-}
-
-fn never_match(x: Result<(), !>) {
-    match x {
-        Ok(_) => {},
-        Err(_) => {},
-    }
-}
-
-pub fn main() { }
diff --git a/src/test/ui/never_coercions.rs b/src/test/ui/never_coercions.rs
deleted file mode 100644 (file)
index 105c386..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-// run-pass
-// Test that having something of type ! doesn't screw up type-checking and that it coerces to the
-// LUB type of the other match arms.
-
-fn main() {
-    let v: Vec<u32> = Vec::new();
-    match 0u32 {
-        0 => &v,
-        1 => return,
-        _ => &v[..],
-    };
-}
diff --git a/src/test/ui/never_transmute_never.rs b/src/test/ui/never_transmute_never.rs
deleted file mode 100644 (file)
index 5bad756..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-// build-pass (FIXME(62277): could be check-pass?)
-
-#![crate_type="lib"]
-
-#![feature(never_type)]
-#![allow(dead_code)]
-#![allow(unreachable_code)]
-#![allow(unused_variables)]
-
-struct Foo;
-
-pub fn f(x: !) -> ! {
-    x
-}
-
-pub fn ub() {
-    // This is completely undefined behaviour,
-    // but we still want to make sure it compiles.
-    let x: ! = unsafe {
-        std::mem::transmute::<Foo, !>(Foo)
-    };
-    f(x)
-}
diff --git a/src/test/ui/never_type/adjust_never.rs b/src/test/ui/never_type/adjust_never.rs
new file mode 100644 (file)
index 0000000..3aa5866
--- /dev/null
@@ -0,0 +1,11 @@
+// Test that a variable of type ! can coerce to another type.
+
+// run-fail
+// error-pattern:explicit
+
+#![feature(never_type)]
+
+fn main() {
+    let x: ! = panic!();
+    let y: u32 = x;
+}
diff --git a/src/test/ui/never_type/call-fn-never-arg-wrong-type.rs b/src/test/ui/never_type/call-fn-never-arg-wrong-type.rs
new file mode 100644 (file)
index 0000000..d06637e
--- /dev/null
@@ -0,0 +1,11 @@
+// Test that we can't pass other types for !
+
+#![feature(never_type)]
+
+fn foo(x: !) -> ! {
+    x
+}
+
+fn main() {
+    foo("wow"); //~ ERROR mismatched types
+}
diff --git a/src/test/ui/never_type/call-fn-never-arg-wrong-type.stderr b/src/test/ui/never_type/call-fn-never-arg-wrong-type.stderr
new file mode 100644 (file)
index 0000000..7a50fd3
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0308]: mismatched types
+  --> $DIR/call-fn-never-arg-wrong-type.rs:10:9
+   |
+LL |     foo("wow");
+   |         ^^^^^ expected !, found reference
+   |
+   = note: expected type `!`
+              found type `&'static str`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/never_type/call-fn-never-arg.rs b/src/test/ui/never_type/call-fn-never-arg.rs
new file mode 100644 (file)
index 0000000..6218572
--- /dev/null
@@ -0,0 +1,15 @@
+// Test that we can use a ! for an argument of type !
+
+// run-fail
+// error-pattern:wowzers!
+
+#![feature(never_type)]
+#![allow(unreachable_code)]
+
+fn foo(x: !) -> ! {
+    x
+}
+
+fn main() {
+    foo(panic!("wowzers!"))
+}
diff --git a/src/test/ui/never_type/cast-never.rs b/src/test/ui/never_type/cast-never.rs
new file mode 100644 (file)
index 0000000..46072e1
--- /dev/null
@@ -0,0 +1,11 @@
+// Test that we can explicitly cast ! to another type
+
+// run-fail
+// error-pattern:explicit
+
+#![feature(never_type)]
+
+fn main() {
+    let x: ! = panic!();
+    let y: u32 = x as u32;
+}
diff --git a/src/test/ui/never_type/defaulted-never-note.rs b/src/test/ui/never_type/defaulted-never-note.rs
new file mode 100644 (file)
index 0000000..d3fb8a0
--- /dev/null
@@ -0,0 +1,35 @@
+// We need to opt into the `!` feature in order to trigger the
+// requirement that this is testing.
+#![feature(never_type)]
+
+#![allow(unused)]
+
+trait Deserialize: Sized {
+    fn deserialize() -> Result<Self, String>;
+}
+
+impl Deserialize for () {
+    fn deserialize() -> Result<(), String> {
+        Ok(())
+    }
+}
+
+trait ImplementedForUnitButNotNever {}
+
+impl ImplementedForUnitButNotNever for () {}
+
+fn foo<T: ImplementedForUnitButNotNever>(_t: T) {}
+//~^ NOTE required by this bound in `foo`
+//~| NOTE
+
+fn smeg() {
+    let _x = return;
+    foo(_x);
+    //~^ ERROR the trait bound
+    //~| NOTE the trait `ImplementedForUnitButNotNever` is not implemented
+    //~| NOTE the trait is implemented for `()`
+}
+
+fn main() {
+    smeg();
+}
diff --git a/src/test/ui/never_type/defaulted-never-note.stderr b/src/test/ui/never_type/defaulted-never-note.stderr
new file mode 100644 (file)
index 0000000..28c9da0
--- /dev/null
@@ -0,0 +1,14 @@
+error[E0277]: the trait bound `!: ImplementedForUnitButNotNever` is not satisfied
+  --> $DIR/defaulted-never-note.rs:27:5
+   |
+LL | fn foo<T: ImplementedForUnitButNotNever>(_t: T) {}
+   |    ---    ----------------------------- required by this bound in `foo`
+...
+LL |     foo(_x);
+   |     ^^^ the trait `ImplementedForUnitButNotNever` is not implemented for `!`
+   |
+   = note: the trait is implemented for `()`. Possibly this error has been caused by changes to Rust's type-inference algorithm (see: https://github.com/rust-lang/rust/issues/48950 for more info). Consider whether you meant to use the type `()` here instead.
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/never_type/dispatch_from_dyn_zst.rs b/src/test/ui/never_type/dispatch_from_dyn_zst.rs
new file mode 100644 (file)
index 0000000..764f58c
--- /dev/null
@@ -0,0 +1,51 @@
+// run-pass
+
+#![feature(unsize, dispatch_from_dyn, never_type)]
+
+#![allow(dead_code)]
+
+use std::{
+    ops::DispatchFromDyn,
+    marker::{Unsize, PhantomData},
+};
+
+struct Zst;
+struct NestedZst(PhantomData<()>, Zst);
+
+
+struct WithUnit<T: ?Sized>(Box<T>, ());
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithUnit<U>> for WithUnit<T>
+    where T: Unsize<U> {}
+
+struct WithPhantom<T: ?Sized>(Box<T>, PhantomData<()>);
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithPhantom<U>> for WithPhantom<T>
+    where T: Unsize<U> {}
+
+struct WithNever<T: ?Sized>(Box<T>, !);
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithNever<U>> for WithNever<T>
+    where T: Unsize<U> {}
+
+struct WithZst<T: ?Sized>(Box<T>, Zst);
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithZst<U>> for WithZst<T>
+    where T: Unsize<U> {}
+
+struct WithNestedZst<T: ?Sized>(Box<T>, NestedZst);
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<WithNestedZst<U>> for WithNestedZst<T>
+    where T: Unsize<U> {}
+
+
+struct Generic<T: ?Sized, A>(Box<T>, A);
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, ()>> for Generic<T, ()>
+    where T: Unsize<U> {}
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, PhantomData<()>>>
+    for Generic<T, PhantomData<()>>
+    where T: Unsize<U> {}
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, !>> for Generic<T, !>
+    where T: Unsize<U> {}
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, Zst>> for Generic<T, Zst>
+    where T: Unsize<U> {}
+impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Generic<U, NestedZst>> for Generic<T, NestedZst>
+    where T: Unsize<U> {}
+
+
+fn main() {}
diff --git a/src/test/ui/never_type/diverging-fallback-control-flow.rs b/src/test/ui/never_type/diverging-fallback-control-flow.rs
new file mode 100644 (file)
index 0000000..c68e636
--- /dev/null
@@ -0,0 +1,102 @@
+// run-pass
+
+#![allow(dead_code)]
+#![allow(unused_assignments)]
+#![allow(unused_variables)]
+#![allow(unreachable_code)]
+
+// Test various cases where we permit an unconstrained variable
+// to fallback based on control-flow.
+//
+// These represent current behavior, but are pretty dubious.  I would
+// like to revisit these and potentially change them. --nmatsakis
+
+#![feature(never_type)]
+
+trait BadDefault {
+    fn default() -> Self;
+}
+
+impl BadDefault for u32 {
+    fn default() -> Self {
+        0
+    }
+}
+
+impl BadDefault for ! {
+    fn default() -> ! {
+        panic!()
+    }
+}
+
+fn assignment() {
+    let x;
+
+    if true {
+        x = BadDefault::default();
+    } else {
+        x = return;
+    }
+}
+
+fn assignment_rev() {
+    let x;
+
+    if true {
+        x = return;
+    } else {
+        x = BadDefault::default();
+    }
+}
+
+fn if_then_else() {
+    let _x = if true {
+        BadDefault::default()
+    } else {
+        return;
+    };
+}
+
+fn if_then_else_rev() {
+    let _x = if true {
+        return;
+    } else {
+        BadDefault::default()
+    };
+}
+
+fn match_arm() {
+    let _x = match Ok(BadDefault::default()) {
+        Ok(v) => v,
+        Err(()) => return,
+    };
+}
+
+fn match_arm_rev() {
+    let _x = match Ok(BadDefault::default()) {
+        Err(()) => return,
+        Ok(v) => v,
+    };
+}
+
+fn loop_break() {
+    let _x = loop {
+        if false {
+            break return;
+        } else {
+            break BadDefault::default();
+        }
+    };
+}
+
+fn loop_break_rev() {
+    let _x = loop {
+        if false {
+            break return;
+        } else {
+            break BadDefault::default();
+        }
+    };
+}
+
+fn main() { }
diff --git a/src/test/ui/never_type/impl-for-never.rs b/src/test/ui/never_type/impl-for-never.rs
new file mode 100644 (file)
index 0000000..9423f08
--- /dev/null
@@ -0,0 +1,27 @@
+// run-pass
+
+#![feature(never_type)]
+
+// Test that we can call static methods on ! both directly and when it appears in a generic
+
+trait StringifyType {
+    fn stringify_type() -> &'static str;
+}
+
+impl StringifyType for ! {
+    fn stringify_type() -> &'static str {
+        "!"
+    }
+}
+
+fn maybe_stringify<T: StringifyType>(opt: Option<T>) -> &'static str {
+    match opt {
+        Some(_) => T::stringify_type(),
+        None => "none",
+    }
+}
+
+fn main() {
+    println!("! is {}", <!>::stringify_type());
+    println!("None is {}", maybe_stringify(None::<!>));
+}
diff --git a/src/test/ui/never_type/issue-13352.rs b/src/test/ui/never_type/issue-13352.rs
new file mode 100644 (file)
index 0000000..e6995be
--- /dev/null
@@ -0,0 +1,11 @@
+// ignore-cloudabi no std::process
+
+fn foo(_: Box<dyn FnMut()>) {}
+
+fn main() {
+    foo(loop {
+        std::process::exit(0);
+    });
+    2_usize + (loop {});
+    //~^ ERROR E0277
+}
diff --git a/src/test/ui/never_type/issue-13352.stderr b/src/test/ui/never_type/issue-13352.stderr
new file mode 100644 (file)
index 0000000..58ac74b
--- /dev/null
@@ -0,0 +1,11 @@
+error[E0277]: cannot add `()` to `usize`
+  --> $DIR/issue-13352.rs:9:13
+   |
+LL |     2_usize + (loop {});
+   |             ^ no implementation for `usize + ()`
+   |
+   = help: the trait `std::ops::Add<()>` is not implemented for `usize`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/never_type/issue-2149.rs b/src/test/ui/never_type/issue-2149.rs
new file mode 100644 (file)
index 0000000..d46f0e6
--- /dev/null
@@ -0,0 +1,15 @@
+trait VecMonad<A> {
+    fn bind<B, F>(&self, f: F) where F: FnMut(A) -> Vec<B>;
+}
+
+impl<A> VecMonad<A> for Vec<A> {
+    fn bind<B, F>(&self, mut f: F) where F: FnMut(A) -> Vec<B> {
+        let mut r = panic!();
+        for elt in self { r = r + f(*elt); }
+        //~^ ERROR E0277
+   }
+}
+fn main() {
+    ["hi"].bind(|x| [x] );
+    //~^ ERROR no method named `bind` found for type `[&str; 1]` in the current scope
+}
diff --git a/src/test/ui/never_type/issue-2149.stderr b/src/test/ui/never_type/issue-2149.stderr
new file mode 100644 (file)
index 0000000..8ce2ba0
--- /dev/null
@@ -0,0 +1,22 @@
+error[E0277]: cannot add `std::vec::Vec<B>` to `()`
+  --> $DIR/issue-2149.rs:8:33
+   |
+LL |         for elt in self { r = r + f(*elt); }
+   |                                 ^ no implementation for `() + std::vec::Vec<B>`
+   |
+   = help: the trait `std::ops::Add<std::vec::Vec<B>>` is not implemented for `()`
+
+error[E0599]: no method named `bind` found for type `[&str; 1]` in the current scope
+  --> $DIR/issue-2149.rs:13:12
+   |
+LL |     ["hi"].bind(|x| [x] );
+   |            ^^^^ method not found in `[&str; 1]`
+   |
+   = help: items from traits can only be used if the trait is implemented and in scope
+   = note: the following trait defines an item `bind`, perhaps you need to implement it:
+           candidate #1: `VecMonad`
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0277, E0599.
+For more information about an error, try `rustc --explain E0277`.
diff --git a/src/test/ui/never_type/issue-44402.rs b/src/test/ui/never_type/issue-44402.rs
new file mode 100644 (file)
index 0000000..699e480
--- /dev/null
@@ -0,0 +1,33 @@
+// check-pass
+
+#![allow(dead_code)]
+#![feature(never_type)]
+#![feature(exhaustive_patterns)]
+
+// Regression test for inhabitedness check. The old
+// cache used to cause us to incorrectly decide
+// that `test_b` was invalid.
+
+struct Foo {
+    field1: !,
+    field2: Option<&'static Bar>,
+}
+
+struct Bar {
+    field1: &'static Foo
+}
+
+fn test_a() {
+    let x: Option<Foo> = None;
+    match x { None => () }
+}
+
+fn test_b() {
+    let x: Option<Bar> = None;
+    match x {
+        Some(_) => (),
+        None => ()
+    }
+}
+
+fn main() { }
diff --git a/src/test/ui/never_type/never-assign-dead-code.rs b/src/test/ui/never_type/never-assign-dead-code.rs
new file mode 100644 (file)
index 0000000..7bb7c87
--- /dev/null
@@ -0,0 +1,12 @@
+// Test that an assignment of type ! makes the rest of the block dead code.
+
+// check-pass
+
+#![feature(never_type)]
+#![warn(unused)]
+
+fn main() {
+    let x: ! = panic!("aah"); //~ WARN unused
+    drop(x); //~ WARN unreachable
+    //~^ WARN unreachable
+}
diff --git a/src/test/ui/never_type/never-assign-dead-code.stderr b/src/test/ui/never_type/never-assign-dead-code.stderr
new file mode 100644 (file)
index 0000000..1860150
--- /dev/null
@@ -0,0 +1,37 @@
+warning: unreachable statement
+  --> $DIR/never-assign-dead-code.rs:10:5
+   |
+LL |     let x: ! = panic!("aah");
+   |                ------------- any code following this expression is unreachable
+LL |     drop(x);
+   |     ^^^^^^^^ unreachable statement
+   |
+note: lint level defined here
+  --> $DIR/never-assign-dead-code.rs:6:9
+   |
+LL | #![warn(unused)]
+   |         ^^^^^^
+   = note: `#[warn(unreachable_code)]` implied by `#[warn(unused)]`
+   = note: this warning originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+warning: unreachable call
+  --> $DIR/never-assign-dead-code.rs:10:5
+   |
+LL |     drop(x);
+   |     ^^^^ - any code following this expression is unreachable
+   |     |
+   |     unreachable call
+
+warning: unused variable: `x`
+  --> $DIR/never-assign-dead-code.rs:9:9
+   |
+LL |     let x: ! = panic!("aah");
+   |         ^ help: consider prefixing with an underscore: `_x`
+   |
+note: lint level defined here
+  --> $DIR/never-assign-dead-code.rs:6:9
+   |
+LL | #![warn(unused)]
+   |         ^^^^^^
+   = note: `#[warn(unused_variables)]` implied by `#[warn(unused)]`
+
diff --git a/src/test/ui/never_type/never-assign-wrong-type.rs b/src/test/ui/never_type/never-assign-wrong-type.rs
new file mode 100644 (file)
index 0000000..67e26f5
--- /dev/null
@@ -0,0 +1,8 @@
+// Test that we can't use another type in place of !
+
+#![feature(never_type)]
+#![deny(warnings)]
+
+fn main() {
+    let x: ! = "hello"; //~ ERROR mismatched types
+}
diff --git a/src/test/ui/never_type/never-assign-wrong-type.stderr b/src/test/ui/never_type/never-assign-wrong-type.stderr
new file mode 100644 (file)
index 0000000..da2e77d
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0308]: mismatched types
+  --> $DIR/never-assign-wrong-type.rs:7:16
+   |
+LL |     let x: ! = "hello";
+   |                ^^^^^^^ expected !, found reference
+   |
+   = note: expected type `!`
+              found type `&'static str`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/never_type/never-associated-type.rs b/src/test/ui/never_type/never-associated-type.rs
new file mode 100644 (file)
index 0000000..7f0a3fe
--- /dev/null
@@ -0,0 +1,24 @@
+// Test that we can use ! as an associated type.
+
+// run-fail
+// error-pattern:kapow!
+
+#![feature(never_type)]
+
+trait Foo {
+    type Wow;
+
+    fn smeg(&self) -> Self::Wow;
+}
+
+struct Blah;
+impl Foo for Blah {
+    type Wow = !;
+    fn smeg(&self) -> ! {
+        panic!("kapow!");
+    }
+}
+
+fn main() {
+    Blah.smeg();
+}
diff --git a/src/test/ui/never_type/never-from-impl-is-reserved.rs b/src/test/ui/never_type/never-from-impl-is-reserved.rs
new file mode 100644 (file)
index 0000000..9d16015
--- /dev/null
@@ -0,0 +1,12 @@
+// check that the `for<T> T: From<!>` impl is reserved
+
+#![feature(never_type)]
+
+pub struct MyFoo;
+pub trait MyTrait {}
+
+impl MyTrait for MyFoo {}
+// This will conflict with the first impl if we impl `for<T> T: From<!>`.
+impl<T> MyTrait for T where T: From<!> {} //~ ERROR conflicting implementation
+
+fn main() {}
diff --git a/src/test/ui/never_type/never-from-impl-is-reserved.stderr b/src/test/ui/never_type/never-from-impl-is-reserved.stderr
new file mode 100644 (file)
index 0000000..8b8d0f4
--- /dev/null
@@ -0,0 +1,14 @@
+error[E0119]: conflicting implementations of trait `MyTrait` for type `MyFoo`:
+  --> $DIR/never-from-impl-is-reserved.rs:10:1
+   |
+LL | impl MyTrait for MyFoo {}
+   | ---------------------- first implementation here
+LL | // This will conflict with the first impl if we impl `for<T> T: From<!>`.
+LL | impl<T> MyTrait for T where T: From<!> {}
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `MyFoo`
+   |
+   = note: permitting this impl would forbid us from adding `impl<T> From<!> for T` later; see rust-lang/rust#64715 for details
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0119`.
diff --git a/src/test/ui/never_type/never-result.rs b/src/test/ui/never_type/never-result.rs
new file mode 100644 (file)
index 0000000..35af379
--- /dev/null
@@ -0,0 +1,21 @@
+// run-pass
+
+#![allow(unused_variables)]
+#![allow(unreachable_code)]
+
+// Test that we can extract a ! through pattern matching then use it as several different types.
+
+#![feature(never_type)]
+
+fn main() {
+    let x: Result<u32, !> = Ok(123);
+    match x {
+        Ok(z) => (),
+        Err(y) => {
+            let q: u32 = y;
+            let w: i32 = y;
+            let e: String = y;
+            y
+        },
+    }
+}
diff --git a/src/test/ui/never_type/never-type-arg.rs b/src/test/ui/never_type/never-type-arg.rs
new file mode 100644 (file)
index 0000000..a82d351
--- /dev/null
@@ -0,0 +1,18 @@
+// Test that we can use ! as an argument to a trait impl.
+
+// run-fail
+// error-pattern:oh no!
+
+#![feature(never_type)]
+
+struct Wub;
+
+impl PartialEq<!> for Wub {
+    fn eq(&self, other: &!) -> bool {
+        *other
+    }
+}
+
+fn main() {
+    let _ = Wub == panic!("oh no!");
+}
diff --git a/src/test/ui/never_type/never-type-rvalues.rs b/src/test/ui/never_type/never-type-rvalues.rs
new file mode 100644 (file)
index 0000000..9ccc73d
--- /dev/null
@@ -0,0 +1,38 @@
+// run-pass
+
+#![feature(never_type)]
+#![allow(dead_code)]
+#![allow(path_statements)]
+#![allow(unreachable_patterns)]
+
+fn never_direct(x: !) {
+    x;
+}
+
+fn never_ref_pat(ref x: !) {
+    *x;
+}
+
+fn never_ref(x: &!) {
+    let &y = x;
+    y;
+}
+
+fn never_pointer(x: *const !) {
+    unsafe {
+        *x;
+    }
+}
+
+fn never_slice(x: &[!]) {
+    x[0];
+}
+
+fn never_match(x: Result<(), !>) {
+    match x {
+        Ok(_) => {},
+        Err(_) => {},
+    }
+}
+
+pub fn main() { }
diff --git a/src/test/ui/never_type/never_coercions.rs b/src/test/ui/never_type/never_coercions.rs
new file mode 100644 (file)
index 0000000..105c386
--- /dev/null
@@ -0,0 +1,12 @@
+// run-pass
+// Test that having something of type ! doesn't screw up type-checking and that it coerces to the
+// LUB type of the other match arms.
+
+fn main() {
+    let v: Vec<u32> = Vec::new();
+    match 0u32 {
+        0 => &v,
+        1 => return,
+        _ => &v[..],
+    };
+}
diff --git a/src/test/ui/never_type/never_transmute_never.rs b/src/test/ui/never_type/never_transmute_never.rs
new file mode 100644 (file)
index 0000000..fce3ced
--- /dev/null
@@ -0,0 +1,23 @@
+// check-pass
+
+#![crate_type="lib"]
+
+#![feature(never_type)]
+#![allow(dead_code)]
+#![allow(unreachable_code)]
+#![allow(unused_variables)]
+
+struct Foo;
+
+pub fn f(x: !) -> ! {
+    x
+}
+
+pub fn ub() {
+    // This is completely undefined behaviour,
+    // but we still want to make sure it compiles.
+    let x: ! = unsafe {
+        std::mem::transmute::<Foo, !>(Foo)
+    };
+    f(x)
+}
diff --git a/src/test/ui/never_type/panic-uninitialized-zeroed.rs b/src/test/ui/never_type/panic-uninitialized-zeroed.rs
new file mode 100644 (file)
index 0000000..72b844d
--- /dev/null
@@ -0,0 +1,102 @@
+// run-pass
+// ignore-emscripten compiled with panic=abort by default
+// This test checks that instantiating an uninhabited type via `mem::{uninitialized,zeroed}` results
+// in a runtime panic.
+
+#![feature(never_type)]
+#![allow(deprecated, invalid_value)]
+
+use std::{mem, panic};
+
+#[allow(dead_code)]
+struct Foo {
+    x: u8,
+    y: !,
+}
+
+enum Bar {}
+
+fn main() {
+    unsafe {
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::uninitialized::<!>()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type !"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::zeroed::<!>()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type !"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::MaybeUninit::<!>::uninit().assume_init()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type !"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::uninitialized::<Foo>()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type Foo"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::zeroed::<Foo>()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type Foo"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::MaybeUninit::<Foo>::uninit().assume_init()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type Foo"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::uninitialized::<Bar>()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type Bar"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::zeroed::<Bar>()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type Bar"
+            })),
+            Some(true)
+        );
+
+        assert_eq!(
+            panic::catch_unwind(|| {
+                mem::MaybeUninit::<Bar>::uninit().assume_init()
+            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
+                s == "Attempted to instantiate uninhabited type Bar"
+            })),
+            Some(true)
+        );
+    }
+}
diff --git a/src/test/ui/never_type/try_from.rs b/src/test/ui/never_type/try_from.rs
new file mode 100644 (file)
index 0000000..5045157
--- /dev/null
@@ -0,0 +1,37 @@
+// run-pass
+// This test relies on `TryFrom` being blanket impl for all `T: Into`
+// and `TryInto` being blanket impl for all `U: TryFrom`
+
+// This test was added to show the motivation for doing this
+// over `TryFrom` being blanket impl for all `T: From`
+
+#![feature(never_type)]
+
+use std::convert::{TryInto, Infallible};
+
+struct Foo<T> {
+    t: T,
+}
+
+// This fails to compile due to coherence restrictions
+// as of Rust version 1.32.x, therefore it could not be used
+// instead of the `Into` version of the impl, and serves as
+// motivation for a blanket impl for all `T: Into`, instead
+// of a blanket impl for all `T: From`
+/*
+impl<T> From<Foo<T>> for Box<T> {
+    fn from(foo: Foo<T>) -> Box<T> {
+        Box::new(foo.t)
+    }
+}
+*/
+
+impl<T> Into<Vec<T>> for Foo<T> {
+    fn into(self) -> Vec<T> {
+        vec![self.t]
+    }
+}
+
+pub fn main() {
+    let _: Result<Vec<i32>, Infallible> = Foo { t: 10 }.try_into();
+}
index b19e3a9dfb345604f9a107b444d2d4b10c3432d0..74c33df37a09edcdba9311ed0eab9814fca04300 100644 (file)
@@ -6,7 +6,7 @@ LL |     let _x = *s;
    |
    = note: expected type `std::marker::Sized`
               found type `std::marker::Sized`
-note: the lifetime 'a as defined on the function body at 9:8...
+note: the lifetime `'a` as defined on the function body at 9:8...
   --> $DIR/issue-50716.rs:9:8
    |
 LL | fn foo<'a, T: 'static>(s: Box<<&'a T as A>::X>)
index 90a35177f4c3bae75e75119c11f75ec8eb77430e..0cdc2d9443926f55b0011a9e2d3e93280bac7368 100644 (file)
@@ -4,7 +4,7 @@ error[E0312]: lifetime of reference outlives lifetime of borrowed content...
 LL |         self.y = b.z
    |                  ^^^
    |
-note: ...the reference is valid for the lifetime '_ as defined on the impl at 12:10...
+note: ...the reference is valid for the lifetime `'_` as defined on the impl at 12:10...
   --> $DIR/issue-52742.rs:12:10
    |
 LL | impl Foo<'_, '_> {
index ffb94ed7dd7c0eab3ee8a3bbe82708c546aacdaa..714a63b670c66a96688a0cbeb14c71d670aa181d 100644 (file)
@@ -16,7 +16,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |         Foo { bar }
    |               ^^^
-note: but, the lifetime must be valid for the lifetime '_ as defined on the impl at 7:10...
+note: but, the lifetime must be valid for the lifetime `'_` as defined on the impl at 7:10...
   --> $DIR/issue-55394.rs:7:10
    |
 LL | impl Foo<'_> {
@@ -27,3 +27,4 @@ LL | impl Foo<'_> {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 4ec16ba055a4c48a4edd34379ce63a8cb173b391..2dc7236cbc2749201a9f8d291d391ef80ae5b928 100644 (file)
@@ -5,7 +5,7 @@ LL |     *y
    |     ^^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 1:47
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 1:47
   --> $DIR/issue-55401.rs:1:47
    |
 LL | fn static_to_a_to_static_through_ref_in_tuple<'a>(x: &'a u32) -> &'static u32 {
diff --git a/src/test/ui/nll/issue-63154-normalize.rs b/src/test/ui/nll/issue-63154-normalize.rs
new file mode 100644 (file)
index 0000000..484c128
--- /dev/null
@@ -0,0 +1,34 @@
+// Regression test for rust-lang/rust#63154
+//
+// Before, we would ICE after failing to normalize the destination type
+// when checking call destinations and also when checking MIR
+// assignment statements.
+
+// check-pass
+
+trait HasAssocType {
+    type Inner;
+}
+
+impl HasAssocType for () {
+    type Inner = ();
+}
+
+trait Tr<I, T>: Fn(I) -> Option<T> {}
+impl<I, T, Q: Fn(I) -> Option<T>> Tr<I, T> for Q {}
+
+fn f<T: HasAssocType>() -> impl Tr<T, T::Inner> {
+    |_| None
+}
+
+fn g<T, Y>(f: impl Tr<T, Y>) -> impl Tr<T, Y> {
+    f
+}
+
+fn h() {
+    g(f())(());
+}
+
+fn main() {
+    h();
+}
index 951e73e7fd76555659eb5261b618e7493a626f2a..3a152fbc6fce8e5898ac1261ca1080b80497e3cd 100644 (file)
@@ -4,12 +4,12 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'d` d
 LL | fn visit_seq<'d, 'a: 'd>() -> <&'a () as Visitor<'d>>::Value {}
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'd as defined on the function body at 12:14...
+note: first, the lifetime cannot outlive the lifetime `'d` as defined on the function body at 12:14...
   --> $DIR/normalization-bounds-error.rs:12:14
    |
 LL | fn visit_seq<'d, 'a: 'd>() -> <&'a () as Visitor<'d>>::Value {}
    |              ^^
-note: ...but the lifetime must also be valid for the lifetime 'a as defined on the function body at 12:18...
+note: ...but the lifetime must also be valid for the lifetime `'a` as defined on the function body at 12:18...
   --> $DIR/normalization-bounds-error.rs:12:18
    |
 LL | fn visit_seq<'d, 'a: 'd>() -> <&'a () as Visitor<'d>>::Value {}
@@ -20,3 +20,4 @@ LL | fn visit_seq<'d, 'a: 'd>() -> <&'a () as Visitor<'d>>::Value {}
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index f39f668e2329a5c61ad69b7eb5008321335e29a9..ecf9748af9ea30f7a707915f00e64e976d36315d 100644 (file)
@@ -6,12 +6,12 @@ LL |     const AC: Option<&'c str> = None;
    |
    = note: expected type `std::option::Option<&'b str>`
               found type `std::option::Option<&'c str>`
-note: the lifetime 'c as defined on the impl at 20:18...
+note: the lifetime `'c` as defined on the impl at 20:18...
   --> $DIR/trait-associated-constant.rs:20:18
    |
 LL | impl<'a: 'b, 'b, 'c> Anything<'a, 'b> for FailStruct {
    |                  ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the impl at 20:14
+note: ...does not necessarily outlive the lifetime `'b` as defined on the impl at 20:14
   --> $DIR/trait-associated-constant.rs:20:14
    |
 LL | impl<'a: 'b, 'b, 'c> Anything<'a, 'b> for FailStruct {
index 00d58d34362e644c78a445592e7a1e2f9fb8ffdc..6986389af88149ee503d183507fb1ee1fa807319 100644 (file)
@@ -14,7 +14,7 @@ LL | |     }
    = note: ...so that the expression is assignable:
            expected std::boxed::Box<std::boxed::Box<&isize>>
               found std::boxed::Box<std::boxed::Box<&isize>>
-note: but, the lifetime must be valid for the lifetime 'a as defined on the impl at 15:6...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the impl at 15:6...
   --> $DIR/type-alias-free-regions.rs:15:6
    |
 LL | impl<'a> FromBox<'a> for C<'a> {
@@ -39,7 +39,7 @@ LL | |     }
    = note: ...so that the expression is assignable:
            expected std::boxed::Box<&isize>
               found std::boxed::Box<&isize>
-note: but, the lifetime must be valid for the lifetime 'a as defined on the impl at 25:6...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the impl at 25:6...
   --> $DIR/type-alias-free-regions.rs:25:6
    |
 LL | impl<'a> FromTuple<'a> for C<'a> {
@@ -50,3 +50,4 @@ LL | impl<'a> FromTuple<'a> for C<'a> {
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0495`.
index 77e1339dc161d7ed43667e6ff2f4e4fdc8f7728e..4ebd991078864f6cb986739c477de2d5dc6744e5 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d
 LL |     <Foo<'a>>::C
    |     ^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 7:8...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 7:8...
   --> $DIR/constant-in-expr-inherent-1.rs:7:8
    |
 LL | fn foo<'a>(_: &'a u32) -> &'static u32 {
@@ -21,3 +21,4 @@ LL |     <Foo<'a>>::C
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 0a8ad4221c986ed509e1018b9a779cb5176e20cc..4c7adf75d2fd7a8a55687fa7046f9915df6bf217 100644 (file)
@@ -5,7 +5,7 @@ LL |     <() as Foo<'a>>::C
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 17:8
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 17:8
   --> $DIR/constant-in-expr-normalize.rs:17:8
    |
 LL | fn foo<'a>(_: &'a u32) -> &'static u32 {
index d596aaf098f7709db1a58a8268d1c5e2191413f3..d01d022cba7967945fd5f226398fec1d0fbf85d1 100644 (file)
@@ -5,7 +5,7 @@ LL |     <() as Foo<'a>>::C
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 9:8
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 9:8
   --> $DIR/constant-in-expr-trait-item-1.rs:9:8
    |
 LL | fn foo<'a>(_: &'a u32) -> &'static u32 {
index 80ff9a043d4d5004fe06283c1ec474fd3545796f..dd294280b903aee8a75c7cea25fe0ba011a777b5 100644 (file)
@@ -5,7 +5,7 @@ LL |     <T as Foo<'a>>::C
    |     ^^^^^^^^^^^^^^^^^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 9:8
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 9:8
   --> $DIR/constant-in-expr-trait-item-2.rs:9:8
    |
 LL | fn foo<'a, T: Foo<'a>>() -> &'static u32 {
index 77655fe091b6275fedbd0721f22cff7b94929a31..d61659e7e9afcd3c046fb42cad71504156ef8d2e 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d
 LL |     T::C
    |     ^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 9:8...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 9:8...
   --> $DIR/constant-in-expr-trait-item-3.rs:9:8
    |
 LL | fn foo<'a, T: Foo<'a>>() -> &'static u32 {
@@ -21,3 +21,4 @@ LL |     T::C
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 680430a05ee8ad98e66783bd1090e5ae1ebc4ad5..f21df68d5a2cc52e5fca6f26879ed9b86ac0da08 100644 (file)
@@ -18,5 +18,5 @@ LL |     m!((bad, pat));
 
 error: aborting due to 3 previous errors
 
-Some errors have detailed explanations: E0130, E0642.
+Some errors have detailed explanations: E0130, E0561, E0642.
 For more information about an error, try `rustc --explain E0130`.
index b65e0ecd253e65ae6fa5195316b0d43af06ec57c..1c2ce86646787e65eecc1462a06d16b20f35f52c 100644 (file)
@@ -30,4 +30,5 @@ LL | type A2 = fn(&arg: u8);
 
 error: aborting due to 5 previous errors
 
-For more information about this error, try `rustc --explain E0130`.
+Some errors have detailed explanations: E0130, E0561.
+For more information about an error, try `rustc --explain E0130`.
index aa18b923044c6a52d8db6ad3d1b082eb9755b28b..2362ccd32de994d0239ba0ce9ac3a336273d7a09 100644 (file)
@@ -8,6 +8,7 @@ LL |     assert::<&mut i32>();
    |     ^^^^^^^^^^^^^^^^^^ `&mut i32` may not be safely transferred across an unwind boundary
    |
    = help: the trait `std::panic::UnwindSafe` is not implemented for `&mut i32`
+   = note: `std::panic::UnwindSafe` is implemented for `&i32`, but not for `&mut i32`
 
 error: aborting due to previous error
 
index 2cdd6c5d890f2721ffa94bd6f70e351867e80a8c..d66322c48ec98c3f304e7a31574df4e90c858589 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for automatic coercion due to
 LL |     ss
    |     ^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 54:10...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 54:10...
   --> $DIR/object-lifetime-default-elision.rs:54:10
    |
 LL | fn load3<'a,'b>(ss: &'a dyn SomeTrait) -> &'b dyn SomeTrait {
@@ -14,7 +14,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |     ss
    |     ^^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 54:13...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 54:13...
   --> $DIR/object-lifetime-default-elision.rs:54:13
    |
 LL | fn load3<'a,'b>(ss: &'a dyn SomeTrait) -> &'b dyn SomeTrait {
@@ -29,7 +29,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |     ss
    |     ^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 54:10...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 54:10...
   --> $DIR/object-lifetime-default-elision.rs:54:10
    |
 LL | fn load3<'a,'b>(ss: &'a dyn SomeTrait) -> &'b dyn SomeTrait {
@@ -39,7 +39,7 @@ note: ...so that the declared lifetime parameter bounds are satisfied
    |
 LL |     ss
    |     ^^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 54:13...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 54:13...
   --> $DIR/object-lifetime-default-elision.rs:54:13
    |
 LL | fn load3<'a,'b>(ss: &'a dyn SomeTrait) -> &'b dyn SomeTrait {
@@ -50,3 +50,4 @@ LL | fn load3<'a,'b>(ss: &'a dyn SomeTrait) -> &'b dyn SomeTrait {
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0495`.
index 4f9cef12c5ef29ae0e29a944b06a91df91a9ef10..99f0ce0602b11c9725017d30565e5f2b006108c0 100644 (file)
@@ -6,7 +6,7 @@ LL |     ss.t = t;
    |
    = note: expected type `&'a std::boxed::Box<(dyn Test + 'static)>`
               found type `&'a std::boxed::Box<(dyn Test + 'a)>`
-note: the lifetime 'a as defined on the function body at 14:6...
+note: the lifetime `'a` as defined on the function body at 14:6...
   --> $DIR/object-lifetime-default-from-rptr-box-error.rs:14:6
    |
 LL | fn c<'a>(t: &'a Box<dyn Test+'a>, mut ss: SomeStruct<'a>) {
index 3b7faee68aaba5a1fae9373e5014e5c26f8d6e1c..07d4d8c8ed40b0a94caaab4f4972c5879b965697 100644 (file)
@@ -6,7 +6,7 @@ LL |     ss.t = t;
    |
    = note: expected type `&'a MyBox<(dyn Test + 'static)>`
               found type `&'a MyBox<(dyn Test + 'a)>`
-note: the lifetime 'a as defined on the function body at 20:6...
+note: the lifetime `'a` as defined on the function body at 20:6...
   --> $DIR/object-lifetime-default-from-rptr-struct-error.rs:20:6
    |
 LL | fn c<'a>(t: &'a MyBox<dyn Test+'a>, mut ss: SomeStruct<'a>) {
index 928b9201982325d40f99f17496fe02f24dba9805..f825475b96b03b336b7fa4e46a0fb70b0fc57bcd 100644 (file)
@@ -18,7 +18,7 @@ LL |     load0(ss)
    |
    = note: expected type `&MyBox<(dyn SomeTrait + 'static)>`
               found type `&MyBox<(dyn SomeTrait + 'a)>`
-note: the lifetime 'a as defined on the function body at 30:10...
+note: the lifetime `'a` as defined on the function body at 30:10...
   --> $DIR/object-lifetime-default-mybox.rs:30:10
    |
 LL | fn load2<'a>(ss: &MyBox<dyn SomeTrait + 'a>) -> MyBox<dyn SomeTrait + 'a> {
diff --git a/src/test/ui/panic-uninitialized-zeroed.rs b/src/test/ui/panic-uninitialized-zeroed.rs
deleted file mode 100644 (file)
index 72b844d..0000000
+++ /dev/null
@@ -1,102 +0,0 @@
-// run-pass
-// ignore-emscripten compiled with panic=abort by default
-// This test checks that instantiating an uninhabited type via `mem::{uninitialized,zeroed}` results
-// in a runtime panic.
-
-#![feature(never_type)]
-#![allow(deprecated, invalid_value)]
-
-use std::{mem, panic};
-
-#[allow(dead_code)]
-struct Foo {
-    x: u8,
-    y: !,
-}
-
-enum Bar {}
-
-fn main() {
-    unsafe {
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::uninitialized::<!>()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type !"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::zeroed::<!>()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type !"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::MaybeUninit::<!>::uninit().assume_init()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type !"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::uninitialized::<Foo>()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type Foo"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::zeroed::<Foo>()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type Foo"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::MaybeUninit::<Foo>::uninit().assume_init()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type Foo"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::uninitialized::<Bar>()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type Bar"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::zeroed::<Bar>()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type Bar"
-            })),
-            Some(true)
-        );
-
-        assert_eq!(
-            panic::catch_unwind(|| {
-                mem::MaybeUninit::<Bar>::uninit().assume_init()
-            }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
-                s == "Attempted to instantiate uninhabited type Bar"
-            })),
-            Some(true)
-        );
-    }
-}
diff --git a/src/test/ui/panics/abort-on-panic.rs b/src/test/ui/panics/abort-on-panic.rs
new file mode 100644 (file)
index 0000000..c6e8dbf
--- /dev/null
@@ -0,0 +1,64 @@
+// run-pass
+
+#![allow(unused_must_use)]
+#![feature(unwind_attributes)]
+// Since we mark some ABIs as "nounwind" to LLVM, we must make sure that
+// we never unwind through them.
+
+// ignore-cloudabi no env and process
+// ignore-emscripten no processes
+// ignore-sgx no processes
+
+use std::{env, panic};
+use std::io::prelude::*;
+use std::io;
+use std::process::{Command, Stdio};
+
+#[unwind(aborts)] // FIXME(#58794) should work even without the attribute
+extern "C" fn panic_in_ffi() {
+    panic!("Test");
+}
+
+#[unwind(aborts)]
+extern "Rust" fn panic_in_rust_abi() {
+    panic!("TestRust");
+}
+
+fn test() {
+    let _ = panic::catch_unwind(|| { panic_in_ffi(); });
+    // The process should have aborted by now.
+    io::stdout().write(b"This should never be printed.\n");
+    let _ = io::stdout().flush();
+}
+
+fn testrust() {
+    let _ = panic::catch_unwind(|| { panic_in_rust_abi(); });
+    // The process should have aborted by now.
+    io::stdout().write(b"This should never be printed.\n");
+    let _ = io::stdout().flush();
+}
+
+fn main() {
+    let args: Vec<String> = env::args().collect();
+    if args.len() > 1 {
+        // This is inside the self-executed command.
+        match &*args[1] {
+            "test" => return test(),
+            "testrust" => return testrust(),
+            _ => panic!("bad test"),
+        }
+    }
+
+    // These end up calling the self-execution branches above.
+    let mut p = Command::new(&args[0])
+                        .stdout(Stdio::piped())
+                        .stdin(Stdio::piped())
+                        .arg("test").spawn().unwrap();
+    assert!(!p.wait().unwrap().success());
+
+    let mut p = Command::new(&args[0])
+                        .stdout(Stdio::piped())
+                        .stdin(Stdio::piped())
+                        .arg("testrust").spawn().unwrap();
+    assert!(!p.wait().unwrap().success());
+}
index 3287ece9ae644d672713ad4b651aa21db119a3c4..261e27b6e0d18fc808f52fe1874efca7e89ea8f9 100644 (file)
@@ -8,3 +8,4 @@ LL |     /// empty doc
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0584`.
diff --git a/src/test/ui/parser/intersection-patterns.rs b/src/test/ui/parser/intersection-patterns.rs
new file mode 100644 (file)
index 0000000..adb607c
--- /dev/null
@@ -0,0 +1,40 @@
+// This tests the parser recovery in `recover_intersection_pat`
+// and serves as a regression test for the diagnostics issue #65400.
+//
+// The general idea is that for `$pat_lhs @ $pat_rhs` where
+// `$pat_lhs` is not generated by `ref? mut? $ident` we want
+// to suggest either switching the order or note that intersection
+// patterns are not allowed.
+
+fn main() {
+    let s: Option<u8> = None;
+
+    match s {
+        Some(x) @ y => {}
+        //~^ ERROR pattern on wrong side of `@`
+        //~| pattern on the left, should be on the right
+        //~| binding on the right, should be on the left
+        //~| HELP switch the order
+        //~| SUGGESTION y @ Some(x)
+        _ => {}
+    }
+
+    match s {
+        Some(x) @ Some(y) => {}
+        //~^ ERROR left-hand side of `@` must be a binding
+        //~| interpreted as a pattern, not a binding
+        //~| also a pattern
+        //~| NOTE bindings are `x`, `mut x`, `ref x`, and `ref mut x`
+        _ => {}
+    }
+
+    match 2 {
+        1 ..= 5 @ e => {}
+        //~^ ERROR pattern on wrong side of `@`
+        //~| pattern on the left, should be on the right
+        //~| binding on the right, should be on the left
+        //~| HELP switch the order
+        //~| SUGGESTION e @ 1 ..=5
+        _ => {}
+    }
+}
diff --git a/src/test/ui/parser/intersection-patterns.stderr b/src/test/ui/parser/intersection-patterns.stderr
new file mode 100644 (file)
index 0000000..f5bfee5
--- /dev/null
@@ -0,0 +1,33 @@
+error: pattern on wrong side of `@`
+  --> $DIR/intersection-patterns.rs:13:9
+   |
+LL |         Some(x) @ y => {}
+   |         -------^^^-
+   |         |         |
+   |         |         binding on the right, should be on the left
+   |         pattern on the left, should be on the right
+   |         help: switch the order: `y @ Some(x)`
+
+error: left-hand side of `@` must be a binding
+  --> $DIR/intersection-patterns.rs:23:9
+   |
+LL |         Some(x) @ Some(y) => {}
+   |         -------^^^-------
+   |         |         |
+   |         |         also a pattern
+   |         interpreted as a pattern, not a binding
+   |
+   = note: bindings are `x`, `mut x`, `ref x`, and `ref mut x`
+
+error: pattern on wrong side of `@`
+  --> $DIR/intersection-patterns.rs:32:9
+   |
+LL |         1 ..= 5 @ e => {}
+   |         -------^^^-
+   |         |         |
+   |         |         binding on the right, should be on the left
+   |         pattern on the left, should be on the right
+   |         help: switch the order: `e @ 1 ..=5`
+
+error: aborting due to 3 previous errors
+
index 22f80a8aae866f254a8623a196f7c08ab0a85851..7291732cebe4a3751062346c6920758f349d38c5 100644 (file)
@@ -3,6 +3,7 @@
 impl S {
     fn f(*, a: u8) -> u8 {}
     //~^ ERROR expected parameter name, found `*`
+    //~| ERROR mismatched types
 }
 
 fn main() {}
index 9e1178e8ac1f352d3f4fc53d8dc1f401f7cfbeeb..7e5c348e36ceac5073e18730a04b066186b5f148 100644 (file)
@@ -4,5 +4,17 @@ error: expected parameter name, found `*`
 LL |     fn f(*, a: u8) -> u8 {}
    |          ^ expected parameter name
 
-error: aborting due to previous error
+error[E0308]: mismatched types
+  --> $DIR/issue-33413.rs:4:23
+   |
+LL |     fn f(*, a: u8) -> u8 {}
+   |        -              ^^ expected u8, found ()
+   |        |
+   |        implicitly returns `()` as its body has no tail or `return` expression
+   |
+   = note: expected type `u8`
+              found type `()`
+
+error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs b/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.rs
new file mode 100644 (file)
index 0000000..97a405b
--- /dev/null
@@ -0,0 +1,26 @@
+// Regression test; used to ICE with 'visit_mac disabled by default' due to a
+// `MutVisitor` in `fn make_all_value_bindings_mutable` (`parse/parser/pat.rs`).
+
+macro_rules! mac1 {
+    ($eval:expr) => {
+        let mut $eval = ();
+        //~^ ERROR `mut` must be followed by a named binding
+    };
+}
+
+macro_rules! mac2 {
+    ($eval:pat) => {
+        let mut $eval = ();
+        //~^ ERROR `mut` must be followed by a named binding
+        //~| ERROR expected identifier, found `does_not_exist!()`
+    };
+}
+
+fn foo() {
+    mac1! { does_not_exist!() }
+    //~^ ERROR cannot find macro `does_not_exist` in this scope
+    mac2! { does_not_exist!() }
+    //~^ ERROR cannot find macro `does_not_exist` in this scope
+}
+
+fn main() {}
diff --git a/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.stderr b/src/test/ui/parser/issue-65122-mac-invoc-in-mut-patterns.stderr
new file mode 100644 (file)
index 0000000..dd193d6
--- /dev/null
@@ -0,0 +1,45 @@
+error: `mut` must be followed by a named binding
+  --> $DIR/issue-65122-mac-invoc-in-mut-patterns.rs:6:13
+   |
+LL |         let mut $eval = ();
+   |             ^^^^^^^^^ help: remove the `mut` prefix: `does_not_exist!()`
+...
+LL |     mac1! { does_not_exist!() }
+   |     --------------------------- in this macro invocation
+   |
+   = note: `mut` may be followed by `variable` and `variable @ pattern`
+
+error: expected identifier, found `does_not_exist!()`
+  --> $DIR/issue-65122-mac-invoc-in-mut-patterns.rs:13:17
+   |
+LL |         let mut $eval = ();
+   |                 ^^^^^ expected identifier
+...
+LL |     mac2! { does_not_exist!() }
+   |     --------------------------- in this macro invocation
+
+error: `mut` must be followed by a named binding
+  --> $DIR/issue-65122-mac-invoc-in-mut-patterns.rs:13:13
+   |
+LL |         let mut $eval = ();
+   |             ^^^ help: remove the `mut` prefix: `does_not_exist!()`
+...
+LL |     mac2! { does_not_exist!() }
+   |     --------------------------- in this macro invocation
+   |
+   = note: `mut` may be followed by `variable` and `variable @ pattern`
+
+error: cannot find macro `does_not_exist` in this scope
+  --> $DIR/issue-65122-mac-invoc-in-mut-patterns.rs:20:13
+   |
+LL |     mac1! { does_not_exist!() }
+   |             ^^^^^^^^^^^^^^
+
+error: cannot find macro `does_not_exist` in this scope
+  --> $DIR/issue-65122-mac-invoc-in-mut-patterns.rs:22:13
+   |
+LL |     mac2! { does_not_exist!() }
+   |             ^^^^^^^^^^^^^^
+
+error: aborting due to 5 previous errors
+
diff --git a/src/test/ui/parser/mismatched-delim-brace-empty-block.rs b/src/test/ui/parser/mismatched-delim-brace-empty-block.rs
new file mode 100644 (file)
index 0000000..0f5a2cb
--- /dev/null
@@ -0,0 +1,5 @@
+fn main() {
+
+}
+    let _ = ();
+} //~ ERROR unexpected close delimiter
diff --git a/src/test/ui/parser/mismatched-delim-brace-empty-block.stderr b/src/test/ui/parser/mismatched-delim-brace-empty-block.stderr
new file mode 100644 (file)
index 0000000..5ae5fc9
--- /dev/null
@@ -0,0 +1,14 @@
+error: unexpected close delimiter: `}`
+  --> $DIR/mismatched-delim-brace-empty-block.rs:5:1
+   |
+LL |   fn main() {
+   |  ___________-
+LL | |
+LL | | }
+   | |_- this block is empty, you might have not meant to close it
+LL |       let _ = ();
+LL |   }
+   |   ^ unexpected close delimiter
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/parser/no-const-fn-in-extern-block.rs b/src/test/ui/parser/no-const-fn-in-extern-block.rs
new file mode 100644 (file)
index 0000000..29f2638
--- /dev/null
@@ -0,0 +1,8 @@
+extern {
+    const fn foo();
+    //~^ ERROR extern items cannot be `const`
+    const unsafe fn bar();
+    //~^ ERROR extern items cannot be `const`
+}
+
+fn main() {}
diff --git a/src/test/ui/parser/no-const-fn-in-extern-block.stderr b/src/test/ui/parser/no-const-fn-in-extern-block.stderr
new file mode 100644 (file)
index 0000000..5b4663a
--- /dev/null
@@ -0,0 +1,14 @@
+error: extern items cannot be `const`
+  --> $DIR/no-const-fn-in-extern-block.rs:2:5
+   |
+LL |     const fn foo();
+   |     ^^^^^
+
+error: extern items cannot be `const`
+  --> $DIR/no-const-fn-in-extern-block.rs:4:5
+   |
+LL |     const unsafe fn bar();
+   |     ^^^^^
+
+error: aborting due to 2 previous errors
+
index 3dcc0c8f3d496eb03e6cd0a641c264b868014117..9c7a25d589a1f9aafa988098042db58979b5dc5a 100644 (file)
@@ -3,15 +3,24 @@ fn f<T>() {}
 
 fn main() {
     false == false == false;
-    //~^ ERROR: chained comparison operators require parentheses
+    //~^ ERROR chained comparison operators require parentheses
 
     false == 0 < 2;
-    //~^ ERROR: chained comparison operators require parentheses
-    //~| ERROR: mismatched types
-    //~| ERROR: mismatched types
+    //~^ ERROR chained comparison operators require parentheses
+    //~| ERROR mismatched types
+    //~| ERROR mismatched types
 
     f<X>();
     //~^ ERROR chained comparison operators require parentheses
-    //~| HELP: use `::<...>` instead of `<...>`
-    //~| HELP: or use `(...)`
+    //~| HELP use `::<...>` instead of `<...>` to specify type arguments
+
+    f<Result<Option<X>, Option<Option<X>>>(1, 2);
+    //~^ ERROR chained comparison operators require parentheses
+    //~| HELP use `::<...>` instead of `<...>` to specify type arguments
+
+    use std::convert::identity;
+    let _ = identity<u8>;
+    //~^ ERROR chained comparison operators require parentheses
+    //~| HELP use `::<...>` instead of `<...>` to specify type arguments
+    //~| HELP or use `(...)` if you meant to specify fn arguments
 }
index e927f4c32484ededce5d662205d609a516e8fb77..5aa37a40cbd3dc23d1383eae5f2325096400bb16 100644 (file)
@@ -2,21 +2,41 @@ error: chained comparison operators require parentheses
   --> $DIR/require-parens-for-chained-comparison.rs:5:11
    |
 LL |     false == false == false;
-   |           ^^^^^^^^^^^^^^^^^
+   |           ^^^^^^^^^^^
 
 error: chained comparison operators require parentheses
   --> $DIR/require-parens-for-chained-comparison.rs:8:11
    |
 LL |     false == 0 < 2;
-   |           ^^^^^^^^
+   |           ^^^^^^
 
 error: chained comparison operators require parentheses
   --> $DIR/require-parens-for-chained-comparison.rs:13:6
    |
 LL |     f<X>();
-   |      ^^^^
+   |      ^^^
+help: use `::<...>` instead of `<...>` to specify type arguments
    |
-   = help: use `::<...>` instead of `<...>` if you meant to specify type arguments
+LL |     f::<X>();
+   |      ^^
+
+error: chained comparison operators require parentheses
+  --> $DIR/require-parens-for-chained-comparison.rs:17:6
+   |
+LL |     f<Result<Option<X>, Option<Option<X>>>(1, 2);
+   |      ^^^^^^^^
+help: use `::<...>` instead of `<...>` to specify type arguments
+   |
+LL |     f::<Result<Option<X>, Option<Option<X>>>(1, 2);
+   |      ^^
+
+error: chained comparison operators require parentheses
+  --> $DIR/require-parens-for-chained-comparison.rs:22:21
+   |
+LL |     let _ = identity<u8>;
+   |                     ^^^^
+   |
+   = help: use `::<...>` instead of `<...>` to specify type arguments
    = help: or use `(...)` if you meant to specify fn arguments
 
 error[E0308]: mismatched types
@@ -37,6 +57,6 @@ LL |     false == 0 < 2;
    = note: expected type `bool`
               found type `{integer}`
 
-error: aborting due to 5 previous errors
+error: aborting due to 7 previous errors
 
 For more information about this error, try `rustc --explain E0308`.
index 9021bd30a7781e6fb97a55721fb176089ee89655..6acc09b62c811cf97a2b3ab4a34e7134cdc9f8db 100644 (file)
@@ -5,7 +5,6 @@ LL |     a == b;
    |       ^^ no implementation for `&T == T`
    |
    = help: the trait `std::cmp::PartialEq<T>` is not implemented for `&T`
-   = help: consider adding a `where &T: std::cmp::PartialEq<T>` bound
 
 error: aborting due to previous error
 
index ac48ee0cb0fe37696a571ef6db264762a7ec11e0..4d9d06b8986d5c6b15c94cb52cd12a184b149776 100644 (file)
@@ -3,12 +3,13 @@ error[E0277]: `T` cannot be shared between threads safely
    |
 LL | fn is_zen<T: Zen>(_: T) {}
    |    ------    --- required by this bound in `is_zen`
-...
+LL | 
+LL | fn not_sync<T>(x: Guard<T>) {
+   |             - help: consider restricting this bound: `T: std::marker::Sync`
 LL |     is_zen(x)
    |            ^ `T` cannot be shared between threads safely
    |
    = help: the trait `std::marker::Sync` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Sync` bound
    = note: required because of the requirements on the impl of `Zen` for `&T`
    = note: required because it appears within the type `std::marker::PhantomData<&T>`
    = note: required because it appears within the type `Guard<'_, T>`
@@ -19,11 +20,12 @@ error[E0277]: `T` cannot be shared between threads safely
 LL | fn is_zen<T: Zen>(_: T) {}
    |    ------    --- required by this bound in `is_zen`
 ...
+LL | fn nested_not_sync<T>(x: Nested<Guard<T>>) {
+   |                    - help: consider restricting this bound: `T: std::marker::Sync`
 LL |     is_zen(x)
    |            ^ `T` cannot be shared between threads safely
    |
    = help: the trait `std::marker::Sync` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Sync` bound
    = note: required because of the requirements on the impl of `Zen` for `&T`
    = note: required because it appears within the type `std::marker::PhantomData<&T>`
    = note: required because it appears within the type `Guard<'_, T>`
index 759b63b188b35d379dcbfb392de88da61a12fae5..54aeb8616d959f4bb02f601e13627cab2e808304 100644 (file)
@@ -8,7 +8,7 @@
 #![feature(precise_pointer_size_matching)]
 #![feature(exclusive_range_pattern)]
 
-#![deny(unreachable_patterns)]
+#![deny(unreachable_patterns, overlapping_patterns)]
 
 use std::{usize, isize};
 
index 09148f9d0e6e1abd7d6d17ad1a28f63725afba19..8ffc12c31cb66326632b099c4aec8b77bf3dbdd0 100644 (file)
@@ -72,5 +72,5 @@ LL | use foo3::Bar;
 
 error: aborting due to 4 previous errors
 
-Some errors have detailed explanations: E0412, E0423, E0425.
+Some errors have detailed explanations: E0412, E0423, E0425, E0573.
 For more information about an error, try `rustc --explain E0412`.
index 8ea32f36f9e7a9fb9442b1a87448d99e45bc8984..13057a899f3c9f2d0992e9eb24080c9f9bd469bf 100644 (file)
@@ -82,5 +82,5 @@ LL |     use foo3::{Bar,Baz};
 
 error: aborting due to 7 previous errors
 
-Some errors have detailed explanations: E0423, E0603.
+Some errors have detailed explanations: E0423, E0573, E0603.
 For more information about an error, try `rustc --explain E0423`.
index 741ba0be2c2b6edaa686b91d21d85fdb69d1a2cc..3dc26b1955cd7fbea53ce1efe05319920bfea83f 100644 (file)
@@ -48,31 +48,31 @@ fn test() {
 }
 
 fn this_crate() {
-    let a = a::A(()); //~ ERROR tuple struct `A` is private
-    let b = a::B(2); //~ ERROR tuple struct `B` is private
-    let c = a::C(2, 3); //~ ERROR tuple struct `C` is private
+    let a = a::A(()); //~ ERROR tuple struct constructor `A` is private
+    let b = a::B(2); //~ ERROR tuple struct constructor `B` is private
+    let c = a::C(2, 3); //~ ERROR tuple struct constructor `C` is private
     let d = a::D(4);
 
-    let a::A(()) = a; //~ ERROR tuple struct `A` is private
-    let a::A(_) = a; //~ ERROR tuple struct `A` is private
-    match a { a::A(()) => {} } //~ ERROR tuple struct `A` is private
-    match a { a::A(_) => {} } //~ ERROR tuple struct `A` is private
-
-    let a::B(_) = b; //~ ERROR tuple struct `B` is private
-    let a::B(_b) = b; //~ ERROR tuple struct `B` is private
-    match b { a::B(_) => {} } //~ ERROR tuple struct `B` is private
-    match b { a::B(_b) => {} } //~ ERROR tuple struct `B` is private
-    match b { a::B(1) => {} a::B(_) => {} } //~ ERROR tuple struct `B` is private
-                                            //~^ ERROR tuple struct `B` is private
-
-    let a::C(_, _) = c; //~ ERROR tuple struct `C` is private
-    let a::C(_a, _) = c; //~ ERROR tuple struct `C` is private
-    let a::C(_, _b) = c; //~ ERROR tuple struct `C` is private
-    let a::C(_a, _b) = c; //~ ERROR tuple struct `C` is private
-    match c { a::C(_, _) => {} } //~ ERROR tuple struct `C` is private
-    match c { a::C(_a, _) => {} } //~ ERROR tuple struct `C` is private
-    match c { a::C(_, _b) => {} } //~ ERROR tuple struct `C` is private
-    match c { a::C(_a, _b) => {} } //~ ERROR tuple struct `C` is private
+    let a::A(()) = a; //~ ERROR tuple struct constructor `A` is private
+    let a::A(_) = a; //~ ERROR tuple struct constructor `A` is private
+    match a { a::A(()) => {} } //~ ERROR tuple struct constructor `A` is private
+    match a { a::A(_) => {} } //~ ERROR tuple struct constructor `A` is private
+
+    let a::B(_) = b; //~ ERROR tuple struct constructor `B` is private
+    let a::B(_b) = b; //~ ERROR tuple struct constructor `B` is private
+    match b { a::B(_) => {} } //~ ERROR tuple struct constructor `B` is private
+    match b { a::B(_b) => {} } //~ ERROR tuple struct constructor `B` is private
+    match b { a::B(1) => {} a::B(_) => {} } //~ ERROR tuple struct constructor `B` is private
+                                            //~^ ERROR tuple struct constructor `B` is private
+
+    let a::C(_, _) = c; //~ ERROR tuple struct constructor `C` is private
+    let a::C(_a, _) = c; //~ ERROR tuple struct constructor `C` is private
+    let a::C(_, _b) = c; //~ ERROR tuple struct constructor `C` is private
+    let a::C(_a, _b) = c; //~ ERROR tuple struct constructor `C` is private
+    match c { a::C(_, _) => {} } //~ ERROR tuple struct constructor `C` is private
+    match c { a::C(_a, _) => {} } //~ ERROR tuple struct constructor `C` is private
+    match c { a::C(_, _b) => {} } //~ ERROR tuple struct constructor `C` is private
+    match c { a::C(_a, _b) => {} } //~ ERROR tuple struct constructor `C` is private
 
     let a::D(_) = d;
     let a::D(_d) = d;
@@ -80,38 +80,38 @@ fn this_crate() {
     match d { a::D(_d) => {} }
     match d { a::D(1) => {} a::D(_) => {} }
 
-    let a2 = a::A; //~ ERROR tuple struct `A` is private
-    let b2 = a::B; //~ ERROR tuple struct `B` is private
-    let c2 = a::C; //~ ERROR tuple struct `C` is private
+    let a2 = a::A; //~ ERROR tuple struct constructor `A` is private
+    let b2 = a::B; //~ ERROR tuple struct constructor `B` is private
+    let c2 = a::C; //~ ERROR tuple struct constructor `C` is private
     let d2 = a::D;
 }
 
 fn xcrate() {
-    let a = other::A(()); //~ ERROR tuple struct `A` is private
-    let b = other::B(2); //~ ERROR tuple struct `B` is private
-    let c = other::C(2, 3); //~ ERROR tuple struct `C` is private
+    let a = other::A(()); //~ ERROR tuple struct constructor `A` is private
+    let b = other::B(2); //~ ERROR tuple struct constructor `B` is private
+    let c = other::C(2, 3); //~ ERROR tuple struct constructor `C` is private
     let d = other::D(4);
 
-    let other::A(()) = a; //~ ERROR tuple struct `A` is private
-    let other::A(_) = a; //~ ERROR tuple struct `A` is private
-    match a { other::A(()) => {} } //~ ERROR tuple struct `A` is private
-    match a { other::A(_) => {} } //~ ERROR tuple struct `A` is private
-
-    let other::B(_) = b; //~ ERROR tuple struct `B` is private
-    let other::B(_b) = b; //~ ERROR tuple struct `B` is private
-    match b { other::B(_) => {} } //~ ERROR tuple struct `B` is private
-    match b { other::B(_b) => {} } //~ ERROR tuple struct `B` is private
-    match b { other::B(1) => {} other::B(_) => {} } //~ ERROR tuple struct `B` is private
-                                                    //~^ ERROR tuple struct `B` is private
-
-    let other::C(_, _) = c; //~ ERROR tuple struct `C` is private
-    let other::C(_a, _) = c; //~ ERROR tuple struct `C` is private
-    let other::C(_, _b) = c; //~ ERROR tuple struct `C` is private
-    let other::C(_a, _b) = c; //~ ERROR tuple struct `C` is private
-    match c { other::C(_, _) => {} } //~ ERROR tuple struct `C` is private
-    match c { other::C(_a, _) => {} } //~ ERROR tuple struct `C` is private
-    match c { other::C(_, _b) => {} } //~ ERROR tuple struct `C` is private
-    match c { other::C(_a, _b) => {} } //~ ERROR tuple struct `C` is private
+    let other::A(()) = a; //~ ERROR tuple struct constructor `A` is private
+    let other::A(_) = a; //~ ERROR tuple struct constructor `A` is private
+    match a { other::A(()) => {} } //~ ERROR tuple struct constructor `A` is private
+    match a { other::A(_) => {} } //~ ERROR tuple struct constructor `A` is private
+
+    let other::B(_) = b; //~ ERROR tuple struct constructor `B` is private
+    let other::B(_b) = b; //~ ERROR tuple struct constructor `B` is private
+    match b { other::B(_) => {} } //~ ERROR tuple struct constructor `B` is private
+    match b { other::B(_b) => {} } //~ ERROR tuple struct constructor `B` is private
+    match b { other::B(1) => {}//~ ERROR tuple struct constructor `B` is private
+        other::B(_) => {} }    //~ ERROR tuple struct constructor `B` is private
+
+    let other::C(_, _) = c; //~ ERROR tuple struct constructor `C` is private
+    let other::C(_a, _) = c; //~ ERROR tuple struct constructor `C` is private
+    let other::C(_, _b) = c; //~ ERROR tuple struct constructor `C` is private
+    let other::C(_a, _b) = c; //~ ERROR tuple struct constructor `C` is private
+    match c { other::C(_, _) => {} } //~ ERROR tuple struct constructor `C` is private
+    match c { other::C(_a, _) => {} } //~ ERROR tuple struct constructor `C` is private
+    match c { other::C(_, _b) => {} } //~ ERROR tuple struct constructor `C` is private
+    match c { other::C(_a, _b) => {} } //~ ERROR tuple struct constructor `C` is private
 
     let other::D(_) = d;
     let other::D(_d) = d;
@@ -119,9 +119,9 @@ fn xcrate() {
     match d { other::D(_d) => {} }
     match d { other::D(1) => {} other::D(_) => {} }
 
-    let a2 = other::A; //~ ERROR tuple struct `A` is private
-    let b2 = other::B; //~ ERROR tuple struct `B` is private
-    let c2 = other::C; //~ ERROR tuple struct `C` is private
+    let a2 = other::A; //~ ERROR tuple struct constructor `A` is private
+    let b2 = other::B; //~ ERROR tuple struct constructor `B` is private
+    let c2 = other::C; //~ ERROR tuple struct constructor `C` is private
     let d2 = other::D;
 }
 
index 532d1ac1e2fb8552c9a0ad1bc00eb625549af4e5..2ee83149b695f12c8234d9c47b66fdf90fbff4be 100644 (file)
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:51:16
    |
+LL |     pub struct A(());
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     let a = a::A(());
    |                ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:52:16
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     let b = a::B(2);
    |                ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:53:16
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     let c = a::C(2, 3);
    |                ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:56:12
    |
+LL |     pub struct A(());
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     let a::A(()) = a;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:57:12
    |
+LL |     pub struct A(());
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     let a::A(_) = a;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:58:18
    |
+LL |     pub struct A(());
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     match a { a::A(()) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:59:18
    |
+LL |     pub struct A(());
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     match a { a::A(_) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:61:12
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     let a::B(_) = b;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:62:12
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     let a::B(_b) = b;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:63:18
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     match b { a::B(_) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:64:18
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     match b { a::B(_b) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:65:18
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     match b { a::B(1) => {} a::B(_) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:65:32
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     match b { a::B(1) => {} a::B(_) => {} }
    |                                ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:68:12
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     let a::C(_, _) = c;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:69:12
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     let a::C(_a, _) = c;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:70:12
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     let a::C(_, _b) = c;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:71:12
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     let a::C(_a, _b) = c;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:72:18
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     match c { a::C(_, _) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:73:18
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     match c { a::C(_a, _) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:74:18
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     match c { a::C(_, _b) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:75:18
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     match c { a::C(_a, _b) => {} }
    |                  ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:83:17
    |
+LL |     pub struct A(());
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     let a2 = a::A;
    |                 ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:84:17
    |
+LL |     pub struct B(isize);
+   |                  ----- a constructor is private if any of the fields is private
+...
 LL |     let b2 = a::B;
    |                 ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:85:17
    |
+LL |     pub struct C(pub isize, isize);
+   |                  ---------------- a constructor is private if any of the fields is private
+...
 LL |     let c2 = a::C;
    |                 ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:90:20
    |
 LL |     let a = other::A(());
    |                    ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:1:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct A(());
+   |              -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:91:20
    |
 LL |     let b = other::B(2);
    |                    ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:92:20
    |
 LL |     let c = other::C(2, 3);
    |                    ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:95:16
    |
 LL |     let other::A(()) = a;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:1:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct A(());
+   |              -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:96:16
    |
 LL |     let other::A(_) = a;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:1:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct A(());
+   |              -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:97:22
    |
 LL |     match a { other::A(()) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:1:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct A(());
+   |              -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:98:22
    |
 LL |     match a { other::A(_) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:1:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct A(());
+   |              -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:100:16
    |
 LL |     let other::B(_) = b;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:101:16
    |
 LL |     let other::B(_b) = b;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:102:22
    |
 LL |     match b { other::B(_) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:103:22
    |
 LL |     match b { other::B(_b) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:104:22
    |
-LL |     match b { other::B(1) => {} other::B(_) => {} }
+LL |     match b { other::B(1) => {}
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
-  --> $DIR/privacy5.rs:104:40
+error[E0603]: tuple struct constructor `B` is private
+  --> $DIR/privacy5.rs:105:16
    |
-LL |     match b { other::B(1) => {} other::B(_) => {} }
-   |                                        ^
+LL |         other::B(_) => {} }
+   |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:107:16
    |
 LL |     let other::C(_, _) = c;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:108:16
    |
 LL |     let other::C(_a, _) = c;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:109:16
    |
 LL |     let other::C(_, _b) = c;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:110:16
    |
 LL |     let other::C(_a, _b) = c;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:111:22
    |
 LL |     match c { other::C(_, _) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:112:22
    |
 LL |     match c { other::C(_a, _) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:113:22
    |
 LL |     match c { other::C(_, _b) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:114:22
    |
 LL |     match c { other::C(_a, _b) => {} }
    |                      ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `A` is private
+error[E0603]: tuple struct constructor `A` is private
   --> $DIR/privacy5.rs:122:21
    |
 LL |     let a2 = other::A;
    |                     ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:1:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct A(());
+   |              -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `B` is private
+error[E0603]: tuple struct constructor `B` is private
   --> $DIR/privacy5.rs:123:21
    |
 LL |     let b2 = other::B;
    |                     ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:2:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct B(isize);
+   |              ----- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `C` is private
+error[E0603]: tuple struct constructor `C` is private
   --> $DIR/privacy5.rs:124:21
    |
 LL |     let c2 = other::C;
    |                     ^
+   | 
+  ::: $DIR/auxiliary/privacy_tuple_struct.rs:3:14
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct C(pub isize, isize);
+   |              ---------------- a constructor is private if any of the fields is private
 
 error: aborting due to 48 previous errors
 
diff --git a/src/test/ui/proc-macro/auxiliary/gen-macro-rules-hygiene.rs b/src/test/ui/proc-macro/auxiliary/gen-macro-rules-hygiene.rs
new file mode 100644 (file)
index 0000000..548fefe
--- /dev/null
@@ -0,0 +1,23 @@
+// force-host
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+use proc_macro::*;
+
+#[proc_macro]
+pub fn gen_macro_rules(_: TokenStream) -> TokenStream {
+    "
+    macro_rules! generated {() => {
+        struct ItemDef;
+        let local_def = 0;
+
+        ItemUse; // OK
+        local_use; // ERROR
+        break 'label_use; // ERROR
+
+        type DollarCrate = $crate::ItemUse; // OK
+    }}
+    ".parse().unwrap()
+}
diff --git a/src/test/ui/proc-macro/auxiliary/more-gates.rs b/src/test/ui/proc-macro/auxiliary/more-gates.rs
deleted file mode 100644 (file)
index 6b609ea..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-// force-host
-// no-prefer-dynamic
-
-#![crate_type = "proc-macro"]
-
-extern crate proc_macro;
-
-use proc_macro::*;
-
-#[proc_macro_attribute]
-pub fn attr2mac1(_: TokenStream, _: TokenStream) -> TokenStream {
-    "macro_rules! foo1 { (a) => (a) }".parse().unwrap()
-}
-
-#[proc_macro_attribute]
-pub fn attr2mac2(_: TokenStream, _: TokenStream) -> TokenStream {
-    "macro foo2(a) { a }".parse().unwrap()
-}
-
-#[proc_macro]
-pub fn mac2mac1(_: TokenStream) -> TokenStream {
-    "macro_rules! foo3 { (a) => (a) }".parse().unwrap()
-}
-
-#[proc_macro]
-pub fn mac2mac2(_: TokenStream) -> TokenStream {
-    "macro foo4(a) { a }".parse().unwrap()
-}
-
-#[proc_macro]
-pub fn tricky(_: TokenStream) -> TokenStream {
-    "fn foo() {
-        macro_rules! foo { (a) => (a) }
-    }".parse().unwrap()
-}
diff --git a/src/test/ui/proc-macro/disappearing-resolution.rs b/src/test/ui/proc-macro/disappearing-resolution.rs
new file mode 100644 (file)
index 0000000..a01b8f3
--- /dev/null
@@ -0,0 +1,22 @@
+// Regression test for issue #64803 (initial attribute resolution can disappear later).
+
+// aux-build:test-macros.rs
+
+#[macro_use]
+extern crate test_macros;
+
+mod m {
+    use test_macros::Empty;
+}
+use m::Empty; //~ ERROR derive macro `Empty` is private
+
+// To resolve `empty_helper` we need to resolve `Empty`.
+// During initial resolution `use m::Empty` introduces no entries, so we proceed to `macro_use`,
+// successfully resolve `Empty` from there, and then resolve `empty_helper` as its helper.
+// During validation `use m::Empty` introduces a `Res::Err` stub, so `Empty` resolves to it,
+// and `empty_helper` can no longer be resolved.
+#[empty_helper] //~ ERROR cannot find attribute `empty_helper` in this scope
+#[derive(Empty)]
+struct S;
+
+fn main() {}
diff --git a/src/test/ui/proc-macro/disappearing-resolution.stderr b/src/test/ui/proc-macro/disappearing-resolution.stderr
new file mode 100644 (file)
index 0000000..a3377ef
--- /dev/null
@@ -0,0 +1,15 @@
+error: cannot find attribute `empty_helper` in this scope
+  --> $DIR/disappearing-resolution.rs:18:3
+   |
+LL | #[empty_helper]
+   |   ^^^^^^^^^^^^
+
+error[E0603]: derive macro `Empty` is private
+  --> $DIR/disappearing-resolution.rs:11:8
+   |
+LL | use m::Empty;
+   |        ^^^^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0603`.
diff --git a/src/test/ui/proc-macro/gen-macro-rules-hygiene.rs b/src/test/ui/proc-macro/gen-macro-rules-hygiene.rs
new file mode 100644 (file)
index 0000000..195bda8
--- /dev/null
@@ -0,0 +1,23 @@
+// `macro_rules` items produced by transparent macros have correct hygiene in basic cases.
+// Local variables and labels are hygienic, items are not hygienic.
+// `$crate` refers to the crate that defines `macro_rules` and not the outer transparent macro.
+
+// aux-build:gen-macro-rules-hygiene.rs
+
+#[macro_use]
+extern crate gen_macro_rules_hygiene;
+
+struct ItemUse;
+
+gen_macro_rules!();
+//~^ ERROR use of undeclared label `'label_use`
+//~| ERROR cannot find value `local_use` in this scope
+
+fn main() {
+    'label_use: loop {
+        let local_use = 1;
+        generated!();
+        ItemDef; // OK
+        local_def; //~ ERROR cannot find value `local_def` in this scope
+    }
+}
diff --git a/src/test/ui/proc-macro/gen-macro-rules-hygiene.stderr b/src/test/ui/proc-macro/gen-macro-rules-hygiene.stderr
new file mode 100644 (file)
index 0000000..ecebdfa
--- /dev/null
@@ -0,0 +1,28 @@
+error[E0426]: use of undeclared label `'label_use`
+  --> $DIR/gen-macro-rules-hygiene.rs:12:1
+   |
+LL | gen_macro_rules!();
+   | ^^^^^^^^^^^^^^^^^^^ undeclared label `'label_use`
+...
+LL |         generated!();
+   |         ------------- in this macro invocation
+
+error[E0425]: cannot find value `local_use` in this scope
+  --> $DIR/gen-macro-rules-hygiene.rs:12:1
+   |
+LL | gen_macro_rules!();
+   | ^^^^^^^^^^^^^^^^^^^ not found in this scope
+...
+LL |         generated!();
+   |         ------------- in this macro invocation
+
+error[E0425]: cannot find value `local_def` in this scope
+  --> $DIR/gen-macro-rules-hygiene.rs:21:9
+   |
+LL |         local_def;
+   |         ^^^^^^^^^ not found in this scope
+
+error: aborting due to 3 previous errors
+
+Some errors have detailed explanations: E0425, E0426.
+For more information about an error, try `rustc --explain E0425`.
diff --git a/src/test/ui/proc-macro/more-gates.rs b/src/test/ui/proc-macro/more-gates.rs
deleted file mode 100644 (file)
index b870b43..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-// aux-build:more-gates.rs
-
-#![feature(decl_macro)]
-
-extern crate more_gates as foo;
-
-use foo::*;
-
-#[attr2mac1]
-//~^ ERROR: cannot expand to macro definitions
-pub fn a() {}
-#[attr2mac2]
-//~^ ERROR: cannot expand to macro definitions
-pub fn a() {}
-
-mac2mac1!(); //~ ERROR: cannot expand to macro definitions
-mac2mac2!(); //~ ERROR: cannot expand to macro definitions
-
-tricky!();
-//~^ ERROR: cannot expand to macro definitions
-
-fn main() {}
diff --git a/src/test/ui/proc-macro/more-gates.stderr b/src/test/ui/proc-macro/more-gates.stderr
deleted file mode 100644 (file)
index ad96f78..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-error[E0658]: procedural macros cannot expand to macro definitions
-  --> $DIR/more-gates.rs:9:1
-   |
-LL | #[attr2mac1]
-   | ^^^^^^^^^^^^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/54727
-   = help: add `#![feature(proc_macro_hygiene)]` to the crate attributes to enable
-
-error[E0658]: procedural macros cannot expand to macro definitions
-  --> $DIR/more-gates.rs:12:1
-   |
-LL | #[attr2mac2]
-   | ^^^^^^^^^^^^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/54727
-   = help: add `#![feature(proc_macro_hygiene)]` to the crate attributes to enable
-
-error[E0658]: procedural macros cannot expand to macro definitions
-  --> $DIR/more-gates.rs:16:1
-   |
-LL | mac2mac1!();
-   | ^^^^^^^^^^^^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/54727
-   = help: add `#![feature(proc_macro_hygiene)]` to the crate attributes to enable
-
-error[E0658]: procedural macros cannot expand to macro definitions
-  --> $DIR/more-gates.rs:17:1
-   |
-LL | mac2mac2!();
-   | ^^^^^^^^^^^^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/54727
-   = help: add `#![feature(proc_macro_hygiene)]` to the crate attributes to enable
-
-error[E0658]: procedural macros cannot expand to macro definitions
-  --> $DIR/more-gates.rs:19:1
-   |
-LL | tricky!();
-   | ^^^^^^^^^^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/54727
-   = help: add `#![feature(proc_macro_hygiene)]` to the crate attributes to enable
-
-error: aborting due to 5 previous errors
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/reachable/auxiliary/unreachable_variant.rs b/src/test/ui/reachable/auxiliary/unreachable_variant.rs
new file mode 100644 (file)
index 0000000..4e94a4b
--- /dev/null
@@ -0,0 +1,5 @@
+mod super_sekrit {
+    pub enum sooper_sekrit {
+        quux, baz
+    }
+}
diff --git a/src/test/ui/reachable/unreachable-arm.rs b/src/test/ui/reachable/unreachable-arm.rs
new file mode 100644 (file)
index 0000000..64c3896
--- /dev/null
@@ -0,0 +1,14 @@
+#![feature(box_patterns)]
+#![feature(box_syntax)]
+#![allow(dead_code)]
+#![deny(unreachable_patterns)]
+
+enum Foo { A(Box<Foo>, isize), B(usize), }
+
+fn main() {
+    match Foo::B(1) {
+        Foo::B(_) | Foo::A(box _, 1) => { }
+        Foo::A(_, 1) => { } //~ ERROR unreachable pattern
+        _ => { }
+    }
+}
diff --git a/src/test/ui/reachable/unreachable-arm.stderr b/src/test/ui/reachable/unreachable-arm.stderr
new file mode 100644 (file)
index 0000000..8e65745
--- /dev/null
@@ -0,0 +1,14 @@
+error: unreachable pattern
+  --> $DIR/unreachable-arm.rs:11:9
+   |
+LL |         Foo::A(_, 1) => { }
+   |         ^^^^^^^^^^^^
+   |
+note: lint level defined here
+  --> $DIR/unreachable-arm.rs:4:9
+   |
+LL | #![deny(unreachable_patterns)]
+   |         ^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/reachable/unreachable-code.rs b/src/test/ui/reachable/unreachable-code.rs
new file mode 100644 (file)
index 0000000..ad0dc8a
--- /dev/null
@@ -0,0 +1,8 @@
+#![deny(unreachable_code)]
+#![allow(unused_variables)]
+
+fn main() {
+  loop{}
+
+  let a = 3; //~ ERROR: unreachable statement
+}
diff --git a/src/test/ui/reachable/unreachable-code.stderr b/src/test/ui/reachable/unreachable-code.stderr
new file mode 100644 (file)
index 0000000..184440d
--- /dev/null
@@ -0,0 +1,17 @@
+error: unreachable statement
+  --> $DIR/unreachable-code.rs:7:3
+   |
+LL |   loop{}
+   |   ------ any code following this expression is unreachable
+LL | 
+LL |   let a = 3;
+   |   ^^^^^^^^^^ unreachable statement
+   |
+note: lint level defined here
+  --> $DIR/unreachable-code.rs:1:9
+   |
+LL | #![deny(unreachable_code)]
+   |         ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/reachable/unreachable-in-call.rs b/src/test/ui/reachable/unreachable-in-call.rs
new file mode 100644 (file)
index 0000000..dd94e79
--- /dev/null
@@ -0,0 +1,22 @@
+#![allow(dead_code)]
+#![deny(unreachable_code)]
+
+fn diverge() -> ! { panic!() }
+
+fn get_u8() -> u8 {
+    1
+}
+fn call(_: u8, _: u8) {
+
+}
+fn diverge_first() {
+    call(diverge(),
+         get_u8()); //~ ERROR unreachable expression
+}
+fn diverge_second() {
+    call( //~ ERROR unreachable call
+        get_u8(),
+        diverge());
+}
+
+fn main() {}
diff --git a/src/test/ui/reachable/unreachable-in-call.stderr b/src/test/ui/reachable/unreachable-in-call.stderr
new file mode 100644 (file)
index 0000000..1d081d1
--- /dev/null
@@ -0,0 +1,25 @@
+error: unreachable expression
+  --> $DIR/unreachable-in-call.rs:14:10
+   |
+LL |     call(diverge(),
+   |          --------- any code following this expression is unreachable
+LL |          get_u8());
+   |          ^^^^^^^^ unreachable expression
+   |
+note: lint level defined here
+  --> $DIR/unreachable-in-call.rs:2:9
+   |
+LL | #![deny(unreachable_code)]
+   |         ^^^^^^^^^^^^^^^^
+
+error: unreachable call
+  --> $DIR/unreachable-in-call.rs:17:5
+   |
+LL |     call(
+   |     ^^^^ unreachable call
+LL |         get_u8(),
+LL |         diverge());
+   |         --------- any code following this expression is unreachable
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/reachable/unreachable-loop-patterns.rs b/src/test/ui/reachable/unreachable-loop-patterns.rs
new file mode 100644 (file)
index 0000000..6f1d2ef
--- /dev/null
@@ -0,0 +1,21 @@
+#![feature(never_type)]
+#![feature(exhaustive_patterns)]
+
+#![allow(unreachable_code)]
+#![deny(unreachable_patterns)]
+
+enum Void {}
+
+impl Iterator for Void {
+    type Item = Void;
+
+    fn next(&mut self) -> Option<Void> {
+        None
+    }
+}
+
+fn main() {
+    for _ in unimplemented!() as Void {}
+    //~^ ERROR unreachable pattern
+    //~^^ ERROR unreachable pattern
+}
diff --git a/src/test/ui/reachable/unreachable-loop-patterns.stderr b/src/test/ui/reachable/unreachable-loop-patterns.stderr
new file mode 100644 (file)
index 0000000..bb51033
--- /dev/null
@@ -0,0 +1,20 @@
+error: unreachable pattern
+  --> $DIR/unreachable-loop-patterns.rs:18:9
+   |
+LL |     for _ in unimplemented!() as Void {}
+   |         ^
+   |
+note: lint level defined here
+  --> $DIR/unreachable-loop-patterns.rs:5:9
+   |
+LL | #![deny(unreachable_patterns)]
+   |         ^^^^^^^^^^^^^^^^^^^^
+
+error: unreachable pattern
+  --> $DIR/unreachable-loop-patterns.rs:18:14
+   |
+LL |     for _ in unimplemented!() as Void {}
+   |              ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/reachable/unreachable-try-pattern.rs b/src/test/ui/reachable/unreachable-try-pattern.rs
new file mode 100644 (file)
index 0000000..23360e7
--- /dev/null
@@ -0,0 +1,41 @@
+// check-pass
+#![feature(never_type, exhaustive_patterns)]
+#![warn(unreachable_code)]
+#![warn(unreachable_patterns)]
+
+enum Void {}
+
+impl From<Void> for i32 {
+    fn from(v: Void) -> i32 {
+        match v {}
+    }
+}
+
+fn bar(x: Result<!, i32>) -> Result<u32, i32> {
+    x?
+}
+
+fn foo(x: Result<!, i32>) -> Result<u32, i32> {
+    let y = (match x { Ok(n) => Ok(n as u32), Err(e) => Err(e) })?;
+    //~^ WARN unreachable pattern
+    //~| WARN unreachable expression
+    Ok(y)
+}
+
+fn qux(x: Result<u32, Void>) -> Result<u32, i32> {
+    Ok(x?)
+}
+
+fn vom(x: Result<u32, Void>) -> Result<u32, i32> {
+    let y = (match x { Ok(n) => Ok(n), Err(e) => Err(e) })?;
+    //~^ WARN unreachable pattern
+    Ok(y)
+}
+
+
+fn main() {
+    let _ = bar(Err(123));
+    let _ = foo(Err(123));
+    let _ = qux(Ok(123));
+    let _ = vom(Ok(123));
+}
diff --git a/src/test/ui/reachable/unreachable-try-pattern.stderr b/src/test/ui/reachable/unreachable-try-pattern.stderr
new file mode 100644 (file)
index 0000000..7070384
--- /dev/null
@@ -0,0 +1,33 @@
+warning: unreachable expression
+  --> $DIR/unreachable-try-pattern.rs:19:36
+   |
+LL |     let y = (match x { Ok(n) => Ok(n as u32), Err(e) => Err(e) })?;
+   |                                    -^^^^^^^
+   |                                    |
+   |                                    unreachable expression
+   |                                    any code following this expression is unreachable
+   |
+note: lint level defined here
+  --> $DIR/unreachable-try-pattern.rs:3:9
+   |
+LL | #![warn(unreachable_code)]
+   |         ^^^^^^^^^^^^^^^^
+
+warning: unreachable pattern
+  --> $DIR/unreachable-try-pattern.rs:19:24
+   |
+LL |     let y = (match x { Ok(n) => Ok(n as u32), Err(e) => Err(e) })?;
+   |                        ^^^^^
+   |
+note: lint level defined here
+  --> $DIR/unreachable-try-pattern.rs:4:9
+   |
+LL | #![warn(unreachable_patterns)]
+   |         ^^^^^^^^^^^^^^^^^^^^
+
+warning: unreachable pattern
+  --> $DIR/unreachable-try-pattern.rs:30:40
+   |
+LL |     let y = (match x { Ok(n) => Ok(n), Err(e) => Err(e) })?;
+   |                                        ^^^^^^
+
diff --git a/src/test/ui/reachable/unreachable-variant.rs b/src/test/ui/reachable/unreachable-variant.rs
new file mode 100644 (file)
index 0000000..008c2d4
--- /dev/null
@@ -0,0 +1,7 @@
+// aux-build:unreachable_variant.rs
+
+extern crate unreachable_variant as other;
+
+fn main() {
+    let _x = other::super_sekrit::sooper_sekrit::baz; //~ ERROR is private
+}
diff --git a/src/test/ui/reachable/unreachable-variant.stderr b/src/test/ui/reachable/unreachable-variant.stderr
new file mode 100644 (file)
index 0000000..276c77f
--- /dev/null
@@ -0,0 +1,9 @@
+error[E0603]: module `super_sekrit` is private
+  --> $DIR/unreachable-variant.rs:6:21
+   |
+LL |     let _x = other::super_sekrit::sooper_sekrit::baz;
+   |                     ^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0603`.
diff --git a/src/test/ui/reachable/unwarned-match-on-never.rs b/src/test/ui/reachable/unwarned-match-on-never.rs
new file mode 100644 (file)
index 0000000..71f8fe3
--- /dev/null
@@ -0,0 +1,24 @@
+#![deny(unreachable_code)]
+#![allow(dead_code)]
+
+#![feature(never_type)]
+
+fn foo(x: !) -> bool {
+    // Explicit matches on the never type are unwarned.
+    match x {}
+    // But matches in unreachable code are warned.
+    match x {} //~ ERROR unreachable expression
+}
+
+fn bar() {
+    match (return) {
+        () => () //~ ERROR unreachable arm
+    }
+}
+
+fn main() {
+    return;
+    match () { //~ ERROR unreachable expression
+        () => (),
+    }
+}
diff --git a/src/test/ui/reachable/unwarned-match-on-never.stderr b/src/test/ui/reachable/unwarned-match-on-never.stderr
new file mode 100644 (file)
index 0000000..6b2fb4a
--- /dev/null
@@ -0,0 +1,35 @@
+error: unreachable expression
+  --> $DIR/unwarned-match-on-never.rs:10:5
+   |
+LL |     match x {}
+   |           - any code following this expression is unreachable
+LL |     // But matches in unreachable code are warned.
+LL |     match x {}
+   |     ^^^^^^^^^^ unreachable expression
+   |
+note: lint level defined here
+  --> $DIR/unwarned-match-on-never.rs:1:9
+   |
+LL | #![deny(unreachable_code)]
+   |         ^^^^^^^^^^^^^^^^
+
+error: unreachable arm
+  --> $DIR/unwarned-match-on-never.rs:15:15
+   |
+LL |     match (return) {
+   |           -------- any code following this expression is unreachable
+LL |         () => ()
+   |               ^^ unreachable arm
+
+error: unreachable expression
+  --> $DIR/unwarned-match-on-never.rs:21:5
+   |
+LL |       return;
+   |       ------ any code following this expression is unreachable
+LL | /     match () {
+LL | |         () => (),
+LL | |     }
+   | |_____^ unreachable expression
+
+error: aborting due to 3 previous errors
+
index b9385952fafa36a942d2b13e3b83f1b71228cfa9..f9ae75b18317deeccae691d293ebebf51ebfcb4d 100644 (file)
@@ -3,6 +3,13 @@ error[E0005]: refutable pattern in local binding: `Err(_)` not covered
    |
 LL |     let Ok(x) = res;
    |         ^^^^^ pattern `Err(_)` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Ok(x) = res { /* */ }
+   |
 
 error[E0381]: use of possibly-uninitialized variable: `x`
   --> $DIR/recursive-types-are-not-uninhabited.rs:8:5
index 3b13e25293d58578f440e5e49323ee427c05e497..0cf5d9cd5f12a6e6a223a61aae2ff7fa475d002b 100644 (file)
@@ -9,6 +9,13 @@ error[E0005]: refutable pattern in local binding: `(std::i32::MIN..=0i32, _)` an
    |
 LL |     let (1, (Some(1), 2..=3)) = (1, (None, 2));
    |         ^^^^^^^^^^^^^^^^^^^^^ patterns `(std::i32::MIN..=0i32, _)` and `(2i32..=std::i32::MAX, _)` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let (1, (Some(1), 2..=3)) = (1, (None, 2)) { /* */ }
+   |
 
 error: aborting due to 2 previous errors
 
index 751a4c1021ebb85ddb2def05bc6fa79ebaaa20d3..184cead21231f114d0da42fadaa113cf8ad1d333 100644 (file)
@@ -10,12 +10,12 @@ error[E0478]: lifetime bound not satisfied
 LL |     z: Box<dyn Is<'a>+'b+'c>,
    |     ^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: lifetime parameter instantiated with the lifetime 'b as defined on the struct at 11:15
+note: lifetime parameter instantiated with the lifetime `'b` as defined on the struct at 11:15
   --> $DIR/region-bounds-on-objects-and-type-parameters.rs:11:15
    |
 LL | struct Foo<'a,'b,'c> {
    |               ^^
-note: but lifetime parameter must outlive the lifetime 'a as defined on the struct at 11:12
+note: but lifetime parameter must outlive the lifetime `'a` as defined on the struct at 11:12
   --> $DIR/region-bounds-on-objects-and-type-parameters.rs:11:12
    |
 LL | struct Foo<'a,'b,'c> {
index df92ed51e9a9168c30d51689ad045e3f80e61017..911904813d0eba241b597141531fcf035a6168c4 100644 (file)
@@ -3,7 +3,7 @@
 // over time, but this test used to exhibit some pretty bogus messages
 // that were not remotely helpful.
 
-// error-pattern:the lifetime 'a
+// error-pattern:the lifetime `'a`
 // error-pattern:the static lifetime
 
 struct Invariant<'a>(Option<&'a mut &'a mut ()>);
index 60e70ddcd97019d20a1aa0c2cab400f6da47db52..8358a7988c8088866efece25a6fd969973a7a126 100644 (file)
@@ -13,7 +13,7 @@ LL | |     };
    |
    = note: expected type `Invariant<'a>`
               found type `Invariant<'static>`
-note: the lifetime 'a as defined on the function body at 13:10...
+note: the lifetime `'a` as defined on the function body at 13:10...
   --> $DIR/region-invariant-static-error-reporting.rs:13:10
    |
 LL | fn unify<'a>(x: Option<Invariant<'a>>, f: fn(Invariant<'a>)) {
index 0c5e22ebae2835d3726b914f482d49b3a6d73600..74ea1b731e9ac59c6bcb91ddb283a12306e79ee6 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for autoref due to conflictin
 LL |     x.borrowed()
    |       ^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 9:42...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 9:42...
   --> $DIR/region-object-lifetime-2.rs:9:42
    |
 LL | fn borrowed_receiver_different_lifetimes<'a,'b>(x: &'a dyn Foo) -> &'b () {
@@ -14,7 +14,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |     x.borrowed()
    |     ^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 9:45...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 9:45...
   --> $DIR/region-object-lifetime-2.rs:9:45
    |
 LL | fn borrowed_receiver_different_lifetimes<'a,'b>(x: &'a dyn Foo) -> &'b () {
@@ -27,3 +27,4 @@ LL |     x.borrowed()
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index e737d27d5606f67f8c769899328334bae85f41bb..105321829058838ed399cf939033a0bc3092da2a 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for autoref due to conflictin
 LL |     x.borrowed()
    |       ^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 11:41...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 11:41...
   --> $DIR/region-object-lifetime-4.rs:11:41
    |
 LL | fn borrowed_receiver_related_lifetimes2<'a,'b>(x: &'a (dyn Foo + 'b)) -> &'b () {
@@ -14,7 +14,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |     x.borrowed()
    |     ^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 11:44...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 11:44...
   --> $DIR/region-object-lifetime-4.rs:11:44
    |
 LL | fn borrowed_receiver_related_lifetimes2<'a,'b>(x: &'a (dyn Foo + 'b)) -> &'b () {
@@ -27,3 +27,4 @@ LL |     x.borrowed()
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 8209fa1840d05d12611aacf30e80edc54c27d419..14934d6fa4899c9490be3bc91282204fe33222d2 100644 (file)
@@ -29,7 +29,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |     Box::new(v)
    |              ^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 25:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 25:6...
   --> $DIR/region-object-lifetime-in-coercion.rs:25:6
    |
 LL | fn d<'a,'b>(v: &'a [u8]) -> Box<dyn Foo+'b> {
@@ -37,7 +37,7 @@ LL | fn d<'a,'b>(v: &'a [u8]) -> Box<dyn Foo+'b> {
    = note: ...so that the expression is assignable:
            expected &[u8]
               found &'a [u8]
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 25:9...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 25:9...
   --> $DIR/region-object-lifetime-in-coercion.rs:25:9
    |
 LL | fn d<'a,'b>(v: &'a [u8]) -> Box<dyn Foo+'b> {
@@ -48,4 +48,5 @@ LL | fn d<'a,'b>(v: &'a [u8]) -> Box<dyn Foo+'b> {
 
 error: aborting due to 4 previous errors
 
-For more information about this error, try `rustc --explain E0621`.
+Some errors have detailed explanations: E0495, E0621.
+For more information about an error, try `rustc --explain E0495`.
index 2274e9341dbc974f475beec9ae91ff6279368edc..a0b8b6b51e5a1b36572ad8567ad4cbfd94ce81b7 100644 (file)
@@ -26,3 +26,4 @@ LL |         let p: &'static mut usize = &mut self.cats_chased;
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index d02caeb44f1a82380acbdb042adb28dab39485c6..7a051b8ac835d0a2d3f0b8ca8c53e4d4a5a75e8e 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for borrow expression due to
 LL |             let p: &'static mut usize = &mut self.food;
    |                                         ^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime '_ as defined on the body at 9:18...
+note: first, the lifetime cannot outlive the lifetime `'_` as defined on the body at 9:18...
   --> $DIR/regions-addr-of-upvar-self.rs:9:18
    |
 LL |         let _f = || {
@@ -23,3 +23,4 @@ LL |             let p: &'static mut usize = &mut self.food;
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 61be0778c99c52d9f5de557343a08321b48a1813..c0401780b8f5b2794bd65266755485c7b450c2d2 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a WithAssoc<TheType<'b>>`, reference has a longer lifet
 LL |     let _: &'a WithAssoc<TheType<'b>> = loop { };
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 33:15
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 33:15
   --> $DIR/regions-assoc-type-in-supertrait-outlives-container.rs:33:15
    |
 LL | fn with_assoc<'a,'b>() {
    |               ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 33:18
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 33:18
   --> $DIR/regions-assoc-type-in-supertrait-outlives-container.rs:33:18
    |
 LL | fn with_assoc<'a,'b>() {
index 9732cd12ce15fbd76f20147165e4cee2ca71fc56..a636c9ef22c83ead07a67223d423f9f6e9347ddb 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d
 LL | impl<'a> Foo<'static> for &'a i32 {
    |          ^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the impl at 14:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the impl at 14:6...
   --> $DIR/regions-assoc-type-region-bound-in-trait-not-met.rs:14:6
    |
 LL | impl<'a> Foo<'static> for &'a i32 {
@@ -25,7 +25,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d
 LL | impl<'a,'b> Foo<'b> for &'a i64 {
    |             ^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the impl at 19:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the impl at 19:6...
   --> $DIR/regions-assoc-type-region-bound-in-trait-not-met.rs:19:6
    |
 LL | impl<'a,'b> Foo<'b> for &'a i64 {
@@ -33,7 +33,7 @@ LL | impl<'a,'b> Foo<'b> for &'a i64 {
    = note: ...so that the types are compatible:
            expected Foo<'b>
               found Foo<'_>
-note: but, the lifetime must be valid for the lifetime 'b as defined on the impl at 19:9...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the impl at 19:9...
   --> $DIR/regions-assoc-type-region-bound-in-trait-not-met.rs:19:9
    |
 LL | impl<'a,'b> Foo<'b> for &'a i64 {
@@ -46,3 +46,4 @@ LL | impl<'a,'b> Foo<'b> for &'a i64 {
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0495`.
index 2067bc3946c92f81717eaf661404208a5bc8d14e..81256e3b46cbb94a399d27175f88cd9614994b28 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d
 LL | impl<'a> Foo for &'a i32 {
    |          ^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the impl at 9:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the impl at 9:6...
   --> $DIR/regions-assoc-type-static-bound-in-trait-not-met.rs:9:6
    |
 LL | impl<'a> Foo for &'a i32 {
@@ -21,3 +21,4 @@ LL | impl<'a> Foo for &'a i32 {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 27eb8891c6c06ff53061df1f8b2f2711ff829945..a15710b86c06e8360e0d03b899f21a3f25342d5b 100644 (file)
@@ -6,12 +6,12 @@ LL |     return e;
    |
    = note: expected type `TupleStruct<'b>`
               found type `TupleStruct<'a>`
-note: the lifetime 'a as defined on the function body at 8:10...
+note: the lifetime `'a` as defined on the function body at 8:10...
   --> $DIR/regions-bounds.rs:8:10
    |
 LL | fn a_fn1<'a,'b>(e: TupleStruct<'a>) -> TupleStruct<'b> {
    |          ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the function body at 8:13
+note: ...does not necessarily outlive the lifetime `'b` as defined on the function body at 8:13
   --> $DIR/regions-bounds.rs:8:13
    |
 LL | fn a_fn1<'a,'b>(e: TupleStruct<'a>) -> TupleStruct<'b> {
@@ -25,12 +25,12 @@ LL |     return e;
    |
    = note: expected type `Struct<'b>`
               found type `Struct<'a>`
-note: the lifetime 'a as defined on the function body at 12:10...
+note: the lifetime `'a` as defined on the function body at 12:10...
   --> $DIR/regions-bounds.rs:12:10
    |
 LL | fn a_fn3<'a,'b>(e: Struct<'a>) -> Struct<'b> {
    |          ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the function body at 12:13
+note: ...does not necessarily outlive the lifetime `'b` as defined on the function body at 12:13
   --> $DIR/regions-bounds.rs:12:13
    |
 LL | fn a_fn3<'a,'b>(e: Struct<'a>) -> Struct<'b> {
index fa203debb3a1b4705923fb25f1ef6ba0005c369d..8e473dad69341e3375c6018311982e5f700032f6 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for borrow expression due to
 LL |     box B(&*v) as Box<dyn X>
    |           ^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 9:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 9:6...
   --> $DIR/regions-close-object-into-object-2.rs:9:6
    |
 LL | fn g<'a, T: 'static>(v: Box<dyn A<T> + 'a>) -> Box<dyn X + 'static> {
@@ -21,3 +21,4 @@ LL |     box B(&*v) as Box<dyn X>
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index f5e66f84a9ee7657edad02d8a7cd890a840a3170..c80d13e15b14726c16bc6e38b120e550220aea4b 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for borrow expression due to
 LL |     box B(&*v) as Box<dyn X>
    |           ^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 9:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 9:6...
   --> $DIR/regions-close-object-into-object-4.rs:9:6
    |
 LL | fn i<'a, T, U>(v: Box<dyn A<U>+'a>) -> Box<dyn X + 'static> {
@@ -21,3 +21,4 @@ LL |     box B(&*v) as Box<dyn X>
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 8b3dbc8b64902c6f1f1d8c9a6ddf678dbe115e08..ef21316ea83aea639866521ca102079714d32282 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime due to conflicting requiremen
 LL |     box v as Box<dyn SomeTrait + 'a>
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 18:20...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 18:20...
   --> $DIR/regions-close-over-type-parameter-multiple.rs:18:20
    |
 LL | fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<dyn SomeTrait + 'c> {
@@ -14,7 +14,7 @@ note: ...so that the declared lifetime parameter bounds are satisfied
    |
 LL |     box v as Box<dyn SomeTrait + 'a>
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-note: but, the lifetime must be valid for the lifetime 'c as defined on the function body at 18:26...
+note: but, the lifetime must be valid for the lifetime `'c` as defined on the function body at 18:26...
   --> $DIR/regions-close-over-type-parameter-multiple.rs:18:26
    |
 LL | fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<dyn SomeTrait + 'c> {
@@ -25,3 +25,4 @@ LL | fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<dyn SomeTrait +
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index e13cbe9960ab8fc7ac754b3da8ca4e2f4f36cf76..12b89787d5f18ff5cfbf5aa83c3c4d2a8004ec9b 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'a` d
 LL |     Ast::Add(x, y)
    |     ^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 6:16...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 6:16...
   --> $DIR/regions-creating-enums4.rs:6:16
    |
 LL | fn mk_add_bad2<'a,'b>(x: &'a Ast<'a>, y: &'a Ast<'a>, z: &Ast) -> Ast<'b> {
@@ -12,7 +12,7 @@ LL | fn mk_add_bad2<'a,'b>(x: &'a Ast<'a>, y: &'a Ast<'a>, z: &Ast) -> Ast<'b> {
    = note: ...so that the expression is assignable:
            expected &Ast<'_>
               found &Ast<'a>
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 6:19...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 6:19...
   --> $DIR/regions-creating-enums4.rs:6:19
    |
 LL | fn mk_add_bad2<'a,'b>(x: &'a Ast<'a>, y: &'a Ast<'a>, z: &Ast) -> Ast<'b> {
@@ -23,3 +23,4 @@ LL | fn mk_add_bad2<'a,'b>(x: &'a Ast<'a>, y: &'a Ast<'a>, z: &Ast) -> Ast<'b> {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 7b9f2c9503b2fdbfa1e604f56ef99952ebf1541f..9095451da0522bfae8e078ac54311fb44d0aef95 100644 (file)
@@ -4,12 +4,12 @@ error[E0312]: lifetime of reference outlives lifetime of borrowed content...
 LL |         g2.get()
    |         ^^^^^^^^
    |
-note: ...the reference is valid for the lifetime 'a as defined on the impl at 18:6...
+note: ...the reference is valid for the lifetime `'a` as defined on the impl at 18:6...
   --> $DIR/regions-early-bound-error-method.rs:18:6
    |
 LL | impl<'a> Box<'a> {
    |      ^^
-note: ...but the borrowed content is only valid for the lifetime 'b as defined on the method body at 19:11
+note: ...but the borrowed content is only valid for the lifetime `'b` as defined on the method body at 19:11
   --> $DIR/regions-early-bound-error-method.rs:19:11
    |
 LL |     fn or<'b,G:GetRef<'b>>(&self, g2: G) -> &'a isize {
index a68355b78f54ca2a82f8bf8b1f443451a74c8080..162d573362d45d1f3617a7b4464deb12ee71fc70 100644 (file)
@@ -4,12 +4,12 @@ error[E0312]: lifetime of reference outlives lifetime of borrowed content...
 LL |     g1.get()
    |     ^^^^^^^^
    |
-note: ...the reference is valid for the lifetime 'b as defined on the function body at 18:11...
+note: ...the reference is valid for the lifetime `'b` as defined on the function body at 18:11...
   --> $DIR/regions-early-bound-error.rs:18:11
    |
 LL | fn get<'a,'b,G:GetRef<'a, isize>>(g1: G, b: &'b isize) -> &'b isize {
    |           ^^
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 18:8
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 18:8
   --> $DIR/regions-early-bound-error.rs:18:8
    |
 LL | fn get<'a,'b,G:GetRef<'a, isize>>(g1: G, b: &'b isize) -> &'b isize {
index d867448e1372abbc6b54383ebab539aaaa6e41db..b93dd0d4c57c9e06785409bd9e8e4adf359ca715 100644 (file)
@@ -25,3 +25,4 @@ LL |     s.f(|p| p)
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index c8a02683d1000bad004b4d43c9f98452cca385bd..a6b165e2d444474e563ff45d1f7b5a95e3320295 100644 (file)
@@ -25,3 +25,4 @@ LL |     with(|o| o)
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 3b8f09f1ad80a7244be01ccf8d87ecb5d14d5cec..ad555efadf7ec98d24cd342c3948d254661eb85d 100644 (file)
@@ -8,12 +8,12 @@ LL | |     let z: Option<&'a &'b usize> = None;
 LL | | }
    | |_^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 5:14
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 5:14
   --> $DIR/regions-free-region-ordering-callee-4.rs:5:14
    |
 LL | fn ordering4<'a, 'b, F>(a: &'a usize, b: &'b usize, x: F) where F: FnOnce(&'a &'b usize) {
    |              ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 5:18
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 5:18
   --> $DIR/regions-free-region-ordering-callee-4.rs:5:18
    |
 LL | fn ordering4<'a, 'b, F>(a: &'a usize, b: &'b usize, x: F) where F: FnOnce(&'a &'b usize) {
index 5fad6de2a62af661b0654e3539d4170e643e2b63..10644174b9bc43bc7c28d3dbadb666bc6ae17914 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for borrow expression due to
 LL |             None => &self.val
    |                     ^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the method body at 14:12...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the method body at 14:12...
   --> $DIR/regions-free-region-ordering-incorrect.rs:14:12
    |
 LL |     fn get<'a>(&'a self) -> &'b T {
@@ -14,7 +14,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |             None => &self.val
    |                     ^^^^^^^^^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the impl at 13:6...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the impl at 13:6...
   --> $DIR/regions-free-region-ordering-incorrect.rs:13:6
    |
 LL | impl<'b, T> Node<'b, T> {
@@ -30,3 +30,4 @@ LL | |         }
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index b3390bcc4d50b3ebb30fdd25b0ff927e569869d1..c4ca7e970749c4fb30a1919e137afb3f23c48c36 100644 (file)
@@ -7,12 +7,12 @@ LL | | {
 LL | | }
    | |_^
    |
-note: the pointer is valid for the lifetime 'x as defined on the function body at 21:11
+note: the pointer is valid for the lifetime `'x` as defined on the function body at 21:11
   --> $DIR/regions-implied-bounds-projection-gap-hr-1.rs:21:11
    |
 LL | fn callee<'x, 'y, T>(t: &'x dyn for<'z> Trait1< <T as Trait2<'y, 'z>>::Foo >)
    |           ^^
-note: but the referenced data is only valid for the lifetime 'y as defined on the function body at 21:15
+note: but the referenced data is only valid for the lifetime `'y` as defined on the function body at 21:15
   --> $DIR/regions-implied-bounds-projection-gap-hr-1.rs:21:15
    |
 LL | fn callee<'x, 'y, T>(t: &'x dyn for<'z> Trait1< <T as Trait2<'y, 'z>>::Foo >)
index 151c8307a14f605e224806c75a69d537c8b9fa72..1d6dbdb2c7b57f3de79825376cb789249ffde81f 100644 (file)
@@ -27,3 +27,4 @@ LL |     let z = with(|y| { select(x, y) });
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index d31ed3ede36fa1557e398e6577f11e24f5ace02e..f4e223bbf6f9bd5c93c3aab2cbb68784ce943fe9 100644 (file)
@@ -6,7 +6,7 @@ LL |     b_isize
    |
    = note: expected type `Invariant<'static>`
               found type `Invariant<'r>`
-note: the lifetime 'r as defined on the function body at 11:23...
+note: the lifetime `'r` as defined on the function body at 11:23...
   --> $DIR/regions-infer-invariance-due-to-decl.rs:11:23
    |
 LL | fn to_longer_lifetime<'r>(b_isize: Invariant<'r>) -> Invariant<'static> {
index f8bdd014db7c6dd8820be8ce427127d3fa685985..6322244fcf937e8019faed300d5fc008bd19d6cf 100644 (file)
@@ -6,7 +6,7 @@ LL |     b_isize
    |
    = note: expected type `Invariant<'static>`
               found type `Invariant<'r>`
-note: the lifetime 'r as defined on the function body at 9:23...
+note: the lifetime `'r` as defined on the function body at 9:23...
   --> $DIR/regions-infer-invariance-due-to-mutability-3.rs:9:23
    |
 LL | fn to_longer_lifetime<'r>(b_isize: Invariant<'r>) -> Invariant<'static> {
index 1de6f22f08e5098c187864d68f325598a6c85cc9..7baae69945f9cda1a16a7a8bfcbc29b9ad31d95b 100644 (file)
@@ -6,7 +6,7 @@ LL |     b_isize
    |
    = note: expected type `Invariant<'static>`
               found type `Invariant<'r>`
-note: the lifetime 'r as defined on the function body at 9:23...
+note: the lifetime `'r` as defined on the function body at 9:23...
   --> $DIR/regions-infer-invariance-due-to-mutability-4.rs:9:23
    |
 LL | fn to_longer_lifetime<'r>(b_isize: Invariant<'r>) -> Invariant<'static> {
index f43ab8291218787a2140e9ec3e4cda468b3e7bab..6365769430f362656755958be01fdd5116a6f51b 100644 (file)
@@ -6,12 +6,12 @@ LL | fn take_direct<'a,'b>(p: Direct<'a>) -> Direct<'b> { p }
    |
    = note: expected type `Direct<'b>`
               found type `Direct<'a>`
-note: the lifetime 'a as defined on the function body at 15:16...
+note: the lifetime `'a` as defined on the function body at 15:16...
   --> $DIR/regions-infer-not-param.rs:15:16
    |
 LL | fn take_direct<'a,'b>(p: Direct<'a>) -> Direct<'b> { p }
    |                ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the function body at 15:19
+note: ...does not necessarily outlive the lifetime `'b` as defined on the function body at 15:19
   --> $DIR/regions-infer-not-param.rs:15:19
    |
 LL | fn take_direct<'a,'b>(p: Direct<'a>) -> Direct<'b> { p }
@@ -25,12 +25,12 @@ LL | fn take_indirect2<'a,'b>(p: Indirect2<'a>) -> Indirect2<'b> { p }
    |
    = note: expected type `Indirect2<'b>`
               found type `Indirect2<'a>`
-note: the lifetime 'a as defined on the function body at 19:19...
+note: the lifetime `'a` as defined on the function body at 19:19...
   --> $DIR/regions-infer-not-param.rs:19:19
    |
 LL | fn take_indirect2<'a,'b>(p: Indirect2<'a>) -> Indirect2<'b> { p }
    |                   ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the function body at 19:22
+note: ...does not necessarily outlive the lifetime `'b` as defined on the function body at 19:22
   --> $DIR/regions-infer-not-param.rs:19:22
    |
 LL | fn take_indirect2<'a,'b>(p: Indirect2<'a>) -> Indirect2<'b> { p }
@@ -44,12 +44,12 @@ LL | fn take_indirect2<'a,'b>(p: Indirect2<'a>) -> Indirect2<'b> { p }
    |
    = note: expected type `Indirect2<'b>`
               found type `Indirect2<'a>`
-note: the lifetime 'b as defined on the function body at 19:22...
+note: the lifetime `'b` as defined on the function body at 19:22...
   --> $DIR/regions-infer-not-param.rs:19:22
    |
 LL | fn take_indirect2<'a,'b>(p: Indirect2<'a>) -> Indirect2<'b> { p }
    |                      ^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the function body at 19:19
+note: ...does not necessarily outlive the lifetime `'a` as defined on the function body at 19:19
   --> $DIR/regions-infer-not-param.rs:19:19
    |
 LL | fn take_indirect2<'a,'b>(p: Indirect2<'a>) -> Indirect2<'b> { p }
index 1b999ed059c4010a20aae5e27e3113ac88c7feef..b1fd337b8d04b4d262640978b68dc72666d72687 100644 (file)
@@ -17,7 +17,7 @@ LL | |
 LL | |
 LL | |     }
    | |_____^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 16:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 16:6
   --> $DIR/regions-infer-paramd-indirect.rs:16:6
    |
 LL | impl<'a> SetF<'a> for C<'a> {
index 904dee6998c9bbe7c720c044edaa3c63aff4aa48..f4eb5c8644f034872c80dba20474d346bbfbc90e 100644 (file)
@@ -49,7 +49,7 @@ LL | |         if false { return ay; }
 LL | |         return z;
 LL | |     }));
    | |_____^
-note: ...but the borrowed content is only valid for the lifetime 'x as defined on the function body at 3:11
+note: ...but the borrowed content is only valid for the lifetime `'x` as defined on the function body at 3:11
   --> $DIR/regions-nested-fns.rs:3:11
    |
 LL | fn nested<'x>(x: &'x isize) {
@@ -57,4 +57,5 @@ LL | fn nested<'x>(x: &'x isize) {
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0312`.
+Some errors have detailed explanations: E0312, E0495.
+For more information about an error, try `rustc --explain E0312`.
index 912e118316271a80e8ee02ed62efddf58776bf4d..d29fd80943f73a2030b1695284f100a793568f36 100644 (file)
@@ -7,12 +7,12 @@ LL | | {
 LL | | }
    | |_^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 22:8...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 22:8...
   --> $DIR/regions-normalize-in-where-clause-list.rs:22:8
    |
 LL | fn bar<'a, 'b>()
    |        ^^
-note: ...but the lifetime must also be valid for the lifetime 'b as defined on the function body at 22:12...
+note: ...but the lifetime must also be valid for the lifetime `'b` as defined on the function body at 22:12...
   --> $DIR/regions-normalize-in-where-clause-list.rs:22:12
    |
 LL | fn bar<'a, 'b>()
@@ -23,3 +23,4 @@ LL | fn bar<'a, 'b>()
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index ed5800940ee3192597abbceea1d05fdba9d45078..0992d9bf295c1c5408d5ccd91bac913a14a68e77 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a WithHrAssoc<TheType<'b>>`, reference has a longer lif
 LL |     let _: &'a WithHrAssoc<TheType<'b>> = loop { };
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 27:15
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 27:15
   --> $DIR/regions-outlives-projection-container-hrtb.rs:27:15
    |
 LL | fn with_assoc<'a,'b>() {
    |               ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 27:18
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 27:18
   --> $DIR/regions-outlives-projection-container-hrtb.rs:27:18
    |
 LL | fn with_assoc<'a,'b>() {
@@ -21,12 +21,12 @@ error[E0491]: in type `&'a WithHrAssocSub<TheType<'b>>`, reference has a longer
 LL |     let _: &'a WithHrAssocSub<TheType<'b>> = loop { };
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 46:19
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 46:19
   --> $DIR/regions-outlives-projection-container-hrtb.rs:46:19
    |
 LL | fn with_assoc_sub<'a,'b>() {
    |                   ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 46:22
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 46:22
   --> $DIR/regions-outlives-projection-container-hrtb.rs:46:22
    |
 LL | fn with_assoc_sub<'a,'b>() {
index 152e6c5600c4e63e7ab775abe989d05cd30613f0..49e28a14d8a4ec219d21b9bdbcef7c6feb5fce29 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a WithAssoc<TheType<'b>>`, reference has a longer lifet
 LL |     let _: &'a WithAssoc<TheType<'b>> = loop { };
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 27:15
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 27:15
   --> $DIR/regions-outlives-projection-container-wc.rs:27:15
    |
 LL | fn with_assoc<'a,'b>() {
    |               ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 27:18
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 27:18
   --> $DIR/regions-outlives-projection-container-wc.rs:27:18
    |
 LL | fn with_assoc<'a,'b>() {
index 3c1a98a3c018fa589a358ef0dd2ee58d46bbdbb6..dba15fb0576eea6bcb10ff50e2a6d9e8cda3f87b 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a WithAssoc<TheType<'b>>`, reference has a longer lifet
 LL |     let _x: &'a WithAssoc<TheType<'b>> = loop { };
    |             ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 28:15
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 28:15
   --> $DIR/regions-outlives-projection-container.rs:28:15
    |
 LL | fn with_assoc<'a,'b>() {
    |               ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 28:18
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 28:18
   --> $DIR/regions-outlives-projection-container.rs:28:18
    |
 LL | fn with_assoc<'a,'b>() {
@@ -21,12 +21,12 @@ error[E0491]: in type `&'a WithoutAssoc<TheType<'b>>`, reference has a longer li
 LL |     let _x: &'a WithoutAssoc<TheType<'b>> = loop { };
    |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 50:18
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 50:18
   --> $DIR/regions-outlives-projection-container.rs:50:18
    |
 LL | fn without_assoc<'a,'b>() {
    |                  ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 50:21
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 50:21
   --> $DIR/regions-outlives-projection-container.rs:50:21
    |
 LL | fn without_assoc<'a,'b>() {
@@ -38,12 +38,12 @@ error[E0491]: in type `&'a WithAssoc<TheType<'b>>`, reference has a longer lifet
 LL |     call::<&'a WithAssoc<TheType<'b>>>();
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 58:20
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 58:20
   --> $DIR/regions-outlives-projection-container.rs:58:20
    |
 LL | fn call_with_assoc<'a,'b>() {
    |                    ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 58:23
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 58:23
   --> $DIR/regions-outlives-projection-container.rs:58:23
    |
 LL | fn call_with_assoc<'a,'b>() {
@@ -55,12 +55,12 @@ error[E0491]: in type `&'a WithoutAssoc<TheType<'b>>`, reference has a longer li
 LL |     call::<&'a WithoutAssoc<TheType<'b>>>();
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the function body at 67:23
+note: the pointer is valid for the lifetime `'a` as defined on the function body at 67:23
   --> $DIR/regions-outlives-projection-container.rs:67:23
    |
 LL | fn call_without_assoc<'a,'b>() {
    |                       ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the function body at 67:26
+note: but the referenced data is only valid for the lifetime `'b` as defined on the function body at 67:26
   --> $DIR/regions-outlives-projection-container.rs:67:26
    |
 LL | fn call_without_assoc<'a,'b>() {
index 403af2a9e6a44c55a3607daf7041f3606ffcc38f..49076673ad3989dd1f58ccbe183dcdf592d1c20a 100644 (file)
@@ -12,7 +12,7 @@ LL |     with(|o| o)
    = note: ...so that the expression is assignable:
            expected &isize
               found &isize
-note: but, the lifetime must be valid for the lifetime 'a as defined on the function body at 9:14...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the function body at 9:14...
   --> $DIR/regions-ret-borrowed-1.rs:9:14
    |
 LL | fn return_it<'a>() -> &'a isize {
@@ -25,3 +25,4 @@ LL |     with(|o| o)
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 5d1f26da6c78371daef835c2e2ef29fa323733ca..eb1ade27acea7a3f8c51e31959a43ceca7da3fce 100644 (file)
@@ -12,7 +12,7 @@ LL |     with(|o| o)
    = note: ...so that the expression is assignable:
            expected &isize
               found &isize
-note: but, the lifetime must be valid for the lifetime 'a as defined on the function body at 12:14...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the function body at 12:14...
   --> $DIR/regions-ret-borrowed.rs:12:14
    |
 LL | fn return_it<'a>() -> &'a isize {
@@ -25,3 +25,4 @@ LL |     with(|o| o)
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 291b8367f7b75e2c3582154c98d49271fd2fb689..946465bcb5f261a4437b307d33c4e954907d5187 100644 (file)
@@ -4,7 +4,7 @@ error[E0495]: cannot infer an appropriate lifetime for borrow expression due to
 LL |         let mut f = || &mut x;
    |                        ^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime '_ as defined on the body at 7:21...
+note: first, the lifetime cannot outlive the lifetime `'_` as defined on the body at 7:21...
   --> $DIR/regions-return-ref-to-upvar-issue-17403.rs:7:21
    |
 LL |         let mut f = || &mut x;
@@ -27,3 +27,4 @@ LL |         let y = f();
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 21ead8b768f475db682079524370a9a75460cfb0..6e631d40d45008119732c8a0fd60be27ce050666 100644 (file)
@@ -5,7 +5,7 @@ LL |     t
    |     ^
    |
    = note: ...the reference is valid for the static lifetime...
-note: ...but the borrowed content is only valid for the lifetime 'a as defined on the function body at 8:24
+note: ...but the borrowed content is only valid for the lifetime `'a` as defined on the function body at 8:24
   --> $DIR/regions-static-bound.rs:8:24
    |
 LL | fn static_id_wrong_way<'a>(t: &'a ()) -> &'static () where 'static: 'a {
index 421f826ccc54e0327885bdb7bc4d752b93b00f55..f835c005ff969f3df94adbba84d84e028fd91236 100644 (file)
@@ -6,7 +6,7 @@ LL |     fn get_ctxt(&self) -> &'a Ctxt {
    |
    = note: expected type `fn(&HasCtxt<'a>) -> &Ctxt`
               found type `fn(&HasCtxt<'a>) -> &'a Ctxt`
-note: the lifetime 'a as defined on the impl at 12:6...
+note: the lifetime `'a` as defined on the impl at 12:6...
   --> $DIR/regions-trait-1.rs:12:6
    |
 LL | impl<'a> GetCtxt for HasCtxt<'a> {
index 6de92f13840b3310b3a931a63a05e6702736315f..b7c7f93149dcf031cc44e7f12769657193958804 100644 (file)
@@ -4,12 +4,12 @@ error[E0478]: lifetime bound not satisfied
 LL |     x
    |     ^
    |
-note: lifetime parameter instantiated with the lifetime 'a as defined on the function body at 13:9
+note: lifetime parameter instantiated with the lifetime `'a` as defined on the function body at 13:9
   --> $DIR/regions-trait-object-subtyping.rs:13:9
    |
 LL | fn foo3<'a,'b>(x: &'a mut dyn Dummy) -> &'b mut dyn Dummy {
    |         ^^
-note: but lifetime parameter must outlive the lifetime 'b as defined on the function body at 13:12
+note: but lifetime parameter must outlive the lifetime `'b` as defined on the function body at 13:12
   --> $DIR/regions-trait-object-subtyping.rs:13:12
    |
 LL | fn foo3<'a,'b>(x: &'a mut dyn Dummy) -> &'b mut dyn Dummy {
@@ -21,7 +21,7 @@ error[E0495]: cannot infer an appropriate lifetime for automatic coercion due to
 LL |     x
    |     ^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the function body at 13:9...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the function body at 13:9...
   --> $DIR/regions-trait-object-subtyping.rs:13:9
    |
 LL | fn foo3<'a,'b>(x: &'a mut dyn Dummy) -> &'b mut dyn Dummy {
@@ -31,7 +31,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |     x
    |     ^
-note: but, the lifetime must be valid for the lifetime 'b as defined on the function body at 13:12...
+note: but, the lifetime must be valid for the lifetime `'b` as defined on the function body at 13:12...
   --> $DIR/regions-trait-object-subtyping.rs:13:12
    |
 LL | fn foo3<'a,'b>(x: &'a mut dyn Dummy) -> &'b mut dyn Dummy {
@@ -48,12 +48,12 @@ LL |     x
    |
    = note: expected type `Wrapper<&'b mut (dyn Dummy + 'b)>`
               found type `Wrapper<&'a mut (dyn Dummy + 'a)>`
-note: the lifetime 'b as defined on the function body at 20:15...
+note: the lifetime `'b` as defined on the function body at 20:15...
   --> $DIR/regions-trait-object-subtyping.rs:20:15
    |
 LL | fn foo4<'a:'b,'b>(x: Wrapper<&'a mut dyn Dummy>) -> Wrapper<&'b mut dyn Dummy> {
    |               ^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the function body at 20:9
+note: ...does not necessarily outlive the lifetime `'a` as defined on the function body at 20:9
   --> $DIR/regions-trait-object-subtyping.rs:20:9
    |
 LL | fn foo4<'a:'b,'b>(x: Wrapper<&'a mut dyn Dummy>) -> Wrapper<&'b mut dyn Dummy> {
@@ -61,5 +61,5 @@ LL | fn foo4<'a:'b,'b>(x: Wrapper<&'a mut dyn Dummy>) -> Wrapper<&'b mut dyn Dum
 
 error: aborting due to 3 previous errors
 
-Some errors have detailed explanations: E0308, E0478.
+Some errors have detailed explanations: E0308, E0478, E0495.
 For more information about an error, try `rustc --explain E0308`.
index 90b37ce935a6d8b71c67b46133b215cbb5fd50c9..aae519c5df2e9901057e23d14e4168399d24d591 100644 (file)
@@ -6,7 +6,7 @@ LL |     let _: Invariant<'static> = c;
    |
    = note: expected type `Invariant<'static>`
               found type `Invariant<'b>`
-note: the lifetime 'b as defined on the function body at 11:9...
+note: the lifetime `'b` as defined on the function body at 11:9...
   --> $DIR/regions-variance-invariant-use-covariant.rs:11:9
    |
 LL | fn use_<'b>(c: Invariant<'b>) {
index 4e12478c36da3d890ba6760b943c6aeaac52749c..9f39508604110df3c48abb2becb2a3a1208a8828 100644 (file)
@@ -4,12 +4,12 @@ error[E0478]: lifetime bound not satisfied
 LL |     x: Box<dyn TheTrait<'a>+'b>
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: lifetime parameter instantiated with the lifetime 'b as defined on the struct at 6:15
+note: lifetime parameter instantiated with the lifetime `'b` as defined on the struct at 6:15
   --> $DIR/regions-wf-trait-object.rs:6:15
    |
 LL | struct Foo<'a,'b> {
    |               ^^
-note: but lifetime parameter must outlive the lifetime 'a as defined on the struct at 6:12
+note: but lifetime parameter must outlive the lifetime `'a` as defined on the struct at 6:12
   --> $DIR/regions-wf-trait-object.rs:6:12
    |
 LL | struct Foo<'a,'b> {
index 08aca3bb14c26cbf521881068a69369f030f54f4..609a40163a30c38fcb3e8ccc7efabe9ab064f115 100644 (file)
@@ -34,7 +34,7 @@ LL | impl                    Drop for N<'static>     { fn drop(&mut self) { } }
    |
    = note: expected type `N<'n>`
               found type `N<'static>`
-note: the lifetime 'n as defined on the struct at 8:10...
+note: the lifetime `'n` as defined on the struct at 8:10...
   --> $DIR/reject-specialized-drops-8142.rs:8:10
    |
 LL | struct N<'n> { x: &'n i8 }
@@ -95,12 +95,12 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'lw`
 LL | impl<'lw>         Drop for W<'lw,'lw>     { fn drop(&mut self) { } } // REJECT
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'l1 as defined on the struct at 17:10...
+note: first, the lifetime cannot outlive the lifetime `'l1` as defined on the struct at 17:10...
   --> $DIR/reject-specialized-drops-8142.rs:17:10
    |
 LL | struct W<'l1, 'l2> { x: &'l1 i8, y: &'l2 u8 }
    |          ^^^
-note: ...but the lifetime must also be valid for the lifetime 'l2 as defined on the struct at 17:15...
+note: ...but the lifetime must also be valid for the lifetime `'l2` as defined on the struct at 17:15...
   --> $DIR/reject-specialized-drops-8142.rs:17:15
    |
 LL | struct W<'l1, 'l2> { x: &'l1 i8, y: &'l2 u8 }
@@ -111,5 +111,5 @@ LL | struct W<'l1, 'l2> { x: &'l1 i8, y: &'l2 u8 }
 
 error: aborting due to 8 previous errors
 
-Some errors have detailed explanations: E0308, E0366, E0367.
+Some errors have detailed explanations: E0308, E0366, E0367, E0495.
 For more information about an error, try `rustc --explain E0308`.
index 64177ac2a83104129eafb22a27f0e88bccf08515..9766f8f1412b6c4341018afbbb68f1d60c9364e2 100644 (file)
@@ -14,3 +14,4 @@ LL | use std::thread::Result;
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index d00d87393aab67cd6338d4ce1e555e4396b35bad..513e02f74e3b265cf88afc639d6cc7d91c9076ca 100644 (file)
@@ -27,7 +27,7 @@ LL | use mul3::Mul;
    |
 LL | use mul4::Mul;
    |
-and 2 other candidates
+     and 2 other candidates
 
 error[E0405]: cannot find trait `ThisTraitReallyDoesntExistInAnyModuleReally` in this scope
   --> $DIR/issue-21221-1.rs:63:6
index 7790383843e17743a1c0cc99f619c4b783484e2c..329543114a610af024f2e64af610188782df3b2b 100644 (file)
@@ -61,8 +61,14 @@ LL |         purr();
 error[E0424]: expected value, found module `self`
   --> $DIR/issue-2356.rs:65:8
    |
-LL |     if self.whiskers > 3 {
-   |        ^^^^ `self` value is a keyword only available in methods with `self` parameter
+LL | /   fn meow() {
+LL | |     if self.whiskers > 3 {
+   | |        ^^^^ `self` value is a keyword only available in methods with a `self` parameter
+LL | |
+LL | |         println!("MEOW");
+LL | |     }
+LL | |   }
+   | |___- this function doesn't have a `self` parameter
 
 error[E0425]: cannot find function `grow_older` in this scope
   --> $DIR/issue-2356.rs:72:5
@@ -97,8 +103,12 @@ LL |     purr_louder();
 error[E0424]: expected value, found module `self`
   --> $DIR/issue-2356.rs:92:5
    |
-LL |     self += 1;
-   |     ^^^^ `self` value is a keyword only available in methods with `self` parameter
+LL | / fn main() {
+LL | |     self += 1;
+   | |     ^^^^ `self` value is a keyword only available in methods with a `self` parameter
+LL | |
+LL | | }
+   | |_- this function doesn't have a `self` parameter
 
 error: aborting due to 17 previous errors
 
diff --git a/src/test/ui/resolve/issue-65025-extern-static-parent-generics.rs b/src/test/ui/resolve/issue-65025-extern-static-parent-generics.rs
new file mode 100644 (file)
index 0000000..ce45f63
--- /dev/null
@@ -0,0 +1,10 @@
+unsafe fn foo<A>() {
+    extern "C" {
+        static baz: *const A;
+        //~^ ERROR can't use generic parameters from outer function
+    }
+
+    let bar: *const u64 = core::mem::transmute(&baz);
+}
+
+fn main() { }
diff --git a/src/test/ui/resolve/issue-65025-extern-static-parent-generics.stderr b/src/test/ui/resolve/issue-65025-extern-static-parent-generics.stderr
new file mode 100644 (file)
index 0000000..6bbf76d
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0401]: can't use generic parameters from outer function
+  --> $DIR/issue-65025-extern-static-parent-generics.rs:3:28
+   |
+LL | unsafe fn foo<A>() {
+   |               - type parameter from outer function
+LL |     extern "C" {
+LL |         static baz: *const A;
+   |                            ^ use of generic parameter from outer function
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0401`.
diff --git a/src/test/ui/resolve/issue-65035-static-with-parent-generics.rs b/src/test/ui/resolve/issue-65035-static-with-parent-generics.rs
new file mode 100644 (file)
index 0000000..63d3431
--- /dev/null
@@ -0,0 +1,29 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T>() {
+    extern "C" {
+        static a: *const T;
+        //~^ ERROR can't use generic parameters from outer function
+    }
+}
+
+fn g<T: Default>() {
+    static a: *const T = Default::default();
+    //~^ ERROR can't use generic parameters from outer function
+}
+
+fn h<const N: usize>() {
+    extern "C" {
+        static a: [u8; N];
+        //~^ ERROR can't use generic parameters from outer function
+    }
+}
+
+fn i<const N: usize>() {
+    static a: [u8; N] = [0; N];
+    //~^ ERROR can't use generic parameters from outer function
+    //~^^ ERROR can't use generic parameters from outer function
+}
+
+fn main() {}
diff --git a/src/test/ui/resolve/issue-65035-static-with-parent-generics.stderr b/src/test/ui/resolve/issue-65035-static-with-parent-generics.stderr
new file mode 100644 (file)
index 0000000..82e2aa2
--- /dev/null
@@ -0,0 +1,53 @@
+error[E0401]: can't use generic parameters from outer function
+  --> $DIR/issue-65035-static-with-parent-generics.rs:6:26
+   |
+LL | fn f<T>() {
+   |      - type parameter from outer function
+LL |     extern "C" {
+LL |         static a: *const T;
+   |                          ^ use of generic parameter from outer function
+
+error[E0401]: can't use generic parameters from outer function
+  --> $DIR/issue-65035-static-with-parent-generics.rs:12:22
+   |
+LL | fn g<T: Default>() {
+   |      - type parameter from outer function
+LL |     static a: *const T = Default::default();
+   |                      ^ use of generic parameter from outer function
+
+error[E0401]: can't use generic parameters from outer function
+  --> $DIR/issue-65035-static-with-parent-generics.rs:18:24
+   |
+LL | fn h<const N: usize>() {
+   |            - const parameter from outer function
+LL |     extern "C" {
+LL |         static a: [u8; N];
+   |                        ^ use of generic parameter from outer function
+
+error[E0401]: can't use generic parameters from outer function
+  --> $DIR/issue-65035-static-with-parent-generics.rs:24:20
+   |
+LL | fn i<const N: usize>() {
+   |            - const parameter from outer function
+LL |     static a: [u8; N] = [0; N];
+   |                    ^ use of generic parameter from outer function
+
+error[E0401]: can't use generic parameters from outer function
+  --> $DIR/issue-65035-static-with-parent-generics.rs:24:29
+   |
+LL | fn i<const N: usize>() {
+   |            - const parameter from outer function
+LL |     static a: [u8; N] = [0; N];
+   |                             ^ use of generic parameter from outer function
+
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-65035-static-with-parent-generics.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: aborting due to 5 previous errors
+
+For more information about this error, try `rustc --explain E0401`.
index 2e3c0f5448e865e770f7618db2bc4bbf1e36ceae..e693a0ef91fc2be9414d376efcfbdaf9e75cd3a6 100644 (file)
@@ -38,13 +38,13 @@ error[E0412]: cannot find type `first` in module `m`
   --> $DIR/levenshtein.rs:28:15
    |
 LL |     let b: m::first = m::second; // Misspelled item in module.
-   |               ^^^^^ help: a struct with a similar name exists: `First`
+   |               ^^^^^ help: a struct with a similar name exists (notice the capitalization): `First`
 
 error[E0425]: cannot find value `second` in module `m`
   --> $DIR/levenshtein.rs:28:26
    |
 LL |     let b: m::first = m::second; // Misspelled item in module.
-   |                          ^^^^^^ help: a unit struct with a similar name exists: `Second`
+   |                          ^^^^^^ help: a unit struct with a similar name exists (notice the capitalization): `Second`
 
 error: aborting due to 8 previous errors
 
index 0b389acf75d8b88b8dc86d8d2c23dd6901829291..0eecc7f8cc5db8d669d179b26b28601f58fe914a 100644 (file)
@@ -16,7 +16,7 @@ pub mod n {
 
     fn f() {
         n::Z;
-        //~^ ERROR tuple struct `Z` is private
+        //~^ ERROR tuple struct constructor `Z` is private
         Z;
         //~^ ERROR expected value, found struct `Z`
     }
@@ -27,21 +27,21 @@ fn f() {
 
 fn main() {
     m::S;
-    //~^ ERROR tuple struct `S` is private
+    //~^ ERROR tuple struct constructor `S` is private
     let _: S = m::S(2);
-    //~^ ERROR tuple struct `S` is private
+    //~^ ERROR tuple struct constructor `S` is private
     S;
     //~^ ERROR expected value, found struct `S`
     m::n::Z;
-    //~^ ERROR tuple struct `Z` is private
+    //~^ ERROR tuple struct constructor `Z` is private
 
     S2;
     //~^ ERROR expected value, found struct `S2`
 
     xcrate::m::S;
-    //~^ ERROR tuple struct `S` is private
+    //~^ ERROR tuple struct constructor `S` is private
     xcrate::S;
     //~^ ERROR expected value, found struct `xcrate::S`
     xcrate::m::n::Z;
-    //~^ ERROR tuple struct `Z` is private
+    //~^ ERROR tuple struct constructor `Z` is private
 }
index 979367bc623fd4ec5985136b5c417ab302b6ffdc..7d884d3a66910ca38e7fae8ec2f661d812a65c6b 100644 (file)
@@ -34,53 +34,63 @@ help: possible better candidate is found in another module, you can import it in
 LL | use m::S;
    |
 
-error[E0603]: tuple struct `Z` is private
+error[E0603]: tuple struct constructor `Z` is private
   --> $DIR/privacy-struct-ctor.rs:18:12
    |
+LL |         pub(in m) struct Z(pub(in m::n) u8);
+   |                            --------------- a constructor is private if any of the fields is private
+...
 LL |         n::Z;
    |            ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `S` is private
+error[E0603]: tuple struct constructor `S` is private
   --> $DIR/privacy-struct-ctor.rs:29:8
    |
+LL |     pub struct S(u8);
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     m::S;
    |        ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `S` is private
+error[E0603]: tuple struct constructor `S` is private
   --> $DIR/privacy-struct-ctor.rs:31:19
    |
+LL |     pub struct S(u8);
+   |                  -- a constructor is private if any of the fields is private
+...
 LL |     let _: S = m::S(2);
    |                   ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `Z` is private
+error[E0603]: tuple struct constructor `Z` is private
   --> $DIR/privacy-struct-ctor.rs:35:11
    |
+LL |         pub(in m) struct Z(pub(in m::n) u8);
+   |                            --------------- a constructor is private if any of the fields is private
+...
 LL |     m::n::Z;
    |           ^
-   |
-   = note: a tuple struct constructor is private if any of its fields is private
 
-error[E0603]: tuple struct `S` is private
+error[E0603]: tuple struct constructor `S` is private
   --> $DIR/privacy-struct-ctor.rs:41:16
    |
 LL |     xcrate::m::S;
    |                ^
+   | 
+  ::: $DIR/auxiliary/privacy-struct-ctor.rs:2:18
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL |     pub struct S(u8);
+   |                  -- a constructor is private if any of the fields is private
 
-error[E0603]: tuple struct `Z` is private
+error[E0603]: tuple struct constructor `Z` is private
   --> $DIR/privacy-struct-ctor.rs:45:19
    |
 LL |     xcrate::m::n::Z;
    |                   ^
+   | 
+  ::: $DIR/auxiliary/privacy-struct-ctor.rs:5:28
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL |         pub(in m) struct Z(pub(in m::n) u8);
+   |                            --------------- a constructor is private if any of the fields is private
 
 error: aborting due to 10 previous errors
 
diff --git a/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/auxiliary/types.rs b/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/auxiliary/types.rs
new file mode 100644 (file)
index 0000000..1a187d0
--- /dev/null
@@ -0,0 +1,31 @@
+#![feature(non_exhaustive)]
+
+#[non_exhaustive]
+#[repr(C)]
+pub enum NonExhaustiveEnum {
+    Unit,
+    Tuple(u32),
+    Struct { field: u32 }
+}
+
+#[non_exhaustive]
+#[repr(C)]
+pub struct NormalStruct {
+    pub first_field: u16,
+    pub second_field: u16,
+}
+
+#[non_exhaustive]
+#[repr(C)]
+pub struct UnitStruct;
+
+#[non_exhaustive]
+#[repr(C)]
+pub struct TupleStruct (pub u16, pub u16);
+
+#[repr(C)]
+pub enum NonExhaustiveVariants {
+    #[non_exhaustive] Unit,
+    #[non_exhaustive] Tuple(u32),
+    #[non_exhaustive] Struct { field: u32 }
+}
diff --git a/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/extern_crate_improper.rs b/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/extern_crate_improper.rs
new file mode 100644 (file)
index 0000000..900b933
--- /dev/null
@@ -0,0 +1,24 @@
+// aux-build:types.rs
+#![deny(improper_ctypes)]
+
+extern crate types;
+
+// This test checks that non-exhaustive types with `#[repr(C)]` from an extern crate are considered
+// improper.
+
+use types::{NonExhaustiveEnum, NormalStruct, UnitStruct, TupleStruct, NonExhaustiveVariants};
+
+extern {
+    pub fn non_exhaustive_enum(_: NonExhaustiveEnum);
+    //~^ ERROR `extern` block uses type `types::NonExhaustiveEnum`, which is not FFI-safe
+    pub fn non_exhaustive_normal_struct(_: NormalStruct);
+    //~^ ERROR `extern` block uses type `types::NormalStruct`, which is not FFI-safe
+    pub fn non_exhaustive_unit_struct(_: UnitStruct);
+    //~^ ERROR `extern` block uses type `types::UnitStruct`, which is not FFI-safe
+    pub fn non_exhaustive_tuple_struct(_: TupleStruct);
+    //~^ ERROR `extern` block uses type `types::TupleStruct`, which is not FFI-safe
+    pub fn non_exhaustive_variant(_: NonExhaustiveVariants);
+    //~^ ERROR `extern` block uses type `types::NonExhaustiveVariants`, which is not FFI-safe
+}
+
+fn main() { }
diff --git a/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/extern_crate_improper.stderr b/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/extern_crate_improper.stderr
new file mode 100644 (file)
index 0000000..7fbf115
--- /dev/null
@@ -0,0 +1,47 @@
+error: `extern` block uses type `types::NonExhaustiveEnum`, which is not FFI-safe
+  --> $DIR/extern_crate_improper.rs:12:35
+   |
+LL |     pub fn non_exhaustive_enum(_: NonExhaustiveEnum);
+   |                                   ^^^^^^^^^^^^^^^^^ not FFI-safe
+   |
+note: lint level defined here
+  --> $DIR/extern_crate_improper.rs:2:9
+   |
+LL | #![deny(improper_ctypes)]
+   |         ^^^^^^^^^^^^^^^
+   = note: this enum is non-exhaustive
+
+error: `extern` block uses type `types::NormalStruct`, which is not FFI-safe
+  --> $DIR/extern_crate_improper.rs:14:44
+   |
+LL |     pub fn non_exhaustive_normal_struct(_: NormalStruct);
+   |                                            ^^^^^^^^^^^^ not FFI-safe
+   |
+   = note: this struct is non-exhaustive
+
+error: `extern` block uses type `types::UnitStruct`, which is not FFI-safe
+  --> $DIR/extern_crate_improper.rs:16:42
+   |
+LL |     pub fn non_exhaustive_unit_struct(_: UnitStruct);
+   |                                          ^^^^^^^^^^ not FFI-safe
+   |
+   = note: this struct is non-exhaustive
+
+error: `extern` block uses type `types::TupleStruct`, which is not FFI-safe
+  --> $DIR/extern_crate_improper.rs:18:43
+   |
+LL |     pub fn non_exhaustive_tuple_struct(_: TupleStruct);
+   |                                           ^^^^^^^^^^^ not FFI-safe
+   |
+   = note: this struct is non-exhaustive
+
+error: `extern` block uses type `types::NonExhaustiveVariants`, which is not FFI-safe
+  --> $DIR/extern_crate_improper.rs:20:38
+   |
+LL |     pub fn non_exhaustive_variant(_: NonExhaustiveVariants);
+   |                                      ^^^^^^^^^^^^^^^^^^^^^ not FFI-safe
+   |
+   = note: this enum has non-exhaustive variants
+
+error: aborting due to 5 previous errors
+
diff --git a/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/same_crate_proper.rs b/src/test/ui/rfc-2008-non-exhaustive/improper_ctypes/same_crate_proper.rs
new file mode 100644 (file)
index 0000000..3f38e3c
--- /dev/null
@@ -0,0 +1,46 @@
+// check-pass
+#![feature(non_exhaustive)]
+#![deny(improper_ctypes)]
+
+// This test checks that non-exhaustive types with `#[repr(C)]` are considered proper within
+// the defining crate.
+
+#[non_exhaustive]
+#[repr(C)]
+pub enum NonExhaustiveEnum {
+    Unit,
+    Tuple(u32),
+    Struct { field: u32 }
+}
+
+#[non_exhaustive]
+#[repr(C)]
+pub struct NormalStruct {
+    pub first_field: u16,
+    pub second_field: u16,
+}
+
+#[non_exhaustive]
+#[repr(C)]
+pub struct UnitStruct;
+
+#[non_exhaustive]
+#[repr(C)]
+pub struct TupleStruct (pub u16, pub u16);
+
+#[repr(C)]
+pub enum NonExhaustiveVariants {
+    #[non_exhaustive] Unit,
+    #[non_exhaustive] Tuple(u32),
+    #[non_exhaustive] Struct { field: u32 }
+}
+
+extern {
+    // Unit structs aren't tested here because they will trigger `improper_ctypes` anyway.
+    pub fn non_exhaustive_enum(_: NonExhaustiveEnum);
+    pub fn non_exhaustive_normal_struct(_: NormalStruct);
+    pub fn non_exhaustive_tuple_struct(_: TupleStruct);
+    pub fn non_exhaustive_variant(_: NonExhaustiveVariants);
+}
+
+fn main() { }
index 94ac588d24083d92a8ee1a0370f7e6d469d9bbbe..cf383a260e044be784f32d9ae48d9707eeb18d09 100644 (file)
@@ -21,7 +21,7 @@ fn main() {
     //~^ ERROR expected function, found struct `TupleStruct` [E0423]
 
     let ts_explicit = structs::TupleStruct(640, 480);
-    //~^ ERROR tuple struct `TupleStruct` is private [E0603]
+    //~^ ERROR tuple struct constructor `TupleStruct` is private [E0603]
 
     let TupleStruct { 0: first_field, 1: second_field } = ts;
     //~^ ERROR `..` required with struct marked as non-exhaustive
index d75a376286fcc946f1ff9f39273ac3ca33f8c128..d3686a1b869619357613b5c101839b88cb8f0e13 100644 (file)
@@ -10,13 +10,16 @@ error[E0423]: expected value, found struct `UnitStruct`
 LL |     let us = UnitStruct;
    |              ^^^^^^^^^^ constructor is not visible here due to private fields
 
-error[E0603]: tuple struct `TupleStruct` is private
+error[E0603]: tuple struct constructor `TupleStruct` is private
   --> $DIR/struct.rs:23:32
    |
 LL |     let ts_explicit = structs::TupleStruct(640, 480);
    |                                ^^^^^^^^^^^
+   | 
+  ::: $DIR/auxiliary/structs.rs:13:24
    |
-   = note: a tuple struct constructor is private if any of its fields is private
+LL | pub struct TupleStruct(pub u16, pub u16);
+   |                        ---------------- a constructor is private if any of the fields is private
 
 error[E0603]: unit struct `UnitStruct` is private
   --> $DIR/struct.rs:32:32
index ac0025ec758072fa3ddee48e88085c9b9055a067..d9d6ea21b8bd4dccce0e8365873285e99ed9e6b6 100644 (file)
@@ -3,8 +3,6 @@ error[E0603]: tuple variant `Tuple` is private
    |
 LL |     let variant_tuple = NonExhaustiveVariants::Tuple(640);
    |                                                ^^^^^
-   |
-   = note: a tuple variant constructor is private if any of its fields is private
 
 error[E0603]: unit variant `Unit` is private
   --> $DIR/variant.rs:14:47
@@ -23,16 +21,12 @@ error[E0603]: tuple variant `Tuple` is private
    |
 LL |         NonExhaustiveVariants::Tuple(fe_tpl) => "",
    |                                ^^^^^
-   |
-   = note: a tuple variant constructor is private if any of its fields is private
 
 error[E0603]: tuple variant `Tuple` is private
   --> $DIR/variant.rs:26:35
    |
 LL |     if let NonExhaustiveVariants::Tuple(fe_tpl) = variant_struct {
    |                                   ^^^^^
-   |
-   = note: a tuple variant constructor is private if any of its fields is private
 
 error[E0639]: cannot create non-exhaustive variant using struct expression
   --> $DIR/variant.rs:8:26
diff --git a/src/test/ui/rfc-2091-track-caller/error-odd-syntax.rs b/src/test/ui/rfc-2091-track-caller/error-odd-syntax.rs
new file mode 100644 (file)
index 0000000..d400db8
--- /dev/null
@@ -0,0 +1,7 @@
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+#[track_caller(1)]
+fn f() {}
+//~^^ ERROR malformed `track_caller` attribute input
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/error-odd-syntax.stderr b/src/test/ui/rfc-2091-track-caller/error-odd-syntax.stderr
new file mode 100644 (file)
index 0000000..a53a8ee
--- /dev/null
@@ -0,0 +1,16 @@
+error: malformed `track_caller` attribute input
+  --> $DIR/error-odd-syntax.rs:3:1
+   |
+LL | #[track_caller(1)]
+   | ^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[track_caller]`
+
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/error-odd-syntax.rs:1:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-invalid-abi.rs b/src/test/ui/rfc-2091-track-caller/error-with-invalid-abi.rs
new file mode 100644 (file)
index 0000000..2994f3c
--- /dev/null
@@ -0,0 +1,7 @@
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+#[track_caller]
+extern "C" fn f() {}
+//~^^ ERROR rust ABI is required to use `#[track_caller]`
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-invalid-abi.stderr b/src/test/ui/rfc-2091-track-caller/error-with-invalid-abi.stderr
new file mode 100644 (file)
index 0000000..a34acf3
--- /dev/null
@@ -0,0 +1,17 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/error-with-invalid-abi.rs:1:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0737]: rust ABI is required to use `#[track_caller]`
+  --> $DIR/error-with-invalid-abi.rs:3:1
+   |
+LL | #[track_caller]
+   | ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0737`.
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-naked.rs b/src/test/ui/rfc-2091-track-caller/error-with-naked.rs
new file mode 100644 (file)
index 0000000..bbbcec3
--- /dev/null
@@ -0,0 +1,8 @@
+#![feature(naked_functions, track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+#[track_caller]
+#[naked]
+fn f() {}
+//~^^^ ERROR cannot use `#[track_caller]` with `#[naked]`
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-naked.stderr b/src/test/ui/rfc-2091-track-caller/error-with-naked.stderr
new file mode 100644 (file)
index 0000000..93e6f7a
--- /dev/null
@@ -0,0 +1,17 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/error-with-naked.rs:1:29
+   |
+LL | #![feature(naked_functions, track_caller)]
+   |                             ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0736]: cannot use `#[track_caller]` with `#[naked]`
+  --> $DIR/error-with-naked.rs:3:1
+   |
+LL | #[track_caller]
+   | ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0736`.
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-trait-decl.rs b/src/test/ui/rfc-2091-track-caller/error-with-trait-decl.rs
new file mode 100644 (file)
index 0000000..1cd45c8
--- /dev/null
@@ -0,0 +1,13 @@
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+trait Trait {
+    #[track_caller]
+    fn unwrap(&self);
+    //~^^ ERROR: `#[track_caller]` is not supported in trait declarations.
+}
+
+impl Trait for u64 {
+    fn unwrap(&self) {}
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-trait-decl.stderr b/src/test/ui/rfc-2091-track-caller/error-with-trait-decl.stderr
new file mode 100644 (file)
index 0000000..fb3732b
--- /dev/null
@@ -0,0 +1,17 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/error-with-trait-decl.rs:1:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0738]: `#[track_caller]` is not supported in trait declarations.
+  --> $DIR/error-with-trait-decl.rs:4:5
+   |
+LL |     #[track_caller]
+   |     ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0738`.
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-trait-default-impl.rs b/src/test/ui/rfc-2091-track-caller/error-with-trait-default-impl.rs
new file mode 100644 (file)
index 0000000..0f2020d
--- /dev/null
@@ -0,0 +1,9 @@
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+trait Trait {
+    #[track_caller]
+    fn unwrap(&self) {}
+    //~^^ ERROR: `#[track_caller]` is not supported in trait declarations.
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-trait-default-impl.stderr b/src/test/ui/rfc-2091-track-caller/error-with-trait-default-impl.stderr
new file mode 100644 (file)
index 0000000..c212a71
--- /dev/null
@@ -0,0 +1,17 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/error-with-trait-default-impl.rs:1:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0738]: `#[track_caller]` is not supported in trait declarations.
+  --> $DIR/error-with-trait-default-impl.rs:4:5
+   |
+LL |     #[track_caller]
+   |     ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0738`.
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-trait-fn-impl.rs b/src/test/ui/rfc-2091-track-caller/error-with-trait-fn-impl.rs
new file mode 100644 (file)
index 0000000..1378eba
--- /dev/null
@@ -0,0 +1,13 @@
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+trait Trait {
+    fn unwrap(&self);
+}
+
+impl Trait for u64 {
+    #[track_caller]
+    fn unwrap(&self) {}
+    //~^^ ERROR: `#[track_caller]` is not supported in traits yet.
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-trait-fn-impl.stderr b/src/test/ui/rfc-2091-track-caller/error-with-trait-fn-impl.stderr
new file mode 100644 (file)
index 0000000..2662fbf
--- /dev/null
@@ -0,0 +1,17 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/error-with-trait-fn-impl.rs:1:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0738]: `#[track_caller]` is not supported in traits yet.
+  --> $DIR/error-with-trait-fn-impl.rs:8:5
+   |
+LL |     #[track_caller]
+   |     ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0738`.
diff --git a/src/test/ui/rfc-2091-track-caller/only-for-fns.rs b/src/test/ui/rfc-2091-track-caller/only-for-fns.rs
new file mode 100644 (file)
index 0000000..01ebf13
--- /dev/null
@@ -0,0 +1,7 @@
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+#[track_caller]
+struct S;
+//~^^ ERROR attribute should be applied to function
+
+fn main() {}
diff --git a/src/test/ui/rfc-2091-track-caller/only-for-fns.stderr b/src/test/ui/rfc-2091-track-caller/only-for-fns.stderr
new file mode 100644 (file)
index 0000000..3301da7
--- /dev/null
@@ -0,0 +1,18 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/only-for-fns.rs:1:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0739]: attribute should be applied to function
+  --> $DIR/only-for-fns.rs:3:1
+   |
+LL | #[track_caller]
+   | ^^^^^^^^^^^^^^^
+LL | struct S;
+   | --------- not a function
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/rfc-2091-track-caller/pass.rs b/src/test/ui/rfc-2091-track-caller/pass.rs
new file mode 100644 (file)
index 0000000..f2c3f0d
--- /dev/null
@@ -0,0 +1,9 @@
+// run-pass
+#![feature(track_caller)] //~ WARN the feature `track_caller` is incomplete
+
+#[track_caller]
+fn f() {}
+
+fn main() {
+    f();
+}
diff --git a/src/test/ui/rfc-2091-track-caller/pass.stderr b/src/test/ui/rfc-2091-track-caller/pass.stderr
new file mode 100644 (file)
index 0000000..b1fd23a
--- /dev/null
@@ -0,0 +1,8 @@
+warning: the feature `track_caller` is incomplete and may cause the compiler to crash
+  --> $DIR/pass.rs:2:12
+   |
+LL | #![feature(track_caller)]
+   |            ^^^^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
index be8b5c6446cadd2d03fd03d462343940ea648718..6efc1176d05b84e9497c2dded93319511bd727b9 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a rev_variant_struct_region::Foo<'b>`, reference has a
 LL |         type Out = &'a Foo<'b>;
    |         ^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the impl at 16:10
+note: the pointer is valid for the lifetime `'a` as defined on the impl at 16:10
   --> $DIR/regions-outlives-nominal-type-region-rev.rs:16:10
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
    |          ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the impl at 16:14
+note: but the referenced data is only valid for the lifetime `'b` as defined on the impl at 16:14
   --> $DIR/regions-outlives-nominal-type-region-rev.rs:16:14
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
index 9a3ba2d65cad76e22cd78b6eef55bb383dd3e32e..06e5f24dec97014cfa2f50ca81d481a3f3058cc7 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a variant_struct_region::Foo<'b>`, reference has a long
 LL |         type Out = &'a Foo<'b>;
    |         ^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the impl at 16:10
+note: the pointer is valid for the lifetime `'a` as defined on the impl at 16:10
   --> $DIR/regions-outlives-nominal-type-region.rs:16:10
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
    |          ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the impl at 16:14
+note: but the referenced data is only valid for the lifetime `'b` as defined on the impl at 16:14
   --> $DIR/regions-outlives-nominal-type-region.rs:16:14
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
index 5389beea3a70c6df25d4b076b9fe4771e06a0eda..d02f7b796218499bb1b208c133096fbe3e365a80 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a variant_struct_type::Foo<&'b i32>`, reference has a l
 LL |         type Out = &'a Foo<&'b i32>;
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the impl at 16:10
+note: the pointer is valid for the lifetime `'a` as defined on the impl at 16:10
   --> $DIR/regions-outlives-nominal-type-type-rev.rs:16:10
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
    |          ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the impl at 16:14
+note: but the referenced data is only valid for the lifetime `'b` as defined on the impl at 16:14
   --> $DIR/regions-outlives-nominal-type-type-rev.rs:16:14
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
index 2f3ef48a0544160c4ea6a1a889a90507a155b06a..40c70f53245cfeeeeb0d45bd6402ae40c37dbf9c 100644 (file)
@@ -4,12 +4,12 @@ error[E0491]: in type `&'a variant_struct_type::Foo<&'b i32>`, reference has a l
 LL |         type Out = &'a Foo<&'b i32>;
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the impl at 16:10
+note: the pointer is valid for the lifetime `'a` as defined on the impl at 16:10
   --> $DIR/regions-outlives-nominal-type-type.rs:16:10
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
    |          ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the impl at 16:14
+note: but the referenced data is only valid for the lifetime `'b` as defined on the impl at 16:14
   --> $DIR/regions-outlives-nominal-type-type.rs:16:14
    |
 LL |     impl<'a, 'b> Trait<'a, 'b> for usize {
index 5a11c5fb95fefc7191010bf9ac221de74a47ede2..825c1015c51d7e37c5543e797d8ae51b7c059f66 100644 (file)
@@ -32,12 +32,12 @@ error[E0491]: in type `&'a &'b T`, reference has a longer lifetime than the data
 LL |     type Out = &'a &'b T;
    |     ^^^^^^^^^^^^^^^^^^^^^
    |
-note: the pointer is valid for the lifetime 'a as defined on the impl at 24:6
+note: the pointer is valid for the lifetime `'a` as defined on the impl at 24:6
   --> $DIR/regions-struct-not-wf.rs:24:6
    |
 LL | impl<'a, 'b, T> Trait1<'a, 'b, T> for u32 {
    |      ^^
-note: but the referenced data is only valid for the lifetime 'b as defined on the impl at 24:10
+note: but the referenced data is only valid for the lifetime `'b` as defined on the impl at 24:10
   --> $DIR/regions-struct-not-wf.rs:24:10
    |
 LL | impl<'a, 'b, T> Trait1<'a, 'b, T> for u32 {
index 4edc00efc7e722cb37a4bfde5984912ef9c80f2e..ad4686c1915d67de9ef1e40b89cff1bca1e80770 100644 (file)
@@ -517,7 +517,10 @@ error[E0308]: mismatched types
   --> $DIR/disallowed-positions.rs:32:8
    |
 LL |     if &let 0 = 0 {}
-   |        ^^^^^^^^^^ expected bool, found &bool
+   |        ^^^^^^^^^^
+   |        |
+   |        expected bool, found &bool
+   |        help: consider removing the borrow: `let 0 = 0`
    |
    = note: expected type `bool`
               found type `&bool`
@@ -702,7 +705,10 @@ error[E0308]: mismatched types
   --> $DIR/disallowed-positions.rs:96:11
    |
 LL |     while &let 0 = 0 {}
-   |           ^^^^^^^^^^ expected bool, found &bool
+   |           ^^^^^^^^^^
+   |           |
+   |           expected bool, found &bool
+   |           help: consider removing the borrow: `let 0 = 0`
    |
    = note: expected type `bool`
               found type `&bool`
diff --git a/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib-2.rs b/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib-2.rs
new file mode 100644 (file)
index 0000000..14345ba
--- /dev/null
@@ -0,0 +1,8 @@
+#[link(name="foo")]
+extern {
+    #[link_ordinal(42)]
+    //~^ ERROR: the `#[link_ordinal]` attribute is an experimental feature
+    fn foo();
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib-2.stderr b/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib-2.stderr
new file mode 100644 (file)
index 0000000..0869d7a
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0658]: the `#[link_ordinal]` attribute is an experimental feature
+  --> $DIR/feature-gate-raw-dylib-2.rs:3:5
+   |
+LL |     #[link_ordinal(42)]
+   |     ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/58713
+   = help: add `#![feature(raw_dylib)]` to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib.rs b/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib.rs
new file mode 100644 (file)
index 0000000..f0f83e0
--- /dev/null
@@ -0,0 +1,5 @@
+#[link(name="foo", kind="raw-dylib")]
+//~^ ERROR: kind="raw-dylib" is unstable
+extern {}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib.stderr b/src/test/ui/rfc-2627-raw-dylib/feature-gate-raw-dylib.stderr
new file mode 100644 (file)
index 0000000..0ca9de2
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0658]: kind="raw-dylib" is unstable
+  --> $DIR/feature-gate-raw-dylib.rs:1:1
+   |
+LL | #[link(name="foo", kind="raw-dylib")]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/58713
+   = help: add `#![feature(raw_dylib)]` to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/rfc-2627-raw-dylib/link-ordinal-and-name.rs b/src/test/ui/rfc-2627-raw-dylib/link-ordinal-and-name.rs
new file mode 100644 (file)
index 0000000..5769366
--- /dev/null
@@ -0,0 +1,12 @@
+#![feature(raw_dylib)]
+//~^ WARN the feature `raw_dylib` is incomplete and may cause the compiler to crash
+
+#[link(name="foo")]
+extern {
+    #[link_name="foo"]
+    #[link_ordinal(42)]
+    //~^ ERROR cannot use `#[link_name]` with `#[link_ordinal]`
+    fn foo();
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2627-raw-dylib/link-ordinal-and-name.stderr b/src/test/ui/rfc-2627-raw-dylib/link-ordinal-and-name.stderr
new file mode 100644 (file)
index 0000000..303a1c0
--- /dev/null
@@ -0,0 +1,16 @@
+warning: the feature `raw_dylib` is incomplete and may cause the compiler to crash
+  --> $DIR/link-ordinal-and-name.rs:1:12
+   |
+LL | #![feature(raw_dylib)]
+   |            ^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: cannot use `#[link_name]` with `#[link_ordinal]`
+  --> $DIR/link-ordinal-and-name.rs:7:5
+   |
+LL |     #[link_ordinal(42)]
+   |     ^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/rfc-2627-raw-dylib/link-ordinal-invalid-format.rs b/src/test/ui/rfc-2627-raw-dylib/link-ordinal-invalid-format.rs
new file mode 100644 (file)
index 0000000..82fb115
--- /dev/null
@@ -0,0 +1,11 @@
+#![feature(raw_dylib)]
+//~^ WARN the feature `raw_dylib` is incomplete and may cause the compiler to crash
+
+#[link(name="foo")]
+extern {
+    #[link_ordinal("JustMonika")]
+    //~^ ERROR illegal ordinal format in `link_ordinal`
+    fn foo();
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2627-raw-dylib/link-ordinal-invalid-format.stderr b/src/test/ui/rfc-2627-raw-dylib/link-ordinal-invalid-format.stderr
new file mode 100644 (file)
index 0000000..14556a7
--- /dev/null
@@ -0,0 +1,18 @@
+warning: the feature `raw_dylib` is incomplete and may cause the compiler to crash
+  --> $DIR/link-ordinal-invalid-format.rs:1:12
+   |
+LL | #![feature(raw_dylib)]
+   |            ^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: illegal ordinal format in `link_ordinal`
+  --> $DIR/link-ordinal-invalid-format.rs:6:5
+   |
+LL |     #[link_ordinal("JustMonika")]
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: an unsuffixed integer value, e.g., `1`, is expected
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/rfc-2627-raw-dylib/link-ordinal-too-large.rs b/src/test/ui/rfc-2627-raw-dylib/link-ordinal-too-large.rs
new file mode 100644 (file)
index 0000000..69596ad
--- /dev/null
@@ -0,0 +1,11 @@
+#![feature(raw_dylib)]
+//~^ WARN the feature `raw_dylib` is incomplete and may cause the compiler to crash
+
+#[link(name="foo")]
+extern {
+    #[link_ordinal(18446744073709551616)]
+    //~^ ERROR ordinal value in `link_ordinal` is too large: `18446744073709551616`
+    fn foo();
+}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2627-raw-dylib/link-ordinal-too-large.stderr b/src/test/ui/rfc-2627-raw-dylib/link-ordinal-too-large.stderr
new file mode 100644 (file)
index 0000000..b3b22f9
--- /dev/null
@@ -0,0 +1,18 @@
+warning: the feature `raw_dylib` is incomplete and may cause the compiler to crash
+  --> $DIR/link-ordinal-too-large.rs:1:12
+   |
+LL | #![feature(raw_dylib)]
+   |            ^^^^^^^^^
+   |
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: ordinal value in `link_ordinal` is too large: `18446744073709551616`
+  --> $DIR/link-ordinal-too-large.rs:6:5
+   |
+LL |     #[link_ordinal(18446744073709551616)]
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: the value may not exceed `std::usize::MAX`
+
+error: aborting due to previous error
+
index 580e67513b3965936be67f17d5e8fdeb7da41f95..f8abd1b96d80e85ff959146c4f895107b28bae3b 100644 (file)
@@ -3,7 +3,7 @@ pub fn main() {
     let i = 5;
     match &&&&i {
         1 ..= 3 => panic!(),
-        3 ..= 8 => {},
+        4 ..= 8 => {},
         _ => panic!(),
     }
 }
index 030fa56dcff2fe9ab6d345a75e96b4a2792665bb..973c486970e6577b8ea0ae132d1fc9e250d7982a 100644 (file)
@@ -13,7 +13,7 @@ LL | use std::collections::hash_map::Drain;
    |
 LL | use std::collections::hash_set::Drain;
    |
-and 3 other candidates
+     and 3 other candidates
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/save-analysis/issue-64659.rs b/src/test/ui/save-analysis/issue-64659.rs
new file mode 100644 (file)
index 0000000..a3d88a2
--- /dev/null
@@ -0,0 +1,10 @@
+// check-pass
+// compile-flags: -Zsave-analysis
+
+trait Trait { type Assoc; }
+
+fn main() {
+    struct Data<T: Trait> {
+        x: T::Assoc,
+    }
+}
diff --git a/src/test/ui/save-analysis/issue-65411.rs b/src/test/ui/save-analysis/issue-65411.rs
new file mode 100644 (file)
index 0000000..9e58b8d
--- /dev/null
@@ -0,0 +1,15 @@
+// check-pass
+// compile-flags: -Zsave-analysis
+
+trait Trait { type Assoc; }
+trait GenericTrait<T> {}
+struct Wrapper<B> { b: B }
+
+fn func() {
+    // Processing associated path in impl block definition inside a function
+    // body does not ICE
+    impl<B: Trait> GenericTrait<B::Assoc> for Wrapper<B> {}
+}
+
+
+fn main() {}
index 2fb152475a1effdf8332d70448c18a5c13e4ecd8..bce1900ca602c41e14ea4195e6cbc345531320e3 100644 (file)
@@ -6,12 +6,12 @@ LL |     async fn f(self: Pin<&Self>) -> impl Clone { self }
    |                |
    |                ...but this borrow...
    |
-note: ...can't outlive the lifetime '_ as defined on the method body at 8:26
+note: ...can't outlive the lifetime `'_` as defined on the method body at 8:26
   --> $DIR/arbitrary_self_types_pin_lifetime_impl_trait-async.rs:8:26
    |
 LL |     async fn f(self: Pin<&Self>) -> impl Clone { self }
    |                          ^
-help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime '_ as defined on the method body at 8:26
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime `'_` as defined on the method body at 8:26
    |
 LL |     async fn f(self: Pin<&Self>) -> impl Clone + '_ { self }
    |                                     ^^^^^^^^^^^^^^^
index 5cf975b5752f0e54c198792df47025ca627268d7..1e5555355c3b319d3d6586d4319fd2d2e44d4d6d 100644 (file)
@@ -22,7 +22,9 @@ pub trait Bar {
     fn bar(&self) -> i32 { 0 }
 }
 
-impl<T> Bar for T {} // use the provided method
+impl<T> Bar for T {
+    default fn bar(&self) -> i32 { 0 }
+}
 
 impl Bar for i32 {
     fn bar(&self) -> i32 { 1 }
index 45951561e7264a9af8ed0411396f4191e8292d8d..5275b7b1ddfa57e11fa37710d48c8ffc448a76b9 100644 (file)
@@ -2,9 +2,9 @@ error[E0277]: the trait bound `U: std::cmp::Eq` is not satisfied
   --> $DIR/specialization-wfcheck.rs:7:17
    |
 LL | default impl<U> Foo<'static, U> for () {}
-   |                 ^^^^^^^^^^^^^^^ the trait `std::cmp::Eq` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::cmp::Eq` bound
+   |              -  ^^^^^^^^^^^^^^^ the trait `std::cmp::Eq` is not implemented for `U`
+   |              |
+   |              help: consider restricting this bound: `U: std::cmp::Eq`
 
 error: aborting due to previous error
 
index 36cb939bc48fbdb069d08c28b966c5fa67e50746..9546a5dd5f51699d64f466a3c9c94b2c4198ad2c 100644 (file)
@@ -13,6 +13,10 @@ impl<'a, I, T: 'a> Iterator for Cloned<I>
     fn next(&mut self) -> Option<T> {
         unimplemented!()
     }
+
+    default fn count(self) -> usize where Self: Sized {
+        self.fold(0, |cnt, _| cnt + 1)
+    }
 }
 
 impl<'a, I, T: 'a> Iterator for Cloned<I>
diff --git a/src/test/ui/specialization/non-defaulted-item-fail.rs b/src/test/ui/specialization/non-defaulted-item-fail.rs
new file mode 100644 (file)
index 0000000..403f718
--- /dev/null
@@ -0,0 +1,53 @@
+#![feature(specialization, associated_type_defaults)]
+
+// Test that attempting to override a non-default method or one not in the
+// parent impl causes an error.
+
+trait Foo {
+    type Ty = ();
+    const CONST: u8 = 123;
+    fn foo(&self) -> bool { true }
+}
+
+// Specialization tree for Foo:
+//
+//       Box<T>              Vec<T>
+//        / \                 / \
+// Box<i32>  Box<i64>   Vec<()>  Vec<bool>
+
+impl<T> Foo for Box<T> {
+    type Ty = bool;
+    const CONST: u8 = 0;
+    fn foo(&self) -> bool { false }
+}
+
+// Allowed
+impl Foo for Box<i32> {}
+
+// Can't override a non-`default` fn
+impl Foo for Box<i64> {
+    type Ty = Vec<()>;
+//~^ error: `Ty` specializes an item from a parent `impl`, but that item is not marked `default`
+    const CONST: u8 = 42;
+//~^ error: `CONST` specializes an item from a parent `impl`, but that item is not marked `default`
+    fn foo(&self) -> bool { true }
+//~^ error: `foo` specializes an item from a parent `impl`, but that item is not marked `default`
+}
+
+
+// Doesn't mention the item = provided body/value is used and the method is final.
+impl<T> Foo for Vec<T> {}
+
+// Allowed
+impl Foo for Vec<()> {}
+
+impl Foo for Vec<bool> {
+    type Ty = Vec<()>;
+//~^ error: `Ty` specializes an item from a parent `impl`, but that item is not marked `default`
+    const CONST: u8 = 42;
+//~^ error: `CONST` specializes an item from a parent `impl`, but that item is not marked `default`
+    fn foo(&self) -> bool { true }
+//~^ error: `foo` specializes an item from a parent `impl`, but that item is not marked `default`
+}
+
+fn main() {}
diff --git a/src/test/ui/specialization/non-defaulted-item-fail.stderr b/src/test/ui/specialization/non-defaulted-item-fail.stderr
new file mode 100644 (file)
index 0000000..e6c5fc1
--- /dev/null
@@ -0,0 +1,81 @@
+error[E0520]: `Ty` specializes an item from a parent `impl`, but that item is not marked `default`
+  --> $DIR/non-defaulted-item-fail.rs:29:5
+   |
+LL | / impl<T> Foo for Box<T> {
+LL | |     type Ty = bool;
+LL | |     const CONST: u8 = 0;
+LL | |     fn foo(&self) -> bool { false }
+LL | | }
+   | |_- parent `impl` is here
+...
+LL |       type Ty = Vec<()>;
+   |       ^^^^^^^^^^^^^^^^^^ cannot specialize default item `Ty`
+   |
+   = note: to specialize, `Ty` in the parent `impl` must be marked `default`
+
+error[E0520]: `CONST` specializes an item from a parent `impl`, but that item is not marked `default`
+  --> $DIR/non-defaulted-item-fail.rs:31:5
+   |
+LL | / impl<T> Foo for Box<T> {
+LL | |     type Ty = bool;
+LL | |     const CONST: u8 = 0;
+LL | |     fn foo(&self) -> bool { false }
+LL | | }
+   | |_- parent `impl` is here
+...
+LL |       const CONST: u8 = 42;
+   |       ^^^^^^^^^^^^^^^^^^^^^ cannot specialize default item `CONST`
+   |
+   = note: to specialize, `CONST` in the parent `impl` must be marked `default`
+
+error[E0520]: `foo` specializes an item from a parent `impl`, but that item is not marked `default`
+  --> $DIR/non-defaulted-item-fail.rs:33:5
+   |
+LL | / impl<T> Foo for Box<T> {
+LL | |     type Ty = bool;
+LL | |     const CONST: u8 = 0;
+LL | |     fn foo(&self) -> bool { false }
+LL | | }
+   | |_- parent `impl` is here
+...
+LL |       fn foo(&self) -> bool { true }
+   |       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot specialize default item `foo`
+   |
+   = note: to specialize, `foo` in the parent `impl` must be marked `default`
+
+error[E0520]: `Ty` specializes an item from a parent `impl`, but that item is not marked `default`
+  --> $DIR/non-defaulted-item-fail.rs:45:5
+   |
+LL | impl<T> Foo for Vec<T> {}
+   | ------------------------- parent `impl` is here
+...
+LL |     type Ty = Vec<()>;
+   |     ^^^^^^^^^^^^^^^^^^ cannot specialize default item `Ty`
+   |
+   = note: to specialize, `Ty` in the parent `impl` must be marked `default`
+
+error[E0520]: `CONST` specializes an item from a parent `impl`, but that item is not marked `default`
+  --> $DIR/non-defaulted-item-fail.rs:47:5
+   |
+LL | impl<T> Foo for Vec<T> {}
+   | ------------------------- parent `impl` is here
+...
+LL |     const CONST: u8 = 42;
+   |     ^^^^^^^^^^^^^^^^^^^^^ cannot specialize default item `CONST`
+   |
+   = note: to specialize, `CONST` in the parent `impl` must be marked `default`
+
+error[E0520]: `foo` specializes an item from a parent `impl`, but that item is not marked `default`
+  --> $DIR/non-defaulted-item-fail.rs:49:5
+   |
+LL | impl<T> Foo for Vec<T> {}
+   | ------------------------- parent `impl` is here
+...
+LL |     fn foo(&self) -> bool { true }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot specialize default item `foo`
+   |
+   = note: to specialize, `foo` in the parent `impl` must be marked `default`
+
+error: aborting due to 6 previous errors
+
+For more information about this error, try `rustc --explain E0520`.
index 5d65a0457e7916820df2618fb6babe28bbd66cbf..9ae3d1e9f3931cb58b3dd1f0e595a1a306903b4c 100644 (file)
@@ -55,8 +55,9 @@ fn bar(&self) -> i32 { 0 }
 //                   /  \
 //            Vec<i32>  $Vec<i64>
 
-// use the provided method
-impl<T> Bar for T {}
+impl<T> Bar for T {
+    default fn bar(&self) -> i32 { 0 }
+}
 
 impl Bar for i32 {
     fn bar(&self) -> i32 { 1 }
index ced78c03e09d64656ddb8c8cc527d7a1445dd7a2..99235e26e15e7a5a23afefd80b52671ebc3e877d 100644 (file)
@@ -6,3 +6,4 @@ LL |     static || {};
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0697`.
index 8516ac07b6cf3b6f87e2bf522cf065d089d22691..bda325dc0116045470e27a96e45ab553eff50bac 100644 (file)
@@ -4,7 +4,7 @@ error[E0478]: lifetime bound not satisfied
 LL | impl<'a, A: Clone> Arbitrary for ::std::borrow::Cow<'a, A> {}
    |                    ^^^^^^^^^
    |
-note: lifetime parameter instantiated with the lifetime 'a as defined on the impl at 3:6
+note: lifetime parameter instantiated with the lifetime `'a` as defined on the impl at 3:6
   --> $DIR/static-lifetime.rs:3:6
    |
 LL | impl<'a, A: Clone> Arbitrary for ::std::borrow::Cow<'a, A> {}
diff --git a/src/test/ui/suggestions/constrain-trait.fixed b/src/test/ui/suggestions/constrain-trait.fixed
new file mode 100644 (file)
index 0000000..dda9e93
--- /dev/null
@@ -0,0 +1,47 @@
+// run-rustfix
+// check-only
+
+#[derive(Debug)]
+struct Demo {
+    a: String
+}
+
+trait GetString {
+    fn get_a(&self) -> &String;
+}
+
+trait UseString: std::fmt::Debug + GetString {
+    fn use_string(&self) {
+        println!("{:?}", self.get_a()); //~ ERROR no method named `get_a` found for type `&Self`
+    }
+}
+
+trait UseString2: GetString {
+    fn use_string(&self) {
+        println!("{:?}", self.get_a()); //~ ERROR no method named `get_a` found for type `&Self`
+    }
+}
+
+impl GetString for Demo {
+    fn get_a(&self) -> &String {
+        &self.a
+    }
+}
+
+impl UseString for Demo {}
+impl UseString2 for Demo {}
+
+
+#[cfg(test)]
+mod tests {
+    use crate::{Demo, UseString};
+
+    #[test]
+    fn it_works() {
+        let d = Demo { a: "test".to_string() };
+        d.use_string();
+    }
+}
+
+
+fn main() {}
diff --git a/src/test/ui/suggestions/constrain-trait.rs b/src/test/ui/suggestions/constrain-trait.rs
new file mode 100644 (file)
index 0000000..4ef0eff
--- /dev/null
@@ -0,0 +1,47 @@
+// run-rustfix
+// check-only
+
+#[derive(Debug)]
+struct Demo {
+    a: String
+}
+
+trait GetString {
+    fn get_a(&self) -> &String;
+}
+
+trait UseString: std::fmt::Debug {
+    fn use_string(&self) {
+        println!("{:?}", self.get_a()); //~ ERROR no method named `get_a` found for type `&Self`
+    }
+}
+
+trait UseString2 {
+    fn use_string(&self) {
+        println!("{:?}", self.get_a()); //~ ERROR no method named `get_a` found for type `&Self`
+    }
+}
+
+impl GetString for Demo {
+    fn get_a(&self) -> &String {
+        &self.a
+    }
+}
+
+impl UseString for Demo {}
+impl UseString2 for Demo {}
+
+
+#[cfg(test)]
+mod tests {
+    use crate::{Demo, UseString};
+
+    #[test]
+    fn it_works() {
+        let d = Demo { a: "test".to_string() };
+        d.use_string();
+    }
+}
+
+
+fn main() {}
diff --git a/src/test/ui/suggestions/constrain-trait.stderr b/src/test/ui/suggestions/constrain-trait.stderr
new file mode 100644 (file)
index 0000000..3cc351a
--- /dev/null
@@ -0,0 +1,27 @@
+error[E0599]: no method named `get_a` found for type `&Self` in the current scope
+  --> $DIR/constrain-trait.rs:15:31
+   |
+LL |         println!("{:?}", self.get_a());
+   |                               ^^^^^ method not found in `&Self`
+   |
+   = help: items from traits can only be used if the type parameter is bounded by the trait
+help: the following trait defines an item `get_a`, perhaps you need to add another supertrait for it:
+   |
+LL | trait UseString: std::fmt::Debug + GetString {
+   |                                  ^^^^^^^^^^^
+
+error[E0599]: no method named `get_a` found for type `&Self` in the current scope
+  --> $DIR/constrain-trait.rs:21:31
+   |
+LL |         println!("{:?}", self.get_a());
+   |                               ^^^^^ method not found in `&Self`
+   |
+   = help: items from traits can only be used if the type parameter is bounded by the trait
+help: the following trait defines an item `get_a`, perhaps you need to add a supertrait for it:
+   |
+LL | trait UseString2: GetString {
+   |                 ^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0599`.
diff --git a/src/test/ui/suggestions/imm-ref-trait-object-literal.rs b/src/test/ui/suggestions/imm-ref-trait-object-literal.rs
new file mode 100644 (file)
index 0000000..22ca6dd
--- /dev/null
@@ -0,0 +1,14 @@
+trait Trait {}
+
+struct S;
+
+impl<'a> Trait for &'a mut S {}
+
+fn foo<X: Trait>(_: X) {}
+
+
+fn main() {
+  let s = S;
+  foo(&s); //~ ERROR the trait bound `&S: Trait` is not satisfied
+  foo(s); //~ ERROR the trait bound `S: Trait` is not satisfied
+}
diff --git a/src/test/ui/suggestions/imm-ref-trait-object-literal.stderr b/src/test/ui/suggestions/imm-ref-trait-object-literal.stderr
new file mode 100644 (file)
index 0000000..ccaceef
--- /dev/null
@@ -0,0 +1,30 @@
+error[E0277]: the trait bound `&S: Trait` is not satisfied
+  --> $DIR/imm-ref-trait-object-literal.rs:12:7
+   |
+LL | fn foo<X: Trait>(_: X) {}
+   |    ---    ----- required by this bound in `foo`
+...
+LL |   foo(&s);
+   |       -^
+   |       |
+   |       the trait `Trait` is not implemented for `&S`
+   |       help: consider changing this borrow's mutability: `&mut`
+   |
+   = help: the following implementations were found:
+             <&'a mut S as Trait>
+
+error[E0277]: the trait bound `S: Trait` is not satisfied
+  --> $DIR/imm-ref-trait-object-literal.rs:13:7
+   |
+LL | fn foo<X: Trait>(_: X) {}
+   |    ---    ----- required by this bound in `foo`
+...
+LL |   foo(s);
+   |       ^ the trait `Trait` is not implemented for `S`
+   |
+   = help: the following implementations were found:
+             <&'a mut S as Trait>
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/suggestions/imm-ref-trait-object.rs b/src/test/ui/suggestions/imm-ref-trait-object.rs
new file mode 100644 (file)
index 0000000..288d6c6
--- /dev/null
@@ -0,0 +1,8 @@
+fn test(t: &dyn Iterator<Item=&u64>) -> u64 {
+     t.min().unwrap() //~ ERROR the `min` method cannot be invoked on a trait object
+}
+
+fn main() {
+     let array = [0u64];
+     test(&mut array.iter());
+}
diff --git a/src/test/ui/suggestions/imm-ref-trait-object.stderr b/src/test/ui/suggestions/imm-ref-trait-object.stderr
new file mode 100644 (file)
index 0000000..9185eaa
--- /dev/null
@@ -0,0 +1,10 @@
+error: the `min` method cannot be invoked on a trait object
+  --> $DIR/imm-ref-trait-object.rs:2:8
+   |
+LL |      t.min().unwrap()
+   |        ^^^
+   |
+   = note: you need `&mut dyn std::iter::Iterator<Item = &u64>` instead of `&dyn std::iter::Iterator<Item = &u64>`
+
+error: aborting due to previous error
+
index fb3e1096ad54cbf86589f8e407e01a9e31b56da5..a1e1f4d13572a703736c33b7dbf8cdb5d53bfff4 100644 (file)
@@ -8,6 +8,7 @@ LL |     foo(String::new());
    |     ^^^ the trait `std::convert::From<std::string::String>` is not implemented for `&str`
    |
    = note: to coerce a `std::string::String` into a `&str`, use `&*` as a prefix
+   = note: `std::convert::From<std::string::String>` is implemented for `&mut str`, but not for `&str`
    = note: required because of the requirements on the impl of `std::convert::Into<&str>` for `std::string::String`
 
 error: aborting due to previous error
diff --git a/src/test/ui/suggestions/missing-assoc-type-bound-restriction.rs b/src/test/ui/suggestions/missing-assoc-type-bound-restriction.rs
new file mode 100644 (file)
index 0000000..265ccb3
--- /dev/null
@@ -0,0 +1,25 @@
+// Running rustfix would cause the same suggestion to be applied multiple times, which results in
+// invalid code.
+
+trait Parent {
+    type Ty;
+    type Assoc: Child<Self::Ty>;
+}
+
+trait Child<T> {}
+
+struct ChildWrapper<T>(T);
+
+impl<A, T> Child<A> for ChildWrapper<T> where T: Child<A> {}
+
+struct ParentWrapper<T>(T);
+
+impl<A, T: Parent<Ty = A>> Parent for ParentWrapper<T> {
+    //~^ ERROR the trait bound `<T as Parent>::Assoc: Child<A>` is not satisfied
+    //~| ERROR the trait bound `<T as Parent>::Assoc: Child<A>` is not satisfied
+    type Ty = A;
+    type Assoc = ChildWrapper<T::Assoc>;
+    //~^ ERROR the trait bound `<T as Parent>::Assoc: Child<A>` is not satisfied
+}
+
+fn main() {}
diff --git a/src/test/ui/suggestions/missing-assoc-type-bound-restriction.stderr b/src/test/ui/suggestions/missing-assoc-type-bound-restriction.stderr
new file mode 100644 (file)
index 0000000..bdea8ab
--- /dev/null
@@ -0,0 +1,43 @@
+error[E0277]: the trait bound `<T as Parent>::Assoc: Child<A>` is not satisfied
+  --> $DIR/missing-assoc-type-bound-restriction.rs:17:1
+   |
+LL |   trait Parent {
+   |   ------------ required by `Parent`
+...
+LL |   impl<A, T: Parent<Ty = A>> Parent for ParentWrapper<T> {
+   |   ^                                                     - help: consider further restricting the associated type: `where <T as Parent>::Assoc: Child<A>`
+   |  _|
+   | |
+LL | |
+LL | |
+LL | |     type Ty = A;
+LL | |     type Assoc = ChildWrapper<T::Assoc>;
+LL | |
+LL | | }
+   | |_^ the trait `Child<A>` is not implemented for `<T as Parent>::Assoc`
+
+error[E0277]: the trait bound `<T as Parent>::Assoc: Child<A>` is not satisfied
+  --> $DIR/missing-assoc-type-bound-restriction.rs:17:28
+   |
+LL | impl<A, T: Parent<Ty = A>> Parent for ParentWrapper<T> {
+   |                            ^^^^^^                     - help: consider further restricting the associated type: `where <T as Parent>::Assoc: Child<A>`
+   |                            |
+   |                            the trait `Child<A>` is not implemented for `<T as Parent>::Assoc`
+   |
+   = note: required because of the requirements on the impl of `Child<A>` for `ChildWrapper<<T as Parent>::Assoc>`
+
+error[E0277]: the trait bound `<T as Parent>::Assoc: Child<A>` is not satisfied
+  --> $DIR/missing-assoc-type-bound-restriction.rs:21:5
+   |
+LL | trait Parent {
+   | ------------ required by `Parent`
+...
+LL | impl<A, T: Parent<Ty = A>> Parent for ParentWrapper<T> {
+   |                                                       - help: consider further restricting the associated type: `where <T as Parent>::Assoc: Child<A>`
+...
+LL |     type Assoc = ChildWrapper<T::Assoc>;
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Child<A>` is not implemented for `<T as Parent>::Assoc`
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/suggestions/mut-borrow-needed-by-trait.rs b/src/test/ui/suggestions/mut-borrow-needed-by-trait.rs
new file mode 100644 (file)
index 0000000..dcef2ad
--- /dev/null
@@ -0,0 +1,23 @@
+use std::env::args;
+use std::fs::File;
+use std::io::{stdout, Write, BufWriter};
+
+fn main() {
+    let mut args = args();
+    let _ = args.next();
+    let dest = args.next();
+
+    let h1; let h2; let h3;
+
+    let fp: &dyn Write = match dest {
+        Some(path) => { h1 = File::create(path).unwrap(); &h1 },
+        None => { h2 = stdout(); h3 = h2.lock(); &h3 }
+    };
+
+    let fp = BufWriter::new(fp);
+    //~^ ERROR the trait bound `&dyn std::io::Write: std::io::Write` is not satisfied
+    //~| ERROR the trait bound `&dyn std::io::Write: std::io::Write` is not satisfied
+    //~| ERROR the trait bound `&dyn std::io::Write: std::io::Write` is not satisfied
+
+    writeln!(fp, "hello world").unwrap(); //~ ERROR no method named `write_fmt` found for type
+}
diff --git a/src/test/ui/suggestions/mut-borrow-needed-by-trait.stderr b/src/test/ui/suggestions/mut-borrow-needed-by-trait.stderr
new file mode 100644 (file)
index 0000000..daa8e11
--- /dev/null
@@ -0,0 +1,41 @@
+error[E0277]: the trait bound `&dyn std::io::Write: std::io::Write` is not satisfied
+  --> $DIR/mut-borrow-needed-by-trait.rs:17:29
+   |
+LL |     let fp = BufWriter::new(fp);
+   |                             ^^ the trait `std::io::Write` is not implemented for `&dyn std::io::Write`
+   |
+   = note: `std::io::Write` is implemented for `&mut dyn std::io::Write`, but not for `&dyn std::io::Write`
+   = note: required by `std::io::BufWriter::<W>::new`
+
+error[E0277]: the trait bound `&dyn std::io::Write: std::io::Write` is not satisfied
+  --> $DIR/mut-borrow-needed-by-trait.rs:17:14
+   |
+LL |     let fp = BufWriter::new(fp);
+   |              ^^^^^^^^^^^^^^ the trait `std::io::Write` is not implemented for `&dyn std::io::Write`
+   |
+   = note: `std::io::Write` is implemented for `&mut dyn std::io::Write`, but not for `&dyn std::io::Write`
+   = note: required by `std::io::BufWriter`
+
+error[E0277]: the trait bound `&dyn std::io::Write: std::io::Write` is not satisfied
+  --> $DIR/mut-borrow-needed-by-trait.rs:17:14
+   |
+LL |     let fp = BufWriter::new(fp);
+   |              ^^^^^^^^^^^^^^^^^^ the trait `std::io::Write` is not implemented for `&dyn std::io::Write`
+   |
+   = note: `std::io::Write` is implemented for `&mut dyn std::io::Write`, but not for `&dyn std::io::Write`
+   = note: required by `std::io::BufWriter`
+
+error[E0599]: no method named `write_fmt` found for type `std::io::BufWriter<&dyn std::io::Write>` in the current scope
+  --> $DIR/mut-borrow-needed-by-trait.rs:22:5
+   |
+LL |     writeln!(fp, "hello world").unwrap();
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^ method not found in `std::io::BufWriter<&dyn std::io::Write>`
+   |
+   = note: the method `write_fmt` exists but the following trait bounds were not satisfied:
+           `std::io::BufWriter<&dyn std::io::Write> : std::io::Write`
+   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+error: aborting due to 4 previous errors
+
+Some errors have detailed explanations: E0277, E0599.
+For more information about an error, try `rustc --explain E0277`.
diff --git a/src/test/ui/suggestions/remove-as_str.rs b/src/test/ui/suggestions/remove-as_str.rs
new file mode 100644 (file)
index 0000000..d10300b
--- /dev/null
@@ -0,0 +1,21 @@
+fn foo1(s: &str) {
+    s.as_str();
+    //~^ ERROR no method named `as_str` found for type `&str` in the current scope
+}
+
+fn foo2<'a>(s: &'a str) {
+    s.as_str();
+    //~^ ERROR no method named `as_str` found for type `&'a str` in the current scope
+}
+
+fn foo3(s: &mut str) {
+    s.as_str();
+    //~^ ERROR no method named `as_str` found for type `&mut str` in the current scope
+}
+
+fn foo4(s: &&str) {
+    s.as_str();
+    //~^ ERROR no method named `as_str` found for type `&&str` in the current scope
+}
+
+fn main() {}
diff --git a/src/test/ui/suggestions/remove-as_str.stderr b/src/test/ui/suggestions/remove-as_str.stderr
new file mode 100644 (file)
index 0000000..eae9cc0
--- /dev/null
@@ -0,0 +1,27 @@
+error[E0599]: no method named `as_str` found for type `&str` in the current scope
+  --> $DIR/remove-as_str.rs:2:7
+   |
+LL |     s.as_str();
+   |      -^^^^^^-- help: remove this method call
+
+error[E0599]: no method named `as_str` found for type `&'a str` in the current scope
+  --> $DIR/remove-as_str.rs:7:7
+   |
+LL |     s.as_str();
+   |      -^^^^^^-- help: remove this method call
+
+error[E0599]: no method named `as_str` found for type `&mut str` in the current scope
+  --> $DIR/remove-as_str.rs:12:7
+   |
+LL |     s.as_str();
+   |      -^^^^^^-- help: remove this method call
+
+error[E0599]: no method named `as_str` found for type `&&str` in the current scope
+  --> $DIR/remove-as_str.rs:17:7
+   |
+LL |     s.as_str();
+   |      -^^^^^^-- help: remove this method call
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0599`.
diff --git a/src/test/ui/suggestions/restrict-type-argument.rs b/src/test/ui/suggestions/restrict-type-argument.rs
new file mode 100644 (file)
index 0000000..c4ebfbe
--- /dev/null
@@ -0,0 +1,31 @@
+fn is_send<T: Send>(val: T) {}
+
+fn use_impl_sync(val: impl Sync) {
+    is_send(val); //~ ERROR `impl Sync` cannot be sent between threads safely
+}
+
+fn use_where<S>(val: S) where S: Sync {
+    is_send(val); //~ ERROR `S` cannot be sent between threads safely
+}
+
+fn use_bound<S: Sync>(val: S) {
+    is_send(val); //~ ERROR `S` cannot be sent between threads safely
+}
+
+fn use_bound_2<
+    S // Make sure we can synthezise a correct suggestion span for this case
+    :
+    Sync
+>(val: S) {
+    is_send(val); //~ ERROR `S` cannot be sent between threads safely
+}
+
+fn use_bound_and_where<S: Sync>(val: S) where S: std::fmt::Debug {
+    is_send(val); //~ ERROR `S` cannot be sent between threads safely
+}
+
+fn use_unbound<S>(val: S) {
+    is_send(val); //~ ERROR `S` cannot be sent between threads safely
+}
+
+fn main() {}
diff --git a/src/test/ui/suggestions/restrict-type-argument.stderr b/src/test/ui/suggestions/restrict-type-argument.stderr
new file mode 100644 (file)
index 0000000..d6840ca
--- /dev/null
@@ -0,0 +1,83 @@
+error[E0277]: `impl Sync` cannot be sent between threads safely
+  --> $DIR/restrict-type-argument.rs:4:13
+   |
+LL | fn is_send<T: Send>(val: T) {}
+   |    -------    ---- required by this bound in `is_send`
+LL | 
+LL | fn use_impl_sync(val: impl Sync) {
+   |                       --------- help: consider further restricting this bound: `impl Sync + std::marker::Send`
+LL |     is_send(val);
+   |             ^^^ `impl Sync` cannot be sent between threads safely
+   |
+   = help: the trait `std::marker::Send` is not implemented for `impl Sync`
+
+error[E0277]: `S` cannot be sent between threads safely
+  --> $DIR/restrict-type-argument.rs:8:13
+   |
+LL | fn is_send<T: Send>(val: T) {}
+   |    -------    ---- required by this bound in `is_send`
+...
+LL | fn use_where<S>(val: S) where S: Sync {
+   |                                      - help: consider further restricting type parameter `S`: `, S: std::marker::Send`
+LL |     is_send(val);
+   |             ^^^ `S` cannot be sent between threads safely
+   |
+   = help: the trait `std::marker::Send` is not implemented for `S`
+
+error[E0277]: `S` cannot be sent between threads safely
+  --> $DIR/restrict-type-argument.rs:12:13
+   |
+LL | fn is_send<T: Send>(val: T) {}
+   |    -------    ---- required by this bound in `is_send`
+...
+LL | fn use_bound<S: Sync>(val: S) {
+   |              -- help: consider further restricting this bound: `S: std::marker::Send +`
+LL |     is_send(val);
+   |             ^^^ `S` cannot be sent between threads safely
+   |
+   = help: the trait `std::marker::Send` is not implemented for `S`
+
+error[E0277]: `S` cannot be sent between threads safely
+  --> $DIR/restrict-type-argument.rs:20:13
+   |
+LL |   fn is_send<T: Send>(val: T) {}
+   |      -------    ---- required by this bound in `is_send`
+...
+LL | /     S // Make sure we can synthezise a correct suggestion span for this case
+LL | |     :
+   | |_____- help: consider further restricting this bound: `S: std::marker::Send +`
+...
+LL |       is_send(val);
+   |               ^^^ `S` cannot be sent between threads safely
+   |
+   = help: the trait `std::marker::Send` is not implemented for `S`
+
+error[E0277]: `S` cannot be sent between threads safely
+  --> $DIR/restrict-type-argument.rs:24:13
+   |
+LL | fn is_send<T: Send>(val: T) {}
+   |    -------    ---- required by this bound in `is_send`
+...
+LL | fn use_bound_and_where<S: Sync>(val: S) where S: std::fmt::Debug {
+   |                                                                 - help: consider further restricting type parameter `S`: `, S: std::marker::Send`
+LL |     is_send(val);
+   |             ^^^ `S` cannot be sent between threads safely
+   |
+   = help: the trait `std::marker::Send` is not implemented for `S`
+
+error[E0277]: `S` cannot be sent between threads safely
+  --> $DIR/restrict-type-argument.rs:28:13
+   |
+LL | fn is_send<T: Send>(val: T) {}
+   |    -------    ---- required by this bound in `is_send`
+...
+LL | fn use_unbound<S>(val: S) {
+   |                - help: consider restricting this bound: `S: std::marker::Send`
+LL |     is_send(val);
+   |             ^^^ `S` cannot be sent between threads safely
+   |
+   = help: the trait `std::marker::Send` is not implemented for `S`
+
+error: aborting due to 6 previous errors
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish-through-deref.rs b/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish-through-deref.rs
new file mode 100644 (file)
index 0000000..5480adb
--- /dev/null
@@ -0,0 +1,13 @@
+use std::cell::RefCell;
+
+struct HasAssocMethod;
+
+impl HasAssocMethod {
+    fn hello() {}
+}
+fn main() {
+    let shared_state = RefCell::new(HasAssocMethod);
+    let state = shared_state.borrow_mut();
+    state.hello();
+    //~^ ERROR no method named `hello` found for type `std::cell::RefMut<'_, HasAssocMethod>`
+}
diff --git a/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish-through-deref.stderr b/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish-through-deref.stderr
new file mode 100644 (file)
index 0000000..a1c0126
--- /dev/null
@@ -0,0 +1,19 @@
+error[E0599]: no method named `hello` found for type `std::cell::RefMut<'_, HasAssocMethod>` in the current scope
+  --> $DIR/suggest-assoc-fn-call-with-turbofish-through-deref.rs:11:11
+   |
+LL |     state.hello();
+   |     ------^^^^^
+   |     |     |
+   |     |     this is an associated function, not a method
+   |     help: use associated function syntax instead: `HasAssocMethod::hello`
+   |
+   = note: found the following associated functions; to be used as methods, functions must have a `self` parameter
+note: the candidate is defined in an impl for the type `HasAssocMethod`
+  --> $DIR/suggest-assoc-fn-call-with-turbofish-through-deref.rs:6:5
+   |
+LL |     fn hello() {}
+   |     ^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0599`.
diff --git a/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish.rs b/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish.rs
new file mode 100644 (file)
index 0000000..ef4b38d
--- /dev/null
@@ -0,0 +1,11 @@
+struct GenericAssocMethod<T>(T);
+
+impl<T> GenericAssocMethod<T> {
+    fn default_hello() {}
+}
+
+fn main() {
+    let x = GenericAssocMethod(33i32);
+    x.default_hello();
+    //~^ ERROR no method named `default_hello` found for type `GenericAssocMethod<i32>`
+}
diff --git a/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish.stderr b/src/test/ui/suggestions/suggest-assoc-fn-call-with-turbofish.stderr
new file mode 100644 (file)
index 0000000..8cfa7de
--- /dev/null
@@ -0,0 +1,22 @@
+error[E0599]: no method named `default_hello` found for type `GenericAssocMethod<i32>` in the current scope
+  --> $DIR/suggest-assoc-fn-call-with-turbofish.rs:9:7
+   |
+LL | struct GenericAssocMethod<T>(T);
+   | -------------------------------- method `default_hello` not found for this
+...
+LL |     x.default_hello();
+   |     --^^^^^^^^^^^^^
+   |     | |
+   |     | this is an associated function, not a method
+   |     help: use associated function syntax instead: `GenericAssocMethod::<i32>::default_hello`
+   |
+   = note: found the following associated functions; to be used as methods, functions must have a `self` parameter
+note: the candidate is defined in an impl for the type `GenericAssocMethod<_>`
+  --> $DIR/suggest-assoc-fn-call-with-turbofish.rs:4:5
+   |
+LL |     fn default_hello() {}
+   |     ^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0599`.
index 6bef793e0e71bbb28700cb19f7a195c367d11e98..b831e624cb6ff4403bf69fc047169f75edace6fe 100644 (file)
@@ -42,4 +42,5 @@ LL |     rustfmt::skip;
 
 error: aborting due to 7 previous errors
 
-For more information about this error, try `rustc --explain E0423`.
+Some errors have detailed explanations: E0423, E0573.
+For more information about an error, try `rustc --explain E0423`.
index ca6d05847166788d7c2357caad07f1746aefe034..4355a517bd724dad62cbafa094128ed54d7012f0 100644 (file)
@@ -4,9 +4,10 @@ error[E0277]: the trait bound `T: Foo` is not satisfied
 LL | trait A<T: Foo> {}
    | --------------- required by `A`
 LL | trait B<T> = A<T>;
-   | ^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `T`
-   |
-   = help: consider adding a `where T: Foo` bound
+   | ^^^^^^^^-^^^^^^^^^
+   | |       |
+   | |       help: consider restricting this bound: `T: Foo`
+   | the trait `Foo` is not implemented for `T`
 
 error: aborting due to previous error
 
index 434dcbc8736aa23b234f80c5f74d853f79133dce..e1d54fbf8aa7b3b63d97b21b3db74fdf74752526 100644 (file)
@@ -6,3 +6,4 @@ LL |     TraitNotAStruct{ value: 0 };
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index bd76df8071a53f8a0dcfcf9cb45b20f3a69e1bcb..96bbd1f3e4f1753add73e5010fcab1dd0c79b6cf 100644 (file)
@@ -5,9 +5,9 @@ LL | struct Foo<T:Trait> {
    | ------------------- required by `Foo`
 ...
 LL | impl<T> Foo<T> {
-   |         ^^^^^^ the trait `Trait` is not implemented for `T`
-   |
-   = help: consider adding a `where T: Trait` bound
+   |      -  ^^^^^^ the trait `Trait` is not implemented for `T`
+   |      |
+   |      help: consider restricting this bound: `T: Trait`
 
 error[E0277]: the trait bound `isize: Trait` is not satisfied
   --> $DIR/trait-bounds-on-structs-and-enums.rs:19:5
@@ -33,10 +33,10 @@ error[E0277]: the trait bound `U: Trait` is not satisfied
 LL | struct Foo<T:Trait> {
    | ------------------- required by `Foo`
 ...
+LL | struct Badness<U> {
+   |                - help: consider restricting this bound: `U: Trait`
 LL |     b: Foo<U>,
    |     ^^^^^^^^^ the trait `Trait` is not implemented for `U`
-   |
-   = help: consider adding a `where U: Trait` bound
 
 error[E0277]: the trait bound `V: Trait` is not satisfied
   --> $DIR/trait-bounds-on-structs-and-enums.rs:31:21
@@ -44,10 +44,10 @@ error[E0277]: the trait bound `V: Trait` is not satisfied
 LL | enum Bar<T:Trait> {
    | ----------------- required by `Bar`
 ...
+LL | enum MoreBadness<V> {
+   |                  - help: consider restricting this bound: `V: Trait`
 LL |     EvenMoreBadness(Bar<V>),
    |                     ^^^^^^ the trait `Trait` is not implemented for `V`
-   |
-   = help: consider adding a `where V: Trait` bound
 
 error[E0277]: the trait bound `i32: Trait` is not satisfied
   --> $DIR/trait-bounds-on-structs-and-enums.rs:35:5
index 4a06cd777d49e18b4f962cb0c9657a293ba236a7..c62bcfca94de945bda627db8f6d109ffed2b1078 100644 (file)
@@ -6,3 +6,4 @@ LL | impl A for a {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0573`.
index fb417b82d15ce6f98dd428181540c240dc3ef388..88c9c473eb0c7c0c53b32a5d83e7917ab0a54935 100644 (file)
@@ -4,12 +4,12 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'b` d
 LL | impl<'a,'b> T2<'a, 'b> for S<'a, 'b> {
    |             ^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'a as defined on the impl at 24:6...
+note: first, the lifetime cannot outlive the lifetime `'a` as defined on the impl at 24:6...
   --> $DIR/trait-impl-of-supertrait-has-wrong-lifetime-parameters.rs:24:6
    |
 LL | impl<'a,'b> T2<'a, 'b> for S<'a, 'b> {
    |      ^^
-note: ...but the lifetime must also be valid for the lifetime 'b as defined on the impl at 24:9...
+note: ...but the lifetime must also be valid for the lifetime `'b` as defined on the impl at 24:9...
   --> $DIR/trait-impl-of-supertrait-has-wrong-lifetime-parameters.rs:24:9
    |
 LL | impl<'a,'b> T2<'a, 'b> for S<'a, 'b> {
@@ -20,3 +20,4 @@ LL | impl<'a,'b> T2<'a, 'b> for S<'a, 'b> {
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
index 80c577674d166b2c8bbabb4a8d5a20f41b019cbf..e1ccde3c9d14a80752124822237784789a5705a0 100644 (file)
@@ -6,12 +6,12 @@ LL |     fn foo(x: Foo<'b,'a>) {
    |
    = note: expected type `fn(Foo<'a, 'b>)`
               found type `fn(Foo<'b, 'a>)`
-note: the lifetime 'b as defined on the impl at 13:9...
+note: the lifetime `'b` as defined on the impl at 13:9...
   --> $DIR/trait-matching-lifetimes.rs:13:9
    |
 LL | impl<'a,'b> Tr for Foo<'a,'b> {
    |         ^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 13:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 13:6
   --> $DIR/trait-matching-lifetimes.rs:13:6
    |
 LL | impl<'a,'b> Tr for Foo<'a,'b> {
@@ -25,12 +25,12 @@ LL |     fn foo(x: Foo<'b,'a>) {
    |
    = note: expected type `fn(Foo<'a, 'b>)`
               found type `fn(Foo<'b, 'a>)`
-note: the lifetime 'a as defined on the impl at 13:6...
+note: the lifetime `'a` as defined on the impl at 13:6...
   --> $DIR/trait-matching-lifetimes.rs:13:6
    |
 LL | impl<'a,'b> Tr for Foo<'a,'b> {
    |      ^^
-note: ...does not necessarily outlive the lifetime 'b as defined on the impl at 13:9
+note: ...does not necessarily outlive the lifetime `'b` as defined on the impl at 13:9
   --> $DIR/trait-matching-lifetimes.rs:13:9
    |
 LL | impl<'a,'b> Tr for Foo<'a,'b> {
index d15edaa9c814683bc64ea3cd76c9cef10baa0852..f1004ea9dc6ee3e0eb823048ada3f9db22f2aaec 100644 (file)
@@ -1,6 +1,9 @@
 error[E0277]: the size for values of type `U` cannot be known at compilation time
   --> $DIR/trait-suggest-where-clause.rs:9:20
    |
+LL | fn check<T: Iterator, U: ?Sized>() {
+   |                       -- help: consider further restricting this bound: `U: std::marker::Sized +`
+LL |     // suggest a where-clause, if needed
 LL |     mem::size_of::<U>();
    |                    ^ doesn't have a size known at compile-time
    | 
@@ -11,11 +14,13 @@ LL | pub const fn size_of<T>() -> usize {
    |
    = help: the trait `std::marker::Sized` is not implemented for `U`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where U: std::marker::Sized` bound
 
 error[E0277]: the size for values of type `U` cannot be known at compilation time
   --> $DIR/trait-suggest-where-clause.rs:12:5
    |
+LL | fn check<T: Iterator, U: ?Sized>() {
+   |                       -- help: consider further restricting this bound: `U: std::marker::Sized +`
+...
 LL |     mem::size_of::<Misc<U>>();
    |     ^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
    | 
@@ -26,7 +31,6 @@ LL | pub const fn size_of<T>() -> usize {
    |
    = help: within `Misc<U>`, the trait `std::marker::Sized` is not implemented for `U`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where U: std::marker::Sized` bound
    = note: required because it appears within the type `Misc<U>`
 
 error[E0277]: the trait bound `u64: std::convert::From<T>` is not satisfied
@@ -35,7 +39,6 @@ error[E0277]: the trait bound `u64: std::convert::From<T>` is not satisfied
 LL |     <u64 as From<T>>::from;
    |     ^^^^^^^^^^^^^^^^^^^^^^ the trait `std::convert::From<T>` is not implemented for `u64`
    |
-   = help: consider adding a `where u64: std::convert::From<T>` bound
    = note: required by `std::convert::From::from`
 
 error[E0277]: the trait bound `u64: std::convert::From<<T as std::iter::Iterator>::Item>` is not satisfied
@@ -44,7 +47,6 @@ error[E0277]: the trait bound `u64: std::convert::From<<T as std::iter::Iterator
 LL |     <u64 as From<<T as Iterator>::Item>>::from;
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::convert::From<<T as std::iter::Iterator>::Item>` is not implemented for `u64`
    |
-   = help: consider adding a `where u64: std::convert::From<<T as std::iter::Iterator>::Item>` bound
    = note: required by `std::convert::From::from`
 
 error[E0277]: the trait bound `Misc<_>: std::convert::From<T>` is not satisfied
index 40c2c2e4c9d10d3ed294b3e3d78645b72c80cbd1..63182a6bd95813290d685296e044ec295acd8eb4 100644 (file)
@@ -17,4 +17,5 @@ LL |     let (a, b) = copy(NoClone);
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0277`.
+Some errors have detailed explanations: E0277, E0568.
+For more information about an error, try `rustc --explain E0277`.
index db77e82adbd002bbe3b010cd78be122a8573d591..85c7a55c31379c14bf696804401ed5890d9fbe5a 100644 (file)
@@ -7,10 +7,10 @@ LL |     c.same_as(22)
 error[E0277]: the trait bound `C: CompareTo<i32>` is not satisfied
   --> $DIR/traits-repeated-supertrait-ambig.rs:30:7
    |
+LL | fn with_trait<C:CompareToInts>(c: &C) -> bool {
+   |               -- help: consider further restricting this bound: `C: CompareTo<i32> +`
 LL |     c.same_as(22)
    |       ^^^^^^^ the trait `CompareTo<i32>` is not implemented for `C`
-   |
-   = help: consider adding a `where C: CompareTo<i32>` bound
 
 error[E0277]: the trait bound `dyn CompareToInts: CompareTo<i32>` is not satisfied
   --> $DIR/traits-repeated-supertrait-ambig.rs:34:5
@@ -27,10 +27,10 @@ error[E0277]: the trait bound `C: CompareTo<i32>` is not satisfied
 LL |     fn same_as(&self, t: T) -> bool;
    |     -------------------------------- required by `CompareTo::same_as`
 ...
+LL | fn with_ufcs2<C:CompareToInts>(c: &C) -> bool {
+   |               -- help: consider further restricting this bound: `C: CompareTo<i32> +`
 LL |     CompareTo::same_as(c, 22)
    |     ^^^^^^^^^^^^^^^^^^ the trait `CompareTo<i32>` is not implemented for `C`
-   |
-   = help: consider adding a `where C: CompareTo<i32>` bound
 
 error[E0277]: the trait bound `i64: CompareTo<i32>` is not satisfied
   --> $DIR/traits-repeated-supertrait-ambig.rs:42:23
index 7034cdce7553de57fa6174612762a7d8c5462750..c94e43131faf0090fd230ad132a964be792c9f70 100644 (file)
@@ -21,3 +21,4 @@ LL |     let try_result: Option<_> = try {
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0574`.
diff --git a/src/test/ui/try_from.rs b/src/test/ui/try_from.rs
deleted file mode 100644 (file)
index 5045157..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-// run-pass
-// This test relies on `TryFrom` being blanket impl for all `T: Into`
-// and `TryInto` being blanket impl for all `U: TryFrom`
-
-// This test was added to show the motivation for doing this
-// over `TryFrom` being blanket impl for all `T: From`
-
-#![feature(never_type)]
-
-use std::convert::{TryInto, Infallible};
-
-struct Foo<T> {
-    t: T,
-}
-
-// This fails to compile due to coherence restrictions
-// as of Rust version 1.32.x, therefore it could not be used
-// instead of the `Into` version of the impl, and serves as
-// motivation for a blanket impl for all `T: Into`, instead
-// of a blanket impl for all `T: From`
-/*
-impl<T> From<Foo<T>> for Box<T> {
-    fn from(foo: Foo<T>) -> Box<T> {
-        Box::new(foo.t)
-    }
-}
-*/
-
-impl<T> Into<Vec<T>> for Foo<T> {
-    fn into(self) -> Vec<T> {
-        vec![self.t]
-    }
-}
-
-pub fn main() {
-    let _: Result<Vec<i32>, Infallible> = Foo { t: 10 }.try_into();
-}
index dd90dd1b06fe2e6c94679d240d049c29aa114345..63c07224353e2e018343760e098e2b0834a031bc 100644 (file)
@@ -9,8 +9,10 @@ error[E0277]: the trait bound `T: Trait` is not satisfied
    |
 LL | type Underconstrained<T: Trait> = impl 'static;
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Trait` is not implemented for `T`
+...
+LL | fn underconstrain<T>(_: T) -> Underconstrained<T> {
+   |                   - help: consider restricting this bound: `T: Trait`
    |
-   = help: consider adding a `where T: Trait` bound
    = note: the return type of a function must have a statically known size
 
 error: aborting due to 2 previous errors
index 574432bdcf6de8e396467d665331f349893b390b..ba892f6ed7c7b7918811f34d0816e7fd1d92973a 100644 (file)
@@ -15,9 +15,11 @@ error[E0277]: `U` doesn't implement `std::fmt::Debug`
    |
 LL | type Underconstrained<T: std::fmt::Debug> = impl 'static;
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `U` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug`
+...
+LL | fn underconstrained<U>(_: U) -> Underconstrained<U> {
+   |                     - help: consider restricting this bound: `U: std::fmt::Debug`
    |
    = help: the trait `std::fmt::Debug` is not implemented for `U`
-   = help: consider adding a `where U: std::fmt::Debug` bound
    = note: the return type of a function must have a statically known size
 
 error[E0277]: `V` doesn't implement `std::fmt::Debug`
@@ -25,9 +27,11 @@ error[E0277]: `V` doesn't implement `std::fmt::Debug`
    |
 LL | type Underconstrained2<T: std::fmt::Debug> = impl 'static;
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `V` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug`
+...
+LL | fn underconstrained2<U, V>(_: U, _: V) -> Underconstrained2<V> {
+   |                         - help: consider restricting this bound: `V: std::fmt::Debug`
    |
    = help: the trait `std::fmt::Debug` is not implemented for `V`
-   = help: consider adding a `where V: std::fmt::Debug` bound
    = note: the return type of a function must have a statically known size
 
 error: aborting due to 4 previous errors
index 624c817e33e3280b01e7d0db91d39789eaed53cb..eeaca5300f9b68d38012f103c17bc93cbf73f9c8 100644 (file)
@@ -11,3 +11,4 @@ LL |     f();
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0573`.
index 742a709958fa09ae72c764a95e90c72cf4870bba..6802bc38b89c9fce2598826baec09d3d1edd6ab2 100644 (file)
@@ -52,9 +52,10 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL | trait Super<T: Copy> { }
    | -------------------- required by `Super`
 LL | trait Base<T = String>: Super<T> { }
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
+   | ^^^^^^^^^^^-^^^^^^^^^^^^^^^^^^^^^^^^
+   | |          |
+   | |          help: consider restricting this bound: `T: std::marker::Copy`
+   | the trait `std::marker::Copy` is not implemented for `T`
 
 error[E0277]: cannot add `u8` to `i32`
   --> $DIR/type-check-defaults.rs:24:66
index 7d4bbc813c09c750dbe7e9d09ce545a7fc122376..7ce249a60b85eefa6bb0f11f5ac8b21c157719b3 100644 (file)
@@ -4,10 +4,10 @@ error[E0277]: the trait bound `Self: Tr<U>` is not satisfied
 LL |     fn op(_: T) -> Self;
    |     -------------------- required by `Tr::op`
 ...
+LL |     fn test<U>(u: U) -> Self {
+   |                             - help: consider further restricting `Self`: `where Self: Tr<U>`
 LL |         Tr::op(u)
    |         ^^^^^^ the trait `Tr<U>` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: Tr<U>` bound
 
 error[E0277]: the trait bound `Self: Tr<U>` is not satisfied
   --> $DIR/type-params-in-different-spaces-2.rs:16:9
@@ -15,10 +15,10 @@ error[E0277]: the trait bound `Self: Tr<U>` is not satisfied
 LL |     fn op(_: T) -> Self;
    |     -------------------- required by `Tr::op`
 ...
+LL |     fn test<U>(u: U) -> Self {
+   |                             - help: consider further restricting `Self`: `where Self: Tr<U>`
 LL |         Tr::op(u)
    |         ^^^^^^ the trait `Tr<U>` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: Tr<U>` bound
 
 error: aborting due to 2 previous errors
 
index 1184e30749fe009dc6efb00ba2df4d0c2f556b09..8755bcded9d2fc07161d6f459845348f4d00a02c 100644 (file)
@@ -6,3 +6,4 @@ LL | auto trait Magic : Sized where Option<Self> : Magic {}
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0568`.
index 7b45ca07b35c775c10a7a7cd5b27e0a6d834b331..5a38883490959d3cd8941cca5ccfe575c6ca4859 100644 (file)
@@ -6,3 +6,4 @@ LL | auto trait Magic: Copy {}
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0568`.
diff --git a/src/test/ui/typeck/typeck-default-trait-impl-assoc-type.fixed b/src/test/ui/typeck/typeck-default-trait-impl-assoc-type.fixed
new file mode 100644 (file)
index 0000000..7a108d8
--- /dev/null
@@ -0,0 +1,17 @@
+// run-rustfix
+// Test that we do not consider associated types to be sendable without
+// some applicable trait bound (and we don't ICE).
+#![allow(dead_code)]
+
+trait Trait {
+    type AssocType;
+    fn dummy(&self) { }
+}
+fn bar<T:Trait+Send>() where <T as Trait>::AssocType: std::marker::Send  {
+    is_send::<T::AssocType>(); //~ ERROR E0277
+}
+
+fn is_send<T:Send>() {
+}
+
+fn main() { }
index d64835393867455d814b93b02e473ebb701e2204..bafc1657737f44c0110ebfefd3e05e349e8d0894 100644 (file)
@@ -1,5 +1,7 @@
+// run-rustfix
 // Test that we do not consider associated types to be sendable without
 // some applicable trait bound (and we don't ICE).
+#![allow(dead_code)]
 
 trait Trait {
     type AssocType;
index b842d0ae1a2489d49617be319935f7167e542dee..2e54cdf01320d50beb13b6690b35102a2b709a5f 100644 (file)
@@ -1,6 +1,8 @@
 error[E0277]: `<T as Trait>::AssocType` cannot be sent between threads safely
-  --> $DIR/typeck-default-trait-impl-assoc-type.rs:9:5
+  --> $DIR/typeck-default-trait-impl-assoc-type.rs:11:5
    |
+LL | fn bar<T:Trait+Send>() {
+   |                       - help: consider further restricting the associated type: `where <T as Trait>::AssocType: std::marker::Send`
 LL |     is_send::<T::AssocType>();
    |     ^^^^^^^^^^^^^^^^^^^^^^^ `<T as Trait>::AssocType` cannot be sent between threads safely
 ...
@@ -8,7 +10,6 @@ LL | fn is_send<T:Send>() {
    |    -------   ---- required by this bound in `is_send`
    |
    = help: the trait `std::marker::Send` is not implemented for `<T as Trait>::AssocType`
-   = help: consider adding a `where <T as Trait>::AssocType: std::marker::Send` bound
 
 error: aborting due to previous error
 
index b3139083b1ac8ec7186785e3df8a7b5f5df81bc3..a84aef5fdbdb66d5090c008114f82b442f21b079 100644 (file)
@@ -1,6 +1,8 @@
 error[E0277]: `T` cannot be sent between threads safely
   --> $DIR/typeck-default-trait-impl-send-param.rs:5:15
    |
+LL | fn foo<T>() {
+   |        - help: consider restricting this bound: `T: std::marker::Send`
 LL |     is_send::<T>()
    |               ^ `T` cannot be sent between threads safely
 ...
@@ -8,7 +10,6 @@ LL | fn is_send<T:Send>() {
    |    -------   ---- required by this bound in `is_send`
    |
    = help: the trait `std::marker::Send` is not implemented for `T`
-   = help: consider adding a `where T: std::marker::Send` bound
 
 error: aborting due to previous error
 
index b2fe1b281fc999ee0ed8324d1b536abab6a2dc7f..de3a997a19ed8d65bfd70d86a534f3ba92a4021c 100644 (file)
@@ -38,7 +38,7 @@ note: the anonymous lifetime #1 defined on the method body at 37:5...
    |
 LL |     fn dummy2(self: &Bar<T>) {}
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 35:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 35:6
   --> $DIR/ufcs-explicit-self-bad.rs:35:6
    |
 LL | impl<'a, T> SomeTrait for &'a Bar<T> {
@@ -52,7 +52,7 @@ LL |     fn dummy2(self: &Bar<T>) {}
    |
    = note: expected type `&'a Bar<T>`
               found type `&Bar<T>`
-note: the lifetime 'a as defined on the impl at 35:6...
+note: the lifetime `'a` as defined on the impl at 35:6...
   --> $DIR/ufcs-explicit-self-bad.rs:35:6
    |
 LL | impl<'a, T> SomeTrait for &'a Bar<T> {
@@ -76,7 +76,7 @@ note: the anonymous lifetime #2 defined on the method body at 39:5...
    |
 LL |     fn dummy3(self: &&Bar<T>) {}
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...does not necessarily outlive the lifetime 'a as defined on the impl at 35:6
+note: ...does not necessarily outlive the lifetime `'a` as defined on the impl at 35:6
   --> $DIR/ufcs-explicit-self-bad.rs:35:6
    |
 LL | impl<'a, T> SomeTrait for &'a Bar<T> {
@@ -90,7 +90,7 @@ LL |     fn dummy3(self: &&Bar<T>) {}
    |
    = note: expected type `&'a Bar<T>`
               found type `&Bar<T>`
-note: the lifetime 'a as defined on the impl at 35:6...
+note: the lifetime `'a` as defined on the impl at 35:6...
   --> $DIR/ufcs-explicit-self-bad.rs:35:6
    |
 LL | impl<'a, T> SomeTrait for &'a Bar<T> {
index 5ee8adaaf270fbb5228c45417da2cb14dced15df..39752f66b9d65fa6be3e9094f8df930aca8ebd29 100644 (file)
@@ -200,5 +200,5 @@ LL |     <u8 as Dr>::X::N;
 
 error: aborting due to 32 previous errors
 
-Some errors have detailed explanations: E0223, E0433, E0599.
+Some errors have detailed explanations: E0223, E0433, E0575, E0599.
 For more information about an error, try `rustc --explain E0223`.
diff --git a/src/test/ui/underscore-imports/hygiene-2.rs b/src/test/ui/underscore-imports/hygiene-2.rs
new file mode 100644 (file)
index 0000000..bea61ea
--- /dev/null
@@ -0,0 +1,33 @@
+// Make sure that underscore imports with different contexts can exist in the
+// same scope.
+
+// check-pass
+
+#![feature(decl_macro)]
+
+mod x {
+    pub use std::ops::Deref as _;
+}
+
+macro n() {
+    pub use crate::x::*;
+}
+
+#[macro_export]
+macro_rules! p {
+    () => { pub use crate::x::*; }
+}
+
+macro m($y:ident) {
+    mod $y {
+        crate::n!(); // Reexport of `Deref` should not be imported in `main`
+        crate::p!(); // Reexport of `Deref` should be imported into `main`
+    }
+}
+
+m!(y);
+
+fn main() {
+    use crate::y::*;
+    (&()).deref();
+}
diff --git a/src/test/ui/underscore-imports/hygiene.rs b/src/test/ui/underscore-imports/hygiene.rs
new file mode 100644 (file)
index 0000000..a254f6e
--- /dev/null
@@ -0,0 +1,40 @@
+// Make sure that underscore imports have the same hygiene considerations as
+// other imports.
+
+#![feature(decl_macro)]
+
+mod x {
+    pub use std::ops::Deref as _;
+}
+
+
+macro glob_import() {
+    pub use crate::x::*;
+}
+
+macro underscore_import() {
+    use std::ops::DerefMut as _;
+}
+
+mod y {
+    crate::glob_import!();
+    crate::underscore_import!();
+}
+
+macro create_module($y:ident) {
+    mod $y {
+        crate::glob_import!();
+        crate::underscore_import!();
+    }
+}
+
+create_module!(z);
+
+fn main() {
+    use crate::y::*;
+    use crate::z::*;
+    glob_import!();
+    underscore_import!();
+    (&()).deref();              //~ ERROR no method named `deref`
+    (&mut ()).deref_mut();      //~ ERROR no method named `deref_mut`
+}
diff --git a/src/test/ui/underscore-imports/hygiene.stderr b/src/test/ui/underscore-imports/hygiene.stderr
new file mode 100644 (file)
index 0000000..44cfc5c
--- /dev/null
@@ -0,0 +1,27 @@
+error[E0599]: no method named `deref` found for type `&()` in the current scope
+  --> $DIR/hygiene.rs:38:11
+   |
+LL |     (&()).deref();
+   |           ^^^^^ method not found in `&()`
+   |
+   = help: items from traits can only be used if the trait is in scope
+help: the following trait is implemented but not in scope; perhaps add a `use` for it:
+   |
+LL | use std::ops::Deref;
+   |
+
+error[E0599]: no method named `deref_mut` found for type `&mut ()` in the current scope
+  --> $DIR/hygiene.rs:39:15
+   |
+LL |     (&mut ()).deref_mut();
+   |               ^^^^^^^^^ method not found in `&mut ()`
+   |
+   = help: items from traits can only be used if the trait is in scope
+help: the following trait is implemented but not in scope; perhaps add a `use` for it:
+   |
+LL | use std::ops::DerefMut;
+   |
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0599`.
diff --git a/src/test/ui/underscore-imports/macro-expanded.rs b/src/test/ui/underscore-imports/macro-expanded.rs
new file mode 100644 (file)
index 0000000..43f527b
--- /dev/null
@@ -0,0 +1,45 @@
+// Check that macro expanded underscore imports behave as expected
+
+// check-pass
+
+#![feature(decl_macro, rustc_attrs)]
+
+mod x {
+    pub use std::ops::Not as _;
+}
+
+macro m() {
+    mod w {
+        mod y {
+            pub use std::ops::Deref as _;
+        }
+        use crate::x::*;
+        use self::y::*;
+        use std::ops::DerefMut as _;
+        fn f() {
+            false.not();
+            (&()).deref();
+            (&mut ()).deref_mut();
+        }
+    }
+}
+
+#[rustc_macro_transparency = "transparent"]
+macro n() {
+    mod z {
+        pub use std::ops::Deref as _;
+    }
+    use crate::x::*;
+    use crate::z::*;
+    use std::ops::DerefMut as _;
+    fn f() {
+        false.not();
+        (&()).deref();
+        (&mut ()).deref_mut();
+    }
+}
+
+m!();
+n!();
+
+fn main() {}
index 92e5ac282e4d63a427229d18bfbcae51c00923ad..d0475bf08c38d2bb8fe602ac25f40ae2e1af2b4b 100644 (file)
@@ -24,3 +24,4 @@ LL |     Box::new(items.iter())
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0495`.
diff --git a/src/test/ui/uninhabited/always-inhabited-union-ref.rs b/src/test/ui/uninhabited/always-inhabited-union-ref.rs
new file mode 100644 (file)
index 0000000..11eae2a
--- /dev/null
@@ -0,0 +1,32 @@
+// The precise semantics of inhabitedness with respect to unions and references is currently
+// undecided. This test file currently checks a conservative choice.
+
+#![feature(exhaustive_patterns)]
+#![feature(never_type)]
+
+#![allow(dead_code)]
+#![allow(unreachable_code)]
+
+pub union Foo {
+    foo: !,
+}
+
+fn uninhab_ref() -> &'static ! {
+    unimplemented!()
+}
+
+fn uninhab_union() -> Foo {
+    unimplemented!()
+}
+
+fn match_on_uninhab() {
+    match uninhab_ref() {
+        //~^ ERROR non-exhaustive patterns: type `&'static !` is non-empty
+    }
+
+    match uninhab_union() {
+        //~^ ERROR non-exhaustive patterns: type `Foo` is non-empty
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/uninhabited/always-inhabited-union-ref.stderr b/src/test/ui/uninhabited/always-inhabited-union-ref.stderr
new file mode 100644 (file)
index 0000000..792ab6f
--- /dev/null
@@ -0,0 +1,19 @@
+error[E0004]: non-exhaustive patterns: type `&'static !` is non-empty
+  --> $DIR/always-inhabited-union-ref.rs:23:11
+   |
+LL |     match uninhab_ref() {
+   |           ^^^^^^^^^^^^^
+   |
+   = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
+
+error[E0004]: non-exhaustive patterns: type `Foo` is non-empty
+  --> $DIR/always-inhabited-union-ref.rs:27:11
+   |
+LL |     match uninhab_union() {
+   |           ^^^^^^^^^^^^^^^
+   |
+   = help: ensure that all possible cases are being handled, possibly by adding wildcards or more match arms
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0004`.
index 29ff1dc37608930ae1b13adc60009646b38616ca..26e1be34ea75dfb651fa8c4e38c791bb842a617f 100644 (file)
@@ -12,6 +12,13 @@ LL | | }
 ...
 LL |       let Foo::D(_y) = x;
    |           ^^^^^^^^^^ pattern `A(_)` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Foo::D(_y) = x { /* */ }
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: aborting due to previous error
 
index 25519ab2d6a7d1cfc9130988a68bd7df10b6be6e..a49344e45cec652775d6737f3b8c178d9477e0fa 100644 (file)
@@ -51,6 +51,13 @@ error[E0005]: refutable pattern in local binding: `Err(_)` not covered
    |
 LL |     let Ok(x) = x;
    |         ^^^^^ pattern `Err(_)` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+help: you might want to use `if let` to ignore the variant that isn't matched
+   |
+LL |     if let Ok(x) = x { /* */ }
+   |
 
 error: aborting due to 7 previous errors
 
index 89140030683858aa4217271b860c5dea39bace8e..c9fec1d21d152026214290203c561e73e648843b 100644 (file)
@@ -1,34 +1,37 @@
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/union-sized-field.rs:4:5
    |
+LL | union Foo<T: ?Sized> {
+   |           -- help: consider further restricting this bound: `T: std::marker::Sized +`
 LL |     value: T,
    |     ^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: no field of a union may have a dynamically sized type
 
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/union-sized-field.rs:9:5
    |
+LL | struct Foo2<T: ?Sized> {
+   |             -- help: consider further restricting this bound: `T: std::marker::Sized +`
 LL |     value: T,
    |     ^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: only the last field of a struct may have a dynamically sized type
 
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/union-sized-field.rs:15:11
    |
+LL | enum Foo3<T: ?Sized> {
+   |           -- help: consider further restricting this bound: `T: std::marker::Sized +`
 LL |     Value(T),
    |           ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error: aborting due to 3 previous errors
diff --git a/src/test/ui/unreachable/auxiliary/unreachable_variant.rs b/src/test/ui/unreachable/auxiliary/unreachable_variant.rs
deleted file mode 100644 (file)
index 4e94a4b..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-mod super_sekrit {
-    pub enum sooper_sekrit {
-        quux, baz
-    }
-}
diff --git a/src/test/ui/unreachable/unreachable-arm.rs b/src/test/ui/unreachable/unreachable-arm.rs
deleted file mode 100644 (file)
index 64c3896..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-#![feature(box_patterns)]
-#![feature(box_syntax)]
-#![allow(dead_code)]
-#![deny(unreachable_patterns)]
-
-enum Foo { A(Box<Foo>, isize), B(usize), }
-
-fn main() {
-    match Foo::B(1) {
-        Foo::B(_) | Foo::A(box _, 1) => { }
-        Foo::A(_, 1) => { } //~ ERROR unreachable pattern
-        _ => { }
-    }
-}
diff --git a/src/test/ui/unreachable/unreachable-arm.stderr b/src/test/ui/unreachable/unreachable-arm.stderr
deleted file mode 100644 (file)
index 8e65745..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-error: unreachable pattern
-  --> $DIR/unreachable-arm.rs:11:9
-   |
-LL |         Foo::A(_, 1) => { }
-   |         ^^^^^^^^^^^^
-   |
-note: lint level defined here
-  --> $DIR/unreachable-arm.rs:4:9
-   |
-LL | #![deny(unreachable_patterns)]
-   |         ^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to previous error
-
diff --git a/src/test/ui/unreachable/unreachable-code.rs b/src/test/ui/unreachable/unreachable-code.rs
deleted file mode 100644 (file)
index ad0dc8a..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-#![deny(unreachable_code)]
-#![allow(unused_variables)]
-
-fn main() {
-  loop{}
-
-  let a = 3; //~ ERROR: unreachable statement
-}
diff --git a/src/test/ui/unreachable/unreachable-code.stderr b/src/test/ui/unreachable/unreachable-code.stderr
deleted file mode 100644 (file)
index 184440d..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-error: unreachable statement
-  --> $DIR/unreachable-code.rs:7:3
-   |
-LL |   loop{}
-   |   ------ any code following this expression is unreachable
-LL | 
-LL |   let a = 3;
-   |   ^^^^^^^^^^ unreachable statement
-   |
-note: lint level defined here
-  --> $DIR/unreachable-code.rs:1:9
-   |
-LL | #![deny(unreachable_code)]
-   |         ^^^^^^^^^^^^^^^^
-
-error: aborting due to previous error
-
diff --git a/src/test/ui/unreachable/unreachable-in-call.rs b/src/test/ui/unreachable/unreachable-in-call.rs
deleted file mode 100644 (file)
index dd94e79..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-#![allow(dead_code)]
-#![deny(unreachable_code)]
-
-fn diverge() -> ! { panic!() }
-
-fn get_u8() -> u8 {
-    1
-}
-fn call(_: u8, _: u8) {
-
-}
-fn diverge_first() {
-    call(diverge(),
-         get_u8()); //~ ERROR unreachable expression
-}
-fn diverge_second() {
-    call( //~ ERROR unreachable call
-        get_u8(),
-        diverge());
-}
-
-fn main() {}
diff --git a/src/test/ui/unreachable/unreachable-in-call.stderr b/src/test/ui/unreachable/unreachable-in-call.stderr
deleted file mode 100644 (file)
index 1d081d1..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-error: unreachable expression
-  --> $DIR/unreachable-in-call.rs:14:10
-   |
-LL |     call(diverge(),
-   |          --------- any code following this expression is unreachable
-LL |          get_u8());
-   |          ^^^^^^^^ unreachable expression
-   |
-note: lint level defined here
-  --> $DIR/unreachable-in-call.rs:2:9
-   |
-LL | #![deny(unreachable_code)]
-   |         ^^^^^^^^^^^^^^^^
-
-error: unreachable call
-  --> $DIR/unreachable-in-call.rs:17:5
-   |
-LL |     call(
-   |     ^^^^ unreachable call
-LL |         get_u8(),
-LL |         diverge());
-   |         --------- any code following this expression is unreachable
-
-error: aborting due to 2 previous errors
-
diff --git a/src/test/ui/unreachable/unreachable-loop-patterns.rs b/src/test/ui/unreachable/unreachable-loop-patterns.rs
deleted file mode 100644 (file)
index 56ab1a2..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-// compile-fail
-
-#![feature(never_type)]
-#![feature(exhaustive_patterns)]
-
-#![allow(unreachable_code)]
-#![deny(unreachable_patterns)]
-
-enum Void {}
-
-impl Iterator for Void {
-    type Item = Void;
-
-    fn next(&mut self) -> Option<Void> {
-        None
-    }
-}
-
-fn main() {
-    for _ in unimplemented!() as Void {}
-    //~^ ERROR unreachable pattern
-    //~^^ ERROR unreachable pattern
-}
diff --git a/src/test/ui/unreachable/unreachable-loop-patterns.stderr b/src/test/ui/unreachable/unreachable-loop-patterns.stderr
deleted file mode 100644 (file)
index 254d117..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-error: unreachable pattern
-  --> $DIR/unreachable-loop-patterns.rs:20:9
-   |
-LL |     for _ in unimplemented!() as Void {}
-   |         ^
-   |
-note: lint level defined here
-  --> $DIR/unreachable-loop-patterns.rs:7:9
-   |
-LL | #![deny(unreachable_patterns)]
-   |         ^^^^^^^^^^^^^^^^^^^^
-
-error: unreachable pattern
-  --> $DIR/unreachable-loop-patterns.rs:20:14
-   |
-LL |     for _ in unimplemented!() as Void {}
-   |              ^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to 2 previous errors
-
diff --git a/src/test/ui/unreachable/unreachable-try-pattern.rs b/src/test/ui/unreachable/unreachable-try-pattern.rs
deleted file mode 100644 (file)
index cbc5fce..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-// build-pass (FIXME(62277): could be check-pass?)
-#![feature(never_type, exhaustive_patterns)]
-#![warn(unreachable_code)]
-#![warn(unreachable_patterns)]
-
-enum Void {}
-
-impl From<Void> for i32 {
-    fn from(v: Void) -> i32 {
-        match v {}
-    }
-}
-
-fn bar(x: Result<!, i32>) -> Result<u32, i32> {
-    x?
-}
-
-fn foo(x: Result<!, i32>) -> Result<u32, i32> {
-    let y = (match x { Ok(n) => Ok(n as u32), Err(e) => Err(e) })?;
-    //~^ WARN unreachable pattern
-    //~| WARN unreachable expression
-    Ok(y)
-}
-
-fn qux(x: Result<u32, Void>) -> Result<u32, i32> {
-    Ok(x?)
-}
-
-fn vom(x: Result<u32, Void>) -> Result<u32, i32> {
-    let y = (match x { Ok(n) => Ok(n), Err(e) => Err(e) })?;
-    //~^ WARN unreachable pattern
-    Ok(y)
-}
-
-
-fn main() {
-    let _ = bar(Err(123));
-    let _ = foo(Err(123));
-    let _ = qux(Ok(123));
-    let _ = vom(Ok(123));
-}
diff --git a/src/test/ui/unreachable/unreachable-try-pattern.stderr b/src/test/ui/unreachable/unreachable-try-pattern.stderr
deleted file mode 100644 (file)
index 7070384..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-warning: unreachable expression
-  --> $DIR/unreachable-try-pattern.rs:19:36
-   |
-LL |     let y = (match x { Ok(n) => Ok(n as u32), Err(e) => Err(e) })?;
-   |                                    -^^^^^^^
-   |                                    |
-   |                                    unreachable expression
-   |                                    any code following this expression is unreachable
-   |
-note: lint level defined here
-  --> $DIR/unreachable-try-pattern.rs:3:9
-   |
-LL | #![warn(unreachable_code)]
-   |         ^^^^^^^^^^^^^^^^
-
-warning: unreachable pattern
-  --> $DIR/unreachable-try-pattern.rs:19:24
-   |
-LL |     let y = (match x { Ok(n) => Ok(n as u32), Err(e) => Err(e) })?;
-   |                        ^^^^^
-   |
-note: lint level defined here
-  --> $DIR/unreachable-try-pattern.rs:4:9
-   |
-LL | #![warn(unreachable_patterns)]
-   |         ^^^^^^^^^^^^^^^^^^^^
-
-warning: unreachable pattern
-  --> $DIR/unreachable-try-pattern.rs:30:40
-   |
-LL |     let y = (match x { Ok(n) => Ok(n), Err(e) => Err(e) })?;
-   |                                        ^^^^^^
-
diff --git a/src/test/ui/unreachable/unreachable-variant.rs b/src/test/ui/unreachable/unreachable-variant.rs
deleted file mode 100644 (file)
index 008c2d4..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-// aux-build:unreachable_variant.rs
-
-extern crate unreachable_variant as other;
-
-fn main() {
-    let _x = other::super_sekrit::sooper_sekrit::baz; //~ ERROR is private
-}
diff --git a/src/test/ui/unreachable/unreachable-variant.stderr b/src/test/ui/unreachable/unreachable-variant.stderr
deleted file mode 100644 (file)
index 276c77f..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-error[E0603]: module `super_sekrit` is private
-  --> $DIR/unreachable-variant.rs:6:21
-   |
-LL |     let _x = other::super_sekrit::sooper_sekrit::baz;
-   |                     ^^^^^^^^^^^^
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0603`.
diff --git a/src/test/ui/unreachable/unwarned-match-on-never.rs b/src/test/ui/unreachable/unwarned-match-on-never.rs
deleted file mode 100644 (file)
index 71f8fe3..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-#![deny(unreachable_code)]
-#![allow(dead_code)]
-
-#![feature(never_type)]
-
-fn foo(x: !) -> bool {
-    // Explicit matches on the never type are unwarned.
-    match x {}
-    // But matches in unreachable code are warned.
-    match x {} //~ ERROR unreachable expression
-}
-
-fn bar() {
-    match (return) {
-        () => () //~ ERROR unreachable arm
-    }
-}
-
-fn main() {
-    return;
-    match () { //~ ERROR unreachable expression
-        () => (),
-    }
-}
diff --git a/src/test/ui/unreachable/unwarned-match-on-never.stderr b/src/test/ui/unreachable/unwarned-match-on-never.stderr
deleted file mode 100644 (file)
index 6b2fb4a..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-error: unreachable expression
-  --> $DIR/unwarned-match-on-never.rs:10:5
-   |
-LL |     match x {}
-   |           - any code following this expression is unreachable
-LL |     // But matches in unreachable code are warned.
-LL |     match x {}
-   |     ^^^^^^^^^^ unreachable expression
-   |
-note: lint level defined here
-  --> $DIR/unwarned-match-on-never.rs:1:9
-   |
-LL | #![deny(unreachable_code)]
-   |         ^^^^^^^^^^^^^^^^
-
-error: unreachable arm
-  --> $DIR/unwarned-match-on-never.rs:15:15
-   |
-LL |     match (return) {
-   |           -------- any code following this expression is unreachable
-LL |         () => ()
-   |               ^^ unreachable arm
-
-error: unreachable expression
-  --> $DIR/unwarned-match-on-never.rs:21:5
-   |
-LL |       return;
-   |       ------ any code following this expression is unreachable
-LL | /     match () {
-LL | |         () => (),
-LL | |     }
-   | |_____^ unreachable expression
-
-error: aborting due to 3 previous errors
-
index e56176690a113d9898e041cbff0885723f44ebc7..bd97b0203b5104fcde519a193e7bef21440efd9e 100644 (file)
@@ -4,11 +4,12 @@ error[E0277]: the size for values of type `T` cannot be known at compilation tim
 LL | fn bar<T: Sized>() { }
    |    --- - required by this bound in `bar`
 LL | fn foo<T: ?Sized>() { bar::<T>() }
-   |                             ^ doesn't have a size known at compile-time
+   |        --                   ^ doesn't have a size known at compile-time
+   |        |
+   |        help: consider further restricting this bound: `T: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index dff934834ef2c52002109ac172f54a37594216d9..341d3e4cc2df2926248e5ea81459b4b122e9c6a8 100644 (file)
@@ -5,11 +5,12 @@ LL | enum Foo<U> { FooSome(U), FooNone }
    | ----------- required by `Foo`
 LL | fn foo1<T>() { not_sized::<Foo<T>>() } // Hunky dory.
 LL | fn foo2<T: ?Sized>() { not_sized::<Foo<T>>() }
-   |                                    ^^^^^^ doesn't have a size known at compile-time
+   |         --                         ^^^^^^ doesn't have a size known at compile-time
+   |         |
+   |         help: consider further restricting this bound: `T: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index cdd5747d86b0f275b873a6d4072c5b58c8228bc2..e85b6d662f9d56b8e3c8cc5976b744818b2f928c 100644 (file)
@@ -1,45 +1,53 @@
 error[E0277]: the size for values of type `W` cannot be known at compilation time
   --> $DIR/unsized-enum2.rs:23:8
    |
+LL | enum E<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized> {
+   |        -- help: consider further restricting this bound: `W: std::marker::Sized +`
+LL |     // parameter
 LL |     VA(W),
    |        ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `W`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where W: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized-enum2.rs:25:8
    |
+LL | enum E<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized> {
+   |                   -- help: consider further restricting this bound: `X: std::marker::Sized +`
+...
 LL |     VB{x: X},
    |        ^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error[E0277]: the size for values of type `Y` cannot be known at compilation time
   --> $DIR/unsized-enum2.rs:27:15
    |
+LL | enum E<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized> {
+   |                              -- help: consider further restricting this bound: `Y: std::marker::Sized +`
+...
 LL |     VC(isize, Y),
    |               ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Y`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Y: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error[E0277]: the size for values of type `Z` cannot be known at compilation time
   --> $DIR/unsized-enum2.rs:29:18
    |
+LL | enum E<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized> {
+   |                                         -- help: consider further restricting this bound: `Z: std::marker::Sized +`
+...
 LL |     VD{u: isize, x: Z},
    |                  ^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Z`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Z: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error[E0277]: the size for values of type `[u8]` cannot be known at compilation time
index 1a726bb089f6e22f5ea0437801f423048b0b8832..280b8fd43cab0ecc07fa18e55b8c44d3719fc6d1 100644 (file)
@@ -5,11 +5,12 @@ LL | struct S5<Y>(Y);
    | ---------------- required by `S5`
 LL | 
 LL | impl<X: ?Sized> S5<X> {
-   |                 ^^^^^ doesn't have a size known at compile-time
+   |      --         ^^^^^ doesn't have a size known at compile-time
+   |      |
+   |      help: consider further restricting this bound: `X: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index 0d4776ff6c25b6139d1d1734007a5344c488f2a5..2894d5d56710dd3384ad1b7ad6732863dc3e5590 100644 (file)
@@ -5,11 +5,12 @@ LL | struct Foo<T> { data: T }
    | ------------- required by `Foo`
 LL | fn foo1<T>() { not_sized::<Foo<T>>() } // Hunky dory.
 LL | fn foo2<T: ?Sized>() { not_sized::<Foo<T>>() }
-   |                                    ^^^^^^ doesn't have a size known at compile-time
+   |         --                         ^^^^^^ doesn't have a size known at compile-time
+   |         |
+   |         help: consider further restricting this bound: `T: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
 
 error[E0277]: the size for values of type `T` cannot be known at compilation time
   --> $DIR/unsized-struct.rs:13:24
@@ -18,11 +19,12 @@ LL | fn is_sized<T:Sized>() { }
    |    -------- - required by this bound in `is_sized`
 ...
 LL | fn bar2<T: ?Sized>() { is_sized::<Bar<T>>() }
-   |                        ^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
+   |         --             ^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
+   |         |
+   |         help: consider further restricting this bound: `T: std::marker::Sized +`
    |
    = help: within `Bar<T>`, the trait `std::marker::Sized` is not implemented for `T`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where T: std::marker::Sized` bound
    = note: required because it appears within the type `Bar<T>`
 
 error: aborting due to 2 previous errors
index f399f8ded10856510e93c47421f2123907ec4320..ba1550439c0d9b51bae147932d593f65fbf956d4 100644 (file)
@@ -5,11 +5,12 @@ LL | struct S5<Y>(Y);
    | ---------------- required by `S5`
 LL | 
 LL | impl<X: ?Sized> T3<X> for S5<X> {
-   |                 ^^^^^ doesn't have a size known at compile-time
+   |      --         ^^^^^ doesn't have a size known at compile-time
+   |      |
+   |      help: consider further restricting this bound: `X: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index ee0d5ccccfe11b296204457ddd5c6868b94f82e4..41371d63f9e53eaa0215188b7094934d07c2ed97 100644 (file)
@@ -2,11 +2,12 @@ error[E0277]: the size for values of type `X` cannot be known at compilation tim
   --> $DIR/unsized-trait-impl-trait-arg.rs:8:17
    |
 LL | impl<X: ?Sized> T2<X> for S4<X> {
-   |                 ^^^^^ doesn't have a size known at compile-time
+   |      --         ^^^^^ doesn't have a size known at compile-time
+   |      |
+   |      help: consider further restricting this bound: `X: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index c821a08f6b585d3519c0c496bae3b578227e7222..232296ad09126b3e7194f6d848e8c01a56fbae4b 100644 (file)
@@ -1,6 +1,8 @@
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized3.rs:7:13
    |
+LL | fn f1<X: ?Sized>(x: &X) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f2::<X>(x);
    |             ^ doesn't have a size known at compile-time
 ...
@@ -9,11 +11,12 @@ LL | fn f2<X>(x: &X) {
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized3.rs:18:13
    |
+LL | fn f3<X: ?Sized + T>(x: &X) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f4::<X>(x);
    |             ^ doesn't have a size known at compile-time
 ...
@@ -22,7 +25,6 @@ LL | fn f4<X: T>(x: &X) {
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized3.rs:33:8
@@ -30,35 +32,38 @@ error[E0277]: the size for values of type `X` cannot be known at compilation tim
 LL | fn f5<Y>(x: &Y) {}
    |    -- - required by this bound in `f5`
 ...
+LL | fn f8<X: ?Sized>(x1: &S<X>, x2: &S<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f5(x1);
    |        ^^ doesn't have a size known at compile-time
    |
    = help: within `S<X>`, the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: required because it appears within the type `S<X>`
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized3.rs:40:8
    |
+LL | fn f9<X: ?Sized>(x1: Box<S<X>>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f5(&(*x1, 34));
    |        ^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: within `S<X>`, the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: required because it appears within the type `S<X>`
    = note: only the last element of a tuple may have a dynamically sized type
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized3.rs:45:9
    |
+LL | fn f10<X: ?Sized>(x1: Box<S<X>>) {
+   |        -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f5(&(32, *x1));
    |         ^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: within `({integer}, S<X>)`, the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: required because it appears within the type `S<X>`
    = note: required because it appears within the type `({integer}, S<X>)`
    = note: tuples must have a statically known size to be initialized
@@ -69,12 +74,13 @@ error[E0277]: the size for values of type `X` cannot be known at compilation tim
 LL | fn f5<Y>(x: &Y) {}
    |    -- - required by this bound in `f5`
 ...
+LL | fn f10<X: ?Sized>(x1: Box<S<X>>) {
+   |        -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f5(&(32, *x1));
    |        ^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: within `({integer}, S<X>)`, the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: required because it appears within the type `S<X>`
    = note: required because it appears within the type `({integer}, S<X>)`
 
index 6dce9a046066fbdea9edbca9347d81b20b9a9bfa..bfd3f4aa691eb7deae80216873bc2034816db17d 100644 (file)
@@ -1,23 +1,26 @@
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized5.rs:4:5
    |
+LL | struct S1<X: ?Sized> {
+   |           -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     f1: X,
    |     ^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: only the last field of a struct may have a dynamically sized type
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized5.rs:10:5
    |
+LL | struct S2<X: ?Sized> {
+   |           -- help: consider further restricting this bound: `X: std::marker::Sized +`
+LL |     f: isize,
 LL |     g: X,
    |     ^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: only the last field of a struct may have a dynamically sized type
 
 error[E0277]: the size for values of type `str` cannot be known at compilation time
@@ -43,23 +46,25 @@ LL |     f: [u8],
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized5.rs:25:8
    |
+LL | enum E<X: ?Sized> {
+   |        -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     V1(X, isize),
    |        ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized5.rs:29:8
    |
+LL | enum F<X: ?Sized> {
+   |        -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     V2{f1: X, f: isize},
    |        ^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: no field of an enum variant may have a dynamically sized type
 
 error: aborting due to 6 previous errors
index a6a26573e7c596789835162adc531316988d0276..95acd987a5a272f11f9d92f1a8a449a8e861f06a 100644 (file)
 error[E0277]: the size for values of type `Y` cannot be known at compilation time
   --> $DIR/unsized6.rs:9:9
    |
+LL | fn f1<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized>(x: &X) {
+   |                             -- help: consider further restricting this bound: `Y: std::marker::Sized +`
+...
 LL |     let y: Y;
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Y`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Y: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:7:12
    |
+LL | fn f1<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized>(x: &X) {
+   |                  -- help: consider further restricting this bound: `X: std::marker::Sized +`
+LL |     let _: W; // <-- this is OK, no bindings created, no initializer.
 LL |     let _: (isize, (X, isize));
    |            ^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: only the last element of a tuple may have a dynamically sized type
 
 error[E0277]: the size for values of type `Z` cannot be known at compilation time
   --> $DIR/unsized6.rs:11:12
    |
+LL | fn f1<W: ?Sized, X: ?Sized, Y: ?Sized, Z: ?Sized>(x: &X) {
+   |                                        -- help: consider further restricting this bound: `Z: std::marker::Sized +`
+...
 LL |     let y: (isize, (Z, usize));
    |            ^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Z`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Z: std::marker::Sized` bound
    = note: only the last element of a tuple may have a dynamically sized type
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:15:9
    |
+LL | fn f2<X: ?Sized, Y: ?Sized>(x: &X) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     let y: X;
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `Y` cannot be known at compilation time
   --> $DIR/unsized6.rs:17:12
    |
+LL | fn f2<X: ?Sized, Y: ?Sized>(x: &X) {
+   |                  -- help: consider further restricting this bound: `Y: std::marker::Sized +`
+...
 LL |     let y: (isize, (Y, isize));
    |            ^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `Y`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where Y: std::marker::Sized` bound
    = note: only the last element of a tuple may have a dynamically sized type
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:22:9
    |
+LL | fn f3<X: ?Sized>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     let y: X = *x1;
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:24:9
    |
+LL | fn f3<X: ?Sized>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
+...
 LL |     let y = *x2;
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:26:10
    |
+LL | fn f3<X: ?Sized>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
+...
 LL |     let (y, z) = (*x3, 4);
    |          ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:30:9
    |
+LL | fn f4<X: ?Sized + T>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
 LL |     let y: X = *x1;
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:32:9
    |
+LL | fn f4<X: ?Sized + T>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
+...
 LL |     let y = *x2;
    |         ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
 error[E0277]: the size for values of type `X` cannot be known at compilation time
   --> $DIR/unsized6.rs:34:10
    |
+LL | fn f4<X: ?Sized + T>(x1: Box<X>, x2: Box<X>, x3: Box<X>) {
+   |       -- help: consider further restricting this bound: `X: std::marker::Sized +`
+...
 LL |     let (y, z) = (*x3, 4);
    |          ^ doesn't have a size known at compile-time
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
@@ -131,11 +150,12 @@ error[E0277]: the size for values of type `X` cannot be known at compilation tim
   --> $DIR/unsized6.rs:38:18
    |
 LL | fn g1<X: ?Sized>(x: X) {}
-   |                  ^ doesn't have a size known at compile-time
+   |       --         ^ doesn't have a size known at compile-time
+   |       |
+   |       help: consider further restricting this bound: `X: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
@@ -143,11 +163,12 @@ error[E0277]: the size for values of type `X` cannot be known at compilation tim
   --> $DIR/unsized6.rs:40:22
    |
 LL | fn g2<X: ?Sized + T>(x: X) {}
-   |                      ^ doesn't have a size known at compile-time
+   |       --             ^ doesn't have a size known at compile-time
+   |       |
+   |       help: consider further restricting this bound: `X: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
    = note: all local variables must have a statically known size
    = help: unsized locals are gated as an unstable feature
 
index bb83b1811844aa4f1fdde87f53acef5a098f095b..c77503a6f87aa705a74d87c466bae6425d3fbba0 100644 (file)
@@ -2,11 +2,12 @@ error[E0277]: the size for values of type `X` cannot be known at compilation tim
   --> $DIR/unsized7.rs:12:21
    |
 LL | impl<X: ?Sized + T> T1<X> for S3<X> {
-   |                     ^^^^^ doesn't have a size known at compile-time
+   |      --             ^^^^^ doesn't have a size known at compile-time
+   |      |
+   |      help: consider further restricting this bound: `X: std::marker::Sized +`
    |
    = help: the trait `std::marker::Sized` is not implemented for `X`
    = note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
-   = help: consider adding a `where X: std::marker::Sized` bound
 
 error: aborting due to previous error
 
index 14e1bb624033b7b2e5f6f611b8e6bc431a0d6c7b..6c23178c700112f12f41a4a7272a4f57b8555d31 100644 (file)
@@ -6,3 +6,4 @@ LL |         Trait { x: 42 } => ()
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0574`.
index 49222fc7fa62708e571cd093db99472a5f004f2d..0f93927683ea677024ffe744918934e984b2289b 100644 (file)
@@ -6,7 +6,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::IterMut<'_, &'new (), _>`
               found type `std::collections::btree_map::IterMut<'_, &'static (), _>`
-note: the lifetime 'new as defined on the function body at 3:21...
+note: the lifetime `'new` as defined on the function body at 3:21...
   --> $DIR/variance-btree-invariant-types.rs:3:21
    |
 LL | fn iter_cov_key<'a, 'new>(v: IterMut<'a, &'static (), ()>) -> IterMut<'a, &'new (), ()> {
@@ -21,7 +21,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::IterMut<'_, _, &'new ()>`
               found type `std::collections::btree_map::IterMut<'_, _, &'static ()>`
-note: the lifetime 'new as defined on the function body at 6:21...
+note: the lifetime `'new` as defined on the function body at 6:21...
   --> $DIR/variance-btree-invariant-types.rs:6:21
    |
 LL | fn iter_cov_val<'a, 'new>(v: IterMut<'a, (), &'static ()>) -> IterMut<'a, (), &'new ()> {
@@ -36,7 +36,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::IterMut<'_, &'static (), _>`
               found type `std::collections::btree_map::IterMut<'_, &'new (), _>`
-note: the lifetime 'new as defined on the function body at 9:24...
+note: the lifetime `'new` as defined on the function body at 9:24...
   --> $DIR/variance-btree-invariant-types.rs:9:24
    |
 LL | fn iter_contra_key<'a, 'new>(v: IterMut<'a, &'new (), ()>) -> IterMut<'a, &'static (), ()> {
@@ -51,7 +51,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::IterMut<'_, _, &'static ()>`
               found type `std::collections::btree_map::IterMut<'_, _, &'new ()>`
-note: the lifetime 'new as defined on the function body at 12:24...
+note: the lifetime `'new` as defined on the function body at 12:24...
   --> $DIR/variance-btree-invariant-types.rs:12:24
    |
 LL | fn iter_contra_val<'a, 'new>(v: IterMut<'a, (), &'new ()>) -> IterMut<'a, (), &'static ()> {
@@ -66,7 +66,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::OccupiedEntry<'_, &'new (), _>`
               found type `std::collections::btree_map::OccupiedEntry<'_, &'static (), _>`
-note: the lifetime 'new as defined on the function body at 16:20...
+note: the lifetime `'new` as defined on the function body at 16:20...
   --> $DIR/variance-btree-invariant-types.rs:16:20
    |
 LL | fn occ_cov_key<'a, 'new>(v: OccupiedEntry<'a, &'static (), ()>)
@@ -81,7 +81,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::OccupiedEntry<'_, _, &'new ()>`
               found type `std::collections::btree_map::OccupiedEntry<'_, _, &'static ()>`
-note: the lifetime 'new as defined on the function body at 20:20...
+note: the lifetime `'new` as defined on the function body at 20:20...
   --> $DIR/variance-btree-invariant-types.rs:20:20
    |
 LL | fn occ_cov_val<'a, 'new>(v: OccupiedEntry<'a, (), &'static ()>)
@@ -96,7 +96,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::OccupiedEntry<'_, &'static (), _>`
               found type `std::collections::btree_map::OccupiedEntry<'_, &'new (), _>`
-note: the lifetime 'new as defined on the function body at 24:23...
+note: the lifetime `'new` as defined on the function body at 24:23...
   --> $DIR/variance-btree-invariant-types.rs:24:23
    |
 LL | fn occ_contra_key<'a, 'new>(v: OccupiedEntry<'a, &'new (), ()>)
@@ -111,7 +111,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::OccupiedEntry<'_, _, &'static ()>`
               found type `std::collections::btree_map::OccupiedEntry<'_, _, &'new ()>`
-note: the lifetime 'new as defined on the function body at 28:23...
+note: the lifetime `'new` as defined on the function body at 28:23...
   --> $DIR/variance-btree-invariant-types.rs:28:23
    |
 LL | fn occ_contra_val<'a, 'new>(v: OccupiedEntry<'a, (), &'new ()>)
@@ -126,7 +126,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::VacantEntry<'_, &'new (), _>`
               found type `std::collections::btree_map::VacantEntry<'_, &'static (), _>`
-note: the lifetime 'new as defined on the function body at 33:20...
+note: the lifetime `'new` as defined on the function body at 33:20...
   --> $DIR/variance-btree-invariant-types.rs:33:20
    |
 LL | fn vac_cov_key<'a, 'new>(v: VacantEntry<'a, &'static (), ()>)
@@ -141,7 +141,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::VacantEntry<'_, _, &'new ()>`
               found type `std::collections::btree_map::VacantEntry<'_, _, &'static ()>`
-note: the lifetime 'new as defined on the function body at 37:20...
+note: the lifetime `'new` as defined on the function body at 37:20...
   --> $DIR/variance-btree-invariant-types.rs:37:20
    |
 LL | fn vac_cov_val<'a, 'new>(v: VacantEntry<'a, (), &'static ()>)
@@ -156,7 +156,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::VacantEntry<'_, &'static (), _>`
               found type `std::collections::btree_map::VacantEntry<'_, &'new (), _>`
-note: the lifetime 'new as defined on the function body at 41:23...
+note: the lifetime `'new` as defined on the function body at 41:23...
   --> $DIR/variance-btree-invariant-types.rs:41:23
    |
 LL | fn vac_contra_key<'a, 'new>(v: VacantEntry<'a, &'new (), ()>)
@@ -171,7 +171,7 @@ LL |     v
    |
    = note: expected type `std::collections::btree_map::VacantEntry<'_, _, &'static ()>`
               found type `std::collections::btree_map::VacantEntry<'_, _, &'new ()>`
-note: the lifetime 'new as defined on the function body at 45:23...
+note: the lifetime `'new` as defined on the function body at 45:23...
   --> $DIR/variance-btree-invariant-types.rs:45:23
    |
 LL | fn vac_contra_val<'a, 'new>(v: VacantEntry<'a, (), &'new ()>)
index 263c849e19981378337f3bcf9f0ec5d6decee194..27017e5dc47d6120feae44151450a9b150dfa851 100644 (file)
@@ -6,12 +6,12 @@ LL |     v
    |
    = note: expected type `dyn Get<&'min i32>`
               found type `dyn Get<&'max i32>`
-note: the lifetime 'min as defined on the function body at 10:21...
+note: the lifetime `'min` as defined on the function body at 10:21...
   --> $DIR/variance-contravariant-arg-object.rs:10:21
    |
 LL | fn get_min_from_max<'min, 'max>(v: Box<dyn Get<&'max i32>>)
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 10:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 10:27
   --> $DIR/variance-contravariant-arg-object.rs:10:27
    |
 LL | fn get_min_from_max<'min, 'max>(v: Box<dyn Get<&'max i32>>)
@@ -25,12 +25,12 @@ LL |     v
    |
    = note: expected type `dyn Get<&'max i32>`
               found type `dyn Get<&'min i32>`
-note: the lifetime 'min as defined on the function body at 17:21...
+note: the lifetime `'min` as defined on the function body at 17:21...
   --> $DIR/variance-contravariant-arg-object.rs:17:21
    |
 LL | fn get_max_from_min<'min, 'max, G>(v: Box<dyn Get<&'min i32>>)
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 17:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 17:27
   --> $DIR/variance-contravariant-arg-object.rs:17:27
    |
 LL | fn get_max_from_min<'min, 'max, G>(v: Box<dyn Get<&'min i32>>)
index ffe690dd22073695476065992369ac6e60c37148..1752b3b36a484ab5918c2a02bb9b5351a861f7ef 100644 (file)
@@ -6,12 +6,12 @@ LL |     impls_get::<G,&'min i32>()
    |
    = note: expected type `Get<&'min i32>`
               found type `Get<&'max i32>`
-note: the lifetime 'min as defined on the function body at 10:21...
+note: the lifetime `'min` as defined on the function body at 10:21...
   --> $DIR/variance-contravariant-arg-trait-match.rs:10:21
    |
 LL | fn get_min_from_max<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 10:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 10:27
   --> $DIR/variance-contravariant-arg-trait-match.rs:10:27
    |
 LL | fn get_min_from_max<'min, 'max, G>()
@@ -25,12 +25,12 @@ LL |     impls_get::<G,&'max i32>()
    |
    = note: expected type `Get<&'max i32>`
               found type `Get<&'min i32>`
-note: the lifetime 'min as defined on the function body at 16:21...
+note: the lifetime `'min` as defined on the function body at 16:21...
   --> $DIR/variance-contravariant-arg-trait-match.rs:16:21
    |
 LL | fn get_max_from_min<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 16:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 16:27
   --> $DIR/variance-contravariant-arg-trait-match.rs:16:27
    |
 LL | fn get_max_from_min<'min, 'max, G>()
index 6f445d79bf5d28152cc5300b338a749007fb1130..e35aec4c0ce27a9b10ae2f0a93d72e89ea8ae287 100644 (file)
@@ -6,12 +6,12 @@ LL |     impls_get::<&'min G>();
    |
    = note: expected type `Get`
               found type `Get`
-note: the lifetime 'min as defined on the function body at 10:21...
+note: the lifetime `'min` as defined on the function body at 10:21...
   --> $DIR/variance-contravariant-self-trait-match.rs:10:21
    |
 LL | fn get_min_from_max<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 10:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 10:27
   --> $DIR/variance-contravariant-self-trait-match.rs:10:27
    |
 LL | fn get_min_from_max<'min, 'max, G>()
@@ -25,12 +25,12 @@ LL |     impls_get::<&'max G>();
    |
    = note: expected type `Get`
               found type `Get`
-note: the lifetime 'min as defined on the function body at 16:21...
+note: the lifetime `'min` as defined on the function body at 16:21...
   --> $DIR/variance-contravariant-self-trait-match.rs:16:21
    |
 LL | fn get_max_from_min<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 16:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 16:27
   --> $DIR/variance-contravariant-self-trait-match.rs:16:27
    |
 LL | fn get_max_from_min<'min, 'max, G>()
index 94f80c2b657f5247680121d67ba531f9a44e14f0..b986edb809f6c05effdab7a6df6d8b6f6c5e739b 100644 (file)
@@ -6,12 +6,12 @@ LL |     v
    |
    = note: expected type `dyn Get<&'min i32>`
               found type `dyn Get<&'max i32>`
-note: the lifetime 'min as defined on the function body at 10:21...
+note: the lifetime `'min` as defined on the function body at 10:21...
   --> $DIR/variance-covariant-arg-object.rs:10:21
    |
 LL | fn get_min_from_max<'min, 'max>(v: Box<dyn Get<&'max i32>>)
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 10:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 10:27
   --> $DIR/variance-covariant-arg-object.rs:10:27
    |
 LL | fn get_min_from_max<'min, 'max>(v: Box<dyn Get<&'max i32>>)
@@ -25,12 +25,12 @@ LL |     v
    |
    = note: expected type `dyn Get<&'max i32>`
               found type `dyn Get<&'min i32>`
-note: the lifetime 'min as defined on the function body at 18:21...
+note: the lifetime `'min` as defined on the function body at 18:21...
   --> $DIR/variance-covariant-arg-object.rs:18:21
    |
 LL | fn get_max_from_min<'min, 'max, G>(v: Box<dyn Get<&'min i32>>)
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 18:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 18:27
   --> $DIR/variance-covariant-arg-object.rs:18:27
    |
 LL | fn get_max_from_min<'min, 'max, G>(v: Box<dyn Get<&'min i32>>)
index c0209edc91553d0c3e126017f5d73b35ecce4f15..aa383fcc26280f99610de6819ce904d54be9129b 100644 (file)
@@ -6,12 +6,12 @@ LL |     impls_get::<G,&'min i32>()
    |
    = note: expected type `Get<&'min i32>`
               found type `Get<&'max i32>`
-note: the lifetime 'min as defined on the function body at 10:21...
+note: the lifetime `'min` as defined on the function body at 10:21...
   --> $DIR/variance-covariant-arg-trait-match.rs:10:21
    |
 LL | fn get_min_from_max<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 10:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 10:27
   --> $DIR/variance-covariant-arg-trait-match.rs:10:27
    |
 LL | fn get_min_from_max<'min, 'max, G>()
@@ -25,12 +25,12 @@ LL |     impls_get::<G,&'max i32>()
    |
    = note: expected type `Get<&'max i32>`
               found type `Get<&'min i32>`
-note: the lifetime 'min as defined on the function body at 17:21...
+note: the lifetime `'min` as defined on the function body at 17:21...
   --> $DIR/variance-covariant-arg-trait-match.rs:17:21
    |
 LL | fn get_max_from_min<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 17:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 17:27
   --> $DIR/variance-covariant-arg-trait-match.rs:17:27
    |
 LL | fn get_max_from_min<'min, 'max, G>()
index fe5fe105c6b3c113797a3fd95648d3e037a838d5..a25d1044d42565b80019b18b9b1908cee387e13b 100644 (file)
@@ -6,12 +6,12 @@ LL |     impls_get::<&'min G>();
    |
    = note: expected type `Get`
               found type `Get`
-note: the lifetime 'min as defined on the function body at 10:21...
+note: the lifetime `'min` as defined on the function body at 10:21...
   --> $DIR/variance-covariant-self-trait-match.rs:10:21
    |
 LL | fn get_min_from_max<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 10:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 10:27
   --> $DIR/variance-covariant-self-trait-match.rs:10:27
    |
 LL | fn get_min_from_max<'min, 'max, G>()
@@ -25,12 +25,12 @@ LL |     impls_get::<&'max G>();
    |
    = note: expected type `Get`
               found type `Get`
-note: the lifetime 'min as defined on the function body at 17:21...
+note: the lifetime `'min` as defined on the function body at 17:21...
   --> $DIR/variance-covariant-self-trait-match.rs:17:21
    |
 LL | fn get_max_from_min<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 17:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 17:27
   --> $DIR/variance-covariant-self-trait-match.rs:17:27
    |
 LL | fn get_max_from_min<'min, 'max, G>()
index 50a8697d4392f64f02332d714e5bddc1281d11b1..8ff1e23e8add826c9d80e291e937f8b34a2bdf6e 100644 (file)
@@ -6,12 +6,12 @@ LL |     v
    |
    = note: expected type `dyn Get<&'min i32>`
               found type `dyn Get<&'max i32>`
-note: the lifetime 'min as defined on the function body at 7:21...
+note: the lifetime `'min` as defined on the function body at 7:21...
   --> $DIR/variance-invariant-arg-object.rs:7:21
    |
 LL | fn get_min_from_max<'min, 'max>(v: Box<dyn Get<&'max i32>>)
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 7:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 7:27
   --> $DIR/variance-invariant-arg-object.rs:7:27
    |
 LL | fn get_min_from_max<'min, 'max>(v: Box<dyn Get<&'max i32>>)
@@ -25,12 +25,12 @@ LL |     v
    |
    = note: expected type `dyn Get<&'max i32>`
               found type `dyn Get<&'min i32>`
-note: the lifetime 'min as defined on the function body at 14:21...
+note: the lifetime `'min` as defined on the function body at 14:21...
   --> $DIR/variance-invariant-arg-object.rs:14:21
    |
 LL | fn get_max_from_min<'min, 'max, G>(v: Box<dyn Get<&'min i32>>)
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 14:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 14:27
   --> $DIR/variance-invariant-arg-object.rs:14:27
    |
 LL | fn get_max_from_min<'min, 'max, G>(v: Box<dyn Get<&'min i32>>)
index c8a1111e6237dfc84cf4d799f92bd68ab9a0189f..b58993737c783be9db2ace5dbf732ba0e71f0d16 100644 (file)
@@ -6,12 +6,12 @@ LL |     impls_get::<G,&'min i32>()
    |
    = note: expected type `Get<&'min i32>`
               found type `Get<&'max i32>`
-note: the lifetime 'min as defined on the function body at 7:21...
+note: the lifetime `'min` as defined on the function body at 7:21...
   --> $DIR/variance-invariant-arg-trait-match.rs:7:21
    |
 LL | fn get_min_from_max<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 7:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 7:27
   --> $DIR/variance-invariant-arg-trait-match.rs:7:27
    |
 LL | fn get_min_from_max<'min, 'max, G>()
@@ -25,12 +25,12 @@ LL |     impls_get::<G,&'max i32>()
    |
    = note: expected type `Get<&'max i32>`
               found type `Get<&'min i32>`
-note: the lifetime 'min as defined on the function body at 13:21...
+note: the lifetime `'min` as defined on the function body at 13:21...
   --> $DIR/variance-invariant-arg-trait-match.rs:13:21
    |
 LL | fn get_max_from_min<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 13:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 13:27
   --> $DIR/variance-invariant-arg-trait-match.rs:13:27
    |
 LL | fn get_max_from_min<'min, 'max, G>()
index cb03d95f77104eb74d8008492be2bfa5987e77be..4a1d4d28b4805ae356b48264edc9d028f51b6212 100644 (file)
@@ -6,12 +6,12 @@ LL |     impls_get::<&'min G>();
    |
    = note: expected type `Get`
               found type `Get`
-note: the lifetime 'min as defined on the function body at 7:21...
+note: the lifetime `'min` as defined on the function body at 7:21...
   --> $DIR/variance-invariant-self-trait-match.rs:7:21
    |
 LL | fn get_min_from_max<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 7:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 7:27
   --> $DIR/variance-invariant-self-trait-match.rs:7:27
    |
 LL | fn get_min_from_max<'min, 'max, G>()
@@ -25,12 +25,12 @@ LL |     impls_get::<&'max G>();
    |
    = note: expected type `Get`
               found type `Get`
-note: the lifetime 'min as defined on the function body at 13:21...
+note: the lifetime `'min` as defined on the function body at 13:21...
   --> $DIR/variance-invariant-self-trait-match.rs:13:21
    |
 LL | fn get_max_from_min<'min, 'max, G>()
    |                     ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 13:27
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 13:27
   --> $DIR/variance-invariant-self-trait-match.rs:13:27
    |
 LL | fn get_max_from_min<'min, 'max, G>()
index 7c433378df5c3abd1b72b9cdba6f7c0702c4b6ba..618f56da512d6aa6a71765fed3bc5a408f3192f7 100644 (file)
@@ -6,12 +6,12 @@ LL |     v
    |
    = note: expected type `SomeStruct<&'min ()>`
               found type `SomeStruct<&'max ()>`
-note: the lifetime 'min as defined on the function body at 8:8...
+note: the lifetime `'min` as defined on the function body at 8:8...
   --> $DIR/variance-use-contravariant-struct-1.rs:8:8
    |
 LL | fn foo<'min,'max>(v: SomeStruct<&'max ()>)
    |        ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 8:13
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 8:13
   --> $DIR/variance-use-contravariant-struct-1.rs:8:13
    |
 LL | fn foo<'min,'max>(v: SomeStruct<&'max ()>)
index 6ae7d12c4633fa120be06f91f301251dc080a014..0b3a8dcfc86f4ab731b0ce613a33dce286e223eb 100644 (file)
@@ -6,12 +6,12 @@ LL |     v
    |
    = note: expected type `SomeStruct<&'max ()>`
               found type `SomeStruct<&'min ()>`
-note: the lifetime 'min as defined on the function body at 6:8...
+note: the lifetime `'min` as defined on the function body at 6:8...
   --> $DIR/variance-use-covariant-struct-1.rs:6:8
    |
 LL | fn foo<'min,'max>(v: SomeStruct<&'min ()>)
    |        ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 6:13
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 6:13
   --> $DIR/variance-use-covariant-struct-1.rs:6:13
    |
 LL | fn foo<'min,'max>(v: SomeStruct<&'min ()>)
index 793954e3a1f0413f1357cb109c80d0a30828addd..31deefb535e94cafa528747f3d5e4a7bc3ae0934 100644 (file)
@@ -6,12 +6,12 @@ LL |     v
    |
    = note: expected type `SomeStruct<&'min ()>`
               found type `SomeStruct<&'max ()>`
-note: the lifetime 'min as defined on the function body at 8:8...
+note: the lifetime `'min` as defined on the function body at 8:8...
   --> $DIR/variance-use-invariant-struct-1.rs:8:8
    |
 LL | fn foo<'min,'max>(v: SomeStruct<&'max ()>)
    |        ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 8:13
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 8:13
   --> $DIR/variance-use-invariant-struct-1.rs:8:13
    |
 LL | fn foo<'min,'max>(v: SomeStruct<&'max ()>)
@@ -25,12 +25,12 @@ LL |     v
    |
    = note: expected type `SomeStruct<&'max ()>`
               found type `SomeStruct<&'min ()>`
-note: the lifetime 'min as defined on the function body at 15:8...
+note: the lifetime `'min` as defined on the function body at 15:8...
   --> $DIR/variance-use-invariant-struct-1.rs:15:8
    |
 LL | fn bar<'min,'max>(v: SomeStruct<&'min ()>)
    |        ^^^^
-note: ...does not necessarily outlive the lifetime 'max as defined on the function body at 15:13
+note: ...does not necessarily outlive the lifetime `'max` as defined on the function body at 15:13
   --> $DIR/variance-use-invariant-struct-1.rs:15:13
    |
 LL | fn bar<'min,'max>(v: SomeStruct<&'min ()>)
index 1138b69ae3bc3bd765df0b1509246a27a9fe1a4e..fdfc044d81f6cbcc24b124e0f975b49b7c5f7b48 100644 (file)
@@ -24,3 +24,4 @@ LL | impl Ty {}
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0573`.
diff --git a/src/test/ui/wf/issue-48638.rs b/src/test/ui/wf/issue-48638.rs
new file mode 100644 (file)
index 0000000..f078431
--- /dev/null
@@ -0,0 +1,21 @@
+// check-pass
+
+pub trait D {}
+pub struct DT;
+impl D for DT {}
+
+pub trait A<R: D>: Sized {
+    type AS;
+}
+
+pub struct As<R: D>(R);
+
+pub struct AT;
+impl<R: D> A<R> for AT {
+    type AS = As<R>;
+}
+
+#[repr(packed)]
+struct S(<AT as A<DT>>::AS);
+
+fn main() {}
index d5632f4a9c24ee8d81e27b913919600f93dfa187..eaacd6b6881eff2831f1479c23aaaa5be4abe570 100644 (file)
@@ -6,12 +6,11 @@ LL |   trait ExtraCopy<T:Copy> { }
 LL | 
 LL | / enum SomeEnum<T,U>
 LL | |     where T: ExtraCopy<U>
+   | |                          - help: consider further restricting type parameter `U`: `, U: std::marker::Copy`
 LL | | {
 LL | |     SomeVariant(T,U)
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 51ee23fc5aa6d52e62335ffd384e7a0a657ce522..52882c460d228ad7f707bd583dc1cdbdd2470c78 100644 (file)
@@ -4,10 +4,11 @@ error[E0277]: the trait bound `A: std::marker::Copy` is not satisfied
 LL | struct IsCopy<T:Copy> {
    | --------------------- required by `IsCopy`
 ...
+LL | enum AnotherEnum<A> {
+   |                  - help: consider restricting this bound: `A: std::marker::Copy`
+LL |     AnotherVariant {
 LL |         f: IsCopy<A>
    |         ^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `A`
-   |
-   = help: consider adding a `where A: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 5f4e7c66f54cadafc32764e0574a906911f68ed2..0fea35d68ea6ff435e206ae6ca743aedaae9790a 100644 (file)
@@ -4,10 +4,10 @@ error[E0277]: the trait bound `A: std::marker::Copy` is not satisfied
 LL | struct IsCopy<T:Copy> {
    | --------------------- required by `IsCopy`
 ...
+LL | enum SomeEnum<A> {
+   |               - help: consider restricting this bound: `A: std::marker::Copy`
 LL |     SomeVariant(IsCopy<A>)
    |                 ^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `A`
-   |
-   = help: consider adding a `where A: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 4bc2e370f29faa25658bc82a03d202dfd4df76c9..9b8b04a7b86a3153d6236de59c7e8037552d340a 100644 (file)
@@ -4,12 +4,13 @@ error[E0277]: the trait bound `U: std::marker::Copy` is not satisfied
 LL |   trait ExtraCopy<T:Copy> { }
    |   ----------------------- required by `ExtraCopy`
 LL | 
-LL | / fn foo<T,U>() where T: ExtraCopy<U>
+LL |   fn foo<T,U>() where T: ExtraCopy<U>
+   |   ^                                  - help: consider further restricting type parameter `U`: `, U: std::marker::Copy`
+   |  _|
+   | |
 LL | | {
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error[E0277]: the size for values of type `(dyn std::marker::Copy + 'static)` cannot be known at compilation time
   --> $DIR/wf-fn-where-clause.rs:12:1
index ceafb4f61578b1424c784d4ab3003f55229c486a..6d71670e6a84c0e8b091891cd5cce241e27b207b 100644 (file)
@@ -4,10 +4,10 @@ error[E0277]: the trait bound `T: MyHash` is not satisfied
 LL | pub struct MySet<T:MyHash> {
    | -------------------------- required by `MySet`
 ...
+LL | impl<T> Foo for T {
+   |      - help: consider restricting this bound: `T: MyHash`
 LL |     type Bar = MySet<T>;
    |     ^^^^^^^^^^^^^^^^^^^^ the trait `MyHash` is not implemented for `T`
-   |
-   = help: consider adding a `where T: MyHash` bound
 
 error: aborting due to previous error
 
index e7432f81987344800eb1f379ef1b22556263be3c..3798ba1ec6e75a095479ffca6a14ce081c5f9cec 100644 (file)
@@ -4,12 +4,13 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL |   struct MustBeCopy<T:Copy> {
    |   ------------------------- required by `MustBeCopy`
 ...
-LL | / fn bar<T>(_: &MustBeCopy<T>)
+LL |   fn bar<T>(_: &MustBeCopy<T>)
+   |   ^      - help: consider restricting this bound: `T: std::marker::Copy`
+   |  _|
+   | |
 LL | | {
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 005ffe84502de55f304887d3e37a5ffa40a42be9..2e46ce4900033f132b03ce3c392365f64424c9d5 100644 (file)
@@ -4,12 +4,13 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL |   struct MustBeCopy<T:Copy> {
    |   ------------------------- required by `MustBeCopy`
 ...
-LL | / fn bar<T>() -> MustBeCopy<T>
+LL |   fn bar<T>() -> MustBeCopy<T>
+   |   ^      - help: consider restricting this bound: `T: std::marker::Copy`
+   |  _|
+   | |
 LL | | {
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index b4cd921040225aeba7d6abcebd026548444a95e5..db4fb9f97f52d6377753177f0d9c404a3184edd6 100644 (file)
@@ -4,10 +4,11 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL | struct MustBeCopy<T:Copy> {
    | ------------------------- required by `MustBeCopy`
 ...
+LL | struct Bar<T> {
+   |            - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     // needs T: Copy
 LL |     x: fn(MustBeCopy<T>)
    |     ^^^^^^^^^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 988fbed8e9107df59c5421f898696f51e1b715f9..09f8aa2a2018202c8f3a90ef2e5028c083234fe3 100644 (file)
@@ -4,10 +4,11 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL | struct MustBeCopy<T:Copy> {
    | ------------------------- required by `MustBeCopy`
 ...
+LL | struct Foo<T> {
+   |            - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     // needs T: 'static
 LL |     x: fn() -> MustBeCopy<T>
    |     ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 0af38ddcffea3d431506fa7e1e392b7a4afc99a8..979802dec49986d050d293ec70d790cfdbebf0ed 100644 (file)
@@ -6,11 +6,10 @@ LL |   trait MustBeCopy<T:Copy> {
 ...
 LL | / fn bar<T,U>()
 LL | |     where T: MustBeCopy<U>
+   | |                           - help: consider further restricting type parameter `U`: `, U: std::marker::Copy`
 LL | | {
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 0f4b4e417ca4f2af0f1df7712c38447608c7c0a1..2711820d82c651081bd2a150b9da056d7577249f 100644 (file)
@@ -4,10 +4,11 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL | struct MustBeCopy<T:Copy> {
    | ------------------------- required by `MustBeCopy`
 ...
+LL | struct Bar<T> {
+   |            - help: consider restricting this bound: `T: std::marker::Copy`
+LL |     // needs T: Copy
 LL |     x: dyn Object<MustBeCopy<T>>
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 1e258864d0367416177ecd7b31910cbb5cec9aaa..21f825ac9ef9af71085207a9ea7a00e766df68e0 100644 (file)
@@ -4,11 +4,11 @@ error[E0277]: the trait bound `U: std::marker::Copy` is not satisfied
 LL |   trait ExtraCopy<T:Copy> { }
    |   ----------------------- required by `ExtraCopy`
 ...
+LL |   impl<T,U> Foo<T,U> {
+   |          - help: consider restricting this bound: `U: std::marker::Copy`
 LL | /     fn foo(self) where T: ExtraCopy<U>
 LL | |     {}
    | |______^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 4c389b3ef3ef20ccbd84a2b16c5151c85391eaae..35b90933813299c85f42b27509d8556aebd88ca7 100644 (file)
@@ -4,12 +4,13 @@ error[E0277]: the trait bound `U: std::marker::Copy` is not satisfied
 LL |   trait ExtraCopy<T:Copy> { }
    |   ----------------------- required by `ExtraCopy`
 ...
-LL | / impl<T,U> Foo<T,U> where T: ExtraCopy<U>
+LL |   impl<T,U> Foo<T,U> where T: ExtraCopy<U>
+   |   ^                                       - help: consider further restricting type parameter `U`: `, U: std::marker::Copy`
+   |  _|
+   | |
 LL | | {
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 3ec90f00448a9af75800748d172939ce482adfa6..93d16514a50783240f65339e48ecb3275f74b805 100644 (file)
@@ -4,12 +4,12 @@ error[E0312]: lifetime of reference outlives lifetime of borrowed content...
 LL |         u
    |         ^
    |
-note: ...the reference is valid for the lifetime 'a as defined on the impl at 14:6...
+note: ...the reference is valid for the lifetime `'a` as defined on the impl at 14:6...
   --> $DIR/wf-static-method.rs:14:6
    |
 LL | impl<'a, 'b> Foo<'a, 'b, Evil<'a, 'b>> for () {
    |      ^^
-note: ...but the borrowed content is only valid for the lifetime 'b as defined on the impl at 14:10
+note: ...but the borrowed content is only valid for the lifetime `'b` as defined on the impl at 14:10
   --> $DIR/wf-static-method.rs:14:10
    |
 LL | impl<'a, 'b> Foo<'a, 'b, Evil<'a, 'b>> for () {
@@ -21,12 +21,12 @@ error[E0478]: lifetime bound not satisfied
 LL |         let me = Self::make_me();
    |                  ^^^^^^^^^^^^^
    |
-note: lifetime parameter instantiated with the lifetime 'b as defined on the impl at 23:10
+note: lifetime parameter instantiated with the lifetime `'b` as defined on the impl at 23:10
   --> $DIR/wf-static-method.rs:23:10
    |
 LL | impl<'a, 'b> Foo<'a, 'b, ()> for IndirectEvil<'a, 'b> {
    |          ^^
-note: but lifetime parameter must outlive the lifetime 'a as defined on the impl at 23:6
+note: but lifetime parameter must outlive the lifetime `'a` as defined on the impl at 23:6
   --> $DIR/wf-static-method.rs:23:6
    |
 LL | impl<'a, 'b> Foo<'a, 'b, ()> for IndirectEvil<'a, 'b> {
@@ -38,12 +38,12 @@ error[E0312]: lifetime of reference outlives lifetime of borrowed content...
 LL |         u
    |         ^
    |
-note: ...the reference is valid for the lifetime 'a as defined on the impl at 31:6...
+note: ...the reference is valid for the lifetime `'a` as defined on the impl at 31:6...
   --> $DIR/wf-static-method.rs:31:6
    |
 LL | impl<'a, 'b> Evil<'a, 'b> {
    |      ^^
-note: ...but the borrowed content is only valid for the lifetime 'b as defined on the impl at 31:10
+note: ...but the borrowed content is only valid for the lifetime `'b` as defined on the impl at 31:10
   --> $DIR/wf-static-method.rs:31:10
    |
 LL | impl<'a, 'b> Evil<'a, 'b> {
@@ -55,7 +55,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'b` d
 LL |     <()>::static_evil(b)
    |     ^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'b as defined on the function body at 40:13...
+note: first, the lifetime cannot outlive the lifetime `'b` as defined on the function body at 40:13...
   --> $DIR/wf-static-method.rs:40:13
    |
 LL | fn evil<'a, 'b>(b: &'b u32) -> &'a u32 {
@@ -65,7 +65,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |     <()>::static_evil(b)
    |                       ^
-note: but, the lifetime must be valid for the lifetime 'a as defined on the function body at 40:9...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the function body at 40:9...
   --> $DIR/wf-static-method.rs:40:9
    |
 LL | fn evil<'a, 'b>(b: &'b u32) -> &'a u32 {
@@ -82,7 +82,7 @@ error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'b` d
 LL |     <IndirectEvil>::static_evil(b)
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: first, the lifetime cannot outlive the lifetime 'b as defined on the function body at 44:22...
+note: first, the lifetime cannot outlive the lifetime `'b` as defined on the function body at 44:22...
   --> $DIR/wf-static-method.rs:44:22
    |
 LL | fn indirect_evil<'a, 'b>(b: &'b u32) -> &'a u32 {
@@ -92,7 +92,7 @@ note: ...so that reference does not outlive borrowed content
    |
 LL |     <IndirectEvil>::static_evil(b)
    |                                 ^
-note: but, the lifetime must be valid for the lifetime 'a as defined on the function body at 44:18...
+note: but, the lifetime must be valid for the lifetime `'a` as defined on the function body at 44:18...
   --> $DIR/wf-static-method.rs:44:18
    |
 LL | fn indirect_evil<'a, 'b>(b: &'b u32) -> &'a u32 {
@@ -105,5 +105,5 @@ LL |     <IndirectEvil>::static_evil(b)
 
 error: aborting due to 5 previous errors
 
-Some errors have detailed explanations: E0312, E0478.
+Some errors have detailed explanations: E0312, E0478, E0495.
 For more information about an error, try `rustc --explain E0312`.
index 2028a0baa17fb2eca35b735dbc2812bafe9bddb3..2155977349256cd9378a8583f0d5674b44c523b9 100644 (file)
@@ -6,12 +6,11 @@ LL |   trait ExtraCopy<T:Copy> { }
 LL | 
 LL | / struct SomeStruct<T,U>
 LL | |     where T: ExtraCopy<U>
+   | |                          - help: consider further restricting type parameter `U`: `, U: std::marker::Copy`
 LL | | {
 LL | |     data: (T,U)
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index d2bff253678ee666b2ee3587924d8195854ccb42..6ac4f1e2da8d22a428a57a177ae0d66f2988e604 100644 (file)
@@ -4,10 +4,10 @@ error[E0277]: the trait bound `A: std::marker::Copy` is not satisfied
 LL | struct IsCopy<T:Copy> {
    | --------------------- required by `IsCopy`
 ...
+LL | struct SomeStruct<A> {
+   |                   - help: consider restricting this bound: `A: std::marker::Copy`
 LL |     data: IsCopy<A>
    |     ^^^^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `A`
-   |
-   = help: consider adding a `where A: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index d5b2b5762a43a6c689642479463d292288fb5f98..af0433fd22f6e30f4997eeb8c86b785f62954180 100644 (file)
@@ -4,12 +4,13 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL |   trait ExtraCopy<T:Copy> { }
    |   ----------------------- required by `ExtraCopy`
 LL | 
-LL | / trait SomeTrait<T> {
+LL |   trait SomeTrait<T> {
+   |   ^               - help: consider restricting this bound: `T: std::marker::Copy`
+   |  _|
+   | |
 LL | |     type Type1: ExtraCopy<T>;
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index d8ab95504823949c732a7381073a589470889ef3..93cb948cdbfcb0674080e6729f698e8c582110b7 100644 (file)
@@ -3,11 +3,12 @@ error[E0277]: the trait bound `<Self as SomeTrait>::Type1: std::marker::Copy` is
    |
 LL | struct IsCopy<T:Copy> { x: T }
    | --------------------- required by `IsCopy`
-...
+LL | 
+LL | trait SomeTrait {
+   |                - help: consider further restricting the associated type: `where <Self as SomeTrait>::Type1: std::marker::Copy`
+LL |     type Type1;
 LL |     type Type2 = IsCopy<Self::Type1>;
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::marker::Copy` is not implemented for `<Self as SomeTrait>::Type1`
-   |
-   = help: consider adding a `where <Self as SomeTrait>::Type1: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 85f12b2de548931017b75aefef3c3daddf2775b3..13e2f8f590149f0e17633d4c00fa5a84754d14d0 100644 (file)
@@ -6,11 +6,10 @@ LL |   trait ExtraCopy<T:Copy> { }
 LL | 
 LL | / trait SomeTrait<T,U>
 LL | |     where T: ExtraCopy<U>
+   | |                          - help: consider further restricting type parameter `U`: `, U: std::marker::Copy`
 LL | | {
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `U`
-   |
-   = help: consider adding a `where U: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 4d0e1f2f0f4cce1bc51ffdd815dba6cb52748fce..9f3545b9c6a6b3d10c5fc41c91899c95b38f07e9 100644 (file)
@@ -4,14 +4,15 @@ error[E0277]: the trait bound `Self: std::cmp::Eq` is not satisfied
 LL |   struct Bar<T:Eq+?Sized> { value: Box<T> }
    |   ----------------------- required by `Bar`
 ...
-LL | /     fn bar(&self, x: &Bar<Self>) {
+LL |       fn bar(&self, x: &Bar<Self>) {
+   |       ^                           - help: consider further restricting `Self`: `where Self: std::cmp::Eq`
+   |  _____|
+   | |
 LL | |
 LL | |         //
 LL | |         // Here, Eq ought to be implemented.
 LL | |     }
    | |_____^ the trait `std::cmp::Eq` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: std::cmp::Eq` bound
 
 error: aborting due to previous error
 
index e82b76b61c4a9fb622e5a9fa56bd87d9eff394c8..e32630a5a4a400025b55a06774fd01768d910fce 100644 (file)
@@ -4,15 +4,16 @@ error[E0277]: the trait bound `Self: std::cmp::Eq` is not satisfied
 LL |   struct Bar<T:Eq+?Sized> { value: Box<T> }
    |   ----------------------- required by `Bar`
 ...
-LL | /     fn bar(&self) -> Bar<Self> {
+LL |       fn bar(&self) -> Bar<Self> {
+   |       ^                         - help: consider further restricting `Self`: `where Self: std::cmp::Eq`
+   |  _____|
+   | |
 LL | |
 LL | |         //
 LL | |         // Here, Eq ought to be implemented.
 LL | |         loop { }
 LL | |     }
    | |_____^ the trait `std::cmp::Eq` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: std::cmp::Eq` bound
 
 error: aborting due to previous error
 
index 6504f6698d9b1642d0d9b3150d8012a3a0e91e68..a443ff1bb63969ed2a9ecd16f3c311f04b9e726e 100644 (file)
@@ -4,14 +4,15 @@ error[E0277]: the trait bound `Self: std::cmp::Eq` is not satisfied
 LL |   trait Bar<T:Eq+?Sized> { }
    |   ---------------------- required by `Bar`
 ...
-LL | /     fn bar<A>(&self) where A: Bar<Self> {
+LL |       fn bar<A>(&self) where A: Bar<Self> {
+   |       ^                                  - help: consider further restricting `Self`: `, Self: std::cmp::Eq`
+   |  _____|
+   | |
 LL | |
 LL | |         //
 LL | |         // Here, Eq ought to be implemented.
 LL | |     }
    | |_____^ the trait `std::cmp::Eq` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: std::cmp::Eq` bound
 
 error: aborting due to previous error
 
index 0887d4b2fcda21ae8ed3adb3110c4e0515ec03ba..42a28ee676373c9638965f9f8b8739d2944a4341 100644 (file)
@@ -5,9 +5,10 @@ LL | struct Bar<T:Eq+?Sized> { value: Box<T> }
    | ----------------------- required by `Bar`
 ...
 LL |     fn bar(&self, x: &Bar<Self>);
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::cmp::Eq` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: std::cmp::Eq` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+   |     |                           |
+   |     |                           help: consider further restricting `Self`: `where Self: std::cmp::Eq`
+   |     the trait `std::cmp::Eq` is not implemented for `Self`
 
 error: aborting due to previous error
 
index 5555081498c618261f79946e1ebc86156ea77b0b..7ec4dbe0056b4acdf505748aae7b422d74176494 100644 (file)
@@ -5,9 +5,10 @@ LL | struct Bar<T:Eq+?Sized> { value: Box<T> }
    | ----------------------- required by `Bar`
 ...
 LL |     fn bar(&self) -> &Bar<Self>;
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::cmp::Eq` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: std::cmp::Eq` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+   |     |                          |
+   |     |                          help: consider further restricting `Self`: `where Self: std::cmp::Eq`
+   |     the trait `std::cmp::Eq` is not implemented for `Self`
 
 error: aborting due to previous error
 
index 5e8fd8982390df9d3e6b97b468ca7df8c130c33f..256edb5b2ca1d0a3fe8fc4855412268452279dc0 100644 (file)
@@ -5,9 +5,10 @@ LL | struct Bar<T:Eq+?Sized> { value: Box<T> }
    | ----------------------- required by `Bar`
 ...
 LL |     fn bar(&self) where Self: Sized, Bar<Self>: Copy;
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::cmp::Eq` is not implemented for `Self`
-   |
-   = help: consider adding a `where Self: std::cmp::Eq` bound
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-
+   |     |                                               |
+   |     |                                               help: consider further restricting `Self`: `, Self: std::cmp::Eq`
+   |     the trait `std::cmp::Eq` is not implemented for `Self`
 
 error: aborting due to previous error
 
index 377ca640536cf09a6e6fa370335e64ba79383ac1..a61b8dd3a3849ffff54cf1f48ee62ae6213582ab 100644 (file)
@@ -4,11 +4,12 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL |   trait ExtraCopy<T:Copy> { }
    |   ----------------------- required by `ExtraCopy`
 LL | 
-LL | / trait SomeTrait<T>: ExtraCopy<T> {
+LL |   trait SomeTrait<T>: ExtraCopy<T> {
+   |   ^               - help: consider restricting this bound: `T: std::marker::Copy`
+   |  _|
+   | |
 LL | | }
    | |_^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 727c9b8e0672141dbca32dc81a757f669815b9c8..995b544600389fb58411d3cd654eaf236142b601 100644 (file)
@@ -4,10 +4,11 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL | fn require_copy<T: Copy>(x: T) {}
    |    ------------    ---- required by this bound in `require_copy`
 ...
+LL | impl<T> Foo<T> {
+   |      - help: consider restricting this bound: `T: std::marker::Copy`
+...
 LL |         require_copy(self.x);
    |                      ^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 1c1937c3074db760b273a6261797ef32d95deeab..fe575f3a28a9ea13c32c7ec8a366480de2b546ff 100644 (file)
@@ -4,10 +4,11 @@ error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
 LL | fn require_copy<T: Copy>(x: T) {}
    |    ------------    ---- required by this bound in `require_copy`
 ...
+LL | impl<T> Foo<T> for Bar<T> {
+   |      - help: consider restricting this bound: `T: std::marker::Copy`
+...
 LL |         require_copy(self.x);
    |                      ^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
-   |
-   = help: consider adding a `where T: std::marker::Copy` bound
 
 error: aborting due to previous error
 
index 8b0561d68f12eeb1d72e07ceef464ebf6032a1bc..3a9abe3f065554a7fbc59f440df2baba4a6e47ee 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 8b0561d68f12eeb1d72e07ceef464ebf6032a1bc
+Subproject commit 3a9abe3f065554a7fbc59f440df2baba4a6e47ee
index 648e5b90b49af483d07caa8e413473a4517853d6..cbedd97b3a58023eff365a2fa74700d06115144a 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 648e5b90b49af483d07caa8e413473a4517853d6
+Subproject commit cbedd97b3a58023eff365a2fa74700d06115144a
index 592b3f14c85aff7bf8309f75d44071385aafae54..c59533da1dc39c06eb0e982fafe5a2a21c4f6cdb 100644 (file)
@@ -15,7 +15,7 @@ fn main() {
             println!("cargo:rerun-if-changed={}", entry.path().to_str().unwrap());
             let file = fs::read_to_string(entry.path()).unwrap()
                 .replace("syntax::register_diagnostics!", "register_diagnostics!");
-            let contents = format!("(|| {{\n{}\n}})();", file);
+            let contents = format!("(|| {{\n{}\n}})()", file);
 
             fs::write(&out_dir.join(&format!("error_{}.rs", idx)), &contents).unwrap();
 
index 07ac10277ea5ad42efbb914da5844e0ab08efbf4..2adc39f27b7fd2d06b3d1d470827928766731a1d 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 07ac10277ea5ad42efbb914da5844e0ab08efbf4
+Subproject commit 2adc39f27b7fd2d06b3d1d470827928766731a1d
index 7cf3cc7663b470f35dedeaaf07a118c94f9d1bcf..4383cd9d5be43a65c8b7fb778d7831eec8f53d6b 100755 (executable)
@@ -143,7 +143,7 @@ def issue(
         cc @{}, do you think you would have time to do the follow-up work?
         If so, that would be great!
 
-        cc @{}, the PR reviewer, and @rust-lang/compiler -- nominating for prioritization.
+        cc @{}, the PR reviewer, and nominating for compiler team prioritization.
 
         ''').format(
             relevant_pr_number, tool, status_description,
index 8dc9ba96d57c5705b99a18a380d41579e9d2d675..a18df16181947edd5eb593ea0f2321e0035448ee 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 8dc9ba96d57c5705b99a18a380d41579e9d2d675
+Subproject commit a18df16181947edd5eb593ea0f2321e0035448ee
index afb1ee1c14594aed5bb4a762b357b01f13c9de10..33e3667085e4c73d4391c6168552458eb47664de 160000 (submodule)
@@ -1 +1 @@
-Subproject commit afb1ee1c14594aed5bb4a762b357b01f13c9de10
+Subproject commit 33e3667085e4c73d4391c6168552458eb47664de
index 1ed39f45d3e701ab7f97a72444dbb62557428532..8e46ca6cd2990e199e9ad47ab49b09ead44d7334 100644 (file)
@@ -89,6 +89,7 @@
     Crate("crc32fast"),
     Crate("crossbeam-deque"),
     Crate("crossbeam-epoch"),
+    Crate("crossbeam-queue"),
     Crate("crossbeam-utils"),
     Crate("datafrog"),
     Crate("dlmalloc"),
diff --git a/src/tools/tidy/src/error_codes_check.rs b/src/tools/tidy/src/error_codes_check.rs
new file mode 100644 (file)
index 0000000..159baff
--- /dev/null
@@ -0,0 +1,137 @@
+//! Checks that all error codes have at least one test to prevent having error
+//! codes that are silently not thrown by the compiler anymore.
+
+use std::collections::HashMap;
+use std::ffi::OsStr;
+use std::path::Path;
+
+// A few of those error codes can't be tested but all the others can and *should* be tested!
+const WHITELIST: &[&str] = &[
+    "E0183",
+    "E0227",
+    "E0279",
+    "E0280",
+    "E0311",
+    "E0313",
+    "E0314",
+    "E0315",
+    "E0377",
+    "E0456",
+    "E0461",
+    "E0462",
+    "E0464",
+    "E0465",
+    "E0472",
+    "E0473",
+    "E0474",
+    "E0475",
+    "E0476",
+    "E0479",
+    "E0480",
+    "E0481",
+    "E0482",
+    "E0483",
+    "E0484",
+    "E0485",
+    "E0486",
+    "E0487",
+    "E0488",
+    "E0489",
+    "E0514",
+    "E0519",
+    "E0523",
+    "E0526",
+    "E0554",
+    "E0570",
+    "E0629",
+    "E0630",
+    "E0640",
+    "E0717",
+    "E0727",
+    "E0729",
+];
+
+fn extract_error_codes(f: &str, error_codes: &mut HashMap<String, bool>) {
+    let mut reached_no_explanation = false;
+    let mut last_error_code = None;
+
+    for line in f.lines() {
+        let s = line.trim();
+        if s.starts_with('E') && s.ends_with(": r##\"") {
+            if let Some(err_code) = s.splitn(2, ':').next() {
+                let err_code = err_code.to_owned();
+                last_error_code = Some(err_code.clone());
+                if !error_codes.contains_key(&err_code) {
+                    error_codes.insert(err_code, false);
+                }
+            }
+        } else if s.starts_with("```") && s.contains("compile_fail") && s.contains('E') {
+            if let Some(err_code) = s.splitn(2, 'E').skip(1).next() {
+                if let Some(err_code) = err_code.splitn(2, ',').next() {
+                    let nb = error_codes.entry(format!("E{}", err_code)).or_insert(false);
+                    *nb = true;
+                }
+            }
+        } else if s == ";" {
+            reached_no_explanation = true;
+        } else if reached_no_explanation && s.starts_with('E') {
+            if let Some(err_code) = s.splitn(2, ',').next() {
+                let err_code = err_code.to_owned();
+                if !error_codes.contains_key(&err_code) { // this check should *never* fail!
+                    error_codes.insert(err_code, false);
+                }
+            }
+        } else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
+            if let Some(last) = last_error_code {
+                error_codes.get_mut(&last).map(|x| *x = true);
+            }
+            last_error_code = None;
+        }
+    }
+}
+
+fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, bool>) {
+    for line in f.lines() {
+        let s = line.trim();
+        if s.starts_with("error[E") || s.starts_with("warning[E") {
+            if let Some(err_code) = s.splitn(2, ']').next() {
+                if let Some(err_code) = err_code.splitn(2, '[').skip(1).next() {
+                    let nb = error_codes.entry(err_code.to_owned()).or_insert(false);
+                    *nb = true;
+                }
+            }
+        }
+    }
+}
+
+pub fn check(path: &Path, bad: &mut bool) {
+    println!("Checking which error codes lack tests...");
+    let mut error_codes: HashMap<String, bool> = HashMap::new();
+    super::walk(path,
+                &mut |path| super::filter_dirs(path),
+                &mut |entry, contents| {
+        let file_name = entry.file_name();
+        if file_name == "error_codes.rs" {
+            extract_error_codes(contents, &mut error_codes);
+        } else if entry.path().extension() == Some(OsStr::new("stderr")) {
+            extract_error_codes_from_tests(contents, &mut error_codes);
+        }
+    });
+    println!("Found {} error codes", error_codes.len());
+
+    let mut errors = Vec::new();
+    for (err_code, nb) in &error_codes {
+        if !*nb && !WHITELIST.contains(&err_code.as_str()) {
+            errors.push(format!("Error code {} needs to have at least one UI test!", err_code));
+        }
+    }
+    errors.sort();
+    for err in &errors {
+        eprintln!("{}", err);
+    }
+    println!("Found {} error codes with no tests", errors.len());
+    if !errors.is_empty() {
+        *bad = true;
+    }
+    println!("Done!");
+}
index 337f9c4d6dbedcc20b8c24a058dcdf5277658f51..eb93eb297479d03e37bc7fb15babcad4fb5c294b 100644 (file)
@@ -41,6 +41,7 @@ macro_rules! tidy_error {
 pub mod ui_tests;
 pub mod unit_tests;
 pub mod unstable_book;
+pub mod error_codes_check;
 
 fn filter_dirs(path: &Path) -> bool {
     let skip = [
index a57238ad8148ae81ad0c7e17778a53dff157fa07..e08c23c01fe2d337ef332352433716d0ceab0d8d 100644 (file)
@@ -35,6 +35,7 @@ fn main() {
     deps::check_whitelist(&path, &cargo, &mut bad);
     extdeps::check(&path, &mut bad);
     ui_tests::check(&path, &mut bad);
+    error_codes_check::check(&path, &mut bad);
 
     if bad {
         eprintln!("some tidy checks failed");
index f3509897bdf9c834db18b2bc7cbda5754c9af270..f0e3a99037b022118df470c8b7fe27578824e79e 100644 (file)
@@ -3,7 +3,19 @@ allow-unauthenticated = [
     "C-*", "A-*", "E-*", "NLL-*", "O-*", "S-*", "T-*", "WG-*", "F-*",
     "requires-nightly",
     # I-* without I-nominated
-    "I-compilemem", "I-compiletime", "I-crash", "I-hang", "I-ICE", "I-slow",
+    "I-*", "!I-nominated",
+    "AsyncAwait-OnDeck",
 ]
 
 [assign]
+
+[ping.icebreakers-llvm]
+message = """\
+Hey LLVM ICE-breakers! This bug has been identified as a good
+"LLVM ICE-breaking candidate". In case it's useful, here are some
+[instructions] for tackling these sorts of bugs. Maybe take a look?
+Thanks! <3
+
+[instructions]: https://rust-lang.github.io/rustc-guide/ice-breaker/llvm.html
+"""
+label = "ICEBreaker-LLVM"