]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #61705 - petrhosek:llvm-cflags, r=alexcrichton
authorMazdak Farrokhzad <twingoow@gmail.com>
Wed, 12 Jun 2019 02:22:55 +0000 (04:22 +0200)
committerGitHub <noreply@github.com>
Wed, 12 Jun 2019 02:22:55 +0000 (04:22 +0200)
Pass cflags rather than cxxflags to LLVM as CMAKE_C_FLAGS

We mistakenly pass cxxflags from the configuration to LLVM build as
CMAKE_C_FLAGS.

492 files changed:
.azure-pipelines/steps/run.yml
.mailmap
Cargo.lock
README.md
src/bootstrap/builder.rs
src/bootstrap/config.rs
src/bootstrap/lib.rs
src/ci/cpu-usage-over-time.py [new file with mode: 0644]
src/ci/docker/asmjs/Dockerfile
src/ci/docker/x86_64-gnu-tools/checktools.sh
src/doc/book
src/doc/embedded-book
src/doc/reference
src/doc/rust-by-example
src/doc/rustc-guide
src/doc/rustc/src/lints/listing/warn-by-default.md
src/doc/unstable-book/src/language-features/plugin.md
src/doc/unstable-book/src/language-features/repr-align-enum.md [deleted file]
src/doc/unstable-book/src/language-features/transparent-enums.md [new file with mode: 0644]
src/doc/unstable-book/src/language-features/transparent-unions.md [new file with mode: 0644]
src/liballoc/alloc.rs
src/liballoc/lib.rs
src/libarena/lib.rs
src/libcore/cell.rs
src/libcore/intrinsics.rs
src/libcore/iter/range.rs
src/libcore/iter/traits/iterator.rs
src/libcore/lib.rs
src/libcore/mem/mod.rs
src/libcore/num/f32.rs
src/libcore/num/f64.rs
src/libcore/num/mod.rs
src/libcore/ops/range.rs
src/libcore/option.rs
src/libcore/pin.rs
src/libcore/result.rs
src/libcore/slice/mod.rs
src/libcore/str/mod.rs
src/libcore/tests/iter.rs
src/libcore/tests/lib.rs
src/libcore/tests/num/dec2flt/mod.rs
src/libcore/tests/ops.rs
src/libfmt_macros/Cargo.toml
src/libfmt_macros/lib.rs
src/librustc/hir/check_attr.rs
src/librustc/hir/intravisit.rs
src/librustc/hir/lowering.rs
src/librustc/hir/map/collector.rs
src/librustc/hir/map/def_collector.rs
src/librustc/hir/map/definitions.rs
src/librustc/hir/mod.rs
src/librustc/hir/print.rs
src/librustc/ich/hcx.rs
src/librustc/ich/impls_syntax.rs
src/librustc/ich/impls_ty.rs
src/librustc/infer/region_constraints/mod.rs
src/librustc/lib.rs
src/librustc/lint/context.rs
src/librustc/macros.rs
src/librustc/mir/cache.rs
src/librustc/mir/interpret/allocation.rs
src/librustc/mir/interpret/error.rs
src/librustc/mir/interpret/mod.rs
src/librustc/mir/interpret/pointer.rs
src/librustc/mir/interpret/value.rs
src/librustc/mir/mod.rs
src/librustc/mir/traversal.rs
src/librustc/mir/visit.rs
src/librustc/session/config.rs
src/librustc/traits/auto_trait.rs
src/librustc/traits/error_reporting.rs
src/librustc/traits/mod.rs
src/librustc/traits/on_unimplemented.rs
src/librustc/traits/query/type_op/normalize.rs
src/librustc/traits/select.rs
src/librustc/traits/structural_impls.rs
src/librustc/ty/constness.rs
src/librustc/ty/fast_reject.rs
src/librustc/ty/instance.rs
src/librustc/ty/layout.rs
src/librustc/ty/mod.rs
src/librustc/ty/print/mod.rs
src/librustc/ty/query/config.rs
src/librustc/ty/sty.rs
src/librustc/util/captures.rs
src/librustc_allocator/lib.rs
src/librustc_borrowck/borrowck/mod.rs
src/librustc_borrowck/lib.rs
src/librustc_codegen_llvm/asm.rs
src/librustc_codegen_llvm/builder.rs
src/librustc_codegen_llvm/context.rs
src/librustc_codegen_llvm/intrinsic.rs
src/librustc_codegen_llvm/lib.rs
src/librustc_codegen_ssa/lib.rs
src/librustc_codegen_ssa/mir/analyze.rs
src/librustc_codegen_ssa/mir/block.rs
src/librustc_codegen_ssa/mir/rvalue.rs
src/librustc_codegen_ssa/traits/asm.rs
src/librustc_codegen_ssa/traits/builder.rs
src/librustc_codegen_ssa/traits/mod.rs
src/librustc_codegen_ssa/traits/statics.rs
src/librustc_codegen_ssa/traits/type_.rs
src/librustc_codegen_utils/lib.rs
src/librustc_data_structures/bit_set.rs
src/librustc_data_structures/stable_hasher.rs
src/librustc_driver/lib.rs
src/librustc_errors/diagnostic_builder.rs
src/librustc_errors/emitter.rs
src/librustc_errors/lib.rs
src/librustc_incremental/assert_dep_graph.rs
src/librustc_incremental/lib.rs
src/librustc_interface/lib.rs
src/librustc_interface/util.rs
src/librustc_lint/builtin.rs
src/librustc_lint/lib.rs
src/librustc_lint/types.rs
src/librustc_metadata/creader.rs
src/librustc_metadata/cstore_impl.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/lib.rs
src/librustc_mir/borrow_check/borrow_set.rs
src/librustc_mir/borrow_check/conflict_errors.rs
src/librustc_mir/borrow_check/error_reporting.rs
src/librustc_mir/borrow_check/location.rs
src/librustc_mir/borrow_check/mod.rs
src/librustc_mir/borrow_check/move_errors.rs
src/librustc_mir/borrow_check/mutability_errors.rs
src/librustc_mir/borrow_check/nll/constraint_generation.rs
src/librustc_mir/borrow_check/nll/explain_borrow/find_use.rs
src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs
src/librustc_mir/borrow_check/nll/invalidation.rs
src/librustc_mir/borrow_check/nll/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
src/librustc_mir/borrow_check/nll/region_infer/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/values.rs
src/librustc_mir/borrow_check/nll/renumber.rs
src/librustc_mir/borrow_check/nll/type_check/input_output.rs
src/librustc_mir/borrow_check/nll/type_check/liveness/local_use_map.rs
src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs
src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs
src/librustc_mir/borrow_check/nll/type_check/mod.rs
src/librustc_mir/borrow_check/path_utils.rs
src/librustc_mir/borrow_check/place_ext.rs
src/librustc_mir/borrow_check/places_conflict.rs
src/librustc_mir/borrow_check/prefixes.rs
src/librustc_mir/borrow_check/used_muts.rs
src/librustc_mir/build/matches/mod.rs
src/librustc_mir/build/matches/test.rs
src/librustc_mir/build/mod.rs
src/librustc_mir/const_eval.rs
src/librustc_mir/dataflow/at_location.rs
src/librustc_mir/dataflow/drop_flag_effects.rs
src/librustc_mir/dataflow/graphviz.rs
src/librustc_mir/dataflow/impls/borrowed_locals.rs
src/librustc_mir/dataflow/impls/borrows.rs
src/librustc_mir/dataflow/impls/mod.rs
src/librustc_mir/dataflow/impls/storage_liveness.rs
src/librustc_mir/dataflow/mod.rs
src/librustc_mir/dataflow/move_paths/builder.rs
src/librustc_mir/dataflow/move_paths/mod.rs
src/librustc_mir/hair/constant.rs
src/librustc_mir/interpret/cast.rs
src/librustc_mir/interpret/eval_context.rs
src/librustc_mir/interpret/intrinsics.rs
src/librustc_mir/interpret/machine.rs
src/librustc_mir/interpret/memory.rs
src/librustc_mir/interpret/operand.rs
src/librustc_mir/interpret/operator.rs
src/librustc_mir/interpret/place.rs
src/librustc_mir/interpret/snapshot.rs
src/librustc_mir/interpret/step.rs
src/librustc_mir/interpret/terminator.rs
src/librustc_mir/interpret/traits.rs
src/librustc_mir/interpret/validity.rs
src/librustc_mir/interpret/visitor.rs
src/librustc_mir/lib.rs
src/librustc_mir/lints.rs
src/librustc_mir/monomorphize/collector.rs
src/librustc_mir/shim.rs
src/librustc_mir/transform/add_call_guards.rs
src/librustc_mir/transform/add_moves_for_packed_drops.rs
src/librustc_mir/transform/add_retag.rs
src/librustc_mir/transform/check_unsafety.rs
src/librustc_mir/transform/cleanup_post_borrowck.rs
src/librustc_mir/transform/const_prop.rs
src/librustc_mir/transform/copy_prop.rs
src/librustc_mir/transform/deaggregator.rs
src/librustc_mir/transform/dump_mir.rs
src/librustc_mir/transform/elaborate_drops.rs
src/librustc_mir/transform/erase_regions.rs
src/librustc_mir/transform/generator.rs
src/librustc_mir/transform/inline.rs
src/librustc_mir/transform/instcombine.rs
src/librustc_mir/transform/lower_128bit.rs
src/librustc_mir/transform/mod.rs
src/librustc_mir/transform/no_landing_pads.rs
src/librustc_mir/transform/promote_consts.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_mir/transform/qualify_min_const_fn.rs
src/librustc_mir/transform/remove_noop_landing_pads.rs
src/librustc_mir/transform/rustc_peek.rs
src/librustc_mir/transform/simplify.rs
src/librustc_mir/transform/simplify_branches.rs
src/librustc_mir/transform/uniform_array_move_out.rs
src/librustc_mir/util/aggregate.rs [new file with mode: 0644]
src/librustc_mir/util/def_use.rs
src/librustc_mir/util/elaborate_drops.rs
src/librustc_mir/util/graphviz.rs
src/librustc_mir/util/liveness.rs
src/librustc_mir/util/mod.rs
src/librustc_mir/util/patch.rs
src/librustc_mir/util/pretty.rs
src/librustc_passes/lib.rs
src/librustc_plugin/registry.rs
src/librustc_privacy/lib.rs
src/librustc_resolve/build_reduced_graph.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_save_analysis/lib.rs
src/librustc_save_analysis/span_utils.rs
src/librustc_target/abi/mod.rs
src/librustc_target/lib.rs
src/librustc_traits/chalk_context/mod.rs
src/librustc_traits/lib.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/closure.rs
src/librustc_typeck/check/intrinsic.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/error_codes.rs
src/librustc_typeck/lib.rs
src/librustdoc/clean/auto_trait.rs
src/librustdoc/clean/inline.rs
src/librustdoc/clean/mod.rs
src/librustdoc/html/highlight.rs
src/librustdoc/html/render.rs
src/librustdoc/lib.rs
src/librustdoc/passes/check_code_block_syntax.rs
src/librustdoc/passes/collect_intra_doc_links.rs
src/librustdoc/passes/mod.rs
src/libserialize/serialize.rs
src/libstd/Cargo.toml
src/libstd/f32.rs
src/libstd/f64.rs
src/libstd/primitive_docs.rs
src/libsyntax/ast.rs
src/libsyntax/attr/mod.rs
src/libsyntax/config.rs
src/libsyntax/diagnostics/plugin.rs
src/libsyntax/early_buffered_lints.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/source_util.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/feature_gate.rs
src/libsyntax/lib.rs
src/libsyntax/mut_visit.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/diagnostics.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/literal.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/parse/unescape.rs
src/libsyntax/parse/unescape_error_reporting.rs
src/libsyntax/print/pprust.rs
src/libsyntax/tokenstream.rs
src/libsyntax/util/parser.rs
src/libsyntax/visit.rs
src/libsyntax_ext/asm.rs
src/libsyntax_ext/assert.rs
src/libsyntax_ext/cfg.rs
src/libsyntax_ext/concat.rs
src/libsyntax_ext/concat_idents.rs
src/libsyntax_ext/deriving/cmp/ord.rs
src/libsyntax_ext/deriving/cmp/partial_eq.rs
src/libsyntax_ext/deriving/cmp/partial_ord.rs
src/libsyntax_ext/deriving/custom.rs
src/libsyntax_ext/deriving/hash.rs
src/libsyntax_ext/deriving/mod.rs
src/libsyntax_ext/format.rs
src/libsyntax_ext/format_foreign.rs
src/libsyntax_ext/lib.rs
src/libsyntax_ext/proc_macro_decls.rs
src/libsyntax_ext/proc_macro_server.rs
src/libsyntax_ext/trace_macros.rs
src/libsyntax_pos/lib.rs
src/libsyntax_pos/symbol.rs
src/libterm/lib.rs
src/test/codegen/align-enum.rs
src/test/codegen/exact_div.rs [deleted file]
src/test/codegen/function-arguments.rs
src/test/codegen/intrinsics/exact_div.rs [new file with mode: 0644]
src/test/codegen/intrinsics/likely.rs [new file with mode: 0644]
src/test/codegen/intrinsics/move-val-init.rs [new file with mode: 0644]
src/test/codegen/intrinsics/nontemporal.rs [new file with mode: 0644]
src/test/codegen/intrinsics/prefetch.rs [new file with mode: 0644]
src/test/codegen/intrinsics/unchecked_math.rs [new file with mode: 0644]
src/test/codegen/likely.rs [deleted file]
src/test/codegen/move-val-init.rs [deleted file]
src/test/codegen/nontemporal.rs [deleted file]
src/test/codegen/prefetch.rs [deleted file]
src/test/codegen/repr-transparent-aggregates-1.rs
src/test/codegen/repr-transparent-aggregates-2.rs
src/test/codegen/repr-transparent-aggregates-3.rs
src/test/codegen/repr-transparent.rs
src/test/codegen/unchecked_math.rs [deleted file]
src/test/mir-opt/const_prop/ref_deref.rs [new file with mode: 0644]
src/test/mir-opt/const_prop/reify_fn_ptr.rs [new file with mode: 0644]
src/test/mir-opt/const_prop/slice_len.rs
src/test/mir-opt/unusual-item-types.rs
src/test/pretty/attr-fn-inner.rs
src/test/pretty/attr-literals.rs
src/test/pretty/stmt_expr_attributes.rs
src/test/run-pass-fulldeps/auxiliary/custom-derive-partial-eq.rs [deleted file]
src/test/run-pass-fulldeps/auxiliary/custom-derive-plugin-attr.rs [deleted file]
src/test/run-pass-fulldeps/auxiliary/custom-derive-plugin.rs [deleted file]
src/test/run-pass-fulldeps/auxiliary/plugin-args.rs
src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs
src/test/run-pass-fulldeps/custom-derive-partial-eq.rs [deleted file]
src/test/run-pass-fulldeps/derive-totalsum-attr.rs [deleted file]
src/test/run-pass-fulldeps/derive-totalsum.rs [deleted file]
src/test/run-pass-fulldeps/issue-40663.rs [deleted file]
src/test/run-pass/async-fn-size.rs [new file with mode: 0644]
src/test/run-pass/attr-before-view-item.rs [deleted file]
src/test/run-pass/attr-before-view-item2.rs [deleted file]
src/test/run-pass/attr-mix-new.rs [deleted file]
src/test/run-pass/auxiliary/allocator-dummy.rs [deleted file]
src/test/run-pass/check-static-recursion-foreign.rs
src/test/run-pass/const-int-conversion.rs
src/test/run-pass/const-int-overflowing.rs
src/test/run-pass/const-int-rotate.rs
src/test/run-pass/const-int-sign.rs
src/test/run-pass/const-int-wrapping.rs
src/test/run-pass/consts/const-endianess.rs
src/test/run-pass/consts/const-ptr-nonnull.rs
src/test/run-pass/consts/const-ptr-unique.rs
src/test/run-pass/generator/overlap-locals.rs [new file with mode: 0644]
src/test/run-pass/item-attributes.rs [deleted file]
src/test/run-pass/lexer-crlf-line-endings-string-literal-doc-comment.rs
src/test/run-pass/macros/macro-at-most-once-rep-2015.rs [new file with mode: 0644]
src/test/run-pass/macros/macro-at-most-once-rep-2018.rs [new file with mode: 0644]
src/test/run-pass/macros/macro-at-most-once-rep.rs [deleted file]
src/test/run-pass/macros/macro-attributes.rs
src/test/run-pass/methods/method-attributes.rs [deleted file]
src/test/run-pass/rfcs/rfc-1789-as-cell/from-mut.rs
src/test/run-pass/structs-enums/align-enum.rs
src/test/run-pass/structs-enums/class-attributes-1.rs [deleted file]
src/test/run-pass/structs-enums/class-attributes-2.rs [deleted file]
src/test/run-pass/structs-enums/enum-null-pointer-opt.rs
src/test/run-pass/variant-attributes.rs [deleted file]
src/test/run-pass/weird-exprs.rs
src/test/rustdoc/const-generics/const-generic-slice.rs [new file with mode: 0644]
src/test/ui/access-mode-in-closures.stderr
src/test/ui/async-await/issue-61452.rs [new file with mode: 0644]
src/test/ui/async-await/issue-61452.stderr [new file with mode: 0644]
src/test/ui/attr-eq-token-tree.rs
src/test/ui/attr-eq-token-tree.stderr
src/test/ui/attr-usage-repr.rs
src/test/ui/attr-usage-repr.stderr
src/test/ui/attributes/attr-before-view-item.rs [new file with mode: 0644]
src/test/ui/attributes/attr-before-view-item2.rs [new file with mode: 0644]
src/test/ui/attributes/attr-mix-new.rs [new file with mode: 0644]
src/test/ui/attributes/attrs-with-no-formal-in-generics-1.rs [new file with mode: 0644]
src/test/ui/attributes/attrs-with-no-formal-in-generics-1.stderr [new file with mode: 0644]
src/test/ui/attributes/attrs-with-no-formal-in-generics-2.rs [new file with mode: 0644]
src/test/ui/attributes/attrs-with-no-formal-in-generics-2.stderr [new file with mode: 0644]
src/test/ui/attributes/attrs-with-no-formal-in-generics-3.rs [new file with mode: 0644]
src/test/ui/attributes/attrs-with-no-formal-in-generics-3.stderr [new file with mode: 0644]
src/test/ui/attributes/class-attributes-1.rs [new file with mode: 0644]
src/test/ui/attributes/class-attributes-2.rs [new file with mode: 0644]
src/test/ui/attributes/item-attributes.rs [new file with mode: 0644]
src/test/ui/attributes/method-attributes.rs [new file with mode: 0644]
src/test/ui/attributes/obsolete-attr.rs [new file with mode: 0644]
src/test/ui/attributes/obsolete-attr.stderr [new file with mode: 0644]
src/test/ui/attributes/unknown-attr.rs [new file with mode: 0644]
src/test/ui/attributes/unknown-attr.stderr [new file with mode: 0644]
src/test/ui/attributes/variant-attributes.rs [new file with mode: 0644]
src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-1.rs [deleted file]
src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-1.stderr [deleted file]
src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-2.rs [deleted file]
src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-2.stderr [deleted file]
src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-3.rs [deleted file]
src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-3.stderr [deleted file]
src/test/ui/borrowck/borrowck-issue-2657-2.stderr
src/test/ui/borrowck/borrowck-move-error-with-note.stderr
src/test/ui/borrowck/borrowck-move-from-unsafe-ptr.stderr
src/test/ui/borrowck/borrowck-move-out-of-overloaded-deref.stderr
src/test/ui/borrowck/issue-54597-reject-move-out-of-borrow-via-pat.stderr
src/test/ui/const-generics/array-wrapper-struct-ctor.rs [new file with mode: 0644]
src/test/ui/const-generics/array-wrapper-struct-ctor.stderr [new file with mode: 0644]
src/test/ui/const-generics/derive-debug-array-wrapper.rs [new file with mode: 0644]
src/test/ui/const-generics/derive-debug-array-wrapper.stderr [new file with mode: 0644]
src/test/ui/const-generics/issue-61336-1.rs [new file with mode: 0644]
src/test/ui/const-generics/issue-61336-1.stderr [new file with mode: 0644]
src/test/ui/const-generics/issue-61336.rs [new file with mode: 0644]
src/test/ui/const-generics/issue-61336.stderr [new file with mode: 0644]
src/test/ui/consts/const-eval/promoted_errors.stderr
src/test/ui/consts/const_constructor/const-construct-call.rs [new file with mode: 0644]
src/test/ui/consts/const_constructor/feature-gate-const_constructor.const_fn.stderr [new file with mode: 0644]
src/test/ui/consts/const_constructor/feature-gate-const_constructor.min_const_fn.stderr [new file with mode: 0644]
src/test/ui/consts/const_constructor/feature-gate-const_constructor.rs [new file with mode: 0644]
src/test/ui/consts/min_const_fn/min_const_fn.nll.stderr
src/test/ui/consts/min_const_fn/min_const_fn.rs
src/test/ui/consts/min_const_fn/min_const_fn.stderr
src/test/ui/consts/single_variant_match_ice.rs
src/test/ui/consts/single_variant_match_ice.stderr
src/test/ui/custom-attribute-multisegment.rs
src/test/ui/custom-attribute-multisegment.stderr
src/test/ui/error-codes/E0517.stderr
src/test/ui/expanded-cfg.rs
src/test/ui/extenv/issue-55897.rs
src/test/ui/extenv/issue-55897.stderr
src/test/ui/feature-gates/feature-gate-repr_align_enum.rs [deleted file]
src/test/ui/feature-gates/feature-gate-repr_align_enum.stderr [deleted file]
src/test/ui/feature-gates/feature-gate-transparent_enums.rs [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-transparent_enums.stderr [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-transparent_unions.rs [new file with mode: 0644]
src/test/ui/feature-gates/feature-gate-transparent_unions.stderr [new file with mode: 0644]
src/test/ui/issues/issue-14309.stderr
src/test/ui/issues/issue-16250.stderr
src/test/ui/issues/issue-20801.stderr
src/test/ui/issues/issue-24434.rs
src/test/ui/issues/issue-31769.rs
src/test/ui/issues/issue-31769.stderr
src/test/ui/issues/issue-39388.rs
src/test/ui/issues/issue-39388.stderr
src/test/ui/issues/issue-43988.stderr
src/test/ui/lint/lint-ctypes-enum.rs
src/test/ui/lint/lint-ctypes-enum.stderr
src/test/ui/lint/lint-ctypes.stderr
src/test/ui/lint/lint-obsolete-attr.rs [deleted file]
src/test/ui/lint/lint-obsolete-attr.stderr [deleted file]
src/test/ui/lint/lint-unknown-attr.rs [deleted file]
src/test/ui/lint/lint-unknown-attr.stderr [deleted file]
src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.rs [deleted file]
src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr [deleted file]
src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs [deleted file]
src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr [deleted file]
src/test/ui/macros/macro-at-most-once-rep-2015.rs [new file with mode: 0644]
src/test/ui/macros/macro-at-most-once-rep-2015.stderr [new file with mode: 0644]
src/test/ui/macros/macro-at-most-once-rep-2018.rs
src/test/ui/macros/macro-at-most-once-rep-2018.stderr
src/test/ui/macros/macro-inner-attributes.rs
src/test/ui/macros/macro-input-future-proofing.stderr
src/test/ui/macros/macro-outer-attributes.rs
src/test/ui/macros/nonterminal-matching.stderr
src/test/ui/malformed/malformed-interpolated.rs
src/test/ui/malformed/malformed-interpolated.stderr
src/test/ui/nll/cannot-move-block-spans.stderr
src/test/ui/nll/move-errors.stderr
src/test/ui/nll/user-annotations/adt-tuple-struct-calls.rs [new file with mode: 0644]
src/test/ui/nll/user-annotations/adt-tuple-struct-calls.stderr [new file with mode: 0644]
src/test/ui/parser/lex-bare-cr-string-literal-doc-comment.stderr
src/test/ui/parser/macro/issue-33569.rs
src/test/ui/parser/macro/issue-33569.stderr
src/test/ui/parser/raw-byte-string-literals.rs
src/test/ui/parser/raw-byte-string-literals.stderr
src/test/ui/proc-macro/resolve-error.rs
src/test/ui/proc-macro/resolve-error.stderr
src/test/ui/repr/repr-align.rs
src/test/ui/repr/repr-align.stderr
src/test/ui/repr/repr-transparent-other-items.rs
src/test/ui/repr/repr-transparent-other-items.stderr
src/test/ui/repr/repr-transparent.rs
src/test/ui/repr/repr-transparent.stderr
src/test/ui/save-analysis/emit-notifications.nll.stderr [new file with mode: 0644]
src/test/ui/save-analysis/emit-notifications.rs [new file with mode: 0644]
src/test/ui/save-analysis/emit-notifications.stderr [new file with mode: 0644]
src/test/ui/std-uncopyable-atomics.stderr
src/test/ui/stmt_expr_attrs_no_feature.rs
src/test/ui/stmt_expr_attrs_no_feature.stderr
src/test/ui/suffixed-literal-meta.rs
src/test/ui/suffixed-literal-meta.stderr
src/test/ui/suggestions/dont-suggest-ref/simple.rs
src/test/ui/suggestions/dont-suggest-ref/simple.stderr
src/test/ui/tool-attributes/tool-attributes-misplaced-1.rs
src/test/ui/tool-attributes/tool-attributes-misplaced-1.stderr
src/test/ui/traits/trait-with-dst.rs [new file with mode: 0644]
src/test/ui/union/union-repr-c.stderr
src/test/ui/unrestricted-attribute-tokens.rs
src/test/ui/unused/unused-attr.rs
src/test/ui/unused/unused-attr.stderr
src/tools/clippy
src/tools/rls

index a646b34fe7d024adb4cf8f4ba1dbfa967c326e44..4875e2c6754a4724938cc590bb023946c43f75a9 100644 (file)
@@ -11,6 +11,12 @@ steps:
 - checkout: self
   fetchDepth: 2
 
+# Spawn a background process to collect CPU usage statistics which we'll upload
+# at the end of the build. See the comments in the script here for more
+# information.
+- bash: python src/ci/cpu-usage-over-time.py &> cpu-usage.csv &
+  displayName: "Collect CPU-usage statistics in the background"
+
 - bash: printenv | sort
   displayName: Show environment variables
 
@@ -74,8 +80,9 @@ steps:
 # images, etc.
 - bash: |
     set -e
+    source src/ci/shared.sh
     sudo apt-get install -y python3-setuptools
-    pip3 install awscli --upgrade --user
+    retry pip3 install awscli --upgrade --user
     echo "##vso[task.prependpath]$HOME/.local/bin"
   displayName: Install awscli (Linux)
   condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
@@ -141,3 +148,13 @@ steps:
     AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
   condition: and(succeeded(), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1')))
   displayName: Upload artifacts
+
+# Upload CPU usage statistics that we've been gathering this whole time. Always
+# execute this step in case we want to inspect failed builds, but don't let
+# errors here ever fail the build since this is just informational.
+- bash: aws s3 cp --acl public-read cpu-usage.csv s3://$DEPLOY_BUCKET/rustc-builds/$BUILD_SOURCEVERSION/cpu-$SYSTEM_JOBNAME.csv
+  env:
+    AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
+  condition: contains(variables, 'AWS_SECRET_ACCESS_KEY')
+  continueOnError: true
+  displayName: Upload CPU usage statistics
index 679aa55d314585e447c68c07e8c3fa011cdd45c8..5673cc5cfbc9db3abd85d60332310f6348b7ff5e 100644 (file)
--- a/.mailmap
+++ b/.mailmap
@@ -56,6 +56,7 @@ Chris Thorn <chris@thorn.co> Chris Thorn <thorn@thoughtbot.com>
 Chris Vittal <christopher.vittal@gmail.com> Christopher Vittal <christopher.vittal@gmail.com>
 Christian Poveda <christianpoveda@protonmail.com> <cn.poveda.ruiz@gmail.com>
 Christian Poveda <christianpoveda@protonmail.com> <z1mvader@protonmail.com>
+Christian Poveda <christianpoveda@protonmail.com> <cpovedar@fnal.gov>
 Clark Gaebel <cg.wowus.cg@gmail.com> <cgaebel@mozilla.com>
 Clinton Ryan <clint.ryan3@gmail.com>
 Corey Richardson <corey@octayn.net> Elaine "See More" Nemo <corey@octayn.net>
@@ -139,6 +140,7 @@ Kang Seonghoon <kang.seonghoon@mearie.org> <public+git@mearie.org>
 Keegan McAllister <mcallister.keegan@gmail.com> <kmcallister@mozilla.com>
 Kevin Butler <haqkrs@gmail.com>
 Kyeongwoon Lee <kyeongwoon.lee@samsung.com>
+Laurențiu Nicola <lnicola@dend.ro>
 Lee Jeffery <leejeffery@gmail.com> Lee Jeffery <lee@leejeffery.co.uk>
 Lee Wondong <wdlee91@gmail.com>
 Lennart Kudling <github@kudling.de>
index 7892bc70695651b8a7cf42e4b09e420486e4e3af..10beb3af6ea40c7b5c6f60d5b7e22e573d4c9bc9 100644 (file)
@@ -25,7 +25,7 @@ dependencies = [
 name = "alloc"
 version = "0.0.0"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
  "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -118,7 +118,7 @@ dependencies = [
  "autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "backtrace-sys 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)",
  "cfg-if 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-core 1.0.0",
@@ -130,7 +130,7 @@ version = "0.1.27"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-core 1.0.0",
 ]
@@ -357,7 +357,7 @@ name = "cfg-if"
 version = "0.1.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-core 1.0.0",
 ]
 
@@ -486,7 +486,7 @@ dependencies = [
 
 [[package]]
 name = "compiler_builtins"
-version = "0.1.15"
+version = "0.1.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -768,7 +768,7 @@ name = "dlmalloc"
 version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-core 1.0.0",
 ]
@@ -910,6 +910,9 @@ dependencies = [
 [[package]]
 name = "fmt_macros"
 version = "0.0.0"
+dependencies = [
+ "syntax_pos 0.0.0",
+]
 
 [[package]]
 name = "fnv"
@@ -934,7 +937,7 @@ name = "fortanix-sgx-abi"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-core 1.0.0",
 ]
 
@@ -1095,7 +1098,7 @@ name = "hashbrown"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-alloc 1.0.0",
  "rustc-std-workspace-core 1.0.0",
 ]
@@ -1277,7 +1280,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "jsonrpc-core"
-version = "10.0.1"
+version = "12.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1407,7 +1410,7 @@ dependencies = [
 
 [[package]]
 name = "lsp-types"
-version = "0.57.0"
+version = "0.57.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1803,7 +1806,7 @@ dependencies = [
 name = "panic_abort"
 version = "0.0.0"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
  "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -1813,7 +1816,7 @@ name = "panic_unwind"
 version = "0.0.0"
 dependencies = [
  "alloc 0.0.0",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
  "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
  "unwind 0.0.0",
@@ -1998,7 +2001,7 @@ name = "profiler_builtins"
 version = "0.0.0"
 dependencies = [
  "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
 ]
 
@@ -2263,10 +2266,10 @@ dependencies = [
 
 [[package]]
 name = "rls"
-version = "1.36.0"
+version = "1.37.0"
 dependencies = [
  "cargo 0.38.0",
- "cargo_metadata 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargo_metadata 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "clippy_lints 0.0.212",
  "crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2276,11 +2279,11 @@ dependencies = [
  "heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "home 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "jsonrpc-core 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "jsonrpc-core 12.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "lsp-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "lsp-types 0.57.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lsp-types 0.57.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "ordslice 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "racer 2.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2295,7 +2298,7 @@ dependencies = [
  "rls-vfs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-workspace-hack 1.0.0",
- "rustc_tools_util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc_tools_util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustfmt-nightly 1.2.2",
  "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2518,7 +2521,7 @@ name = "rustc-demangle"
 version = "0.1.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-std-workspace-core 1.0.0",
 ]
 
@@ -2646,7 +2649,7 @@ dependencies = [
  "alloc 0.0.0",
  "build_helper 0.1.0",
  "cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
 ]
 
@@ -2878,7 +2881,7 @@ dependencies = [
  "alloc 0.0.0",
  "build_helper 0.1.0",
  "cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
 ]
 
@@ -2941,7 +2944,7 @@ dependencies = [
  "alloc 0.0.0",
  "build_helper 0.1.0",
  "cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
 ]
 
@@ -3030,12 +3033,12 @@ dependencies = [
 
 [[package]]
 name = "rustc_tools_util"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
+version = "0.2.0"
 
 [[package]]
 name = "rustc_tools_util"
 version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "rustc_traits"
@@ -3060,7 +3063,7 @@ dependencies = [
  "alloc 0.0.0",
  "build_helper 0.1.0",
  "cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
 ]
 
@@ -3338,7 +3341,7 @@ dependencies = [
  "alloc 0.0.0",
  "backtrace 0.3.29 (registry+https://github.com/rust-lang/crates.io-index)",
  "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
  "dlmalloc 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "fortanix-sgx-abi 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3936,7 +3939,7 @@ name = "unwind"
 version = "0.0.0"
 dependencies = [
  "cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
  "core 0.0.0",
  "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -4149,7 +4152,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum colored 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b0aa3473e85a3161b59845d6096b289bb577874cafeaf75ea1b1beaa6572c7fc"
 "checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007"
 "checksum commoncrypto-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2"
-"checksum compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "e899b947d7e71c3d35c0b6194d64025b84946640510e215090c815b20828964e"
+"checksum compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "e79ed19793c99771b386d76e08c3419409bb3d418b81a8b8afc73524247461cf"
 "checksum compiletest_rs 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "f40ecc9332b68270998995c00f8051ee856121764a0d3230e64c9efd059d27b6"
 "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e"
 "checksum core-foundation 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4e2640d6d0bf22e82bed1b73c6aef8d5dd31e5abe6666c57e6d45e2649f4f887"
@@ -4229,7 +4232,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum jemalloc-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7bef0d4ce37578dfd80b466e3d8324bd9de788e249f1accebb0c472ea4b52bdc"
 "checksum jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "b3d51e24009d966c8285d524dbaf6d60926636b2a89caee9ce0bd612494ddc16"
 "checksum json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9ad0485404155f45cce53a40d4b2d6ac356418300daed05273d9e26f91c390be"
-"checksum jsonrpc-core 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a5152c3fda235dfd68341b3edf4121bc4428642c93acbd6de88c26bf95fc5d7"
+"checksum jsonrpc-core 12.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "288dca7f9713710a29e485076b9340156cb701edb46a881f5d0c31aa4f5b9143"
 "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
 "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
 "checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
@@ -4243,7 +4246,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
 "checksum log_settings 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19af41f0565d7c19b2058153ad0b42d4d5ce89ec4dbf06ed6741114a8b63e7cd"
 "checksum lsp-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "169d737ad89cf8ddd82d1804d9122f54568c49377665157277cc90d747b1d31a"
-"checksum lsp-types 0.57.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d220de1fbbb12b60df17898272579c22329375fc4baa960402fbd17cf0cdd165"
+"checksum lsp-types 0.57.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b62b77309737b1e262b3bbf37ff8faa740562c633b14702afe9be85dbcb6f88a"
 "checksum lzma-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d1eaa027402541975218bb0eec67d6b0412f6233af96e0d096d31dbdfd22e614"
 "checksum mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
 "checksum macro-utils 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f2c4deaccc2ead6a28c16c0ba82f07d52b6475397415ce40876e559b0b0ea510"
@@ -4350,7 +4353,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum rustc-rayon-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "526e7b6d2707a5b9bec3927d424ad70fa3cfc68e0ac1b75e46cdbbc95adc5108"
 "checksum rustc-rayon-core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "79d38ca7cbc22fa59f09d8534ea4b27f67b0facf0cbe274433aceea227a02543"
 "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
-"checksum rustc_tools_util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c5a95edfa0c893236ae4778bb7c4752760e4c0d245e19b5eff33c5aa5eb9dc"
+"checksum rustc_tools_util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b725dadae9fabc488df69a287f5a99c5eaf5d10853842a8a3dfac52476f544ee"
 "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
 "checksum rustfix 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "af7c21531a91512a4a51b490be6ba1c8eff34fdda0dc5bf87dc28d86748aac56"
 "checksum rusty-fork 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9591f190d2852720b679c21f66ad929f9f1d7bb09d1193c26167586029d8489c"
index b522b161ecf939d23c877e6448ddcd88cb6bef4d..15d09f4aada3b0090780252a39f96eddb026c7ed 100644 (file)
--- a/README.md
+++ b/README.md
@@ -130,9 +130,9 @@ build.
 
 MSVC builds of Rust additionally require an installation of Visual Studio 2017
 (or later) so `rustc` can use its linker.  The simplest way is to get the
-[Visual Studio Build Tools] and check the “C++ build tools” workload.
+[Visual Studio], check the “C++ build tools” and “Windows 10 SDK” workload.
 
-[Visual Studio Build Tools]: https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019
+[Visual Studio]: https://visualstudio.microsoft.com/downloads/
 
 (If you're installing cmake yourself, be careful that “C++ CMake tools for
 Windows” doesn't get included under “Individual components”.)
index 1d3b4fe33c8119b40cea14d1ba1c9cee96331311..2281a45e014a9cb0d89ddb4004bc8ae01c274076 100644 (file)
@@ -59,7 +59,7 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
 
     const DEFAULT: bool = false;
 
-    /// Run this rule for all hosts without cross compiling.
+    /// If true, then this rule should be skipped if --target was specified, but --host was not
     const ONLY_HOSTS: bool = false;
 
     /// Primary function to execute this rule. Can call `builder.ensure()`
@@ -163,7 +163,7 @@ fn maybe_run(&self, builder: &Builder<'_>, pathset: &PathSet) {
 
         // Determine the targets participating in this rule.
         let targets = if self.only_hosts {
-            if !builder.config.run_host_only {
+            if builder.config.skip_only_host_steps {
                 return; // don't run anything
             } else {
                 &builder.hosts
@@ -1338,7 +1338,7 @@ fn configure(host: &[&str], target: &[&str]) -> Config {
         let mut config = Config::default_opts();
         // don't save toolstates
         config.save_toolstates = None;
-        config.run_host_only = true;
+        config.skip_only_host_steps = false;
         config.dry_run = true;
         // try to avoid spurious failures in dist where we create/delete each others file
         let dir = config.out.join("tmp-rustbuild-tests").join(
@@ -1583,7 +1583,7 @@ fn dist_with_targets_and_hosts() {
     #[test]
     fn dist_with_target_flag() {
         let mut config = configure(&["B"], &["C"]);
-        config.run_host_only = false; // as-if --target=C was passed
+        config.skip_only_host_steps = true; // as-if --target=C was passed
         let build = Build::new(config);
         let mut builder = Builder::new(&build);
         builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
@@ -1831,7 +1831,7 @@ fn build_default() {
     #[test]
     fn build_with_target_flag() {
         let mut config = configure(&["B"], &["C"]);
-        config.run_host_only = false;
+        config.skip_only_host_steps = true;
         let build = Build::new(config);
         let mut builder = Builder::new(&build);
         builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
index edeb07fda1d59d923dc1f32403568ac927c57806..66f504ea924e9b880b525ea47409d29db868286e 100644 (file)
@@ -51,7 +51,7 @@ pub struct Config {
     pub test_compare_mode: bool,
     pub llvm_libunwind: bool,
 
-    pub run_host_only: bool,
+    pub skip_only_host_steps: bool,
 
     pub on_fail: Option<String>,
     pub stage: Option<u32>,
@@ -416,7 +416,9 @@ pub fn parse(args: &[String]) -> Config {
         }
 
         // If --target was specified but --host wasn't specified, don't run any host-only tests.
-        config.run_host_only = !(flags.host.is_empty() && !flags.target.is_empty());
+        let has_hosts = !flags.host.is_empty();
+        let has_targets = !flags.target.is_empty();
+        config.skip_only_host_steps = !has_hosts && has_targets;
 
         let toml = file.map(|file| {
             let contents = t!(fs::read_to_string(&file));
index ca4489655ca7b46c77d6adf4eca7667e78d4b4c1..b9d287abb0c7e6c3371eaad39bab3a591bf1a90e 100644 (file)
@@ -1214,8 +1214,7 @@ pub fn replace_in_file(&self, path: &Path, replacements: &[(&str, &str)]) {
     /// when this function is called.
     pub fn cp_r(&self, src: &Path, dst: &Path) {
         if self.config.dry_run { return; }
-        for f in t!(fs::read_dir(src)) {
-            let f = t!(f);
+        for f in self.read_dir(src) {
             let path = f.path();
             let name = path.file_name().unwrap();
             let dst = dst.join(name);
diff --git a/src/ci/cpu-usage-over-time.py b/src/ci/cpu-usage-over-time.py
new file mode 100644 (file)
index 0000000..78427a6
--- /dev/null
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+# ignore-tidy-linelength
+
+# This is a small script that we use on CI to collect CPU usage statistics of
+# our builders. By seeing graphs of CPU usage over time we hope to correlate
+# that with possible improvements to Rust's own build system, ideally diagnosing
+# that either builders are always fully using their CPU resources or they're
+# idle for long stretches of time.
+#
+# This script is relatively simple, but it's platform specific. Each platform
+# (OSX/Windows/Linux) has a different way of calculating the current state of
+# CPU at a point in time. We then compare two captured states to determine the
+# percentage of time spent in one state versus another. The state capturing is
+# all platform-specific but the loop at the bottom is the cross platform part
+# that executes everywhere.
+#
+# # Viewing statistics
+#
+# All builders will upload their CPU statistics as CSV files to our S3 buckets.
+# These URLS look like:
+#
+#   https://$bucket.s3.amazonaws.com/rustc-builds/$commit/cpu-$builder.csv
+#
+# for example
+#
+#   https://rust-lang-ci2.s3.amazonaws.com/rustc-builds/68baada19cd5340f05f0db15a3e16d6671609bcc/cpu-x86_64-apple.csv
+#
+# Each CSV file has two columns. The first is the timestamp of the measurement
+# and the second column is the % of idle cpu time in that time slice. Ideally
+# the second column is always zero.
+#
+# Once you've downloaded a file there's various ways to plot it and visualize
+# it. For command line usage you can use a script like so:
+#
+#      set timefmt '%Y-%m-%dT%H:%M:%S'
+#      set xdata time
+#      set ylabel "Idle CPU %"
+#      set xlabel "Time"
+#      set datafile sep ','
+#      set term png
+#      set output "printme.png"
+#      set grid
+#      builder = "i686-apple"
+#      plot "cpu-".builder.".csv" using 1:2 with lines title builder
+#
+# Executed as `gnuplot < ./foo.plot` it will generate a graph called
+# `printme.png` which you can then open up. If you know how to improve this
+# script or the viewing process that would be much appreciated :) (or even if
+# you know how to automate it!)
+
+import datetime
+import sys
+import time
+
+if sys.platform == 'linux2':
+    class State:
+        def __init__(self):
+            with open('/proc/stat', 'r') as file:
+                data = file.readline().split()
+            if data[0] != 'cpu':
+                raise Exception('did not start with "cpu"')
+            self.user = int(data[1])
+            self.nice = int(data[2])
+            self.system = int(data[3])
+            self.idle = int(data[4])
+            self.iowait = int(data[5])
+            self.irq = int(data[6])
+            self.softirq = int(data[7])
+            self.steal = int(data[8])
+            self.guest = int(data[9])
+            self.guest_nice = int(data[10])
+
+        def idle_since(self, prev):
+            user = self.user - prev.user
+            nice = self.nice - prev.nice
+            system = self.system - prev.system
+            idle = self.idle - prev.idle
+            iowait = self.iowait - prev.iowait
+            irq = self.irq - prev.irq
+            softirq = self.softirq - prev.softirq
+            steal = self.steal - prev.steal
+            guest = self.guest - prev.guest
+            guest_nice = self.guest_nice - prev.guest_nice
+            total = user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice
+            return float(idle) / float(total) * 100
+
+elif sys.platform == 'win32':
+    from ctypes.wintypes import DWORD
+    from ctypes import Structure, windll, WinError, GetLastError, byref
+
+    class FILETIME(Structure):
+        _fields_ = [
+            ("dwLowDateTime", DWORD),
+            ("dwHighDateTime", DWORD),
+        ]
+
+    class State:
+        def __init__(self):
+            idle, kernel, user = FILETIME(), FILETIME(), FILETIME()
+
+            success = windll.kernel32.GetSystemTimes(
+                byref(idle),
+                byref(kernel),
+                byref(user),
+            )
+
+            assert success, WinError(GetLastError())[1]
+
+            self.idle = (idle.dwHighDateTime << 32) | idle.dwLowDateTime
+            self.kernel = (kernel.dwHighDateTime << 32) | kernel.dwLowDateTime
+            self.user = (user.dwHighDateTime << 32) | user.dwLowDateTime
+
+        def idle_since(self, prev):
+            idle = self.idle - prev.idle
+            user = self.user - prev.user
+            kernel = self.kernel - prev.kernel
+            return float(idle) / float(user + kernel) * 100
+
+elif sys.platform == 'darwin':
+    from ctypes import *
+    libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
+
+    PROESSOR_CPU_LOAD_INFO = c_int(2)
+    CPU_STATE_USER = 0
+    CPU_STATE_SYSTEM = 1
+    CPU_STATE_IDLE = 2
+    CPU_STATE_NICE = 3
+    c_int_p = POINTER(c_int)
+
+    class State:
+        def __init__(self):
+            num_cpus_u = c_uint(0)
+            cpu_info = c_int_p()
+            cpu_info_cnt = c_int(0)
+            err = libc.host_processor_info(
+                libc.mach_host_self(),
+                PROESSOR_CPU_LOAD_INFO,
+                byref(num_cpus_u),
+                byref(cpu_info),
+                byref(cpu_info_cnt),
+            )
+            assert err == 0
+            self.user = 0
+            self.system = 0
+            self.idle = 0
+            self.nice = 0
+            cur = 0
+            while cur < cpu_info_cnt.value:
+                self.user += cpu_info[cur + CPU_STATE_USER]
+                self.system += cpu_info[cur + CPU_STATE_SYSTEM]
+                self.idle += cpu_info[cur + CPU_STATE_IDLE]
+                self.nice += cpu_info[cur + CPU_STATE_NICE]
+                cur += num_cpus_u.value
+
+        def idle_since(self, prev):
+            user = self.user - prev.user
+            system = self.system - prev.system
+            idle = self.idle - prev.idle
+            nice = self.nice - prev.nice
+            return float(idle) / float(user + system + idle + nice) * 100.0
+
+else:
+    print('unknown platform', sys.platform)
+    sys.exit(1)
+
+cur_state = State();
+print("Time,Idle")
+while True:
+    time.sleep(1);
+    next_state = State();
+    now = datetime.datetime.utcnow().isoformat()
+    idle = next_state.idle_since(cur_state)
+    print("%s,%s" % (now, idle))
+    sys.stdout.flush()
+    cur_state = next_state
index 9eaffbf83eb4e43e897c93dfc814c4b53ec2f50e..01d6fce34186a50db647dbee2351f6ec46e824f5 100644 (file)
@@ -37,3 +37,11 @@ ENV SCRIPT python2.7 ../x.py test --target $TARGETS \
   src/libstd \
   src/liballoc \
   src/libcore
+
+# Debug assertions in rustc are largely covered by other builders, and LLVM
+# assertions cause this builder to slow down by quite a large amount and don't
+# buy us a huge amount over other builders (not sure if we've ever seen an
+# asmjs-specific backend assertion trip), so disable assertions for these
+# tests.
+ENV NO_LLVM_ASSERTIONS=1
+ENV NO_DEBUG_ASSERTIONS=1
index af0198705a2fefda34bf4f1ea5efc881078d2081..978732e3c089f2b5b19eed0cc8794edc5e45f478 100755 (executable)
@@ -35,12 +35,17 @@ set -e
 cat "$TOOLSTATE_FILE"
 echo
 
+# This function checks if a particular tool is *not* in status "test-pass".
+check_tool_failed() {
+    grep -vq '"'"$1"'":"test-pass"' "$TOOLSTATE_FILE"
+}
+
 # This function checks that if a tool's submodule changed, the tool's state must improve
 verify_status() {
     echo "Verifying status of $1..."
     if echo "$CHANGED_FILES" | grep -q "^M[[:blank:]]$2$"; then
         echo "This PR updated '$2', verifying if status is 'test-pass'..."
-        if grep -vq '"'"$1"'":"test-pass"' "$TOOLSTATE_FILE"; then
+        if check_tool_failed "$1"; then
             echo
             echo "⚠️ We detected that this PR updated '$1', but its tests failed."
             echo
@@ -55,14 +60,16 @@ verify_status() {
     fi
 }
 
-# deduplicates the submodule check and the assertion that on beta some tools MUST be passing
+# deduplicates the submodule check and the assertion that on beta some tools MUST be passing.
+# $1 should be "submodule_changed" to only check tools that got changed by this PR,
+# or "beta_required" to check all tools that have $2 set to "beta".
 check_dispatch() {
     if [ "$1" = submodule_changed ]; then
         # ignore $2 (branch id)
         verify_status $3 $4
     elif [ "$2" = beta ]; then
         echo "Requiring test passing for $3..."
-        if grep -q '"'"$3"'":"\(test\|build\)-fail"' "$TOOLSTATE_FILE"; then
+        if check_tool_failed "$3"; then
             exit 4
         fi
     fi
index 29fe982990e43b9367be0ff47abc82fb2123fd03..62a8c6f25fbd981c80a046f3b04be9684749af3b 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 29fe982990e43b9367be0ff47abc82fb2123fd03
+Subproject commit 62a8c6f25fbd981c80a046f3b04be9684749af3b
index 9858872bd1b7dbba5ec27dc30d34eba00acd7ef9..f0c75b75f9c18537b78f5d17c1015247e9a49c86 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 9858872bd1b7dbba5ec27dc30d34eba00acd7ef9
+Subproject commit f0c75b75f9c18537b78f5d17c1015247e9a49c86
index 862b669c395822bb0938781d74f860e5762ad4fb..f8ae436d936f6f4891d3c1bbb1af5865eb8aeadb 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 862b669c395822bb0938781d74f860e5762ad4fb
+Subproject commit f8ae436d936f6f4891d3c1bbb1af5865eb8aeadb
index 811c697b232c611ed754d279ed20643a0c4096f6..18566f4dedc3ef5bf61f5f85685d5966db99cc11 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 811c697b232c611ed754d279ed20643a0c4096f6
+Subproject commit 18566f4dedc3ef5bf61f5f85685d5966db99cc11
index 3cb727b62b953d59b4360d39aa68b6dc8f157655..3ac9cfc9c9ab2e366feebf18718112737f572352 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 3cb727b62b953d59b4360d39aa68b6dc8f157655
+Subproject commit 3ac9cfc9c9ab2e366feebf18718112737f572352
index f090f142c0816cb17e5c363b7cc3bcd0e1f0e79c..6d4aa024c75b4ffa1ad23055e0f0af4fca25b400 100644 (file)
@@ -728,19 +728,17 @@ This lint detects attributes that were not used by the compiler. Some
 example code that triggers this lint:
 
 ```rust
-#![feature(custom_attribute)]
-
-#![mutable_doc]
+#![macro_export]
 ```
 
 This will produce:
 
 ```text
 warning: unused attribute
- --> src/main.rs:4:1
+ --> src/main.rs:1:1
   |
-4 | #![mutable_doc]
-  | ^^^^^^^^^^^^^^^
+1 | #![macro_export]
+  | ^^^^^^^^^^^^^^^^
   |
 ```
 
index 43fffd680372fb1330454972561e42abd40a434d..1994cf491889bddd82e384f8a8aaa25504a6662e 100644 (file)
@@ -56,7 +56,7 @@ extern crate syntax_pos;
 extern crate rustc;
 extern crate rustc_plugin;
 
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::tokenstream::TokenTree;
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
 use syntax::ext::build::AstBuilder;  // A trait for expr_usize.
@@ -64,7 +64,7 @@ use syntax_pos::Span;
 use rustc_plugin::Registry;
 
 fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-        -> Box<MacResult + 'static> {
+        -> Box<dyn MacResult + 'static> {
 
     static NUMERALS: &'static [(&'static str, usize)] = &[
         ("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
     }
 
     let text = match args[0] {
-        TokenTree::Token(_, token::Ident(s)) => s.to_string(),
+        TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);
diff --git a/src/doc/unstable-book/src/language-features/repr-align-enum.md b/src/doc/unstable-book/src/language-features/repr-align-enum.md
deleted file mode 100644 (file)
index 415c6eb..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-# `repr_align_enum`
-
-The tracking issue for this feature is: [#57996]
-
-[#57996]: https://github.com/rust-lang/rust/issues/57996
-
-------------------------
-
-The `repr_align_enum` feature allows using the `#[repr(align(x))]` attribute
-on enums, similarly to structs.
-
-# Examples
-
-```rust
-#![feature(repr_align_enum)]
-
-#[repr(align(8))]
-enum Aligned {
-    Foo,
-    Bar { value: u32 },
-}
-
-fn main() {
-    assert_eq!(std::mem::align_of::<Aligned>(), 8);
-}
-```
-
-This is equivalent to using an aligned wrapper struct everywhere:
-
-```rust
-#[repr(align(8))]
-struct Aligned(Unaligned);
-
-enum Unaligned {
-    Foo,
-    Bar { value: u32 },
-}
-
-fn main() {
-    assert_eq!(std::mem::align_of::<Aligned>(), 8);
-}
-```
diff --git a/src/doc/unstable-book/src/language-features/transparent-enums.md b/src/doc/unstable-book/src/language-features/transparent-enums.md
new file mode 100644 (file)
index 0000000..862411a
--- /dev/null
@@ -0,0 +1,93 @@
+# `transparent_enums`
+
+The tracking issue for this feature is [#60405]
+
+[60405]: https://github.com/rust-lang/rust/issues/60405
+
+----
+
+The `transparent_enums` feature allows you mark `enum`s as
+`#[repr(transparent)]`. An `enum` may be `#[repr(transparent)]` if it has
+exactly one variant, and that variant matches the same conditions which `struct`
+requires for transparency. Some concrete illustrations follow.
+
+```rust
+#![feature(transparent_enums)]
+
+// This enum has the same representation as `f32`.
+#[repr(transparent)]
+enum SingleFieldEnum {
+    Variant(f32)
+}
+
+// This enum has the same representation as `usize`.
+#[repr(transparent)]
+enum MultiFieldEnum {
+    Variant { field: usize, nothing: () },
+}
+```
+
+For consistency with transparent `struct`s, `enum`s must have exactly one
+non-zero-sized field. If all fields are zero-sized, the `enum` must not be
+`#[repr(transparent)]`:
+
+```rust
+#![feature(transparent_enums)]
+
+// This (non-transparent) enum is already valid in stable Rust:
+pub enum GoodEnum {
+    Nothing,
+}
+
+// Error: transparent enum needs exactly one non-zero-sized field, but has 0
+// #[repr(transparent)]
+// pub enum BadEnum {
+//     Nothing(()),
+// }
+
+// Error: transparent enum needs exactly one non-zero-sized field, but has 0
+// #[repr(transparent)]
+// pub enum BadEmptyEnum {
+//     Nothing,
+// }
+```
+
+The one exception is if the `enum` is generic over `T` and has a field of type
+`T`, it may be `#[repr(transparent)]` even if `T` is a zero-sized type:
+
+```rust
+#![feature(transparent_enums)]
+
+// This enum has the same representation as `T`.
+#[repr(transparent)]
+pub enum GenericEnum<T> {
+    Variant(T, ()),
+}
+
+// This is okay even though `()` is a zero-sized type.
+pub const THIS_IS_OKAY: GenericEnum<()> = GenericEnum::Variant((), ());
+```
+
+Transparent `enum`s require exactly one variant:
+
+```rust
+// Error: transparent enum needs exactly one variant, but has 0
+// #[repr(transparent)]
+// pub enum TooFewVariants {
+// }
+
+// Error: transparent enum needs exactly one variant, but has 2
+// #[repr(transparent)]
+// pub enum TooManyVariants {
+//     First(usize),
+//     Second,
+// }
+```
+
+Like transarent `struct`s, a transparent `enum` of type `E` has the same layout,
+size, and ABI as its single non-ZST field. If it is generic over a type `T`, and
+all its fields are ZSTs except for exactly one field of type `T`, then it has
+the same layout and ABI as `T` (even if `T` is a ZST when monomorphized).
+
+Like transparent `struct`s, transparent `enum`s are FFI-safe if and only if
+their underlying representation type is also FFI-safe.
diff --git a/src/doc/unstable-book/src/language-features/transparent-unions.md b/src/doc/unstable-book/src/language-features/transparent-unions.md
new file mode 100644 (file)
index 0000000..b731c9e
--- /dev/null
@@ -0,0 +1,83 @@
+# `transparent_unions`
+
+The tracking issue for this feature is [#60405]
+
+[60405]: https://github.com/rust-lang/rust/issues/60405
+
+----
+
+The `transparent_unions` feature allows you mark `union`s as
+`#[repr(transparent)]`. A `union` may be `#[repr(transparent)]` in exactly the
+same conditions in which a `struct` may be `#[repr(transparent)]` (generally,
+this means the `union` must have exactly one non-zero-sized field). Some
+concrete illustrations follow.
+
+```rust
+#![feature(transparent_unions)]
+
+// This union has the same representation as `f32`.
+#[repr(transparent)]
+union SingleFieldUnion {
+    field: f32,
+}
+
+// This union has the same representation as `usize`.
+#[repr(transparent)]
+union MultiFieldUnion {
+    field: usize,
+    nothing: (),
+}
+```
+
+For consistency with transparent `struct`s, `union`s must have exactly one
+non-zero-sized field. If all fields are zero-sized, the `union` must not be
+`#[repr(transparent)]`:
+
+```rust
+#![feature(transparent_unions)]
+
+// This (non-transparent) union is already valid in stable Rust:
+pub union GoodUnion {
+    pub nothing: (),
+}
+
+// Error: transparent union needs exactly one non-zero-sized field, but has 0
+// #[repr(transparent)]
+// pub union BadUnion {
+//     pub nothing: (),
+// }
+```
+
+The one exception is if the `union` is generic over `T` and has a field of type
+`T`, it may be `#[repr(transparent)]` even if `T` is a zero-sized type:
+
+```rust
+#![feature(transparent_unions)]
+
+// This union has the same representation as `T`.
+#[repr(transparent)]
+pub union GenericUnion<T: Copy> { // Unions with non-`Copy` fields are unstable.
+    pub field: T,
+    pub nothing: (),
+}
+
+// This is okay even though `()` is a zero-sized type.
+pub const THIS_IS_OKAY: GenericUnion<()> = GenericUnion { field: () };
+```
+
+Like transarent `struct`s, a transparent `union` of type `U` has the same
+layout, size, and ABI as its single non-ZST field. If it is generic over a type
+`T`, and all its fields are ZSTs except for exactly one field of type `T`, then
+it has the same layout and ABI as `T` (even if `T` is a ZST when monomorphized).
+
+Like transparent `struct`s, transparent `union`s are FFI-safe if and only if
+their underlying representation type is also FFI-safe.
+
+A `union` may not be eligible for the same nonnull-style optimizations that a
+`struct` or `enum` (with the same fields) are eligible for. Adding
+`#[repr(transparent)]` to  `union` does not change this. To give a more concrete
+example, it is unspecified whether `size_of::<T>()` is equal to
+`size_of::<Option<T>>()`, where `T` is a `union` (regardless of whether or not
+it is transparent). The Rust compiler is free to perform this optimization if
+possible, but is not required to, and different compiler versions may differ in
+their application of these optimizations.
index 41ff06d70ff09eb7897af0f71c5385e42807483d..755feb849620357dda2d109220b5b5bf2e7536ff 100644 (file)
@@ -15,7 +15,8 @@
     // them from the `#[global_allocator]` attribute if there is one, or uses the
     // default implementations in libstd (`__rdl_alloc` etc in `src/libstd/alloc.rs`)
     // otherwise.
-    #[allocator]
+    #[cfg_attr(bootstrap, allocator)]
+    #[cfg_attr(not(bootstrap), rustc_allocator)]
     #[rustc_allocator_nounwind]
     fn __rust_alloc(size: usize, align: usize) -> *mut u8;
     #[rustc_allocator_nounwind]
index bfc008e14a486f870edffde929bdaca1f939d989..c530ac24275c2bb5f85b7431a28f43d009805bd3 100644 (file)
@@ -79,7 +79,7 @@
 #![feature(coerce_unsized)]
 #![feature(dispatch_from_dyn)]
 #![feature(core_intrinsics)]
-#![feature(custom_attribute)]
+#![cfg_attr(bootstrap, feature(custom_attribute))]
 #![feature(dropck_eyepatch)]
 #![feature(exact_size_is_empty)]
 #![feature(fmt_internals)]
index d6bec816e4ee60fcea0fb79e277a4ccd27d99990..1739b2236e89da3bc54685fc5f13bb43e80fd726 100644 (file)
@@ -13,6 +13,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(core_intrinsics)]
 #![feature(dropck_eyepatch)]
index 239ff017cc2306b433c9d991336e21c3291d7d74..beafddc5a10196810c619c4cd4de5e1e4baecb71 100644 (file)
@@ -494,7 +494,6 @@ pub fn get_mut(&mut self) -> &mut T {
     /// # Examples
     ///
     /// ```
-    /// #![feature(as_cell)]
     /// use std::cell::Cell;
     ///
     /// let slice: &mut [i32] = &mut [1, 2, 3];
@@ -504,7 +503,7 @@ pub fn get_mut(&mut self) -> &mut T {
     /// assert_eq!(slice_cell.len(), 3);
     /// ```
     #[inline]
-    #[unstable(feature = "as_cell", issue="43038")]
+    #[stable(feature = "as_cell", since = "1.37.0")]
     pub fn from_mut(t: &mut T) -> &Cell<T> {
         unsafe {
             &*(t as *mut T as *const Cell<T>)
@@ -541,7 +540,6 @@ impl<T> Cell<[T]> {
     /// # Examples
     ///
     /// ```
-    /// #![feature(as_cell)]
     /// use std::cell::Cell;
     ///
     /// let slice: &mut [i32] = &mut [1, 2, 3];
@@ -550,7 +548,7 @@ impl<T> Cell<[T]> {
     ///
     /// assert_eq!(slice_cell.len(), 3);
     /// ```
-    #[unstable(feature = "as_cell", issue="43038")]
+    #[stable(feature = "as_cell", since = "1.37.0")]
     pub fn as_slice_of_cells(&self) -> &[Cell<T>] {
         unsafe {
             &*(self as *const Cell<[T]> as *const [Cell<T>])
index 84867264e70161212e9b65c37e10b459e4bbddf3..8d9a51742fd97ac9159c514c293f8cbb3bf19f9d 100644 (file)
@@ -1051,6 +1051,19 @@ pub fn volatile_copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T,
     /// Returns the absolute value of an `f64`.
     pub fn fabsf64(x: f64) -> f64;
 
+    /// Returns the minimum of two `f32` values.
+    #[cfg(not(bootstrap))]
+    pub fn minnumf32(x: f32, y: f32) -> f32;
+    /// Returns the minimum of two `f64` values.
+    #[cfg(not(bootstrap))]
+    pub fn minnumf64(x: f64, y: f64) -> f64;
+    /// Returns the maximum of two `f32` values.
+    #[cfg(not(bootstrap))]
+    pub fn maxnumf32(x: f32, y: f32) -> f32;
+    /// Returns the maximum of two `f64` values.
+    #[cfg(not(bootstrap))]
+    pub fn maxnumf64(x: f64, y: f64) -> f64;
+
     /// Copies the sign from `y` to `x` for `f32` values.
     pub fn copysignf32(x: f32, y: f32) -> f32;
     /// Copies the sign from `y` to `x` for `f64` values.
@@ -1313,30 +1326,10 @@ pub fn volatile_copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T,
     pub fn nontemporal_store<T>(ptr: *mut T, val: T);
 }
 
-mod real_intrinsics {
-  extern "rust-intrinsic" {
-    /// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
-    /// and destination must *not* overlap.
-    /// For the full docs, see the stabilized wrapper [`copy_nonoverlapping`].
-    ///
-    /// [`copy_nonoverlapping`]: ../../std/ptr/fn.copy_nonoverlapping.html
-    pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
-
-    /// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
-    /// and destination may overlap.
-    /// For the full docs, see the stabilized wrapper [`copy`].
-    ///
-    /// [`copy`]: ../../std/ptr/fn.copy.html
-    pub fn copy<T>(src: *const T, dst: *mut T, count: usize);
-
-    /// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
-    /// `val`.
-    /// For the full docs, see the stabilized wrapper [`write_bytes`].
-    ///
-    /// [`write_bytes`]: ../../std/ptr/fn.write_bytes.html
-    pub fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
-  }
-}
+// Some functions are defined here because they accidentally got made
+// available in this module on stable. See <https://github.com/rust-lang/rust/issues/15702>.
+// (`transmute` also falls into this category, but it cannot be wrapped due to the
+// check that `T` and `U` have the same size.)
 
 /// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
 /// and destination must *not* overlap.
@@ -1424,7 +1417,10 @@ mod real_intrinsics {
 #[stable(feature = "rust1", since = "1.0.0")]
 #[inline]
 pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
-    real_intrinsics::copy_nonoverlapping(src, dst, count);
+    extern "rust-intrinsic" {
+        fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
+    }
+    copy_nonoverlapping(src, dst, count);
 }
 
 /// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
@@ -1481,7 +1477,10 @@ pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
 #[stable(feature = "rust1", since = "1.0.0")]
 #[inline]
 pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
-    real_intrinsics::copy(src, dst, count)
+    extern "rust-intrinsic" {
+        fn copy<T>(src: *const T, dst: *mut T, count: usize);
+    }
+    copy(src, dst, count)
 }
 
 /// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
@@ -1559,5 +1558,52 @@ pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
 #[stable(feature = "rust1", since = "1.0.0")]
 #[inline]
 pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
-    real_intrinsics::write_bytes(dst, val, count)
+    extern "rust-intrinsic" {
+        fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
+    }
+    write_bytes(dst, val, count)
+}
+
+// Simple bootstrap implementations of minnum/maxnum for stage0 compilation.
+
+/// Returns the minimum of two `f32` values.
+#[cfg(bootstrap)]
+pub fn minnumf32(x: f32, y: f32) -> f32 {
+    // IEEE754 says: minNum(x, y) is the canonicalized number x if x < y, y if y < x, the
+    // canonicalized number if one operand is a number and the other a quiet NaN. Otherwise it
+    // is either x or y, canonicalized (this means results might differ among implementations).
+    // When either x or y is a signaling NaN, then the result is according to 6.2.
+    //
+    // Since we do not support sNaN in Rust yet, we do not need to handle them.
+    // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by
+    // multiplying by 1.0. Should switch to the `canonicalize` when it works.
+    (if x < y || y != y { x } else { y }) * 1.0
+}
+
+/// Returns the minimum of two `f64` values.
+#[cfg(bootstrap)]
+pub fn minnumf64(x: f64, y: f64) -> f64 {
+    // Identical to the `f32` case.
+    (if x < y || y != y { x } else { y }) * 1.0
+}
+
+/// Returns the maximum of two `f32` values.
+#[cfg(bootstrap)]
+pub fn maxnumf32(x: f32, y: f32) -> f32 {
+    // IEEE754 says: maxNum(x, y) is the canonicalized number y if x < y, x if y < x, the
+    // canonicalized number if one operand is a number and the other a quiet NaN. Otherwise it
+    // is either x or y, canonicalized (this means results might differ among implementations).
+    // When either x or y is a signaling NaN, then the result is according to 6.2.
+    //
+    // Since we do not support sNaN in Rust yet, we do not need to handle them.
+    // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by
+    // multiplying by 1.0. Should switch to the `canonicalize` when it works.
+    (if x < y || x != x { y } else { x }) * 1.0
+}
+
+/// Returns the maximum of two `f64` values.
+#[cfg(bootstrap)]
+pub fn maxnumf64(x: f64, y: f64) -> f64 {
+    // Identical to the `f32` case.
+    (if x < y || x != x { y } else { x }) * 1.0
 }
index 6bbf776fb8f17bb7d6f14266e398ad3fc9e029bc..efda3b263cc97fae94f08fb2dda1522424763779 100644 (file)
@@ -281,6 +281,19 @@ fn next_back(&mut self) -> Option<A> {
             None
         }
     }
+
+    #[inline]
+    fn nth_back(&mut self, n: usize) -> Option<A> {
+        if let Some(minus_n) = self.end.sub_usize(n) {
+            if minus_n > self.start {
+                self.end = minus_n.sub_one();
+                return Some(self.end.clone())
+            }
+        }
+
+        self.end = self.start.clone();
+        None
+    }
 }
 
 #[stable(feature = "fused", since = "1.26.0")]
@@ -438,6 +451,34 @@ fn next_back(&mut self) -> Option<A> {
         })
     }
 
+    #[inline]
+    fn nth_back(&mut self, n: usize) -> Option<A> {
+        self.compute_is_empty();
+        if self.is_empty.unwrap_or_default() {
+            return None;
+        }
+
+        if let Some(minus_n) = self.end.sub_usize(n) {
+            use crate::cmp::Ordering::*;
+
+            match minus_n.partial_cmp(&self.start) {
+                Some(Greater) => {
+                    self.is_empty = Some(false);
+                    self.end = minus_n.sub_one();
+                    return Some(minus_n);
+                }
+                Some(Equal) => {
+                    self.is_empty = Some(true);
+                    return Some(minus_n);
+                }
+                _ => {}
+            }
+        }
+
+        self.is_empty = Some(true);
+        None
+    }
+
     #[inline]
     fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
         Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
index d0fdd79473e6726eaf0cc48ffff0beb79e761822..30923c7414504c1a6863d9b50cec569c668ac430 100644 (file)
@@ -1495,13 +1495,13 @@ fn partition<B, F>(self, mut f: F) -> (B, B) where
         let mut left: B = Default::default();
         let mut right: B = Default::default();
 
-        for x in self {
+        self.for_each(|x| {
             if f(&x) {
                 left.extend(Some(x))
             } else {
                 right.extend(Some(x))
             }
-        }
+        });
 
         (left, right)
     }
index ad35204804755cb6cbcb0c29e07470a00662422f..030f4f1d12cc824a734b17a32e384ed9ad742efb 100644 (file)
 #![feature(arbitrary_self_types)]
 #![feature(asm)]
 #![feature(associated_type_defaults)]
+#![feature(bound_cloned)]
 #![feature(cfg_target_has_atomic)]
 #![feature(concat_idents)]
 #![feature(const_fn)]
 #![feature(const_fn_union)]
-#![feature(custom_attribute)]
 #![feature(doc_cfg)]
 #![feature(doc_spotlight)]
 #![feature(extern_types)]
index b43ba6ac34032da01596a867240bcded1d5b2f19..770d1ca8e750196de4c041861eef4a75fb5704ce 100644 (file)
@@ -503,6 +503,61 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
     }
 }
 
+/// Replace `dest` with the default value of `T`, and return the previous `dest` value.
+///
+/// # Examples
+///
+/// A simple example:
+///
+/// ```
+/// use std::mem;
+///
+/// let mut v: Vec<i32> = vec![1, 2];
+///
+/// let old_v = mem::take(&mut v);
+/// assert_eq!(vec![1, 2], old_v);
+/// assert!(v.is_empty());
+/// ```
+///
+/// `take` allows taking ownership of a struct field by replacing it with an "empty" value.
+/// Without `take` you can run into issues like these:
+///
+/// ```compile_fail,E0507
+/// struct Buffer<T> { buf: Vec<T> }
+///
+/// impl<T> Buffer<T> {
+///     fn get_and_reset(&mut self) -> Vec<T> {
+///         // error: cannot move out of dereference of `&mut`-pointer
+///         let buf = self.buf;
+///         self.buf = Vec::new();
+///         buf
+///     }
+/// }
+/// ```
+///
+/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset
+/// `self.buf`. But `take` can be used to disassociate the original value of `self.buf` from
+/// `self`, allowing it to be returned:
+///
+/// ```
+/// # #![allow(dead_code)]
+/// use std::mem;
+///
+/// # struct Buffer<T> { buf: Vec<T> }
+/// impl<T> Buffer<T> {
+///     fn get_and_reset(&mut self) -> Vec<T> {
+///         mem::take(&mut self.buf)
+///     }
+/// }
+/// ```
+///
+/// [`Clone`]: ../../std/clone/trait.Clone.html
+#[inline]
+#[unstable(feature = "mem_take", issue = "61129")]
+pub fn take<T: Default>(dest: &mut T) -> T {
+    replace(dest, T::default())
+}
+
 /// Moves `src` into the referenced `dest`, returning the previous `dest` value.
 ///
 /// Neither value is dropped.
index 3f8d142c8457adfd00837e5bd816e699e5ed32cd..0bcd371b528e43b5a0bd1a235cc36f8d9e98f35f 100644 (file)
@@ -7,6 +7,9 @@
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
+#[cfg(not(test))]
+use crate::intrinsics;
+
 use crate::mem;
 use crate::num::FpCategory;
 
@@ -372,15 +375,7 @@ pub fn to_radians(self) -> f32 {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn max(self, other: f32) -> f32 {
-        // IEEE754 says: maxNum(x, y) is the canonicalized number y if x < y, x if y < x, the
-        // canonicalized number if one operand is a number and the other a quiet NaN. Otherwise it
-        // is either x or y, canonicalized (this means results might differ among implementations).
-        // When either x or y is a signalingNaN, then the result is according to 6.2.
-        //
-        // Since we do not support sNaN in Rust yet, we do not need to handle them.
-        // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by
-        // multiplying by 1.0. Should switch to the `canonicalize` when it works.
-        (if self.is_nan() || self < other { other } else { self }) * 1.0
+        intrinsics::maxnumf32(self, other)
     }
 
     /// Returns the minimum of the two numbers.
@@ -396,15 +391,7 @@ pub fn max(self, other: f32) -> f32 {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn min(self, other: f32) -> f32 {
-        // IEEE754 says: minNum(x, y) is the canonicalized number x if x < y, y if y < x, the
-        // canonicalized number if one operand is a number and the other a quiet NaN. Otherwise it
-        // is either x or y, canonicalized (this means results might differ among implementations).
-        // When either x or y is a signalingNaN, then the result is according to 6.2.
-        //
-        // Since we do not support sNaN in Rust yet, we do not need to handle them.
-        // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by
-        // multiplying by 1.0. Should switch to the `canonicalize` when it works.
-        (if other.is_nan() || self < other { self } else { other }) * 1.0
+        intrinsics::minnumf32(self, other)
     }
 
     /// Raw transmutation to `u32`.
index 7f19101fe6ef801d193ca8b9dc962e13e7cf8dbd..4d4a2c9c5a97cc7c63cd642ae25ea02125b98469 100644 (file)
@@ -7,6 +7,9 @@
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
+#[cfg(not(test))]
+use crate::intrinsics;
+
 use crate::mem;
 use crate::num::FpCategory;
 
@@ -385,15 +388,7 @@ pub fn to_radians(self) -> f64 {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn max(self, other: f64) -> f64 {
-        // IEEE754 says: maxNum(x, y) is the canonicalized number y if x < y, x if y < x, the
-        // canonicalized number if one operand is a number and the other a quiet NaN. Otherwise it
-        // is either x or y, canonicalized (this means results might differ among implementations).
-        // When either x or y is a signalingNaN, then the result is according to 6.2.
-        //
-        // Since we do not support sNaN in Rust yet, we do not need to handle them.
-        // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by
-        // multiplying by 1.0. Should switch to the `canonicalize` when it works.
-        (if self.is_nan() || self < other { other } else { self }) * 1.0
+        intrinsics::maxnumf64(self, other)
     }
 
     /// Returns the minimum of the two numbers.
@@ -409,15 +404,7 @@ pub fn max(self, other: f64) -> f64 {
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn min(self, other: f64) -> f64 {
-        // IEEE754 says: minNum(x, y) is the canonicalized number x if x < y, y if y < x, the
-        // canonicalized number if one operand is a number and the other a quiet NaN. Otherwise it
-        // is either x or y, canonicalized (this means results might differ among implementations).
-        // When either x or y is a signalingNaN, then the result is according to 6.2.
-        //
-        // Since we do not support sNaN in Rust yet, we do not need to handle them.
-        // FIXME(nagisa): due to https://bugs.llvm.org/show_bug.cgi?id=33303 we canonicalize by
-        // multiplying by 1.0. Should switch to the `canonicalize` when it works.
-        (if other.is_nan() || self < other { self } else { other }) * 1.0
+        intrinsics::minnumf64(self, other)
     }
 
     /// Raw transmutation to `u64`.
index dd7090623f5b3bdd6628ebaf44874cae95560072..304b2fc9ebb06e56e61c600d9ad9b58091b3c9c3 100644 (file)
@@ -1993,13 +1993,10 @@ pub fn abs(self) -> Self {
 $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_int_sign")]
             #[inline]
-            pub fn signum(self) -> Self {
-                match self {
-                    n if n > 0 =>  1,
-                    0          =>  0,
-                    _          => -1,
-                }
+            pub const fn signum(self) -> Self {
+                (self > 0) as Self - (self < 0) as Self
             }
         }
 
index a707f0cc0627acdd6c3cf404c0cb30e8380039b1..1b4c4218cc15bb95e25ab47e803e6096414fdc1b 100644 (file)
@@ -696,6 +696,29 @@ pub enum Bound<T> {
     Unbounded,
 }
 
+impl<T: Clone> Bound<&T> {
+    /// Map a `Bound<&T>` to a `Bound<T>` by cloning the contents of the bound.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(bound_cloned)]
+    /// use std::ops::Bound::*;
+    /// use std::ops::RangeBounds;
+    ///
+    /// assert_eq!((1..12).start_bound(), Included(&1));
+    /// assert_eq!((1..12).start_bound().cloned(), Included(1));
+    /// ```
+    #[unstable(feature = "bound_cloned", issue = "61356")]
+    pub fn cloned(self) -> Bound<T> {
+        match self {
+            Bound::Unbounded => Bound::Unbounded,
+            Bound::Included(x) => Bound::Included(x.clone()),
+            Bound::Excluded(x) => Bound::Excluded(x.clone()),
+        }
+    }
+}
+
 #[stable(feature = "collections_range", since = "1.28.0")]
 /// `RangeBounds` is implemented by Rust's built-in range types, produced
 /// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`.
index 6b7f491effb30cf616e7a758ecddb1ba1c692f87..c75ecb059e8137c2c2552c85fccb04968bec4ad3 100644 (file)
 // which basically means it must be `Option`.
 
 /// The `Option` type. See [the module level documentation](index.html) for more.
-#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
+#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub enum Option<T> {
     /// No value
@@ -1040,6 +1040,25 @@ fn expect_failed(msg: &str) -> ! {
 // Trait implementations
 /////////////////////////////////////////////////////////////////////////////
 
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for Option<T> {
+    #[inline]
+    fn clone(&self) -> Self {
+        match self {
+            Some(x) => Some(x.clone()),
+            None => None,
+        }
+    }
+
+    #[inline]
+    fn clone_from(&mut self, source: &Self) {
+        match (self, source) {
+            (Some(to), Some(from)) => to.clone_from(from),
+            (to, from) => *to = from.clone(),
+        }
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> Default for Option<T> {
     /// Returns [`None`][Option::None].
index 4ced860948bee82bb5b26037b24e2b439894cf3c..c5247e134c86a48e0c0dbeb50aad8c133fb3b586 100644 (file)
@@ -1,10 +1,10 @@
 //! Types that pin data to its location in memory.
 //!
-//! It is sometimes useful to have objects that are guaranteed to not move,
+//! It is sometimes useful to have objects that are guaranteed not to move,
 //! in the sense that their placement in memory does not change, and can thus be relied upon.
 //! A prime example of such a scenario would be building self-referential structs,
-//! since moving an object with pointers to itself will invalidate them,
-//! which could cause undefined behavior.
+//! as moving an object with pointers to itself will invalidate them, which could cause undefined
+//! behavior.
 //!
 //! A [`Pin<P>`] ensures that the pointee of any pointer type `P` has a stable location in memory,
 //! meaning it cannot be moved elsewhere and its memory cannot be deallocated
 //! moving the values they contain: you can move out of a `Box<T>`, or you can use [`mem::swap`].
 //! [`Pin<P>`] wraps a pointer type `P`, so `Pin<Box<T>>` functions much like a regular `Box<T>`:
 //! when a `Pin<Box<T>>` gets dropped, so do its contents, and the memory gets deallocated.
-//! Similarily, `Pin<&mut T>` is a lot like `&mut T`. However, [`Pin<P>`] does not let clients
+//! Similarly, `Pin<&mut T>` is a lot like `&mut T`. However, [`Pin<P>`] does not let clients
 //! actually obtain a `Box<T>` or `&mut T` to pinned data, which implies that you cannot use
 //! operations such as [`mem::swap`]:
+//!
 //! ```
 //! use std::pin::Pin;
 //! fn swap_pins<T>(x: Pin<&mut T>, y: Pin<&mut T>) {
 //! as a "`P`-style pointer" to a pinned `P::Target` -- so, a `Pin<Box<T>>` is
 //! an owned pointer to a pinned `T`, and a `Pin<Rc<T>>` is a reference-counted
 //! pointer to a pinned `T`.
-//! For correctness, [`Pin<P>`] relies on the [`Deref`] and [`DerefMut`] implementations
-//! to not move out of their `self` parameter, and to only ever return a pointer
-//! to pinned data when they are called on a pinned pointer.
+//! For correctness, [`Pin<P>`] relies on the implementations of [`Deref`] and
+//! [`DerefMut`] not to move out of their `self` parameter, and only ever to
+//! return a pointer to pinned data when they are called on a pinned pointer.
 //!
 //! # `Unpin`
 //!
-//! However, these restrictions are usually not necessary. Many types are always freely
-//! movable, even when pinned, because they do not rely on having a stable address.
-//! This includes all the basic types (like `bool`, `i32`, references)
-//! as well as types consisting solely of these types.
-//! Types that do not care about pinning implement the [`Unpin`] auto-trait, which
-//! cancels the effect of [`Pin<P>`]. For `T: Unpin`, `Pin<Box<T>>` and `Box<T>` function
-//! identically, as do `Pin<&mut T>` and `&mut T`.
+//! Many types are always freely movable, even when pinned, because they do not
+//! rely on having a stable address. This includes all the basic types (like
+//! `bool`, `i32`, and references) as well as types consisting solely of these
+//! types. Types that do not care about pinning implement the [`Unpin`]
+//! auto-trait, which cancels the effect of [`Pin<P>`]. For `T: Unpin`,
+//! `Pin<Box<T>>` and `Box<T>` function identically, as do `Pin<&mut T>` and
+//! `&mut T`.
 //!
 //! Note that pinning and `Unpin` only affect the pointed-to type `P::Target`, not the pointer
 //! type `P` itself that got wrapped in `Pin<P>`. For example, whether or not `Box<T>` is
 //! use std::marker::PhantomPinned;
 //! use std::ptr::NonNull;
 //!
-//! // This is a self-referential struct since the slice field points to the data field.
+//! // This is a self-referential struct because the slice field points to the data field.
 //! // We cannot inform the compiler about that with a normal reference,
-//! // since this pattern cannot be described with the usual borrowing rules.
-//! // Instead we use a raw pointer, though one which is known to not be null,
-//! // since we know it's pointing at the string.
+//! // as this pattern cannot be described with the usual borrowing rules.
+//! // Instead we use a raw pointer, though one which is known not to be null,
+//! // as we know it's pointing at the string.
 //! struct Unmovable {
 //!     data: String,
 //!     slice: NonNull<String>,
 //! section needs to function correctly.
 //!
 //! Notice that this guarantee does *not* mean that memory does not leak! It is still
-//! completely okay not to ever call `drop` on a pinned element (e.g., you can still
+//! completely okay not ever to call `drop` on a pinned element (e.g., you can still
 //! call [`mem::forget`] on a `Pin<Box<T>>`). In the example of the doubly-linked
 //! list, that element would just stay in the list. However you may not free or reuse the storage
 //! *without calling `drop`*.
 //!     `Unpin`. This is the default, but `Unpin` is a safe trait, so as the author of
 //!     the wrapper it is your responsibility *not* to add something like
 //!     `impl<T> Unpin for Wrapper<T>`. (Notice that adding a projection operation
-//!     requires unsafe code, so the fact that `Unpin` is a safe trait  does not break
+//!     requires unsafe code, so the fact that `Unpin` is a safe trait does not break
 //!     the principle that you only have to worry about any of this if you use `unsafe`.)
 //! 2.  The destructor of the wrapper must not move structural fields out of its argument. This
 //!     is the exact point that was raised in the [previous section][drop-impl]: `drop` takes
index bf8fd63b6446f3eaa081366195a98f46b5bd1e1e..8a09877ce1f4be591140be8f90e75a4d2773cad5 100644 (file)
 ///
 /// [`Ok`]: enum.Result.html#variant.Ok
 /// [`Err`]: enum.Result.html#variant.Err
-#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
+#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
 #[must_use = "this `Result` may be an `Err` variant, which should be handled"]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub enum Result<T, E> {
@@ -1003,6 +1003,27 @@ fn unwrap_failed<E: fmt::Debug>(msg: &str, error: E) -> ! {
 // Trait implementations
 /////////////////////////////////////////////////////////////////////////////
 
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone, E: Clone> Clone for Result<T, E> {
+    #[inline]
+    fn clone(&self) -> Self {
+        match self {
+            Ok(x) => Ok(x.clone()),
+            Err(x) => Err(x.clone()),
+        }
+    }
+
+    #[inline]
+    fn clone_from(&mut self, source: &Self) {
+        match (self, source) {
+            (Ok(to), Ok(from)) => to.clone_from(from),
+            (Err(to), Err(from)) => to.clone_from(from),
+            (to, from) => *to = from.clone(),
+        }
+    }
+}
+
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T, E> IntoIterator for Result<T, E> {
     type Item = T;
index 0e782bef39dd8ee6b4dd011f6df94da6369dee3a..c9c73f4d66ee9f85fe63f756fa8c6233dc558bcc 100644 (file)
@@ -5420,7 +5420,7 @@ impl $traitname for $ty { }
 }
 
 impl_marker_for!(BytewiseEquality,
-                 u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool);
+                 u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize char bool);
 
 #[doc(hidden)]
 unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> {
index ef4bd83cbc5a695997a044e9fdc0677a59fe7a19..34f2d8917ea472a9f5d25af24e3875448378bedf 100644 (file)
@@ -3971,6 +3971,16 @@ pub fn eq_ignore_ascii_case(&self, other: &str) -> bool {
     /// [`to_ascii_uppercase`].
     ///
     /// [`to_ascii_uppercase`]: #method.to_ascii_uppercase
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut s = String::from("Grüße, Jürgen ❤");
+    ///
+    /// s.make_ascii_uppercase();
+    ///
+    /// assert_eq!("GRüßE, JüRGEN ❤", s);
+    /// ```
     #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
     pub fn make_ascii_uppercase(&mut self) {
         let me = unsafe { self.as_bytes_mut() };
@@ -3986,6 +3996,16 @@ pub fn make_ascii_uppercase(&mut self) {
     /// [`to_ascii_lowercase`].
     ///
     /// [`to_ascii_lowercase`]: #method.to_ascii_lowercase
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut s = String::from("GRÜßE, JÜRGEN ❤");
+    ///
+    /// s.make_ascii_lowercase();
+    ///
+    /// assert_eq!("grÜße, jÜrgen ❤", s);
+    /// ```
     #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
     pub fn make_ascii_lowercase(&mut self) {
         let me = unsafe { self.as_bytes_mut() };
index bedb9e756129c5a0322e3dbe22bbbbeb74992950..020618ae7aeedb04a7898b3bed2ad28ebb6d41c4 100644 (file)
@@ -1657,6 +1657,23 @@ fn test_range_nth() {
     assert_eq!(r, 20..20);
 }
 
+#[test]
+fn test_range_nth_back() {
+    assert_eq!((10..15).nth_back(0), Some(14));
+    assert_eq!((10..15).nth_back(1), Some(13));
+    assert_eq!((10..15).nth_back(4), Some(10));
+    assert_eq!((10..15).nth_back(5), None);
+    assert_eq!((-120..80_i8).nth_back(199), Some(-120));
+
+    let mut r = 10..20;
+    assert_eq!(r.nth_back(2), Some(17));
+    assert_eq!(r, 10..17);
+    assert_eq!(r.nth_back(2), Some(14));
+    assert_eq!(r, 10..14);
+    assert_eq!(r.nth_back(10), None);
+    assert_eq!(r, 10..10);
+}
+
 #[test]
 fn test_range_from_nth() {
     assert_eq!((10..).nth(0), Some(10));
@@ -1714,6 +1731,26 @@ fn test_range_inclusive_nth() {
     assert_eq!(ExactSizeIterator::is_empty(&r), true);
 }
 
+#[test]
+fn test_range_inclusive_nth_back() {
+    assert_eq!((10..=15).nth_back(0), Some(15));
+    assert_eq!((10..=15).nth_back(1), Some(14));
+    assert_eq!((10..=15).nth_back(5), Some(10));
+    assert_eq!((10..=15).nth_back(6), None);
+    assert_eq!((-120..=80_i8).nth_back(200), Some(-120));
+
+    let mut r = 10_u8..=20;
+    assert_eq!(r.nth_back(2), Some(18));
+    assert_eq!(r, 10..=17);
+    assert_eq!(r.nth_back(2), Some(15));
+    assert_eq!(r, 10..=14);
+    assert_eq!(r.is_empty(), false);
+    assert_eq!(ExactSizeIterator::is_empty(&r), false);
+    assert_eq!(r.nth_back(10), None);
+    assert_eq!(r.is_empty(), true);
+    assert_eq!(ExactSizeIterator::is_empty(&r), true);
+}
+
 #[test]
 fn test_range_step() {
     #![allow(deprecated)]
index 5050842e409711488fd92462356593674494c40e..928bdd7a7600256cff29cea50e4c7a75a09f464a 100644 (file)
@@ -1,3 +1,4 @@
+#![feature(bound_cloned)]
 #![feature(box_syntax)]
 #![feature(cell_update)]
 #![feature(core_private_bignum)]
index faeaabbf95adacb75d7edd518f11ffdba021a087..0e71426c64108e8f742fe4b6aed976c5f547630b 100644 (file)
@@ -31,6 +31,7 @@ fn ordinary() {
     test_literal!(0.1);
     test_literal!(12345.);
     test_literal!(0.9999999);
+    #[cfg(not(miri))] // Miri is too slow
     test_literal!(2.2250738585072014e-308);
 }
 
@@ -77,6 +78,7 @@ fn zero() {
     test_literal!(0.0);
     test_literal!(1e-325);
     test_literal!(1e-326);
+    #[cfg(not(miri))] // Miri is too slow
     test_literal!(1e-500);
 }
 
index 78cf07119e729770a6c1ecf83c77efc912474651..48755ae4c16411914f3cbd7820f618a8718b1ce0 100644 (file)
@@ -1,4 +1,4 @@
-use core::ops::{Range, RangeFull, RangeFrom, RangeTo, RangeInclusive};
+use core::ops::{Bound, Range, RangeFull, RangeFrom, RangeTo, RangeInclusive};
 
 // Test the Range structs without the syntactic sugar.
 
@@ -82,3 +82,18 @@ fn test_range_is_empty() {
     assert!( (NAN ..= EPSILON).is_empty());
     assert!( (NAN ..= NAN).is_empty());
 }
+
+#[test]
+fn test_bound_cloned_unbounded() {
+    assert_eq!(Bound::<&u32>::Unbounded.cloned(), Bound::Unbounded);
+}
+
+#[test]
+fn test_bound_cloned_included() {
+    assert_eq!(Bound::Included(&3).cloned(), Bound::Included(3));
+}
+
+#[test]
+fn test_bound_cloned_excluded() {
+    assert_eq!(Bound::Excluded(&3).cloned(), Bound::Excluded(3));
+}
index 50779a2d9ad08d526e5c9a02def260618c422fe7..fc32f21ec4e0a8b5816489b2f22e6786168e94bc 100644 (file)
@@ -8,3 +8,6 @@ edition = "2018"
 name = "fmt_macros"
 path = "lib.rs"
 crate-type = ["dylib"]
+
+[dependencies]
+syntax_pos = { path = "../libsyntax_pos" }
index 6fed83021609d76a16a4cf8dc2302a836d92a0bb..7d0a0035dc846cda8ef330867a9503f97d66410e 100644 (file)
@@ -10,6 +10,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(nll)]
 #![feature(rustc_private)]
 use std::string;
 use std::iter;
 
+use syntax_pos::{InnerSpan, Symbol};
+
+#[derive(Copy, Clone)]
+struct InnerOffset(usize);
+
+impl InnerOffset {
+    fn to(self, end: InnerOffset) -> InnerSpan {
+        InnerSpan::new(self.0, end.0)
+    }
+}
+
 /// A piece is a portion of the format string which represents the next part
 /// to emit. These are emitted as a stream by the `Parser` class.
 #[derive(Copy, Clone, PartialEq)]
@@ -39,7 +51,7 @@ pub enum Piece<'a> {
 #[derive(Copy, Clone, PartialEq)]
 pub struct Argument<'a> {
     /// Where to find this argument
-    pub position: Position<'a>,
+    pub position: Position,
     /// How to format the argument
     pub format: FormatSpec<'a>,
 }
@@ -54,9 +66,9 @@ pub struct FormatSpec<'a> {
     /// Packed version of various flags provided
     pub flags: u32,
     /// The integer precision to use
-    pub precision: Count<'a>,
+    pub precision: Count,
     /// The string width requested for the resulting format
-    pub width: Count<'a>,
+    pub width: Count,
     /// The descriptor string representing the name of the format desired for
     /// this argument, this can be empty or any number of characters, although
     /// it is required to be one word.
@@ -65,16 +77,16 @@ pub struct FormatSpec<'a> {
 
 /// Enum describing where an argument for a format can be located.
 #[derive(Copy, Clone, PartialEq)]
-pub enum Position<'a> {
+pub enum Position {
     /// The argument is implied to be located at an index
     ArgumentImplicitlyIs(usize),
     /// The argument is located at a specific index given in the format
     ArgumentIs(usize),
     /// The argument has a name.
-    ArgumentNamed(&'a str),
+    ArgumentNamed(Symbol),
 }
 
-impl Position<'_> {
+impl Position {
     pub fn index(&self) -> Option<usize> {
         match self {
             ArgumentIs(i) | ArgumentImplicitlyIs(i) => Some(*i),
@@ -119,11 +131,11 @@ pub enum Flag {
 /// A count is used for the precision and width parameters of an integer, and
 /// can reference either an argument or a literal integer.
 #[derive(Copy, Clone, PartialEq)]
-pub enum Count<'a> {
+pub enum Count {
     /// The count is specified explicitly.
     CountIs(usize),
     /// The count is specified by the argument with the given name.
-    CountIsName(&'a str),
+    CountIsName(Symbol),
     /// The count is specified by the argument at the given index.
     CountIsParam(usize),
     /// The count is implied and cannot be explicitly specified.
@@ -134,9 +146,8 @@ pub struct ParseError {
     pub description: string::String,
     pub note: Option<string::String>,
     pub label: string::String,
-    pub start: SpanIndex,
-    pub end: SpanIndex,
-    pub secondary_label: Option<(string::String, SpanIndex, SpanIndex)>,
+    pub span: InnerSpan,
+    pub secondary_label: Option<(string::String, InnerSpan)>,
 }
 
 /// The parser structure for interpreting the input format string. This is
@@ -155,24 +166,15 @@ pub struct Parser<'a> {
     /// `Some(raw count)` when the string is "raw", used to position spans correctly
     style: Option<usize>,
     /// Start and end byte offset of every successfully parsed argument
-    pub arg_places: Vec<(SpanIndex, SpanIndex)>,
+    pub arg_places: Vec<InnerSpan>,
     /// Characters that need to be shifted
     skips: Vec<usize>,
-    /// Span offset of the last opening brace seen, used for error reporting
-    last_opening_brace_pos: Option<SpanIndex>,
+    /// Span of the last opening brace seen, used for error reporting
+    last_opening_brace: Option<InnerSpan>,
     /// Wether the source string is comes from `println!` as opposed to `format!` or `print!`
     append_newline: bool,
 }
 
-#[derive(Clone, Copy, Debug)]
-pub struct SpanIndex(pub usize);
-
-impl SpanIndex {
-    pub fn unwrap(self) -> usize {
-        self.0
-    }
-}
-
 impl<'a> Iterator for Parser<'a> {
     type Item = Piece<'a>;
 
@@ -180,19 +182,20 @@ fn next(&mut self) -> Option<Piece<'a>> {
         if let Some(&(pos, c)) = self.cur.peek() {
             match c {
                 '{' => {
-                    let curr_last_brace = self.last_opening_brace_pos;
-                    self.last_opening_brace_pos = Some(self.to_span_index(pos));
+                    let curr_last_brace = self.last_opening_brace;
+                    let byte_pos = self.to_span_index(pos);
+                    self.last_opening_brace = Some(byte_pos.to(InnerOffset(byte_pos.0 + 1)));
                     self.cur.next();
                     if self.consume('{') {
-                        self.last_opening_brace_pos = curr_last_brace;
+                        self.last_opening_brace = curr_last_brace;
 
                         Some(String(self.string(pos + 1)))
                     } else {
                         let arg = self.argument();
-                        if let Some(arg_pos) = self.must_consume('}').map(|end| {
-                            (self.to_span_index(pos), self.to_span_index(end + 1))
-                        }) {
-                            self.arg_places.push(arg_pos);
+                        if let Some(end) = self.must_consume('}') {
+                            let start = self.to_span_index(pos);
+                            let end = self.to_span_index(end + 1);
+                            self.arg_places.push(start.to(end));
                         }
                         Some(NextArgument(arg))
                     }
@@ -207,8 +210,7 @@ fn next(&mut self) -> Option<Piece<'a>> {
                             "unmatched `}` found",
                             "unmatched `}`",
                             "if you intended to print `}`, you can escape it using `}}`",
-                            err_pos,
-                            err_pos,
+                            err_pos.to(err_pos),
                         );
                         None
                     }
@@ -240,7 +242,7 @@ pub fn new(
             style,
             arg_places: vec![],
             skips,
-            last_opening_brace_pos: None,
+            last_opening_brace: None,
             append_newline,
         }
     }
@@ -252,15 +254,13 @@ fn err<S1: Into<string::String>, S2: Into<string::String>>(
         &mut self,
         description: S1,
         label: S2,
-        start: SpanIndex,
-        end: SpanIndex,
+        span: InnerSpan,
     ) {
         self.errors.push(ParseError {
             description: description.into(),
             note: None,
             label: label.into(),
-            start,
-            end,
+            span,
             secondary_label: None,
         });
     }
@@ -273,15 +273,13 @@ fn err_with_note<S1: Into<string::String>, S2: Into<string::String>, S3: Into<st
         description: S1,
         label: S2,
         note: S3,
-        start: SpanIndex,
-        end: SpanIndex,
+        span: InnerSpan,
     ) {
         self.errors.push(ParseError {
             description: description.into(),
             note: Some(note.into()),
             label: label.into(),
-            start,
-            end,
+            span,
             secondary_label: None,
         });
     }
@@ -302,22 +300,21 @@ fn consume(&mut self, c: char) -> bool {
         }
     }
 
-    fn raw(&self) -> usize {
-        self.style.map(|raw| raw + 1).unwrap_or(0)
-    }
-
-    fn to_span_index(&self, pos: usize) -> SpanIndex {
+    fn to_span_index(&self, pos: usize) -> InnerOffset {
         let mut pos = pos;
+        // This handles the raw string case, the raw argument is the number of #
+        // in r###"..."### (we need to add one because of the `r`).
+        let raw = self.style.map(|raw| raw + 1).unwrap_or(0);
         for skip in &self.skips {
             if pos > *skip {
                 pos += 1;
-            } else if pos == *skip && self.raw() == 0 {
+            } else if pos == *skip && raw == 0 {
                 pos += 1;
             } else {
                 break;
             }
         }
-        SpanIndex(self.raw() + pos + 1)
+        InnerOffset(raw + pos + 1)
     }
 
     /// Forces consumption of the specified character. If the character is not
@@ -335,8 +332,8 @@ fn must_consume(&mut self, c: char) -> Option<usize> {
                 let label = "expected `}`".to_owned();
                 let (note, secondary_label) = if c == '}' {
                     (Some("if you intended to print `{`, you can escape it using `{{`".to_owned()),
-                     self.last_opening_brace_pos.map(|pos| {
-                        ("because of this opening brace".to_owned(), pos, pos)
+                     self.last_opening_brace.map(|sp| {
+                        ("because of this opening brace".to_owned(), sp)
                      }))
                 } else {
                     (None, None)
@@ -345,8 +342,7 @@ fn must_consume(&mut self, c: char) -> Option<usize> {
                     description,
                     note,
                     label,
-                    start: pos,
-                    end: pos,
+                    span: pos.to(pos),
                     secondary_label,
                 });
                 None
@@ -360,8 +356,8 @@ fn must_consume(&mut self, c: char) -> Option<usize> {
                 let label = format!("expected `{:?}`", c);
                 let (note, secondary_label) = if c == '}' {
                     (Some("if you intended to print `{`, you can escape it using `{{`".to_owned()),
-                     self.last_opening_brace_pos.map(|pos| {
-                        ("because of this opening brace".to_owned(), pos, pos)
+                     self.last_opening_brace.map(|sp| {
+                        ("because of this opening brace".to_owned(), sp)
                      }))
                 } else {
                     (None, None)
@@ -370,12 +366,11 @@ fn must_consume(&mut self, c: char) -> Option<usize> {
                     description,
                     note,
                     label,
-                    start: pos,
-                    end: pos,
+                    span: pos.to(pos),
                     secondary_label,
                 });
             } else {
-                self.err(description, format!("expected `{:?}`", c), pos, pos);
+                self.err(description, format!("expected `{:?}`", c), pos.to(pos));
             }
             None
         }
@@ -434,20 +429,24 @@ fn argument(&mut self) -> Argument<'a> {
     /// integer index of an argument, a named argument, or a blank string.
     /// Returns `Some(parsed_position)` if the position is not implicitly
     /// consuming a macro argument, `None` if it's the case.
-    fn position(&mut self) -> Option<Position<'a>> {
+    fn position(&mut self) -> Option<Position> {
         if let Some(i) = self.integer() {
             Some(ArgumentIs(i))
         } else {
             match self.cur.peek() {
-                Some(&(_, c)) if c.is_alphabetic() => Some(ArgumentNamed(self.word())),
+                Some(&(_, c)) if c.is_alphabetic() => {
+                    Some(ArgumentNamed(Symbol::intern(self.word())))
+                }
                 Some(&(pos, c)) if c == '_' => {
                     let invalid_name = self.string(pos);
                     self.err_with_note(format!("invalid argument name `{}`", invalid_name),
                                        "invalid argument name",
                                        "argument names cannot start with an underscore",
-                                       self.to_span_index(pos),
-                                       self.to_span_index(pos + invalid_name.len()));
-                    Some(ArgumentNamed(invalid_name))
+                                        self.to_span_index(pos).to(
+                                            self.to_span_index(pos + invalid_name.len())
+                                        ),
+                                        );
+                    Some(ArgumentNamed(Symbol::intern(invalid_name)))
                 },
 
                 // This is an `ArgumentNext`.
@@ -555,7 +554,7 @@ fn format(&mut self) -> FormatSpec<'a> {
     /// Parses a Count parameter at the current position. This does not check
     /// for 'CountIsNextParam' because that is only used in precision, not
     /// width.
-    fn count(&mut self) -> Count<'a> {
+    fn count(&mut self) -> Count {
         if let Some(i) = self.integer() {
             if self.consume('$') {
                 CountIsParam(i)
@@ -569,7 +568,7 @@ fn count(&mut self) -> Count<'a> {
                 self.cur = tmp;
                 CountImplied
             } else if self.consume('$') {
-                CountIsName(word)
+                CountIsName(Symbol::intern(word))
             } else {
                 self.cur = tmp;
                 CountImplied
@@ -759,6 +758,8 @@ fn format_align_fill() {
     }
     #[test]
     fn format_counts() {
+        use syntax_pos::{GLOBALS, Globals, edition};
+        GLOBALS.set(&Globals::new(edition::DEFAULT_EDITION), || {
         same("{:10s}",
              &[NextArgument(Argument {
                    position: ArgumentImplicitlyIs(0),
@@ -814,11 +815,12 @@ fn format_counts() {
                        fill: None,
                        align: AlignUnknown,
                        flags: 0,
-                       precision: CountIsName("b"),
-                       width: CountIsName("a"),
+                       precision: CountIsName(Symbol::intern("b")),
+                       width: CountIsName(Symbol::intern("a")),
                        ty: "s",
                    },
                })]);
+        });
     }
     #[test]
     fn format_flags() {
index b199eee6dad87a2285f099fac8a22274264fdedb..f7d1094b3a2d1407c14c471f85a7b0d37afa9760 100644 (file)
@@ -181,12 +181,9 @@ fn check_repr(&self, item: &hir::Item, target: Target) {
             let (article, allowed_targets) = match hint.name_or_empty() {
                 name @ sym::C | name @ sym::align => {
                     is_c |= name == sym::C;
-                    if target != Target::Struct &&
-                            target != Target::Union &&
-                            target != Target::Enum {
-                                ("a", "struct, enum or union")
-                    } else {
-                        continue
+                    match target {
+                        Target::Struct | Target::Union | Target::Enum => continue,
+                        _ => ("a", "struct, enum, or union"),
                     }
                 }
                 sym::packed => {
@@ -207,10 +204,9 @@ fn check_repr(&self, item: &hir::Item, target: Target) {
                 }
                 sym::transparent => {
                     is_transparent = true;
-                    if target != Target::Struct {
-                        ("a", "struct")
-                    } else {
-                        continue
+                    match target {
+                        Target::Struct | Target::Union | Target::Enum => continue,
+                        _ => ("a", "struct, enum, or union"),
                     }
                 }
                 sym::i8  | sym::u8  | sym::i16 | sym::u16 |
@@ -241,7 +237,7 @@ fn check_repr(&self, item: &hir::Item, target: Target) {
         if is_transparent && hints.len() > 1 {
             let hint_spans: Vec<_> = hint_spans.clone().collect();
             span_err!(self.tcx.sess, hint_spans, E0692,
-                      "transparent struct cannot have other repr hints");
+                      "transparent {} cannot have other repr hints", target);
         }
         // Warn on repr(u8, u16), repr(C, simd), and c-like-enum-repr(C, u8)
         if (int_reprs > 1)
@@ -277,7 +273,7 @@ fn check_stmt_attributes(&self, stmt: &hir::Stmt) {
                         attr.span,
                         stmt.span,
                         "attribute should not be applied to a statement",
-                        "not a struct, enum or union",
+                        "not a struct, enum, or union",
                     );
                 }
             }
@@ -298,7 +294,7 @@ fn check_expr_attributes(&self, expr: &hir::Expr) {
                     attr.span,
                     expr.span,
                     "attribute should not be applied to an expression",
-                    "not defining a struct, enum or union",
+                    "not defining a struct, enum, or union",
                 );
             }
         }
index 1c66f8bdf81bdd865b77c3196b06acdfd1ba2c10..f4f9d6261de4806c6e07d834a756eae729142987 100644 (file)
@@ -1020,7 +1020,6 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
         ExprKind::AddrOf(_, ref subexpression) | ExprKind::Unary(_, ref subexpression) => {
             visitor.visit_expr(subexpression)
         }
-        ExprKind::Lit(_) => {}
         ExprKind::Cast(ref subexpression, ref typ) | ExprKind::Type(ref subexpression, ref typ) => {
             visitor.visit_expr(subexpression);
             visitor.visit_ty(typ)
@@ -1093,7 +1092,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
         ExprKind::Yield(ref subexpression) => {
             visitor.visit_expr(subexpression);
         }
-        ExprKind::Err => {}
+        ExprKind::Lit(_) | ExprKind::Err => {}
     }
 }
 
index d6ad335525c147312ff16004f46f492aaefe0e3f..b5e9f6bd3a610b3c887f46cc9f266b3782a7238a 100644 (file)
@@ -67,7 +67,7 @@
 use syntax::std_inject;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::tokenstream::{TokenStream, TokenTree};
-use syntax::parse::token::Token;
+use syntax::parse::token::{self, Token};
 use syntax::visit::{self, Visitor};
 use syntax_pos::{DUMMY_SP, edition, Span};
 
@@ -1328,7 +1328,7 @@ fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
 
     fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
         match tree {
-            TokenTree::Token(span, token) => self.lower_token(token, span),
+            TokenTree::Token(token) => self.lower_token(token),
             TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
                 span,
                 delim,
@@ -1337,13 +1337,13 @@ fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
         }
     }
 
-    fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
-        match token {
-            Token::Interpolated(nt) => {
-                let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
+    fn lower_token(&mut self, token: Token) -> TokenStream {
+        match token.kind {
+            token::Interpolated(nt) => {
+                let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
                 self.lower_token_stream(tts)
             }
-            other => TokenTree::Token(span, other).into(),
+            _ => TokenTree::Token(token).into(),
         }
     }
 
@@ -2168,7 +2168,7 @@ fn lower_path_segment(
         itctx: ImplTraitContext<'_>,
         explicit_owner: Option<NodeId>,
     ) -> hir::PathSegment {
-        let (mut generic_args, infer_types) = if let Some(ref generic_args) = segment.args {
+        let (mut generic_args, infer_args) = if let Some(ref generic_args) = segment.args {
             let msg = "parenthesized type parameters may only be used with a `Fn` trait";
             match **generic_args {
                 GenericArgs::AngleBracketed(ref data) => {
@@ -2230,9 +2230,9 @@ fn lower_path_segment(
                 .collect();
             if expected_lifetimes > 0 && param_mode == ParamMode::Explicit {
                 let anon_lt_suggestion = vec!["'_"; expected_lifetimes].join(", ");
-                let no_ty_args = generic_args.args.len() == expected_lifetimes;
+                let no_non_lt_args = generic_args.args.len() == expected_lifetimes;
                 let no_bindings = generic_args.bindings.is_empty();
-                let (incl_angl_brckt, insertion_span, suggestion) = if no_ty_args && no_bindings {
+                let (incl_angl_brckt, insertion_sp, suggestion) = if no_non_lt_args && no_bindings {
                     // If there are no (non-implicit) generic args or associated type
                     // bindings, our suggestion includes the angle brackets.
                     (true, path_span.shrink_to_hi(), format!("<{}>", anon_lt_suggestion))
@@ -2240,7 +2240,7 @@ fn lower_path_segment(
                     // Otherwise (sorry, this is kind of gross) we need to infer the
                     // place to splice in the `'_, ` from the generics that do exist.
                     let first_generic_span = first_generic_span
-                        .expect("already checked that type args or bindings exist");
+                        .expect("already checked that non-lifetime args or bindings exist");
                     (false, first_generic_span.shrink_to_lo(), format!("{}, ", anon_lt_suggestion))
                 };
                 match self.anonymous_lifetime_mode {
@@ -2263,7 +2263,7 @@ fn lower_path_segment(
                             expected_lifetimes,
                             path_span,
                             incl_angl_brckt,
-                            insertion_span,
+                            insertion_sp,
                             suggestion,
                         );
                         err.emit();
@@ -2280,7 +2280,7 @@ fn lower_path_segment(
                                 expected_lifetimes,
                                 path_span,
                                 incl_angl_brckt,
-                                insertion_span,
+                                insertion_sp,
                                 suggestion,
                             )
                         );
@@ -2305,7 +2305,7 @@ fn lower_path_segment(
             Some(id),
             Some(self.lower_res(res)),
             generic_args,
-            infer_types,
+            infer_args,
         )
     }
 
@@ -2316,9 +2316,10 @@ fn lower_angle_bracketed_parameter_data(
         mut itctx: ImplTraitContext<'_>,
     ) -> (hir::GenericArgs, bool) {
         let &AngleBracketedArgs { ref args, ref constraints, .. } = data;
-        let has_types = args.iter().any(|arg| match arg {
+        let has_non_lt_args = args.iter().any(|arg| match arg {
+            ast::GenericArg::Lifetime(_) => false,
             ast::GenericArg::Type(_) => true,
-            _ => false,
+            ast::GenericArg::Const(_) => true,
         });
         (
             hir::GenericArgs {
@@ -2328,7 +2329,7 @@ fn lower_angle_bracketed_parameter_data(
                     .collect(),
                 parenthesized: false,
             },
-            !has_types && param_mode == ParamMode::Optional
+            !has_non_lt_args && param_mode == ParamMode::Optional
         )
     }
 
index e66fa13f4fca21cfec4384bc02459a26a4ef7ccc..8a59f6b69bcd690ba3506db9b3536d2ea5025f61 100644 (file)
@@ -589,8 +589,9 @@ struct HirItemLike<T> {
     hash_bodies: bool,
 }
 
-impl<'a, 'hir, T> HashStable<StableHashingContext<'hir>> for HirItemLike<T>
-    where T: HashStable<StableHashingContext<'hir>>
+impl<'hir, T> HashStable<StableHashingContext<'hir>> for HirItemLike<T>
+where
+    T: HashStable<StableHashingContext<'hir>>,
 {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'hir>,
index a4484c81738983c53d434ec3be8086d6c5f78ec4..41073773e9f9bb246571ecc108695566fd4bcdd2 100644 (file)
@@ -326,7 +326,7 @@ fn visit_stmt(&mut self, stmt: &'a Stmt) {
     }
 
     fn visit_token(&mut self, t: Token) {
-        if let Token::Interpolated(nt) = t {
+        if let token::Interpolated(nt) = t.kind {
             if let token::NtExpr(ref expr) = *nt {
                 if let ExprKind::Mac(..) = expr.node {
                     self.visit_macro_invoc(expr.id);
index 3edd75fb725dafbd8fa66855c43135641019bb96..6a561f0c63a2afa556eb4563bbd33f7c414e7a7c 100644 (file)
@@ -582,9 +582,17 @@ pub fn to_string(&self) -> String {
     }
 }
 
+/// Evaluates to the number of tokens passed to it.
+///
+/// Logarithmic counting: every one or two recursive expansions, the number of
+/// tokens to count is divided by two, instead of being reduced by one.
+/// Therefore, the recursion depth is the binary logarithm of the number of
+/// tokens to count, and the expanded tree is likewise very small.
 macro_rules! count {
-    () => (0usize);
-    ( $x:tt $($xs:tt)* ) => (1usize + count!($($xs)*));
+    ()                     => (0usize);
+    ($one:tt)              => (1usize);
+    ($($pairs:tt $_p:tt)*) => (count!($($pairs)*) << 1usize);
+    ($odd:tt $($rest:tt)*) => (count!($($rest)*) | 1usize);
 }
 
 // We define the GlobalMetaDataKind enum with this macro because we want to
index 2aaf5ec775d49f98cb47398834696e74f9298dd5..1b4c56c3453a186f44b2afa17ed4b56daf6eb8ac 100644 (file)
@@ -348,7 +348,7 @@ pub struct PathSegment {
     /// This only applies to expression and pattern paths, and
     /// out of those only the segments with no type parameters
     /// to begin with, e.g., `Vec::new` is `<Vec<..>>::new::<..>`.
-    pub infer_types: bool,
+    pub infer_args: bool,
 }
 
 impl PathSegment {
@@ -358,7 +358,7 @@ pub fn from_ident(ident: Ident) -> PathSegment {
             ident,
             hir_id: None,
             res: None,
-            infer_types: true,
+            infer_args: true,
             args: None,
         }
     }
@@ -368,13 +368,13 @@ pub fn new(
         hir_id: Option<HirId>,
         res: Option<Res>,
         args: GenericArgs,
-        infer_types: bool,
+        infer_args: bool,
     ) -> Self {
         PathSegment {
             ident,
             hir_id,
             res,
-            infer_types,
+            infer_args,
             args: if args.is_empty() {
                 None
             } else {
@@ -2574,7 +2574,7 @@ pub struct CodegenFnAttrFlags: u32 {
         /// `#[cold]`: a hint to LLVM that this function, when called, is never on
         /// the hot path.
         const COLD                      = 1 << 0;
-        /// `#[allocator]`: a hint to LLVM that the pointer returned from this
+        /// `#[rustc_allocator]`: a hint to LLVM that the pointer returned from this
         /// function is never null.
         const ALLOCATOR                 = 1 << 1;
         /// `#[unwind]`: an indicator that this function may unwind despite what
index c8615f0ed1b93b0e8440ae89da3e798c115014be..7b0a499fa5c663fa43077443a3f08c95aea5e4ce 100644 (file)
@@ -1196,7 +1196,7 @@ fn print_expr_method_call(&mut self,
 
         segment.with_generic_args(|generic_args| {
             if !generic_args.args.is_empty() || !generic_args.bindings.is_empty() {
-                return self.print_generic_args(&generic_args, segment.infer_types, true);
+                return self.print_generic_args(&generic_args, segment.infer_args, true);
             }
             Ok(())
         })?;
@@ -1561,7 +1561,7 @@ pub fn print_path(&mut self,
             if segment.ident.name != kw::PathRoot {
                self.print_ident(segment.ident)?;
                segment.with_generic_args(|generic_args| {
-                   self.print_generic_args(generic_args, segment.infer_types,
+                   self.print_generic_args(generic_args, segment.infer_args,
                                            colons_before_params)
                })?;
             }
@@ -1574,7 +1574,7 @@ pub fn print_path_segment(&mut self, segment: &hir::PathSegment) -> io::Result<(
         if segment.ident.name != kw::PathRoot {
            self.print_ident(segment.ident)?;
            segment.with_generic_args(|generic_args| {
-               self.print_generic_args(generic_args, segment.infer_types, false)
+               self.print_generic_args(generic_args, segment.infer_args, false)
            })?;
         }
         Ok(())
@@ -1602,7 +1602,7 @@ pub fn print_qpath(&mut self,
                         self.print_ident(segment.ident)?;
                         segment.with_generic_args(|generic_args| {
                             self.print_generic_args(generic_args,
-                                                    segment.infer_types,
+                                                    segment.infer_args,
                                                     colons_before_params)
                         })?;
                     }
@@ -1614,7 +1614,7 @@ pub fn print_qpath(&mut self,
                 self.print_ident(item_segment.ident)?;
                 item_segment.with_generic_args(|generic_args| {
                     self.print_generic_args(generic_args,
-                                            item_segment.infer_types,
+                                            item_segment.infer_args,
                                             colons_before_params)
                 })
             }
@@ -1626,7 +1626,7 @@ pub fn print_qpath(&mut self,
                 self.print_ident(item_segment.ident)?;
                 item_segment.with_generic_args(|generic_args| {
                     self.print_generic_args(generic_args,
-                                            item_segment.infer_types,
+                                            item_segment.infer_args,
                                             colons_before_params)
                 })
             }
@@ -1635,7 +1635,7 @@ pub fn print_qpath(&mut self,
 
     fn print_generic_args(&mut self,
                              generic_args: &hir::GenericArgs,
-                             infer_types: bool,
+                             infer_args: bool,
                              colons_before_params: bool)
                              -> io::Result<()> {
         if generic_args.parenthesized {
@@ -1681,7 +1681,7 @@ fn print_generic_args(&mut self,
 
             // FIXME(eddyb): this would leak into error messages (e.g.,
             // "non-exhaustive patterns: `Some::<..>(_)` not covered").
-            if infer_types && false {
+            if infer_args && false {
                 start_or_comma(self)?;
                 self.s.word("..")?;
             }
index 8be610e8bf7af715691b4dd36bcd7fe1f432af6f..eff42efa67b92c6aaf9ddc13b765e1ea7d3cf7cf 100644 (file)
@@ -393,12 +393,13 @@ fn hash_stable<W: StableHasherResult>(
     }
 }
 
-pub fn hash_stable_trait_impls<'a, 'gcx, W>(
+pub fn hash_stable_trait_impls<'a, W>(
     hcx: &mut StableHashingContext<'a>,
     hasher: &mut StableHasher<W>,
     blanket_impls: &[DefId],
-    non_blanket_impls: &FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>)
-    where W: StableHasherResult
+    non_blanket_impls: &FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
+) where
+    W: StableHasherResult,
 {
     {
         let mut blanket_impls: SmallVec<[_; 8]> = blanket_impls
index 0cdd9a863ccb846cc5b0a5cb6ce9908229f821e3..e0c01277801d45b7107f24b83e9ec3d25018aca4 100644 (file)
@@ -170,14 +170,14 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 impl_stable_hash_for!(enum ::syntax::ast::LitKind {
     Str(value, style),
-    Err(value),
     ByteStr(value),
     Byte(value),
     Char(value),
     Int(value, lit_int_type),
     Float(value, float_ty),
     FloatUnsuffixed(value),
-    Bool(value)
+    Bool(value),
+    Err(value)
 });
 
 impl_stable_hash_for_spanned!(::syntax::ast::LitKind);
@@ -261,9 +261,8 @@ fn hash_stable<W: StableHasherResult>(&self,
                                           hasher: &mut StableHasher<W>) {
         mem::discriminant(self).hash_stable(hcx, hasher);
         match *self {
-            tokenstream::TokenTree::Token(span, ref token) => {
-                span.hash_stable(hcx, hasher);
-                hash_token(token, hcx, hasher);
+            tokenstream::TokenTree::Token(ref token) => {
+                token.hash_stable(hcx, hasher);
             }
             tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
                 span.hash_stable(hcx, hasher);
@@ -306,70 +305,75 @@ fn hash_stable<W: StableHasherResult>(&self,
     suffix
 });
 
-fn hash_token<'a, 'gcx, W: StableHasherResult>(
-    token: &token::Token,
-    hcx: &mut StableHashingContext<'a>,
-    hasher: &mut StableHasher<W>,
-) {
-    mem::discriminant(token).hash_stable(hcx, hasher);
-    match *token {
-        token::Token::Eq |
-        token::Token::Lt |
-        token::Token::Le |
-        token::Token::EqEq |
-        token::Token::Ne |
-        token::Token::Ge |
-        token::Token::Gt |
-        token::Token::AndAnd |
-        token::Token::OrOr |
-        token::Token::Not |
-        token::Token::Tilde |
-        token::Token::At |
-        token::Token::Dot |
-        token::Token::DotDot |
-        token::Token::DotDotDot |
-        token::Token::DotDotEq |
-        token::Token::Comma |
-        token::Token::Semi |
-        token::Token::Colon |
-        token::Token::ModSep |
-        token::Token::RArrow |
-        token::Token::LArrow |
-        token::Token::FatArrow |
-        token::Token::Pound |
-        token::Token::Dollar |
-        token::Token::Question |
-        token::Token::SingleQuote |
-        token::Token::Whitespace |
-        token::Token::Comment |
-        token::Token::Eof => {}
-
-        token::Token::BinOp(bin_op_token) |
-        token::Token::BinOpEq(bin_op_token) => {
-            std_hash::Hash::hash(&bin_op_token, hasher);
-        }
+impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          hcx: &mut StableHashingContext<'a>,
+                                          hasher: &mut StableHasher<W>) {
+        mem::discriminant(self).hash_stable(hcx, hasher);
+        match *self {
+            token::Eq |
+            token::Lt |
+            token::Le |
+            token::EqEq |
+            token::Ne |
+            token::Ge |
+            token::Gt |
+            token::AndAnd |
+            token::OrOr |
+            token::Not |
+            token::Tilde |
+            token::At |
+            token::Dot |
+            token::DotDot |
+            token::DotDotDot |
+            token::DotDotEq |
+            token::Comma |
+            token::Semi |
+            token::Colon |
+            token::ModSep |
+            token::RArrow |
+            token::LArrow |
+            token::FatArrow |
+            token::Pound |
+            token::Dollar |
+            token::Question |
+            token::SingleQuote |
+            token::Whitespace |
+            token::Comment |
+            token::Eof => {}
+
+            token::BinOp(bin_op_token) |
+            token::BinOpEq(bin_op_token) => {
+                std_hash::Hash::hash(&bin_op_token, hasher);
+            }
 
-        token::Token::OpenDelim(delim_token) |
-        token::Token::CloseDelim(delim_token) => {
-            std_hash::Hash::hash(&delim_token, hasher);
-        }
-        token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
+            token::OpenDelim(delim_token) |
+            token::CloseDelim(delim_token) => {
+                std_hash::Hash::hash(&delim_token, hasher);
+            }
+            token::Literal(lit) => lit.hash_stable(hcx, hasher),
 
-        token::Token::Ident(ident, is_raw) => {
-            ident.name.hash_stable(hcx, hasher);
-            is_raw.hash_stable(hcx, hasher);
-        }
-        token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
+            token::Ident(name, is_raw) => {
+                name.hash_stable(hcx, hasher);
+                is_raw.hash_stable(hcx, hasher);
+            }
+            token::Lifetime(name) => name.hash_stable(hcx, hasher),
 
-        token::Token::Interpolated(_) => {
-            bug!("interpolated tokens should not be present in the HIR")
-        }
+            token::Interpolated(_) => {
+                bug!("interpolated tokens should not be present in the HIR")
+            }
 
-        token::Token::DocComment(val) |
-        token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
+            token::DocComment(val) |
+            token::Shebang(val) => val.hash_stable(hcx, hasher),
+        }
     }
 }
 
+impl_stable_hash_for!(struct token::Token {
+    kind,
+    span
+});
+
 impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
     MetaItem(meta_item),
     Literal(lit)
index 563948a63514b300eb4c01a23398aa988ffb452c..89e79c56ca39d3629e190f470cbc5d2e1c953924 100644 (file)
@@ -135,8 +135,9 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
-impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>> for ty::Binder<T>
-    where T: HashStable<StableHashingContext<'a>>
+impl<'a, T> HashStable<StableHashingContext<'a>> for ty::Binder<T>
+where
+    T: HashStable<StableHashingContext<'a>>,
 {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
@@ -192,9 +193,7 @@ fn to_stable_hash_key(&self, _: &StableHashingContext<'a>) -> region::Scope {
     }
 }
 
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ty::TyVid
-{
+impl<'a> HashStable<StableHashingContext<'a>> for ty::TyVid {
     fn hash_stable<W: StableHasherResult>(&self,
                                           _hcx: &mut StableHashingContext<'a>,
                                           _hasher: &mut StableHasher<W>) {
@@ -204,9 +203,7 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ty::IntVid
-{
+impl<'a> HashStable<StableHashingContext<'a>> for ty::IntVid {
     fn hash_stable<W: StableHasherResult>(&self,
                                           _hcx: &mut StableHashingContext<'a>,
                                           _hasher: &mut StableHasher<W>) {
@@ -216,9 +213,7 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ty::FloatVid
-{
+impl<'a> HashStable<StableHashingContext<'a>> for ty::FloatVid {
     fn hash_stable<W: StableHasherResult>(&self,
                                           _hcx: &mut StableHashingContext<'a>,
                                           _hasher: &mut StableHasher<W>) {
@@ -228,9 +223,9 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
-impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>>
-for ty::steal::Steal<T>
-    where T: HashStable<StableHashingContext<'a>>
+impl<'a, T> HashStable<StableHashingContext<'a>> for ty::steal::Steal<T>
+where
+    T: HashStable<StableHashingContext<'a>>,
 {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
index ca766ea724f3db02379a4a398aeccf8c3496157d..b6fe57636e904c836615488504aedc157662c24b 100644 (file)
@@ -858,7 +858,7 @@ pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
     }
 }
 
-impl<'a, 'gcx, 'tcx> VerifyBound<'tcx> {
+impl<'tcx> VerifyBound<'tcx> {
     pub fn must_hold(&self) -> bool {
         match self {
             VerifyBound::IfEq(..) => false,
index 920f978054396b6e4d29553fb8b2a065d84f1101..e2f2799d9634dfadfd599ea5e66031379476b2ea 100644 (file)
@@ -30,6 +30,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 #![allow(explicit_outlives_requirements)]
 
 #![feature(arbitrary_self_types)]
index c5c6c4b944700c0fdef79fb6a6a9debda17d9528..b51257c520601ac734408b7dde5fd852897cdbb5 100644 (file)
@@ -566,7 +566,7 @@ impl LintPassObject for EarlyLintPassObject {}
 
 impl LintPassObject for LateLintPassObject {}
 
-pub trait LintContext<'tcx>: Sized {
+pub trait LintContext: Sized {
     type PassObject: LintPassObject;
 
     fn sess(&self) -> &Session;
@@ -700,7 +700,7 @@ fn exit_attrs(&mut self, attrs: &'a [ast::Attribute]) {
     }
 }
 
-impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> {
+impl LintContext for LateContext<'_, '_> {
     type PassObject = LateLintPassObject;
 
     /// Gets the overall compiler `Session` object.
@@ -728,7 +728,7 @@ fn lookup<S: Into<MultiSpan>>(&self,
     }
 }
 
-impl<'a> LintContext<'a> for EarlyContext<'a> {
+impl LintContext for EarlyContext<'_> {
     type PassObject = EarlyLintPassObject;
 
     /// Gets the overall compiler `Session` object.
index f8d7a5e29f6f612d1b51ca2d2da3c32ce41135eb..b4088201f3bf1fc17084bd699d59d877c2d0587e 100644 (file)
@@ -80,7 +80,7 @@ impl<> for enum $enum_name [ $enum_name ] { $( $variant
     // We want to use the enum name both in the `impl ... for $enum_name` as well as for
     // importing all the variants. Unfortunately it seems we have to take the name
     // twice for this purpose
-    (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
+    (impl<$($T:ident),* $(,)?>
         for enum $enum_name:path
         [ $enum_path:path ]
     {
@@ -91,7 +91,7 @@ impl<> for enum $enum_name [ $enum_name ] { $( $variant
            $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
         ),* $(,)?
     }) => {
-        impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
+        impl<$($T,)*>
             ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>
             for $enum_name
             where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
@@ -117,13 +117,13 @@ fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&s
     // Structs
     (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
         impl_stable_hash_for!(
-            impl<'tcx> for struct $struct_name { $($field $(-> $delegate)?),* }
+            impl<> for struct $struct_name { $($field $(-> $delegate)?),* }
         );
     };
-    (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?> for struct $struct_name:path {
+    (impl<$($T:ident),* $(,)?> for struct $struct_name:path {
         $($field:ident $(-> $delegate:tt)?),* $(,)?
     }) => {
-        impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
+        impl<$($T,)*>
             ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
             where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
         {
@@ -143,12 +143,12 @@ fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&s
     // We cannot use normal parentheses here, the parser won't allow it
     (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),*  $(,)? }) => {
         impl_stable_hash_for!(
-            impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)?),* }
+            impl<> for tuple_struct $struct_name { $($field $(-> $delegate)?),* }
         );
     };
-    (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
+    (impl<$($T:ident),* $(,)?>
      for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),*  $(,)? }) => {
-        impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
+        impl<$($T,)*>
             ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
             where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
         {
@@ -170,7 +170,7 @@ fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&s
 macro_rules! impl_stable_hash_for_spanned {
     ($T:path) => (
 
-        impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ::syntax::source_map::Spanned<$T>
+        impl HashStable<StableHashingContext<'a>> for ::syntax::source_map::Spanned<$T>
         {
             #[inline]
             fn hash_stable<W: StableHasherResult>(&self,
index 007013f8f8287f108d93e6a55fbf42c6ccf7a117..d2cabb7e10bbff251713eb96a713874f2d222d67 100644 (file)
@@ -47,19 +47,19 @@ pub fn invalidate(&self) {
 
     pub fn predecessors(
         &self,
-        mir: &Body<'_>
+        body: &Body<'_>
     ) -> MappedReadGuard<'_, IndexVec<BasicBlock, Vec<BasicBlock>>> {
         if self.predecessors.borrow().is_none() {
-            *self.predecessors.borrow_mut() = Some(calculate_predecessors(mir));
+            *self.predecessors.borrow_mut() = Some(calculate_predecessors(body));
         }
 
         ReadGuard::map(self.predecessors.borrow(), |p| p.as_ref().unwrap())
     }
 }
 
-fn calculate_predecessors(mir: &Body<'_>) -> IndexVec<BasicBlock, Vec<BasicBlock>> {
-    let mut result = IndexVec::from_elem(vec![], mir.basic_blocks());
-    for (bb, data) in mir.basic_blocks().iter_enumerated() {
+fn calculate_predecessors(body: &Body<'_>) -> IndexVec<BasicBlock, Vec<BasicBlock>> {
+    let mut result = IndexVec::from_elem(vec![], body.basic_blocks());
+    for (bb, data) in body.basic_blocks().iter_enumerated() {
         if let Some(ref term) = data.terminator {
             for &tgt in term.successors() {
                 result[tgt].push(bb);
index 215d4295e44e1ed523bfe00a07aff0b8a9c215ad..d7caf950dcebde2e9ee0fcc9063ae5ec2ef31e86 100644 (file)
@@ -1,7 +1,7 @@
 //! The virtual memory representation of the MIR interpreter.
 
 use super::{
-    Pointer, EvalResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar,
+    Pointer, InterpResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar,
 };
 
 use crate::ty::layout::{Size, Align};
@@ -82,7 +82,7 @@ fn memory_read(
         _alloc: &Allocation<Tag, Self>,
         _ptr: Pointer<Tag>,
         _size: Size,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Ok(())
     }
 
@@ -92,7 +92,7 @@ fn memory_written(
         _alloc: &mut Allocation<Tag, Self>,
         _ptr: Pointer<Tag>,
         _size: Size,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Ok(())
     }
 
@@ -103,7 +103,7 @@ fn memory_deallocated(
         _alloc: &mut Allocation<Tag, Self>,
         _ptr: Pointer<Tag>,
         _size: Size,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Ok(())
     }
 }
@@ -156,7 +156,7 @@ fn check_bounds_ptr(
         &self,
         ptr: Pointer<Tag>,
         msg: CheckInAllocMsg,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let allocation_size = self.bytes.len() as u64;
         ptr.check_in_alloc(Size::from_bytes(allocation_size), msg)
     }
@@ -169,7 +169,7 @@ pub fn check_bounds(
         ptr: Pointer<Tag>,
         size: Size,
         msg: CheckInAllocMsg,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // if ptr.offset is in bounds, then so is ptr (because offset checks for overflow)
         self.check_bounds_ptr(ptr.offset(size, cx)?, msg)
     }
@@ -191,7 +191,7 @@ fn get_bytes_internal(
         size: Size,
         check_defined_and_ptr: bool,
         msg: CheckInAllocMsg,
-    ) -> EvalResult<'tcx, &[u8]>
+    ) -> InterpResult<'tcx, &[u8]>
     {
         self.check_bounds(cx, ptr, size, msg)?;
 
@@ -217,7 +217,7 @@ pub fn get_bytes(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-    ) -> EvalResult<'tcx, &[u8]>
+    ) -> InterpResult<'tcx, &[u8]>
     {
         self.get_bytes_internal(cx, ptr, size, true, CheckInAllocMsg::MemoryAccessTest)
     }
@@ -230,7 +230,7 @@ pub fn get_bytes_with_undef_and_ptr(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-    ) -> EvalResult<'tcx, &[u8]>
+    ) -> InterpResult<'tcx, &[u8]>
     {
         self.get_bytes_internal(cx, ptr, size, false, CheckInAllocMsg::MemoryAccessTest)
     }
@@ -242,7 +242,7 @@ pub fn get_bytes_mut(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-    ) -> EvalResult<'tcx, &mut [u8]>
+    ) -> InterpResult<'tcx, &mut [u8]>
     {
         assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
         self.check_bounds(cx, ptr, size, CheckInAllocMsg::MemoryAccessTest)?;
@@ -267,7 +267,7 @@ pub fn read_c_str(
         &self,
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
-    ) -> EvalResult<'tcx, &[u8]>
+    ) -> InterpResult<'tcx, &[u8]>
     {
         assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
         let offset = ptr.offset.bytes() as usize;
@@ -292,7 +292,7 @@ pub fn check_bytes(
         ptr: Pointer<Tag>,
         size: Size,
         allow_ptr_and_undef: bool,
-    ) -> EvalResult<'tcx>
+    ) -> InterpResult<'tcx>
     {
         // Check bounds and relocations on the edges
         self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
@@ -312,7 +312,7 @@ pub fn write_bytes(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         src: &[u8],
-    ) -> EvalResult<'tcx>
+    ) -> InterpResult<'tcx>
     {
         let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?;
         bytes.clone_from_slice(src);
@@ -326,7 +326,7 @@ pub fn write_repeat(
         ptr: Pointer<Tag>,
         val: u8,
         count: Size
-    ) -> EvalResult<'tcx>
+    ) -> InterpResult<'tcx>
     {
         let bytes = self.get_bytes_mut(cx, ptr, count)?;
         for b in bytes {
@@ -348,7 +348,7 @@ pub fn read_scalar(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size
-    ) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
+    ) -> InterpResult<'tcx, ScalarMaybeUndef<Tag>>
     {
         // get_bytes_unchecked tests relocation edges
         let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
@@ -383,7 +383,7 @@ pub fn read_ptr_sized(
         &self,
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
-    ) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
+    ) -> InterpResult<'tcx, ScalarMaybeUndef<Tag>>
     {
         self.read_scalar(cx, ptr, cx.data_layout().pointer_size)
     }
@@ -402,7 +402,7 @@ pub fn write_scalar(
         ptr: Pointer<Tag>,
         val: ScalarMaybeUndef<Tag>,
         type_size: Size,
-    ) -> EvalResult<'tcx>
+    ) -> InterpResult<'tcx>
     {
         let val = match val {
             ScalarMaybeUndef::Scalar(scalar) => scalar,
@@ -438,7 +438,7 @@ pub fn write_ptr_sized(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         val: ScalarMaybeUndef<Tag>
-    ) -> EvalResult<'tcx>
+    ) -> InterpResult<'tcx>
     {
         let ptr_size = cx.data_layout().pointer_size;
         self.write_scalar(cx, ptr.into(), val, ptr_size)
@@ -468,7 +468,7 @@ fn check_relocations(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if self.relocations(cx, ptr, size).is_empty() {
             Ok(())
         } else {
@@ -487,7 +487,7 @@ fn clear_relocations(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // Find the start and end of the given range and its outermost relocations.
         let (first, last) = {
             // Find all relocations overlapping the given range.
@@ -525,7 +525,7 @@ fn check_relocation_edges(
         cx: &impl HasDataLayout,
         ptr: Pointer<Tag>,
         size: Size,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         self.check_relocations(cx, ptr, Size::ZERO)?;
         self.check_relocations(cx, ptr.offset(size, cx)?, Size::ZERO)?;
         Ok(())
@@ -538,7 +538,7 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
     /// Checks that a range of bytes is defined. If not, returns the `ReadUndefBytes`
     /// error which will report the first byte which is undefined.
     #[inline]
-    fn check_defined(&self, ptr: Pointer<Tag>, size: Size) -> EvalResult<'tcx> {
+    fn check_defined(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
         self.undef_mask.is_range_defined(
             ptr.offset,
             ptr.offset + size,
@@ -550,7 +550,7 @@ pub fn mark_definedness(
         ptr: Pointer<Tag>,
         size: Size,
         new_state: bool,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if size.bytes() == 0 {
             return Ok(());
         }
index b4615aeb0db151959eb97f8df82caf29eb0dc7b1..a9a34f78f54d78c06ee58bdd1b41fadee70253b7 100644 (file)
@@ -179,13 +179,18 @@ pub fn struct_error<'a, 'gcx, 'tcx>(
     struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
 }
 
+/// Packages the kind of error we got from the const code interpreter
+/// up with a Rust-level backtrace of where the error occured.
+/// Thsese should always be constructed by calling `.into()` on
+/// a `InterpError`. In `librustc_mir::interpret`, we have the `err!`
+/// macro for this
 #[derive(Debug, Clone)]
-pub struct EvalError<'tcx> {
+pub struct InterpErrorInfo<'tcx> {
     pub kind: InterpError<'tcx, u64>,
-    pub backtrace: Option<Box<Backtrace>>,
+    backtrace: Option<Box<Backtrace>>,
 }
 
-impl<'tcx> EvalError<'tcx> {
+impl<'tcx> InterpErrorInfo<'tcx> {
     pub fn print_backtrace(&mut self) {
         if let Some(ref mut backtrace) = self.backtrace {
             print_backtrace(&mut *backtrace);
@@ -198,7 +203,7 @@ fn print_backtrace(backtrace: &mut Backtrace) {
     eprintln!("\n\nAn error occurred in miri:\n{:?}", backtrace);
 }
 
-impl<'tcx> From<InterpError<'tcx, u64>> for EvalError<'tcx> {
+impl<'tcx> From<InterpError<'tcx, u64>> for InterpErrorInfo<'tcx> {
     fn from(kind: InterpError<'tcx, u64>) -> Self {
         let backtrace = match env::var("RUST_CTFE_BACKTRACE") {
             // Matching `RUST_BACKTRACE` -- we treat "0" the same as "not present".
@@ -215,7 +220,7 @@ fn from(kind: InterpError<'tcx, u64>) -> Self {
             },
             _ => None,
         };
-        EvalError {
+        InterpErrorInfo {
             kind,
             backtrace,
         }
@@ -315,7 +320,7 @@ pub enum InterpError<'tcx, O> {
     InfiniteLoop,
 }
 
-pub type EvalResult<'tcx, T = ()> = Result<T, EvalError<'tcx>>;
+pub type InterpResult<'tcx, T = ()> = Result<T, InterpErrorInfo<'tcx>>;
 
 impl<'tcx, O> InterpError<'tcx, O> {
     pub fn description(&self) -> &str {
@@ -451,7 +456,7 @@ pub fn description(&self) -> &str {
     }
 }
 
-impl<'tcx> fmt::Display for EvalError<'tcx> {
+impl<'tcx> fmt::Display for InterpErrorInfo<'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "{}", self.kind)
     }
index 964d6c01d74ad282e39b20e92d55b0c4f140328b..15b09f65c74c9b6ddb2175f140ce3b27599e45f7 100644 (file)
@@ -11,7 +11,7 @@ macro_rules! err {
 mod pointer;
 
 pub use self::error::{
-    EvalError, EvalResult, InterpError, AssertMessage, ConstEvalErr, struct_error,
+    InterpErrorInfo, InterpResult, InterpError, AssertMessage, ConstEvalErr, struct_error,
     FrameInfo, ConstEvalRawResult, ConstEvalResult, ErrorHandled,
 };
 
index 4aa83a79d52b8bef5a187e14679eefde153417a2..26002a411d4b6e48b785c770c52a7fdf9957cde9 100644 (file)
@@ -5,7 +5,7 @@
 use rustc_macros::HashStable;
 
 use super::{
-    AllocId, EvalResult, CheckInAllocMsg
+    AllocId, InterpResult, CheckInAllocMsg
 };
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -52,13 +52,13 @@ fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
     }
 
     #[inline]
-    fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
+    fn offset<'tcx>(&self, val: u64, i: u64) -> InterpResult<'tcx, u64> {
         let (res, over) = self.overflowing_offset(val, i);
         if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
     }
 
     #[inline]
-    fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
+    fn signed_offset<'tcx>(&self, val: u64, i: i64) -> InterpResult<'tcx, u64> {
         let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
         if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
     }
@@ -105,7 +105,7 @@ fn from(alloc_id: AllocId) -> Self {
     }
 }
 
-impl<'tcx> Pointer<()> {
+impl Pointer<()> {
     #[inline(always)]
     pub fn new(alloc_id: AllocId, offset: Size) -> Self {
         Pointer { alloc_id, offset, tag: () }
@@ -125,7 +125,7 @@ pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
     }
 
     #[inline]
-    pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
+    pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
         Ok(Pointer::new_with_tag(
             self.alloc_id,
             Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
@@ -145,7 +145,7 @@ pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
     }
 
     #[inline]
-    pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
+    pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
         Ok(Pointer::new_with_tag(
             self.alloc_id,
             Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
@@ -174,7 +174,7 @@ pub fn check_in_alloc(
         self,
         allocation_size: Size,
         msg: CheckInAllocMsg,
-    ) -> EvalResult<'tcx, ()> {
+    ) -> InterpResult<'tcx, ()> {
         if self.offset > allocation_size {
             err!(PointerOutOfBounds {
                 ptr: self.erase_tag(),
index b8d6c1224463128f861704503c083f0756c4de99..f7b3385668f7b2b79a784562e01e21102d2a39f3 100644 (file)
@@ -1,11 +1,12 @@
 use std::fmt;
 use rustc_macros::HashStable;
+use rustc_apfloat::{Float, ieee::{Double, Single}};
 
 use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size}, subst::SubstsRef};
 use crate::ty::PlaceholderConst;
 use crate::hir::def_id::DefId;
 
-use super::{EvalResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate};
+use super::{InterpResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate};
 
 /// Represents the result of a raw const operation, pre-validation.
 #[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash, HashStable)]
@@ -131,7 +132,21 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-impl<'tcx> Scalar<()> {
+impl<Tag> From<Single> for Scalar<Tag> {
+    #[inline(always)]
+    fn from(f: Single) -> Self {
+        Scalar::from_f32(f)
+    }
+}
+
+impl<Tag> From<Double> for Scalar<Tag> {
+    #[inline(always)]
+    fn from(f: Double) -> Self {
+        Scalar::from_f64(f)
+    }
+}
+
+impl Scalar<()> {
     #[inline(always)]
     fn check_data(data: u128, size: u8) {
         debug_assert_eq!(truncate(data, Size::from_bytes(size as u64)), data,
@@ -176,7 +191,7 @@ pub fn zst() -> Self {
     }
 
     #[inline]
-    pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
+    pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
         let dl = cx.data_layout();
         match self {
             Scalar::Raw { data, size } => {
@@ -206,7 +221,7 @@ pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
     }
 
     #[inline]
-    pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
+    pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
         let dl = cx.data_layout();
         match self {
             Scalar::Raw { data, size } => {
@@ -279,6 +294,26 @@ pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
         Scalar::Raw { data: i, size: size.bytes() as u8 }
     }
 
+    #[inline]
+    pub fn from_u8(i: u8) -> Self {
+        Scalar::Raw { data: i as u128, size: 1 }
+    }
+
+    #[inline]
+    pub fn from_u16(i: u16) -> Self {
+        Scalar::Raw { data: i as u128, size: 2 }
+    }
+
+    #[inline]
+    pub fn from_u32(i: u32) -> Self {
+        Scalar::Raw { data: i as u128, size: 4 }
+    }
+
+    #[inline]
+    pub fn from_u64(i: u64) -> Self {
+        Scalar::Raw { data: i as u128, size: 8 }
+    }
+
     #[inline]
     pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
         let i = i.into();
@@ -292,13 +327,15 @@ pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
     }
 
     #[inline]
-    pub fn from_f32(f: f32) -> Self {
-        Scalar::Raw { data: f.to_bits() as u128, size: 4 }
+    pub fn from_f32(f: Single) -> Self {
+        // We trust apfloat to give us properly truncated data.
+        Scalar::Raw { data: f.to_bits(), size: 4 }
     }
 
     #[inline]
-    pub fn from_f64(f: f64) -> Self {
-        Scalar::Raw { data: f.to_bits() as u128, size: 8 }
+    pub fn from_f64(f: Double) -> Self {
+        // We trust apfloat to give us properly truncated data.
+        Scalar::Raw { data: f.to_bits(), size: 8 }
     }
 
     #[inline]
@@ -322,7 +359,7 @@ pub fn to_bits_or_ptr(
     }
 
     #[inline]
-    pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
+    pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
         match self {
             Scalar::Raw { data, size } => {
                 assert_eq!(target_size.bytes(), size as u64);
@@ -335,7 +372,7 @@ pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
     }
 
     #[inline]
-    pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
+    pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
         match self {
             Scalar::Raw { data: 0, .. } => err!(InvalidNullPointerUsage),
             Scalar::Raw { .. } => err!(ReadBytesAsPointer),
@@ -359,7 +396,7 @@ pub fn is_ptr(self) -> bool {
         }
     }
 
-    pub fn to_bool(self) -> EvalResult<'tcx, bool> {
+    pub fn to_bool(self) -> InterpResult<'tcx, bool> {
         match self {
             Scalar::Raw { data: 0, size: 1 } => Ok(false),
             Scalar::Raw { data: 1, size: 1 } => Ok(true),
@@ -367,7 +404,7 @@ pub fn to_bool(self) -> EvalResult<'tcx, bool> {
         }
     }
 
-    pub fn to_char(self) -> EvalResult<'tcx, char> {
+    pub fn to_char(self) -> InterpResult<'tcx, char> {
         let val = self.to_u32()?;
         match ::std::char::from_u32(val) {
             Some(c) => Ok(c),
@@ -375,51 +412,51 @@ pub fn to_char(self) -> EvalResult<'tcx, char> {
         }
     }
 
-    pub fn to_u8(self) -> EvalResult<'static, u8> {
+    pub fn to_u8(self) -> InterpResult<'static, u8> {
         let sz = Size::from_bits(8);
         let b = self.to_bits(sz)?;
         Ok(b as u8)
     }
 
-    pub fn to_u32(self) -> EvalResult<'static, u32> {
+    pub fn to_u32(self) -> InterpResult<'static, u32> {
         let sz = Size::from_bits(32);
         let b = self.to_bits(sz)?;
         Ok(b as u32)
     }
 
-    pub fn to_u64(self) -> EvalResult<'static, u64> {
+    pub fn to_u64(self) -> InterpResult<'static, u64> {
         let sz = Size::from_bits(64);
         let b = self.to_bits(sz)?;
         Ok(b as u64)
     }
 
-    pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'static, u64> {
+    pub fn to_usize(self, cx: &impl HasDataLayout) -> InterpResult<'static, u64> {
         let b = self.to_bits(cx.data_layout().pointer_size)?;
         Ok(b as u64)
     }
 
-    pub fn to_i8(self) -> EvalResult<'static, i8> {
+    pub fn to_i8(self) -> InterpResult<'static, i8> {
         let sz = Size::from_bits(8);
         let b = self.to_bits(sz)?;
         let b = sign_extend(b, sz) as i128;
         Ok(b as i8)
     }
 
-    pub fn to_i32(self) -> EvalResult<'static, i32> {
+    pub fn to_i32(self) -> InterpResult<'static, i32> {
         let sz = Size::from_bits(32);
         let b = self.to_bits(sz)?;
         let b = sign_extend(b, sz) as i128;
         Ok(b as i32)
     }
 
-    pub fn to_i64(self) -> EvalResult<'static, i64> {
+    pub fn to_i64(self) -> InterpResult<'static, i64> {
         let sz = Size::from_bits(64);
         let b = self.to_bits(sz)?;
         let b = sign_extend(b, sz) as i128;
         Ok(b as i64)
     }
 
-    pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'static, i64> {
+    pub fn to_isize(self, cx: &impl HasDataLayout) -> InterpResult<'static, i64> {
         let sz = cx.data_layout().pointer_size;
         let b = self.to_bits(sz)?;
         let b = sign_extend(b, sz) as i128;
@@ -427,13 +464,15 @@ pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'static, i64> {
     }
 
     #[inline]
-    pub fn to_f32(self) -> EvalResult<'static, f32> {
-        Ok(f32::from_bits(self.to_u32()?))
+    pub fn to_f32(self) -> InterpResult<'static, Single> {
+        // Going through `u32` to check size and truncation.
+        Ok(Single::from_bits(self.to_u32()? as u128))
     }
 
     #[inline]
-    pub fn to_f64(self) -> EvalResult<'static, f64> {
-        Ok(f64::from_bits(self.to_u64()?))
+    pub fn to_f64(self) -> InterpResult<'static, Double> {
+        // Going through `u64` to check size and truncation.
+        Ok(Double::from_bits(self.to_u64()? as u128))
     }
 }
 
@@ -489,7 +528,7 @@ pub fn erase_tag(self) -> ScalarMaybeUndef
     }
 
     #[inline]
-    pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
+    pub fn not_undef(self) -> InterpResult<'static, Scalar<Tag>> {
         match self {
             ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
             ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
@@ -497,72 +536,72 @@ pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
     }
 
     #[inline(always)]
-    pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
+    pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
         self.not_undef()?.to_ptr()
     }
 
     #[inline(always)]
-    pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
+    pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
         self.not_undef()?.to_bits(target_size)
     }
 
     #[inline(always)]
-    pub fn to_bool(self) -> EvalResult<'tcx, bool> {
+    pub fn to_bool(self) -> InterpResult<'tcx, bool> {
         self.not_undef()?.to_bool()
     }
 
     #[inline(always)]
-    pub fn to_char(self) -> EvalResult<'tcx, char> {
+    pub fn to_char(self) -> InterpResult<'tcx, char> {
         self.not_undef()?.to_char()
     }
 
     #[inline(always)]
-    pub fn to_f32(self) -> EvalResult<'tcx, f32> {
+    pub fn to_f32(self) -> InterpResult<'tcx, Single> {
         self.not_undef()?.to_f32()
     }
 
     #[inline(always)]
-    pub fn to_f64(self) -> EvalResult<'tcx, f64> {
+    pub fn to_f64(self) -> InterpResult<'tcx, Double> {
         self.not_undef()?.to_f64()
     }
 
     #[inline(always)]
-    pub fn to_u8(self) -> EvalResult<'tcx, u8> {
+    pub fn to_u8(self) -> InterpResult<'tcx, u8> {
         self.not_undef()?.to_u8()
     }
 
     #[inline(always)]
-    pub fn to_u32(self) -> EvalResult<'tcx, u32> {
+    pub fn to_u32(self) -> InterpResult<'tcx, u32> {
         self.not_undef()?.to_u32()
     }
 
     #[inline(always)]
-    pub fn to_u64(self) -> EvalResult<'tcx, u64> {
+    pub fn to_u64(self) -> InterpResult<'tcx, u64> {
         self.not_undef()?.to_u64()
     }
 
     #[inline(always)]
-    pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
+    pub fn to_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
         self.not_undef()?.to_usize(cx)
     }
 
     #[inline(always)]
-    pub fn to_i8(self) -> EvalResult<'tcx, i8> {
+    pub fn to_i8(self) -> InterpResult<'tcx, i8> {
         self.not_undef()?.to_i8()
     }
 
     #[inline(always)]
-    pub fn to_i32(self) -> EvalResult<'tcx, i32> {
+    pub fn to_i32(self) -> InterpResult<'tcx, i32> {
         self.not_undef()?.to_i32()
     }
 
     #[inline(always)]
-    pub fn to_i64(self) -> EvalResult<'tcx, i64> {
+    pub fn to_i64(self) -> InterpResult<'tcx, i64> {
         self.not_undef()?.to_i64()
     }
 
     #[inline(always)]
-    pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
+    pub fn to_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
         self.not_undef()?.to_isize(cx)
     }
 }
index c2b4a765c59fe7192e19df9e9888981838f2ac6d..310228838e0ad4bff571b500353acc3f735bfd48 100644 (file)
@@ -9,6 +9,7 @@
 use crate::hir::{self, InlineAsm as HirInlineAsm};
 use crate::mir::interpret::{ConstValue, InterpError, Scalar};
 use crate::mir::visit::MirVisitable;
+use rustc_data_structures::bit_set::BitMatrix;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::graph::dominators::{dominators, Dominators};
 use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors};
@@ -2916,7 +2917,7 @@ pub fn successor_within_block(&self) -> Location {
     }
 
     /// Returns `true` if `other` is earlier in the control flow graph than `self`.
-    pub fn is_predecessor_of<'tcx>(&self, other: Location, mir: &Body<'tcx>) -> bool {
+    pub fn is_predecessor_of<'tcx>(&self, other: Location, body: &Body<'tcx>) -> bool {
         // If we are in the same block as the other location and are an earlier statement
         // then we are a predecessor of `other`.
         if self.block == other.block && self.statement_index < other.statement_index {
@@ -2924,13 +2925,13 @@ pub fn is_predecessor_of<'tcx>(&self, other: Location, mir: &Body<'tcx>) -> bool
         }
 
         // If we're in another block, then we want to check that block is a predecessor of `other`.
-        let mut queue: Vec<BasicBlock> = mir.predecessors_for(other.block).clone();
+        let mut queue: Vec<BasicBlock> = body.predecessors_for(other.block).clone();
         let mut visited = FxHashSet::default();
 
         while let Some(block) = queue.pop() {
             // If we haven't visited this block before, then make sure we visit it's predecessors.
             if visited.insert(block) {
-                queue.append(&mut mir.predecessors_for(block).clone());
+                queue.append(&mut body.predecessors_for(block).clone());
             } else {
                 continue;
             }
@@ -2997,6 +2998,11 @@ pub struct GeneratorLayout<'tcx> {
     /// be stored in multiple variants.
     pub variant_fields: IndexVec<VariantIdx, IndexVec<Field, GeneratorSavedLocal>>,
 
+    /// Which saved locals are storage-live at the same time. Locals that do not
+    /// have conflicts with each other are allowed to overlap in the computed
+    /// layout.
+    pub storage_conflicts: BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>,
+
     /// Names and scopes of all the stored generator locals.
     /// NOTE(tmandry) This is *strictly* a temporary hack for codegen
     /// debuginfo generation, and will be removed at some point.
@@ -3193,6 +3199,7 @@ impl<'tcx> TypeFoldable<'tcx> for Body<'tcx> {
     impl<'tcx> TypeFoldable<'tcx> for GeneratorLayout<'tcx> {
         field_tys,
         variant_fields,
+        storage_conflicts,
         __local_debuginfo_codegen_only_do_not_use,
     }
 }
@@ -3572,6 +3579,15 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> bool {
     }
 }
 
+impl<'tcx, R: Idx, C: Idx> TypeFoldable<'tcx> for BitMatrix<R, C> {
+    fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _: &mut F) -> Self {
+        self.clone()
+    }
+    fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> bool {
+        false
+    }
+}
+
 impl<'tcx> TypeFoldable<'tcx> for Constant<'tcx> {
     fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
         Constant {
index 75d995d801d7853d6234fb2c551332fb18325fea..77af0e6661b732ea0f50032d90a96d939290ebff 100644 (file)
 /// A preorder traversal of this graph is either `A B D C` or `A C D B`
 #[derive(Clone)]
 pub struct Preorder<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     visited: BitSet<BasicBlock>,
     worklist: Vec<BasicBlock>,
     root_is_start_block: bool,
 }
 
 impl<'a, 'tcx> Preorder<'a, 'tcx> {
-    pub fn new(mir: &'a Body<'tcx>, root: BasicBlock) -> Preorder<'a, 'tcx> {
+    pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Preorder<'a, 'tcx> {
         let worklist = vec![root];
 
         Preorder {
-            mir,
-            visited: BitSet::new_empty(mir.basic_blocks().len()),
+            body,
+            visited: BitSet::new_empty(body.basic_blocks().len()),
             worklist,
             root_is_start_block: root == START_BLOCK,
         }
     }
 }
 
-pub fn preorder<'a, 'tcx>(mir: &'a Body<'tcx>) -> Preorder<'a, 'tcx> {
-    Preorder::new(mir, START_BLOCK)
+pub fn preorder<'a, 'tcx>(body: &'a Body<'tcx>) -> Preorder<'a, 'tcx> {
+    Preorder::new(body, START_BLOCK)
 }
 
 impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> {
@@ -53,7 +53,7 @@ fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
                 continue;
             }
 
-            let data = &self.mir[idx];
+            let data = &self.body[idx];
 
             if let Some(ref term) = data.terminator {
                 self.worklist.extend(term.successors());
@@ -67,7 +67,7 @@ fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
 
     fn size_hint(&self) -> (usize, Option<usize>) {
         // All the blocks, minus the number of blocks we've visited.
-        let upper = self.mir.basic_blocks().len() - self.visited.count();
+        let upper = self.body.basic_blocks().len() - self.visited.count();
 
         let lower = if self.root_is_start_block {
             // We will visit all remaining blocks exactly once.
@@ -99,23 +99,23 @@ fn size_hint(&self) -> (usize, Option<usize>) {
 ///
 /// A Postorder traversal of this graph is `D B C A` or `D C B A`
 pub struct Postorder<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     visited: BitSet<BasicBlock>,
     visit_stack: Vec<(BasicBlock, Successors<'a>)>,
     root_is_start_block: bool,
 }
 
 impl<'a, 'tcx> Postorder<'a, 'tcx> {
-    pub fn new(mir: &'a Body<'tcx>, root: BasicBlock) -> Postorder<'a, 'tcx> {
+    pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Postorder<'a, 'tcx> {
         let mut po = Postorder {
-            mir,
-            visited: BitSet::new_empty(mir.basic_blocks().len()),
+            body,
+            visited: BitSet::new_empty(body.basic_blocks().len()),
             visit_stack: Vec::new(),
             root_is_start_block: root == START_BLOCK,
         };
 
 
-        let data = &po.mir[root];
+        let data = &po.body[root];
 
         if let Some(ref term) = data.terminator {
             po.visited.insert(root);
@@ -186,7 +186,7 @@ fn traverse_successor(&mut self) {
             };
 
             if self.visited.insert(bb) {
-                if let Some(term) = &self.mir[bb].terminator {
+                if let Some(term) = &self.body[bb].terminator {
                     self.visit_stack.push((bb, term.successors()));
                 }
             }
@@ -194,8 +194,8 @@ fn traverse_successor(&mut self) {
     }
 }
 
-pub fn postorder<'a, 'tcx>(mir: &'a Body<'tcx>) -> Postorder<'a, 'tcx> {
-    Postorder::new(mir, START_BLOCK)
+pub fn postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> Postorder<'a, 'tcx> {
+    Postorder::new(body, START_BLOCK)
 }
 
 impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> {
@@ -207,12 +207,12 @@ fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
             self.traverse_successor();
         }
 
-        next.map(|(bb, _)| (bb, &self.mir[bb]))
+        next.map(|(bb, _)| (bb, &self.body[bb]))
     }
 
     fn size_hint(&self) -> (usize, Option<usize>) {
         // All the blocks, minus the number of blocks we've visited.
-        let upper = self.mir.basic_blocks().len() - self.visited.count();
+        let upper = self.body.basic_blocks().len() - self.visited.count();
 
         let lower = if self.root_is_start_block {
             // We will visit all remaining blocks exactly once.
@@ -252,19 +252,19 @@ fn size_hint(&self) -> (usize, Option<usize>) {
 /// to re-use the traversal
 #[derive(Clone)]
 pub struct ReversePostorder<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     blocks: Vec<BasicBlock>,
     idx: usize
 }
 
 impl<'a, 'tcx> ReversePostorder<'a, 'tcx> {
-    pub fn new(mir: &'a Body<'tcx>, root: BasicBlock) -> ReversePostorder<'a, 'tcx> {
-        let blocks : Vec<_> = Postorder::new(mir, root).map(|(bb, _)| bb).collect();
+    pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> ReversePostorder<'a, 'tcx> {
+        let blocks : Vec<_> = Postorder::new(body, root).map(|(bb, _)| bb).collect();
 
         let len = blocks.len();
 
         ReversePostorder {
-            mir,
+            body,
             blocks,
             idx: len
         }
@@ -276,8 +276,8 @@ pub fn reset(&mut self) {
 }
 
 
-pub fn reverse_postorder<'a, 'tcx>(mir: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> {
-    ReversePostorder::new(mir, START_BLOCK)
+pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> {
+    ReversePostorder::new(body, START_BLOCK)
 }
 
 impl<'a, 'tcx> Iterator for ReversePostorder<'a, 'tcx> {
@@ -287,7 +287,7 @@ fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> {
         if self.idx == 0 { return None; }
         self.idx -= 1;
 
-        self.blocks.get(self.idx).map(|&bb| (bb, &self.mir[bb]))
+        self.blocks.get(self.idx).map(|&bb| (bb, &self.body[bb]))
     }
 
     fn size_hint(&self) -> (usize, Option<usize>) {
index dd33fae0d6197ed509cd86a7d0d863bff3ad65a2..babce812d4a39157fb2c175f006249bc510236a5 100644 (file)
@@ -71,8 +71,8 @@ pub trait $visitor_trait_name<'tcx> {
             // Override these, and call `self.super_xxx` to revert back to the
             // default behavior.
 
-            fn visit_body(&mut self, mir: & $($mutability)? Body<'tcx>) {
-                self.super_body(mir);
+            fn visit_body(&mut self, body: & $($mutability)? Body<'tcx>) {
+                self.super_body(body);
             }
 
             fn visit_basic_block_data(&mut self,
@@ -151,6 +151,13 @@ fn visit_place(&mut self,
                 self.super_place(place, context, location);
             }
 
+            fn visit_place_base(&mut self,
+                                place_base: & $($mutability)? PlaceBase<'tcx>,
+                                context: PlaceContext,
+                                location: Location) {
+                self.super_place_base(place_base, context, location);
+            }
+
             fn visit_projection(&mut self,
                                 place: & $($mutability)? Projection<'tcx>,
                                 context: PlaceContext,
@@ -158,12 +165,6 @@ fn visit_projection(&mut self,
                 self.super_projection(place, context, location);
             }
 
-            fn visit_projection_elem(&mut self,
-                                     place: & $($mutability)? PlaceElem<'tcx>,
-                                     location: Location) {
-                self.super_projection_elem(place, location);
-            }
-
             fn visit_constant(&mut self,
                               constant: & $($mutability)? Constant<'tcx>,
                               location: Location) {
@@ -252,41 +253,41 @@ fn visit_source_scope(&mut self,
             // not meant to be overridden.
 
             fn super_body(&mut self,
-                         mir: & $($mutability)? Body<'tcx>) {
-                if let Some(yield_ty) = &$($mutability)? mir.yield_ty {
+                         body: & $($mutability)? Body<'tcx>) {
+                if let Some(yield_ty) = &$($mutability)? body.yield_ty {
                     self.visit_ty(yield_ty, TyContext::YieldTy(SourceInfo {
-                        span: mir.span,
+                        span: body.span,
                         scope: OUTERMOST_SOURCE_SCOPE,
                     }));
                 }
 
                 // for best performance, we want to use an iterator rather
-                // than a for-loop, to avoid calling `mir::Body::invalidate` for
+                // than a for-loop, to avoid calling `body::Body::invalidate` for
                 // each basic block.
                 macro_rules! basic_blocks {
-                    (mut) => (mir.basic_blocks_mut().iter_enumerated_mut());
-                    () => (mir.basic_blocks().iter_enumerated());
+                    (mut) => (body.basic_blocks_mut().iter_enumerated_mut());
+                    () => (body.basic_blocks().iter_enumerated());
                 };
                 for (bb, data) in basic_blocks!($($mutability)?) {
                     self.visit_basic_block_data(bb, data);
                 }
 
-                for scope in &$($mutability)? mir.source_scopes {
+                for scope in &$($mutability)? body.source_scopes {
                     self.visit_source_scope_data(scope);
                 }
 
-                self.visit_ty(&$($mutability)? mir.return_ty(), TyContext::ReturnTy(SourceInfo {
-                    span: mir.span,
+                self.visit_ty(&$($mutability)? body.return_ty(), TyContext::ReturnTy(SourceInfo {
+                    span: body.span,
                     scope: OUTERMOST_SOURCE_SCOPE,
                 }));
 
-                for local in mir.local_decls.indices() {
-                    self.visit_local_decl(local, & $($mutability)? mir.local_decls[local]);
+                for local in body.local_decls.indices() {
+                    self.visit_local_decl(local, & $($mutability)? body.local_decls[local]);
                 }
 
                 macro_rules! type_annotations {
-                    (mut) => (mir.user_type_annotations.iter_enumerated_mut());
-                    () => (mir.user_type_annotations.iter_enumerated());
+                    (mut) => (body.user_type_annotations.iter_enumerated_mut());
+                    () => (body.user_type_annotations.iter_enumerated());
                 };
 
                 for (index, annotation) in type_annotations!($($mutability)?) {
@@ -295,7 +296,7 @@ macro_rules! type_annotations {
                     );
                 }
 
-                self.visit_span(&$($mutability)? mir.span);
+                self.visit_span(&$($mutability)? body.span);
             }
 
             fn super_basic_block_data(&mut self,
@@ -676,36 +677,44 @@ fn super_place(&mut self,
                             context: PlaceContext,
                             location: Location) {
                 match place {
-                    Place::Base(PlaceBase::Local(local)) => {
-                        self.visit_local(local, context, location);
-                    }
-                    Place::Base(PlaceBase::Static(box Static { kind: _, ty })) => {
-                        self.visit_ty(& $($mutability)? *ty, TyContext::Location(location));
+                    Place::Base(place_base) => {
+                        self.visit_place_base(place_base, context, location);
                     }
                     Place::Projection(proj) => {
+                        let context = if context.is_mutating_use() {
+                            PlaceContext::MutatingUse(MutatingUseContext::Projection)
+                        } else {
+                            PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
+                        };
+
                         self.visit_projection(proj, context, location);
                     }
                 }
             }
 
-            fn super_projection(&mut self,
-                                proj: & $($mutability)? Projection<'tcx>,
+            fn super_place_base(&mut self,
+                                place_base: & $($mutability)? PlaceBase<'tcx>,
                                 context: PlaceContext,
                                 location: Location) {
-                let Projection { base, elem } = proj;
-                let context = if context.is_mutating_use() {
-                    PlaceContext::MutatingUse(MutatingUseContext::Projection)
-                } else {
-                    PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
-                };
-                self.visit_place(base, context, location);
-                self.visit_projection_elem(elem, location);
+                match place_base {
+                    PlaceBase::Local(local) => {
+                        self.visit_local(local, context, location);
+                    }
+                    PlaceBase::Static(box Static { kind: _, ty }) => {
+                        self.visit_ty(& $($mutability)? *ty, TyContext::Location(location));
+                    }
+                }
             }
 
-            fn super_projection_elem(&mut self,
-                                     proj: & $($mutability)? PlaceElem<'tcx>,
-                                     location: Location) {
-                match proj {
+            fn super_projection(&mut self,
+                                proj: & $($mutability)? Projection<'tcx>,
+                                context: PlaceContext,
+                                location: Location) {
+                // this is calling `super_place` in preparation for changing `Place` to be
+                // a struct with a base and a slice of projections. `visit_place` should only ever
+                // be called for the outermost place now.
+                self.super_place(& $($mutability)? proj.base, context, location);
+                match & $($mutability)? proj.elem {
                     ProjectionElem::Deref => {
                     }
                     ProjectionElem::Subslice { from: _, to: _ } => {
@@ -825,8 +834,8 @@ fn super_closure_substs(&mut self,
 
             // Convenience methods
 
-            fn visit_location(&mut self, mir: & $($mutability)? Body<'tcx>, location: Location) {
-                let basic_block = & $($mutability)? mir[location.block];
+            fn visit_location(&mut self, body: & $($mutability)? Body<'tcx>, location: Location) {
+                let basic_block = & $($mutability)? body[location.block];
                 if basic_block.statements.len() == location.statement_index {
                     if let Some(ref $($mutability)? terminator) = basic_block.terminator {
                         self.visit_terminator(terminator, location)
@@ -958,7 +967,7 @@ pub enum PlaceContext {
     NonUse(NonUseContext),
 }
 
-impl<'tcx> PlaceContext {
+impl PlaceContext {
     /// Returns `true` if this place context represents a drop.
     pub fn is_drop(&self) -> bool {
         match *self {
index 003fdd501a35ab55e5fc04b54f1717b363e3cb46..f4d523b92338ce95de4a255b37642c396fffdf40 100644 (file)
@@ -227,13 +227,17 @@ pub fn extension(&self) -> &'static str {
     }
 }
 
+/// The type of diagnostics output to generate.
 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub enum ErrorOutputType {
+    /// Output meant for the consumption of humans.
     HumanReadable(HumanReadableErrorType),
+    /// Output that's consumed by other tools such as `rustfix` or the `RLS`.
     Json {
-        /// Render the json in a human readable way (with indents and newlines)
+        /// Render the JSON in a human readable way (with indents and newlines).
         pretty: bool,
-        /// The way the `rendered` field is created
+        /// The JSON output includes a `rendered` field that includes the rendered
+        /// human output.
         json_rendered: HumanReadableErrorType,
     },
 }
@@ -785,7 +789,7 @@ pub fn $buildfn(matches: &getopts::Matches, error_format: ErrorOutputType) -> $s
         return op;
     }
 
-    impl<'a> dep_tracking::DepTrackingHash for $struct_name {
+    impl dep_tracking::DepTrackingHash for $struct_name {
         fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType) {
             let mut sub_hashes = BTreeMap::new();
             $({
index 7505b3c1be8444b1995938f93b80c259bb4e2716..9ce35d15d3c334a74c274cce694ecfdc58bbed8c 100644 (file)
@@ -270,7 +270,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
     // the final synthesized generics: we don't want our generated docs page to contain something
     // like 'T: Copy + Clone', as that's redundant. Therefore, we keep track of a separate
     // 'user_env', which only holds the predicates that will actually be displayed to the user.
-    fn evaluate_predicates<'b, 'gcx, 'c>(
+    fn evaluate_predicates<'b, 'c>(
         &self,
         infcx: &InferCtxt<'b, 'tcx, 'c>,
         trait_did: DefId,
index 490501bde73e53ea5facc24f05f08f161b426747..50d2eeef421c13d767e6a4e50196dddeb8ebd2d0 100644 (file)
@@ -353,7 +353,7 @@ fn on_unimplemented_note(
             _ => {
                 // this is a "direct", user-specified, rather than derived,
                 // obligation.
-                flags.push(("direct".to_owned(), None));
+                flags.push((sym::direct, None));
             }
         }
 
@@ -365,27 +365,27 @@ fn on_unimplemented_note(
             // Currently I'm leaving it for what I need for `try`.
             if self.tcx.trait_of_item(item) == Some(trait_ref.def_id) {
                 let method = self.tcx.item_name(item);
-                flags.push(("from_method".to_owned(), None));
-                flags.push(("from_method".to_owned(), Some(method.to_string())));
+                flags.push((sym::from_method, None));
+                flags.push((sym::from_method, Some(method.to_string())));
             }
         }
         if let Some(t) = self.get_parent_trait_ref(&obligation.cause.code) {
-            flags.push(("parent_trait".to_owned(), Some(t)));
+            flags.push((sym::parent_trait, Some(t)));
         }
 
         if let Some(k) = obligation.cause.span.compiler_desugaring_kind() {
-            flags.push(("from_desugaring".to_owned(), None));
-            flags.push(("from_desugaring".to_owned(), Some(k.name().to_string())));
+            flags.push((sym::from_desugaring, None));
+            flags.push((sym::from_desugaring, Some(k.name().to_string())));
         }
         let generics = self.tcx.generics_of(def_id);
         let self_ty = trait_ref.self_ty();
         // This is also included through the generics list as `Self`,
         // but the parser won't allow you to use it
-        flags.push(("_Self".to_owned(), Some(self_ty.to_string())));
+        flags.push((sym::_Self, Some(self_ty.to_string())));
         if let Some(def) = self_ty.ty_adt_def() {
             // We also want to be able to select self's original
             // signature with no type arguments resolved
-            flags.push(("_Self".to_owned(), Some(self.tcx.type_of(def.did).to_string())));
+            flags.push((sym::_Self, Some(self.tcx.type_of(def.did).to_string())));
         }
 
         for param in generics.params.iter() {
@@ -396,38 +396,38 @@ fn on_unimplemented_note(
                 },
                 GenericParamDefKind::Lifetime => continue,
             };
-            let name = param.name.to_string();
+            let name = param.name.as_symbol();
             flags.push((name, Some(value)));
         }
 
         if let Some(true) = self_ty.ty_adt_def().map(|def| def.did.is_local()) {
-            flags.push(("crate_local".to_owned(), None));
+            flags.push((sym::crate_local, None));
         }
 
         // Allow targeting all integers using `{integral}`, even if the exact type was resolved
         if self_ty.is_integral() {
-            flags.push(("_Self".to_owned(), Some("{integral}".to_owned())));
+            flags.push((sym::_Self, Some("{integral}".to_owned())));
         }
 
         if let ty::Array(aty, len) = self_ty.sty {
-            flags.push(("_Self".to_owned(), Some("[]".to_owned())));
-            flags.push(("_Self".to_owned(), Some(format!("[{}]", aty))));
+            flags.push((sym::_Self, Some("[]".to_owned())));
+            flags.push((sym::_Self, Some(format!("[{}]", aty))));
             if let Some(def) = aty.ty_adt_def() {
                 // We also want to be able to select the array's type's original
                 // signature with no type arguments resolved
                 flags.push((
-                    "_Self".to_owned(),
+                    sym::_Self,
                     Some(format!("[{}]", self.tcx.type_of(def.did).to_string())),
                 ));
                 let tcx = self.tcx;
                 if let Some(len) = len.assert_usize(tcx) {
                     flags.push((
-                        "_Self".to_owned(),
+                        sym::_Self,
                         Some(format!("[{}; {}]", self.tcx.type_of(def.did).to_string(), len)),
                     ));
                 } else {
                     flags.push((
-                        "_Self".to_owned(),
+                        sym::_Self,
                         Some(format!("[{}; _]", self.tcx.type_of(def.did).to_string())),
                     ));
                 }
index c135b0b759c6f2d3574917667d4e8db90c41a72a..fd2d8fd0661a5cf1276e953d98c30fc8b6a76114 100644 (file)
@@ -1191,7 +1191,7 @@ fn fold_ex_clause_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(
         folder: &mut F,
     ) -> chalk_engine::ExClause<Self>;
 
-    fn visit_ex_clause_with<'gcx: 'tcx, V: TypeVisitor<'tcx>>(
+    fn visit_ex_clause_with<V: TypeVisitor<'tcx>>(
         ex_clause: &chalk_engine::ExClause<Self>,
         visitor: &mut V,
     ) -> bool;
index 1c17ace90c2fbbc458f1ea4e8dab464da5e67ed7..b78396c90dc657f2a70aba7c37922ac30048891d 100644 (file)
@@ -7,7 +7,7 @@
 
 use syntax::ast::{MetaItem, NestedMetaItem};
 use syntax::attr;
-use syntax::symbol::sym;
+use syntax::symbol::{Symbol, kw, sym};
 use syntax_pos::Span;
 use syntax_pos::symbol::LocalInternedString;
 
@@ -52,7 +52,7 @@ fn parse_error(tcx: TyCtxt<'_, '_, '_>, span: Span,
 }
 
 impl<'a, 'gcx, 'tcx> OnUnimplementedDirective {
-    pub fn parse(tcx: TyCtxt<'a, 'gcx, 'tcx>,
+    fn parse(tcx: TyCtxt<'a, 'gcx, 'tcx>,
                  trait_def_id: DefId,
                  items: &[NestedMetaItem],
                  span: Span,
@@ -167,7 +167,7 @@ pub fn of_item(tcx: TyCtxt<'a, 'gcx, 'tcx>,
     pub fn evaluate(&self,
                     tcx: TyCtxt<'a, 'gcx, 'tcx>,
                     trait_ref: ty::TraitRef<'tcx>,
-                    options: &[(String, Option<String>)])
+                    options: &[(Symbol, Option<String>)])
                     -> OnUnimplementedNote
     {
         let mut message = None;
@@ -180,7 +180,7 @@ pub fn evaluate(&self,
                 if !attr::eval_condition(condition, &tcx.sess.parse_sess, &mut |c| {
                     c.ident().map_or(false, |ident| {
                         options.contains(&(
-                            ident.to_string(),
+                            ident.name,
                             c.value_str().map(|s| s.as_str().to_string())
                         ))
                     })
@@ -203,8 +203,8 @@ pub fn evaluate(&self,
             }
         }
 
-        let options: FxHashMap<String, String> = options.into_iter()
-            .filter_map(|(k, v)| v.as_ref().map(|v| (k.to_owned(), v.to_owned())))
+        let options: FxHashMap<Symbol, String> = options.into_iter()
+            .filter_map(|(k, v)| v.as_ref().map(|v| (*k, v.to_owned())))
             .collect();
         OnUnimplementedNote {
             label: label.map(|l| l.format(tcx, trait_ref, &options)),
@@ -215,7 +215,7 @@ pub fn evaluate(&self,
 }
 
 impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString {
-    pub fn try_parse(tcx: TyCtxt<'a, 'gcx, 'tcx>,
+    fn try_parse(tcx: TyCtxt<'a, 'gcx, 'tcx>,
                      trait_def_id: DefId,
                      from: LocalInternedString,
                      err_sp: Span)
@@ -241,16 +241,16 @@ fn verify(
                 Piece::String(_) => (), // Normal string, no need to check it
                 Piece::NextArgument(a) => match a.position {
                     // `{Self}` is allowed
-                    Position::ArgumentNamed(s) if s == "Self" => (),
+                    Position::ArgumentNamed(s) if s == kw::SelfUpper => (),
                     // `{ThisTraitsName}` is allowed
-                    Position::ArgumentNamed(s) if s == name.as_str() => (),
+                    Position::ArgumentNamed(s) if s == name => (),
                     // `{from_method}` is allowed
-                    Position::ArgumentNamed(s) if s == "from_method" => (),
+                    Position::ArgumentNamed(s) if s == sym::from_method => (),
                     // `{from_desugaring}` is allowed
-                    Position::ArgumentNamed(s) if s == "from_desugaring" => (),
+                    Position::ArgumentNamed(s) if s == sym::from_desugaring => (),
                     // So is `{A}` if A is a type parameter
                     Position::ArgumentNamed(s) => match generics.params.iter().find(|param| {
-                        param.name.as_str() == s
+                        param.name.as_symbol() == s
                     }) {
                         Some(_) => (),
                         None => {
@@ -276,7 +276,7 @@ pub fn format(
         &self,
         tcx: TyCtxt<'a, 'gcx, 'tcx>,
         trait_ref: ty::TraitRef<'tcx>,
-        options: &FxHashMap<String, String>,
+        options: &FxHashMap<Symbol, String>,
     ) -> String {
         let name = tcx.item_name(trait_ref.def_id);
         let trait_str = tcx.def_path_str(trait_ref.def_id);
@@ -289,9 +289,9 @@ pub fn format(
                 },
                 GenericParamDefKind::Lifetime => return None
             };
-            let name = param.name.to_string();
+            let name = param.name.as_symbol();
             Some((name, value))
-        }).collect::<FxHashMap<String, String>>();
+        }).collect::<FxHashMap<Symbol, String>>();
         let empty_string = String::new();
 
         let parser = Parser::new(&self.0, None, vec![], false);
@@ -299,15 +299,15 @@ pub fn format(
             match p {
                 Piece::String(s) => s,
                 Piece::NextArgument(a) => match a.position {
-                    Position::ArgumentNamed(s) => match generic_map.get(s) {
+                    Position::ArgumentNamed(s) => match generic_map.get(&s) {
                         Some(val) => val,
-                        None if s == name.as_str() => {
+                        None if s == name => {
                             &trait_str
                         }
                         None => {
-                            if let Some(val) = options.get(s) {
+                            if let Some(val) = options.get(&s) {
                                 val
-                            } else if s == "from_desugaring" || s == "from_method" {
+                            } else if s == sym::from_desugaring || s == sym::from_method {
                                 // don't break messages using these two arguments incorrectly
                                 &empty_string
                             } else {
index e3d7a4d57a5643841cc0104b8347e207e7124615..594f55e52f8e6b593b125cef0a2750390ec05bc1 100644 (file)
@@ -145,7 +145,7 @@ impl<'tcx, T> Lift<'tcx> for Normalize<T> {
 }
 
 impl_stable_hash_for! {
-    impl<'tcx, T> for struct Normalize<T> {
+    impl<T> for struct Normalize<T> {
         value
     }
 }
index 7810d65e88cc16315c3049ae0e1f0f798ad45e7c..af0b51e637a36f6b93cfa98b7901b4c490139f58 100644 (file)
@@ -104,10 +104,7 @@ pub enum IntercrateAmbiguityCause {
 impl IntercrateAmbiguityCause {
     /// Emits notes when the overlap is caused by complex intercrate ambiguities.
     /// See #23980 for details.
-    pub fn add_intercrate_ambiguity_hint<'a, 'tcx>(
-        &self,
-        err: &mut errors::DiagnosticBuilder<'_>,
-    ) {
+    pub fn add_intercrate_ambiguity_hint(&self, err: &mut errors::DiagnosticBuilder<'_>) {
         err.note(&self.intercrate_ambiguity_hint());
     }
 
@@ -2299,7 +2296,7 @@ fn assemble_candidates_for_trait_alias(
     /// candidates and prefer where-clause candidates.
     ///
     /// See the comment for "SelectionCandidate" for more details.
-    fn candidate_should_be_dropped_in_favor_of<'o>(
+    fn candidate_should_be_dropped_in_favor_of(
         &mut self,
         victim: &EvaluatedCandidate<'tcx>,
         other: &EvaluatedCandidate<'tcx>,
@@ -2423,7 +2420,7 @@ fn candidate_should_be_dropped_in_favor_of<'o>(
     // These cover the traits that are built-in to the language
     // itself: `Copy`, `Clone` and `Sized`.
 
-    fn assemble_builtin_bound_candidates<'o>(
+    fn assemble_builtin_bound_candidates(
         &mut self,
         conditions: BuiltinImplConditions<'tcx>,
         candidates: &mut SelectionCandidateSet<'tcx>,
index 4d382d6c45a76e1ad8d1269cf6b7b14e4a9ea793..f6108f2a28699479933ed1f2e339bd8e763570c7 100644 (file)
@@ -90,13 +90,13 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableBuiltinData<N> {
+impl<N: fmt::Debug> fmt::Debug for traits::VtableBuiltinData<N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "VtableBuiltinData(nested={:?})", self.nested)
     }
 }
 
-impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableAutoImplData<N> {
+impl<N: fmt::Debug> fmt::Debug for traits::VtableAutoImplData<N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(
             f,
index 7298b548f3197ab248581f3d9fa70aacffef76cb..56f75e800f2557739c7661595668bebed4f018e1 100644 (file)
@@ -2,7 +2,7 @@
 use crate::hir::def_id::DefId;
 use crate::hir;
 use crate::ty::TyCtxt;
-use syntax_pos::symbol::Symbol;
+use syntax_pos::symbol::{sym, Symbol};
 use crate::hir::map::blocks::FnLikeNode;
 use syntax::attr;
 
@@ -10,27 +10,30 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
     /// Whether the `def_id` counts as const fn in your current crate, considering all active
     /// feature gates
     pub fn is_const_fn(self, def_id: DefId) -> bool {
-        self.is_const_fn_raw(def_id) && match self.lookup_stability(def_id) {
-            Some(stab) => match stab.const_stability {
+        self.is_const_fn_raw(def_id) && match self.is_unstable_const_fn(def_id) {
+            Some(feature_name) => {
                 // has a `rustc_const_unstable` attribute, check whether the user enabled the
-                // corresponding feature gate
-                Some(feature_name) => self.features()
+                // corresponding feature gate, const_constructor is not a lib feature, so has
+                // to be checked separately.
+                self.features()
                     .declared_lib_features
                     .iter()
-                    .any(|&(sym, _)| sym == feature_name),
-                // the function has no stability attribute, it is stable as const fn or the user
-                // needs to use feature gates to use the function at all
-                None => true,
+                    .any(|&(sym, _)| sym == feature_name)
+                    || (feature_name == sym::const_constructor
+                        && self.features().const_constructor)
             },
-            // functions without stability are either stable user written const fn or the user is
-            // using feature gates and we thus don't care what they do
+            // functions without const stability are either stable user written
+            // const fn or the user is using feature gates and we thus don't
+            // care what they do
             None => true,
         }
     }
 
     /// Whether the `def_id` is an unstable const fn and what feature gate is necessary to enable it
     pub fn is_unstable_const_fn(self, def_id: DefId) -> Option<Symbol> {
-        if self.is_const_fn_raw(def_id) {
+        if self.is_constructor(def_id) {
+            Some(sym::const_constructor)
+        } else if self.is_const_fn_raw(def_id) {
             self.lookup_stability(def_id)?.const_stability
         } else {
             None
@@ -70,8 +73,11 @@ fn is_const_fn_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool
         let hir_id = tcx.hir().as_local_hir_id(def_id)
                               .expect("Non-local call to local provider is_const_fn");
 
-        if let Some(fn_like) = FnLikeNode::from_node(tcx.hir().get_by_hir_id(hir_id)) {
+        let node = tcx.hir().get_by_hir_id(hir_id);
+        if let Some(fn_like) = FnLikeNode::from_node(node) {
             fn_like.constness() == hir::Constness::Const
+        } else if let hir::Node::Ctor(_) = node {
+            true
         } else {
             false
         }
index 59ab4561f2c8778811f567b6118e6b566aeed585..908bbbcd1d77d1759cac3520bcdf6b982fda083f 100644 (file)
@@ -154,9 +154,9 @@ pub fn map_def<U, F>(self, map: F) -> SimplifiedTypeGen<U>
     }
 }
 
-impl<'a, 'gcx, D> HashStable<StableHashingContext<'a>> for SimplifiedTypeGen<D>
-    where D: Copy + Debug + Ord + Eq + Hash +
-             HashStable<StableHashingContext<'a>>,
+impl<'a, D> HashStable<StableHashingContext<'a>> for SimplifiedTypeGen<D>
+where
+    D: Copy + Debug + Ord + Eq + Hash + HashStable<StableHashingContext<'a>>,
 {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
index f0251917074d44f6ec5c4f30240c5721349a1249..ba42cecbe5b83fe972264141d87132da50d652f8 100644 (file)
@@ -442,10 +442,10 @@ fn resolve_associated_item<'a, 'tcx>(
     }
 }
 
-fn needs_fn_once_adapter_shim<'a, 'tcx>(actual_closure_kind: ty::ClosureKind,
-                                        trait_closure_kind: ty::ClosureKind)
-    -> Result<bool, ()>
-{
+fn needs_fn_once_adapter_shim(
+    actual_closure_kind: ty::ClosureKind,
+    trait_closure_kind: ty::ClosureKind,
+) -> Result<bool, ()> {
     match (actual_closure_kind, trait_closure_kind) {
         (ty::ClosureKind::Fn, ty::ClosureKind::Fn) |
             (ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) |
index 8e2c3dd3d8ad9d5ffb72d70838ebf6dcb86c0750..65cdd4ec4d7aab015760818f4e019b43071e75e0 100644 (file)
 
 use crate::hir;
 use crate::ich::StableHashingContext;
+use crate::mir::{GeneratorLayout, GeneratorSavedLocal};
+use crate::ty::GeneratorSubsts;
+use crate::ty::subst::Subst;
+use rustc_data_structures::bit_set::BitSet;
 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
                                            StableHasherResult};
@@ -212,260 +216,268 @@ pub struct LayoutCx<'tcx, C> {
     pub param_env: ty::ParamEnv<'tcx>,
 }
 
+#[derive(Copy, Clone, Debug)]
+enum StructKind {
+    /// A tuple, closure, or univariant which cannot be coerced to unsized.
+    AlwaysSized,
+    /// A univariant, the last field of which may be coerced to unsized.
+    MaybeUnsized,
+    /// A univariant, but with a prefix of an arbitrary size & alignment (e.g., enum tag).
+    Prefixed(Size, Align),
+}
+
 impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
-    fn layout_raw_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
-        let tcx = self.tcx;
-        let param_env = self.param_env;
+    fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutDetails {
         let dl = self.data_layout();
-        let scalar_unit = |value: Primitive| {
-            let bits = value.size(dl).bits();
-            assert!(bits <= 128);
-            Scalar {
-                value,
-                valid_range: 0..=(!0 >> (128 - bits))
-            }
-        };
-        let scalar = |value: Primitive| {
-            tcx.intern_layout(LayoutDetails::scalar(self, scalar_unit(value)))
-        };
-        let scalar_pair = |a: Scalar, b: Scalar| {
-            let b_align = b.value.align(dl);
-            let align = a.value.align(dl).max(b_align).max(dl.aggregate_align);
-            let b_offset = a.value.size(dl).align_to(b_align.abi);
-            let size = (b_offset + b.value.size(dl)).align_to(align.abi);
-            LayoutDetails {
-                variants: Variants::Single { index: VariantIdx::new(0) },
-                fields: FieldPlacement::Arbitrary {
-                    offsets: vec![Size::ZERO, b_offset],
-                    memory_index: vec![0, 1]
-                },
-                abi: Abi::ScalarPair(a, b),
-                align,
-                size
-            }
-        };
-
-        #[derive(Copy, Clone, Debug)]
-        enum StructKind {
-            /// A tuple, closure, or univariant which cannot be coerced to unsized.
-            AlwaysSized,
-            /// A univariant, the last field of which may be coerced to unsized.
-            MaybeUnsized,
-            /// A univariant, but with a prefix of an arbitrary size & alignment (e.g., enum tag).
-            Prefixed(Size, Align),
+        let b_align = b.value.align(dl);
+        let align = a.value.align(dl).max(b_align).max(dl.aggregate_align);
+        let b_offset = a.value.size(dl).align_to(b_align.abi);
+        let size = (b_offset + b.value.size(dl)).align_to(align.abi);
+        LayoutDetails {
+            variants: Variants::Single { index: VariantIdx::new(0) },
+            fields: FieldPlacement::Arbitrary {
+                offsets: vec![Size::ZERO, b_offset],
+                memory_index: vec![0, 1]
+            },
+            abi: Abi::ScalarPair(a, b),
+            align,
+            size
         }
+    }
 
-        let univariant_uninterned = |fields: &[TyLayout<'_>], repr: &ReprOptions, kind| {
-            let packed = repr.packed();
-            if packed && repr.align > 0 {
-                bug!("struct cannot be packed and aligned");
-            }
+    fn univariant_uninterned(&self,
+                             ty: Ty<'tcx>,
+                             fields: &[TyLayout<'_>],
+                             repr: &ReprOptions,
+                             kind: StructKind) -> Result<LayoutDetails, LayoutError<'tcx>> {
+        let dl = self.data_layout();
+        let packed = repr.packed();
+        if packed && repr.align > 0 {
+            bug!("struct cannot be packed and aligned");
+        }
 
-            let pack = Align::from_bytes(repr.pack as u64).unwrap();
+        let pack = Align::from_bytes(repr.pack as u64).unwrap();
 
-            let mut align = if packed {
-                dl.i8_align
-            } else {
-                dl.aggregate_align
-            };
+        let mut align = if packed {
+            dl.i8_align
+        } else {
+            dl.aggregate_align
+        };
 
-            let mut sized = true;
-            let mut offsets = vec![Size::ZERO; fields.len()];
-            let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
+        let mut sized = true;
+        let mut offsets = vec![Size::ZERO; fields.len()];
+        let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
 
-            let mut optimize = !repr.inhibit_struct_field_reordering_opt();
-            if let StructKind::Prefixed(_, align) = kind {
-                optimize &= align.bytes() == 1;
-            }
+        let mut optimize = !repr.inhibit_struct_field_reordering_opt();
+        if let StructKind::Prefixed(_, align) = kind {
+            optimize &= align.bytes() == 1;
+        }
 
-            if optimize {
-                let end = if let StructKind::MaybeUnsized = kind {
-                    fields.len() - 1
-                } else {
-                    fields.len()
-                };
-                let optimizing = &mut inverse_memory_index[..end];
-                let field_align = |f: &TyLayout<'_>| {
-                    if packed { f.align.abi.min(pack) } else { f.align.abi }
-                };
-                match kind {
-                    StructKind::AlwaysSized |
-                    StructKind::MaybeUnsized => {
-                        optimizing.sort_by_key(|&x| {
-                            // Place ZSTs first to avoid "interesting offsets",
-                            // especially with only one or two non-ZST fields.
-                            let f = &fields[x as usize];
-                            (!f.is_zst(), cmp::Reverse(field_align(f)))
-                        });
-                    }
-                    StructKind::Prefixed(..) => {
-                        optimizing.sort_by_key(|&x| field_align(&fields[x as usize]));
-                    }
+        if optimize {
+            let end = if let StructKind::MaybeUnsized = kind {
+                fields.len() - 1
+            } else {
+                fields.len()
+            };
+            let optimizing = &mut inverse_memory_index[..end];
+            let field_align = |f: &TyLayout<'_>| {
+                if packed { f.align.abi.min(pack) } else { f.align.abi }
+            };
+            match kind {
+                StructKind::AlwaysSized |
+                StructKind::MaybeUnsized => {
+                    optimizing.sort_by_key(|&x| {
+                        // Place ZSTs first to avoid "interesting offsets",
+                        // especially with only one or two non-ZST fields.
+                        let f = &fields[x as usize];
+                        (!f.is_zst(), cmp::Reverse(field_align(f)))
+                    });
+                }
+                StructKind::Prefixed(..) => {
+                    optimizing.sort_by_key(|&x| field_align(&fields[x as usize]));
                 }
             }
+        }
 
-            // inverse_memory_index holds field indices by increasing memory offset.
-            // That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
-            // We now write field offsets to the corresponding offset slot;
-            // field 5 with offset 0 puts 0 in offsets[5].
-            // At the bottom of this function, we use inverse_memory_index to produce memory_index.
+        // inverse_memory_index holds field indices by increasing memory offset.
+        // That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
+        // We now write field offsets to the corresponding offset slot;
+        // field 5 with offset 0 puts 0 in offsets[5].
+        // At the bottom of this function, we use inverse_memory_index to produce memory_index.
 
-            let mut offset = Size::ZERO;
+        let mut offset = Size::ZERO;
 
-            if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
-                let prefix_align = if packed {
-                    prefix_align.min(pack)
-                } else {
-                    prefix_align
-                };
-                align = align.max(AbiAndPrefAlign::new(prefix_align));
-                offset = prefix_size.align_to(prefix_align);
+        if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
+            let prefix_align = if packed {
+                prefix_align.min(pack)
+            } else {
+                prefix_align
+            };
+            align = align.max(AbiAndPrefAlign::new(prefix_align));
+            offset = prefix_size.align_to(prefix_align);
+        }
+
+        for &i in &inverse_memory_index {
+            let field = fields[i as usize];
+            if !sized {
+                bug!("univariant: field #{} of `{}` comes after unsized field",
+                     offsets.len(), ty);
             }
 
-            for &i in &inverse_memory_index {
-                let field = fields[i as usize];
-                if !sized {
-                    bug!("univariant: field #{} of `{}` comes after unsized field",
-                         offsets.len(), ty);
-                }
+            if field.is_unsized() {
+                sized = false;
+            }
 
-                if field.is_unsized() {
-                    sized = false;
-                }
+            // Invariant: offset < dl.obj_size_bound() <= 1<<61
+            let field_align = if packed {
+                field.align.min(AbiAndPrefAlign::new(pack))
+            } else {
+                field.align
+            };
+            offset = offset.align_to(field_align.abi);
+            align = align.max(field_align);
 
-                // Invariant: offset < dl.obj_size_bound() <= 1<<61
-                let field_align = if packed {
-                    field.align.min(AbiAndPrefAlign::new(pack))
-                } else {
-                    field.align
-                };
-                offset = offset.align_to(field_align.abi);
-                align = align.max(field_align);
+            debug!("univariant offset: {:?} field: {:#?}", offset, field);
+            offsets[i as usize] = offset;
 
-                debug!("univariant offset: {:?} field: {:#?}", offset, field);
-                offsets[i as usize] = offset;
+            offset = offset.checked_add(field.size, dl)
+                .ok_or(LayoutError::SizeOverflow(ty))?;
+        }
 
-                offset = offset.checked_add(field.size, dl)
-                    .ok_or(LayoutError::SizeOverflow(ty))?;
-            }
+        if repr.align > 0 {
+            let repr_align = repr.align as u64;
+            align = align.max(AbiAndPrefAlign::new(Align::from_bytes(repr_align).unwrap()));
+            debug!("univariant repr_align: {:?}", repr_align);
+        }
 
-            if repr.align > 0 {
-                let repr_align = repr.align as u64;
-                align = align.max(AbiAndPrefAlign::new(Align::from_bytes(repr_align).unwrap()));
-                debug!("univariant repr_align: {:?}", repr_align);
-            }
+        debug!("univariant min_size: {:?}", offset);
+        let min_size = offset;
 
-            debug!("univariant min_size: {:?}", offset);
-            let min_size = offset;
+        // As stated above, inverse_memory_index holds field indices by increasing offset.
+        // This makes it an already-sorted view of the offsets vec.
+        // To invert it, consider:
+        // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
+        // Field 5 would be the first element, so memory_index is i:
+        // Note: if we didn't optimize, it's already right.
 
-            // As stated above, inverse_memory_index holds field indices by increasing offset.
-            // This makes it an already-sorted view of the offsets vec.
-            // To invert it, consider:
-            // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
-            // Field 5 would be the first element, so memory_index is i:
-            // Note: if we didn't optimize, it's already right.
+        let mut memory_index;
+        if optimize {
+            memory_index = vec![0; inverse_memory_index.len()];
 
-            let mut memory_index;
-            if optimize {
-                memory_index = vec![0; inverse_memory_index.len()];
+            for i in 0..inverse_memory_index.len() {
+                memory_index[inverse_memory_index[i] as usize]  = i as u32;
+            }
+        } else {
+            memory_index = inverse_memory_index;
+        }
 
-                for i in 0..inverse_memory_index.len() {
-                    memory_index[inverse_memory_index[i] as usize]  = i as u32;
-                }
-            } else {
-                memory_index = inverse_memory_index;
-            }
-
-            let size = min_size.align_to(align.abi);
-            let mut abi = Abi::Aggregate { sized };
-
-            // Unpack newtype ABIs and find scalar pairs.
-            if sized && size.bytes() > 0 {
-                // All other fields must be ZSTs, and we need them to all start at 0.
-                let mut zst_offsets =
-                    offsets.iter().enumerate().filter(|&(i, _)| fields[i].is_zst());
-                if zst_offsets.all(|(_, o)| o.bytes() == 0) {
-                    let mut non_zst_fields =
-                        fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
-
-                    match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
-                        // We have exactly one non-ZST field.
-                        (Some((i, field)), None, None) => {
-                            // Field fills the struct and it has a scalar or scalar pair ABI.
-                            if offsets[i].bytes() == 0 &&
-                               align.abi == field.align.abi &&
-                               size == field.size {
-                                match field.abi {
-                                    // For plain scalars, or vectors of them, we can't unpack
-                                    // newtypes for `#[repr(C)]`, as that affects C ABIs.
-                                    Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
-                                        abi = field.abi.clone();
-                                    }
-                                    // But scalar pairs are Rust-specific and get
-                                    // treated as aggregates by C ABIs anyway.
-                                    Abi::ScalarPair(..) => {
-                                        abi = field.abi.clone();
-                                    }
-                                    _ => {}
+        let size = min_size.align_to(align.abi);
+        let mut abi = Abi::Aggregate { sized };
+
+        // Unpack newtype ABIs and find scalar pairs.
+        if sized && size.bytes() > 0 {
+            // All other fields must be ZSTs, and we need them to all start at 0.
+            let mut zst_offsets =
+                offsets.iter().enumerate().filter(|&(i, _)| fields[i].is_zst());
+            if zst_offsets.all(|(_, o)| o.bytes() == 0) {
+                let mut non_zst_fields =
+                    fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
+
+                match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
+                    // We have exactly one non-ZST field.
+                    (Some((i, field)), None, None) => {
+                        // Field fills the struct and it has a scalar or scalar pair ABI.
+                        if offsets[i].bytes() == 0 &&
+                           align.abi == field.align.abi &&
+                           size == field.size {
+                            match field.abi {
+                                // For plain scalars, or vectors of them, we can't unpack
+                                // newtypes for `#[repr(C)]`, as that affects C ABIs.
+                                Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
+                                    abi = field.abi.clone();
                                 }
+                                // But scalar pairs are Rust-specific and get
+                                // treated as aggregates by C ABIs anyway.
+                                Abi::ScalarPair(..) => {
+                                    abi = field.abi.clone();
+                                }
+                                _ => {}
                             }
                         }
+                    }
 
-                        // Two non-ZST fields, and they're both scalars.
-                        (Some((i, &TyLayout {
-                            details: &LayoutDetails { abi: Abi::Scalar(ref a), .. }, ..
-                        })), Some((j, &TyLayout {
-                            details: &LayoutDetails { abi: Abi::Scalar(ref b), .. }, ..
-                        })), None) => {
-                            // Order by the memory placement, not source order.
-                            let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
-                                ((i, a), (j, b))
-                            } else {
-                                ((j, b), (i, a))
-                            };
-                            let pair = scalar_pair(a.clone(), b.clone());
-                            let pair_offsets = match pair.fields {
-                                FieldPlacement::Arbitrary {
-                                    ref offsets,
-                                    ref memory_index
-                                } => {
-                                    assert_eq!(memory_index, &[0, 1]);
-                                    offsets
-                                }
-                                _ => bug!()
-                            };
-                            if offsets[i] == pair_offsets[0] &&
-                               offsets[j] == pair_offsets[1] &&
-                               align == pair.align &&
-                               size == pair.size {
-                                // We can use `ScalarPair` only when it matches our
-                                // already computed layout (including `#[repr(C)]`).
-                                abi = pair.abi;
+                    // Two non-ZST fields, and they're both scalars.
+                    (Some((i, &TyLayout {
+                        details: &LayoutDetails { abi: Abi::Scalar(ref a), .. }, ..
+                    })), Some((j, &TyLayout {
+                        details: &LayoutDetails { abi: Abi::Scalar(ref b), .. }, ..
+                    })), None) => {
+                        // Order by the memory placement, not source order.
+                        let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
+                            ((i, a), (j, b))
+                        } else {
+                            ((j, b), (i, a))
+                        };
+                        let pair = self.scalar_pair(a.clone(), b.clone());
+                        let pair_offsets = match pair.fields {
+                            FieldPlacement::Arbitrary {
+                                ref offsets,
+                                ref memory_index
+                            } => {
+                                assert_eq!(memory_index, &[0, 1]);
+                                offsets
                             }
+                            _ => bug!()
+                        };
+                        if offsets[i] == pair_offsets[0] &&
+                           offsets[j] == pair_offsets[1] &&
+                           align == pair.align &&
+                           size == pair.size {
+                            // We can use `ScalarPair` only when it matches our
+                            // already computed layout (including `#[repr(C)]`).
+                            abi = pair.abi;
                         }
-
-                        _ => {}
                     }
+
+                    _ => {}
                 }
             }
+        }
 
-            if sized && fields.iter().any(|f| f.abi.is_uninhabited()) {
-                abi = Abi::Uninhabited;
-            }
+        if sized && fields.iter().any(|f| f.abi.is_uninhabited()) {
+            abi = Abi::Uninhabited;
+        }
 
-            Ok(LayoutDetails {
-                variants: Variants::Single { index: VariantIdx::new(0) },
-                fields: FieldPlacement::Arbitrary {
-                    offsets,
-                    memory_index
-                },
-                abi,
-                align,
-                size
-            })
+        Ok(LayoutDetails {
+            variants: Variants::Single { index: VariantIdx::new(0) },
+            fields: FieldPlacement::Arbitrary {
+                offsets,
+                memory_index
+            },
+            abi,
+            align,
+            size
+        })
+    }
+
+    fn layout_raw_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
+        let tcx = self.tcx;
+        let param_env = self.param_env;
+        let dl = self.data_layout();
+        let scalar_unit = |value: Primitive| {
+            let bits = value.size(dl).bits();
+            assert!(bits <= 128);
+            Scalar {
+                value,
+                valid_range: 0..=(!0 >> (128 - bits))
+            }
+        };
+        let scalar = |value: Primitive| {
+            tcx.intern_layout(LayoutDetails::scalar(self, scalar_unit(value)))
         };
+
         let univariant = |fields: &[TyLayout<'_>], repr: &ReprOptions, kind| {
-            Ok(tcx.intern_layout(univariant_uninterned(fields, repr, kind)?))
+            Ok(tcx.intern_layout(self.univariant_uninterned(ty, fields, repr, kind)?))
         };
         debug_assert!(!ty.has_infer_types());
 
@@ -537,7 +549,7 @@ enum StructKind {
                 };
 
                 // Effectively a (ptr, meta) tuple.
-                tcx.intern_layout(scalar_pair(data_ptr, metadata))
+                tcx.intern_layout(self.scalar_pair(data_ptr, metadata))
             }
 
             // Arrays and slices.
@@ -602,7 +614,7 @@ enum StructKind {
                 univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)?
             }
             ty::Dynamic(..) | ty::Foreign(..) => {
-                let mut unit = univariant_uninterned(&[], &ReprOptions::default(),
+                let mut unit = self.univariant_uninterned(ty, &[], &ReprOptions::default(),
                   StructKind::AlwaysSized)?;
                 match unit.abi {
                     Abi::Aggregate { ref mut sized } => *sized = false,
@@ -611,64 +623,7 @@ enum StructKind {
                 tcx.intern_layout(unit)
             }
 
-            ty::Generator(def_id, ref substs, _) => {
-                // FIXME(tmandry): For fields that are repeated in multiple
-                // variants in the GeneratorLayout, we need code to ensure that
-                // the offset of these fields never change. Right now this is
-                // not an issue since every variant has every field, but once we
-                // optimize this we have to be more careful.
-
-                let discr_index = substs.prefix_tys(def_id, tcx).count();
-                let prefix_tys = substs.prefix_tys(def_id, tcx)
-                    .chain(iter::once(substs.discr_ty(tcx)));
-                let prefix = univariant_uninterned(
-                    &prefix_tys.map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
-                    &ReprOptions::default(),
-                    StructKind::AlwaysSized)?;
-
-                let mut size = prefix.size;
-                let mut align = prefix.align;
-                let variants_tys = substs.state_tys(def_id, tcx);
-                let variants = variants_tys.enumerate().map(|(i, variant_tys)| {
-                    let mut variant = univariant_uninterned(
-                        &variant_tys.map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
-                        &ReprOptions::default(),
-                        StructKind::Prefixed(prefix.size, prefix.align.abi))?;
-
-                    variant.variants = Variants::Single { index: VariantIdx::new(i) };
-
-                    size = size.max(variant.size);
-                    align = align.max(variant.align);
-
-                    Ok(variant)
-                }).collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
-
-                let abi = if prefix.abi.is_uninhabited() ||
-                             variants.iter().all(|v| v.abi.is_uninhabited()) {
-                    Abi::Uninhabited
-                } else {
-                    Abi::Aggregate { sized: true }
-                };
-                let discr = match &self.layout_of(substs.discr_ty(tcx))?.abi {
-                    Abi::Scalar(s) => s.clone(),
-                    _ => bug!(),
-                };
-
-                let layout = tcx.intern_layout(LayoutDetails {
-                    variants: Variants::Multiple {
-                        discr,
-                        discr_kind: DiscriminantKind::Tag,
-                        discr_index,
-                        variants,
-                    },
-                    fields: prefix.fields,
-                    abi,
-                    size,
-                    align,
-                });
-                debug!("generator layout ({:?}): {:#?}", ty, layout);
-                layout
-            }
+            ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, &substs)?,
 
             ty::Closure(def_id, ref substs) => {
                 let tys = substs.upvar_tys(def_id, tcx);
@@ -853,7 +808,7 @@ enum StructKind {
                         else { StructKind::AlwaysSized }
                     };
 
-                    let mut st = univariant_uninterned(&variants[v], &def.repr, kind)?;
+                    let mut st = self.univariant_uninterned(ty, &variants[v], &def.repr, kind)?;
                     st.variants = Variants::Single { index: v };
                     let (start, end) = self.tcx.layout_scalar_valid_range(def.did);
                     match st.abi {
@@ -932,7 +887,7 @@ enum StructKind {
 
                             let mut align = dl.aggregate_align;
                             let st = variants.iter_enumerated().map(|(j, v)| {
-                                let mut st = univariant_uninterned(v,
+                                let mut st = self.univariant_uninterned(ty, v,
                                     &def.repr, StructKind::AlwaysSized)?;
                                 st.variants = Variants::Single { index: j };
 
@@ -1040,7 +995,7 @@ enum StructKind {
 
                 // Create the set of structs that represent each variant.
                 let mut layout_variants = variants.iter_enumerated().map(|(i, field_layouts)| {
-                    let mut st = univariant_uninterned(&field_layouts,
+                    let mut st = self.univariant_uninterned(ty, &field_layouts,
                         &def.repr, StructKind::Prefixed(min_ity.size(), prefix_align))?;
                     st.variants = Variants::Single { index: i };
                     // Find the first field we can't move later
@@ -1172,7 +1127,7 @@ enum StructKind {
                         }
                     }
                     if let Some((prim, offset)) = common_prim {
-                        let pair = scalar_pair(tag.clone(), scalar_unit(prim));
+                        let pair = self.scalar_pair(tag.clone(), scalar_unit(prim));
                         let pair_offsets = match pair.fields {
                             FieldPlacement::Arbitrary {
                                 ref offsets,
@@ -1237,7 +1192,259 @@ enum StructKind {
             }
         })
     }
+}
 
+/// Overlap eligibility and variant assignment for each GeneratorSavedLocal.
+#[derive(Clone, Debug, PartialEq)]
+enum SavedLocalEligibility {
+    Unassigned,
+    Assigned(VariantIdx),
+    // FIXME: Use newtype_index so we aren't wasting bytes
+    Ineligible(Option<u32>),
+}
+
+// When laying out generators, we divide our saved local fields into two
+// categories: overlap-eligible and overlap-ineligible.
+//
+// Those fields which are ineligible for overlap go in a "prefix" at the
+// beginning of the layout, and always have space reserved for them.
+//
+// Overlap-eligible fields are only assigned to one variant, so we lay
+// those fields out for each variant and put them right after the
+// prefix.
+//
+// Finally, in the layout details, we point to the fields from the
+// variants they are assigned to. It is possible for some fields to be
+// included in multiple variants. No field ever "moves around" in the
+// layout; its offset is always the same.
+//
+// Also included in the layout are the upvars and the discriminant.
+// These are included as fields on the "outer" layout; they are not part
+// of any variant.
+impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
+    /// Compute the eligibility and assignment of each local.
+    fn generator_saved_local_eligibility(&self, info: &GeneratorLayout<'tcx>)
+    -> (BitSet<GeneratorSavedLocal>, IndexVec<GeneratorSavedLocal, SavedLocalEligibility>) {
+        use SavedLocalEligibility::*;
+
+        let mut assignments: IndexVec<GeneratorSavedLocal, SavedLocalEligibility> =
+            IndexVec::from_elem_n(Unassigned, info.field_tys.len());
+
+        // The saved locals not eligible for overlap. These will get
+        // "promoted" to the prefix of our generator.
+        let mut ineligible_locals = BitSet::new_empty(info.field_tys.len());
+
+        // Figure out which of our saved locals are fields in only
+        // one variant. The rest are deemed ineligible for overlap.
+        for (variant_index, fields) in info.variant_fields.iter_enumerated() {
+            for local in fields {
+                match assignments[*local] {
+                    Unassigned => {
+                        assignments[*local] = Assigned(variant_index);
+                    }
+                    Assigned(idx) => {
+                        // We've already seen this local at another suspension
+                        // point, so it is no longer a candidate.
+                        trace!("removing local {:?} in >1 variant ({:?}, {:?})",
+                               local, variant_index, idx);
+                        ineligible_locals.insert(*local);
+                        assignments[*local] = Ineligible(None);
+                    }
+                    Ineligible(_) => {},
+                }
+            }
+        }
+
+        // Next, check every pair of eligible locals to see if they
+        // conflict.
+        for local_a in info.storage_conflicts.rows() {
+            let conflicts_a = info.storage_conflicts.count(local_a);
+            if ineligible_locals.contains(local_a) {
+                continue;
+            }
+
+            for local_b in info.storage_conflicts.iter(local_a) {
+                // local_a and local_b are storage live at the same time, therefore they
+                // cannot overlap in the generator layout. The only way to guarantee
+                // this is if they are in the same variant, or one is ineligible
+                // (which means it is stored in every variant).
+                if ineligible_locals.contains(local_b) ||
+                    assignments[local_a] == assignments[local_b]
+                {
+                    continue;
+                }
+
+                // If they conflict, we will choose one to make ineligible.
+                // This is not always optimal; it's just a greedy heuristic that
+                // seems to produce good results most of the time.
+                let conflicts_b = info.storage_conflicts.count(local_b);
+                let (remove, other) = if conflicts_a > conflicts_b {
+                    (local_a, local_b)
+                } else {
+                    (local_b, local_a)
+                };
+                ineligible_locals.insert(remove);
+                assignments[remove] = Ineligible(None);
+                trace!("removing local {:?} due to conflict with {:?}", remove, other);
+            }
+        }
+
+        // Write down the order of our locals that will be promoted to the prefix.
+        {
+            let mut idx = 0u32;
+            for local in ineligible_locals.iter() {
+                assignments[local] = Ineligible(Some(idx));
+                idx += 1;
+            }
+        }
+        debug!("generator saved local assignments: {:?}", assignments);
+
+        (ineligible_locals, assignments)
+    }
+
+    /// Compute the full generator layout.
+    fn generator_layout(
+        &self,
+        ty: Ty<'tcx>,
+        def_id: hir::def_id::DefId,
+        substs: &GeneratorSubsts<'tcx>,
+    ) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
+        use SavedLocalEligibility::*;
+        let tcx = self.tcx;
+        let recompute_memory_index = |offsets: &[Size]| -> Vec<u32> {
+            debug!("recompute_memory_index({:?})", offsets);
+            let mut inverse_index = (0..offsets.len() as u32).collect::<Vec<_>>();
+            inverse_index.sort_unstable_by_key(|i| offsets[*i as usize]);
+
+            let mut index = vec![0; offsets.len()];
+            for i in 0..index.len() {
+                index[inverse_index[i] as usize] = i as u32;
+            }
+            debug!("recompute_memory_index() => {:?}", index);
+            index
+        };
+        let subst_field = |ty: Ty<'tcx>| { ty.subst(tcx, substs.substs) };
+
+        let info = tcx.generator_layout(def_id);
+        let (ineligible_locals, assignments) = self.generator_saved_local_eligibility(&info);
+
+        // Build a prefix layout, including "promoting" all ineligible
+        // locals as part of the prefix. We compute the layout of all of
+        // these fields at once to get optimal packing.
+        let discr_index = substs.prefix_tys(def_id, tcx).count();
+        let promoted_tys =
+            ineligible_locals.iter().map(|local| subst_field(info.field_tys[local]));
+        let prefix_tys = substs.prefix_tys(def_id, tcx)
+            .chain(iter::once(substs.discr_ty(tcx)))
+            .chain(promoted_tys);
+        let prefix = self.univariant_uninterned(
+            ty,
+            &prefix_tys.map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
+            &ReprOptions::default(),
+            StructKind::AlwaysSized)?;
+        let (prefix_size, prefix_align) = (prefix.size, prefix.align);
+
+        // Split the prefix layout into the "outer" fields (upvars and
+        // discriminant) and the "promoted" fields. Promoted fields will
+        // get included in each variant that requested them in
+        // GeneratorLayout.
+        debug!("prefix = {:#?}", prefix);
+        let (outer_fields, promoted_offsets) = match prefix.fields {
+            FieldPlacement::Arbitrary { mut offsets, .. } => {
+                let offsets_b = offsets.split_off(discr_index + 1);
+                let offsets_a = offsets;
+
+                let memory_index = recompute_memory_index(&offsets_a);
+                let outer_fields = FieldPlacement::Arbitrary { offsets: offsets_a, memory_index };
+                (outer_fields, offsets_b)
+            }
+            _ => bug!(),
+        };
+
+        let mut size = prefix.size;
+        let mut align = prefix.align;
+        let variants = info.variant_fields.iter_enumerated().map(|(index, variant_fields)| {
+            // Only include overlap-eligible fields when we compute our variant layout.
+            let variant_only_tys = variant_fields
+                .iter()
+                .filter(|local| {
+                    match assignments[**local] {
+                        Unassigned => bug!(),
+                        Assigned(v) if v == index => true,
+                        Assigned(_) => bug!("assignment does not match variant"),
+                        Ineligible(_) => false,
+                    }
+                })
+                .map(|local| subst_field(info.field_tys[*local]));
+
+            let mut variant = self.univariant_uninterned(
+                ty,
+                &variant_only_tys
+                    .map(|ty| self.layout_of(ty))
+                    .collect::<Result<Vec<_>, _>>()?,
+                &ReprOptions::default(),
+                StructKind::Prefixed(prefix_size, prefix_align.abi))?;
+            variant.variants = Variants::Single { index };
+
+            let offsets = match variant.fields {
+                FieldPlacement::Arbitrary { offsets, .. } => offsets,
+                _ => bug!(),
+            };
+
+            // Now, stitch the promoted and variant-only fields back together in
+            // the order they are mentioned by our GeneratorLayout.
+            let mut next_variant_field = 0;
+            let mut combined_offsets = Vec::new();
+            for local in variant_fields.iter() {
+                match assignments[*local] {
+                    Unassigned => bug!(),
+                    Assigned(_) => {
+                        combined_offsets.push(offsets[next_variant_field]);
+                        next_variant_field += 1;
+                    }
+                    Ineligible(field_idx) => {
+                        let field_idx = field_idx.unwrap() as usize;
+                        combined_offsets.push(promoted_offsets[field_idx]);
+                    }
+                }
+            }
+            let memory_index = recompute_memory_index(&combined_offsets);
+            variant.fields = FieldPlacement::Arbitrary { offsets: combined_offsets, memory_index };
+
+            size = size.max(variant.size);
+            align = align.max(variant.align);
+            Ok(variant)
+        }).collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
+
+        let abi = if prefix.abi.is_uninhabited() ||
+                     variants.iter().all(|v| v.abi.is_uninhabited()) {
+            Abi::Uninhabited
+        } else {
+            Abi::Aggregate { sized: true }
+        };
+        let discr = match &self.layout_of(substs.discr_ty(tcx))?.abi {
+            Abi::Scalar(s) => s.clone(),
+            _ => bug!(),
+        };
+
+        let layout = tcx.intern_layout(LayoutDetails {
+            variants: Variants::Multiple {
+                discr,
+                discr_kind: DiscriminantKind::Tag,
+                discr_index,
+                variants,
+            },
+            fields: outer_fields,
+            abi,
+            size,
+            align,
+        });
+        debug!("generator layout ({:?}): {:#?}", ty, layout);
+        Ok(layout)
+    }
+}
+
+impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
     /// This is invoked by the `layout_raw` query to record the final
     /// layout of each type.
     #[inline(always)]
@@ -1678,10 +1885,11 @@ pub fn layout_of(self, param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
     }
 }
 
-impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
-    where C: LayoutOf<Ty = Ty<'tcx>> + HasTyCtxt<'tcx>,
-          C::TyLayout: MaybeResult<TyLayout<'tcx>>,
-          C: HasParamEnv<'tcx>
+impl<'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
+where
+    C: LayoutOf<Ty = Ty<'tcx>> + HasTyCtxt<'tcx>,
+    C::TyLayout: MaybeResult<TyLayout<'tcx>>,
+    C: HasParamEnv<'tcx>,
 {
     fn for_variant(this: TyLayout<'tcx>, cx: &C, variant_index: VariantIdx) -> TyLayout<'tcx> {
         let details = match this.variants {
index 69bf05c66f394eda1df54674e9db5c322226a746..d858d374244446bfb52becaa6cfd902f0ca2790a 100644 (file)
@@ -203,7 +203,7 @@ pub fn def_kind(&self) -> DefKind {
 
     /// Tests whether the associated item admits a non-trivial implementation
     /// for !
-    pub fn relevant_for_never<'tcx>(&self) -> bool {
+    pub fn relevant_for_never(&self) -> bool {
         match self.kind {
             AssocKind::Existential |
             AssocKind::Const |
@@ -1614,8 +1614,9 @@ pub struct Placeholder<T> {
     pub name: T,
 }
 
-impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>> for Placeholder<T>
-    where T: HashStable<StableHashingContext<'a>>
+impl<'a, T> HashStable<StableHashingContext<'a>> for Placeholder<T>
+where
+    T: HashStable<StableHashingContext<'a>>,
 {
     fn hash_stable<W: StableHasherResult>(
         &self,
@@ -2303,7 +2304,7 @@ pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx GenericPredicates
     /// Returns an iterator over all fields contained
     /// by this ADT.
     #[inline]
-    pub fn all_fields<'s>(&'s self) -> impl Iterator<Item = &'s FieldDef> {
+    pub fn all_fields<'s>(&'s self) -> impl Iterator<Item = &'s FieldDef> + Clone {
         self.variants.iter().flat_map(|v| v.fields.iter())
     }
 
index a7cb7bd3956f00457b30c76b636bc204fe40d574..d6bc4e537581be6cec5816cd1cac24dca242f4a5 100644 (file)
@@ -11,6 +11,8 @@
 
 pub mod obsolete;
 
+// FIXME(eddyb) false positive, the lifetime parameters are used with `P:  Printer<...>`.
+#[allow(unused_lifetimes)]
 pub trait Print<'gcx, 'tcx, P> {
     type Output;
     type Error;
index 0eda92ea8b822b6816d6fdce975e0d42e6809830..286894c82b7e2bfe5bdb8d48521ebd2dac8d7021 100644 (file)
@@ -17,6 +17,8 @@
 
 // Query configuration and description traits.
 
+// FIXME(eddyb) false positive, the lifetime parameter is used for `Key`/`Value`.
+#[allow(unused_lifetimes)]
 pub trait QueryConfig<'tcx> {
     const NAME: QueryName;
     const CATEGORY: ProfileCategory;
index ddc4bd3f9f6c3c057cd918147a6eef1937be875e..0e5c906024c3fd8f781ed683d1fe6ed54e3eb24b 100644 (file)
@@ -886,7 +886,7 @@ pub fn dummy<'tcx>(value: T) -> Binder<T>
     }
 
     /// Wraps `value` in a binder, binding higher-ranked vars (if any).
-    pub fn bind<'tcx>(value: T) -> Binder<T> {
+    pub fn bind(value: T) -> Binder<T> {
         Binder(value)
     }
 
@@ -988,7 +988,7 @@ pub struct ProjectionTy<'tcx> {
     pub item_def_id: DefId,
 }
 
-impl<'a, 'tcx> ProjectionTy<'tcx> {
+impl<'tcx> ProjectionTy<'tcx> {
     /// Construct a `ProjectionTy` by searching the trait from `trait_ref` for the
     /// associated item named `item_name`.
     pub fn from_ref_and_name(
@@ -1665,6 +1665,7 @@ pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId {
 
 /// Type utilities
 impl<'a, 'gcx, 'tcx> TyS<'tcx> {
+    #[inline]
     pub fn is_unit(&self) -> bool {
         match self.sty {
             Tuple(ref tys) => tys.is_empty(),
@@ -1672,6 +1673,7 @@ pub fn is_unit(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_never(&self) -> bool {
         match self.sty {
             Never => true,
@@ -1726,6 +1728,7 @@ pub fn conservative_is_privately_uninhabited(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>)
         }
     }
 
+    #[inline]
     pub fn is_primitive(&self) -> bool {
         match self.sty {
             Bool | Char | Int(_) | Uint(_) | Float(_) => true,
@@ -1741,6 +1744,7 @@ pub fn is_ty_var(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_ty_infer(&self) -> bool {
         match self.sty {
             Infer(_) => true,
@@ -1748,6 +1752,7 @@ pub fn is_ty_infer(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_phantom_data(&self) -> bool {
         if let Adt(def, _) = self.sty {
             def.is_phantom_data()
@@ -1756,8 +1761,10 @@ pub fn is_phantom_data(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_bool(&self) -> bool { self.sty == Bool }
 
+    #[inline]
     pub fn is_param(&self, index: u32) -> bool {
         match self.sty {
             ty::Param(ref data) => data.index == index,
@@ -1765,6 +1772,7 @@ pub fn is_param(&self, index: u32) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_self(&self) -> bool {
         match self.sty {
             Param(ref p) => p.is_self(),
@@ -1772,6 +1780,7 @@ pub fn is_self(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_slice(&self) -> bool {
         match self.sty {
             RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => match ty.sty {
@@ -1814,6 +1823,7 @@ pub fn simd_size(&self, _cx: TyCtxt<'_, '_, '_>) -> usize {
         }
     }
 
+    #[inline]
     pub fn is_region_ptr(&self) -> bool {
         match self.sty {
             Ref(..) => true,
@@ -1821,6 +1831,7 @@ pub fn is_region_ptr(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_mutable_pointer(&self) -> bool {
         match self.sty {
             RawPtr(TypeAndMut { mutbl: hir::Mutability::MutMutable, .. }) |
@@ -1829,6 +1840,7 @@ pub fn is_mutable_pointer(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_unsafe_ptr(&self) -> bool {
         match self.sty {
             RawPtr(_) => return true,
@@ -1837,6 +1849,7 @@ pub fn is_unsafe_ptr(&self) -> bool {
     }
 
     /// Returns `true` if this type is an `Arc<T>`.
+    #[inline]
     pub fn is_arc(&self) -> bool {
         match self.sty {
             Adt(def, _) => def.is_arc(),
@@ -1845,6 +1858,7 @@ pub fn is_arc(&self) -> bool {
     }
 
     /// Returns `true` if this type is an `Rc<T>`.
+    #[inline]
     pub fn is_rc(&self) -> bool {
         match self.sty {
             Adt(def, _) => def.is_rc(),
@@ -1852,6 +1866,7 @@ pub fn is_rc(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_box(&self) -> bool {
         match self.sty {
             Adt(def, _) => def.is_box(),
@@ -1870,6 +1885,7 @@ pub fn boxed_ty(&self) -> Ty<'tcx> {
     /// A scalar type is one that denotes an atomic datum, with no sub-components.
     /// (A RawPtr is scalar because it represents a non-managed pointer, so its
     /// contents are abstract to rustc.)
+    #[inline]
     pub fn is_scalar(&self) -> bool {
         match self.sty {
             Bool | Char | Int(_) | Float(_) | Uint(_) |
@@ -1880,6 +1896,7 @@ pub fn is_scalar(&self) -> bool {
     }
 
     /// Returns `true` if this type is a floating point type.
+    #[inline]
     pub fn is_floating_point(&self) -> bool {
         match self.sty {
             Float(_) |
@@ -1888,6 +1905,7 @@ pub fn is_floating_point(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_trait(&self) -> bool {
         match self.sty {
             Dynamic(..) => true,
@@ -1895,6 +1913,7 @@ pub fn is_trait(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_enum(&self) -> bool {
         match self.sty {
             Adt(adt_def, _) => {
@@ -1904,6 +1923,7 @@ pub fn is_enum(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_closure(&self) -> bool {
         match self.sty {
             Closure(..) => true,
@@ -1911,6 +1931,7 @@ pub fn is_closure(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_generator(&self) -> bool {
         match self.sty {
             Generator(..) => true,
@@ -1926,6 +1947,7 @@ pub fn is_integral(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_fresh_ty(&self) -> bool {
         match self.sty {
             Infer(FreshTy(_)) => true,
@@ -1933,6 +1955,7 @@ pub fn is_fresh_ty(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_fresh(&self) -> bool {
         match self.sty {
             Infer(FreshTy(_)) => true,
@@ -1942,6 +1965,7 @@ pub fn is_fresh(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_char(&self) -> bool {
         match self.sty {
             Char => true,
@@ -1950,17 +1974,11 @@ pub fn is_char(&self) -> bool {
     }
 
     #[inline]
-    pub fn is_fp(&self) -> bool {
-        match self.sty {
-            Infer(FloatVar(_)) | Float(_) => true,
-            _ => false
-        }
-    }
-
     pub fn is_numeric(&self) -> bool {
-        self.is_integral() || self.is_fp()
+        self.is_integral() || self.is_floating_point()
     }
 
+    #[inline]
     pub fn is_signed(&self) -> bool {
         match self.sty {
             Int(_) => true,
@@ -1968,6 +1986,7 @@ pub fn is_signed(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_pointer_sized(&self) -> bool {
         match self.sty {
             Int(ast::IntTy::Isize) | Uint(ast::UintTy::Usize) => true,
@@ -1975,6 +1994,7 @@ pub fn is_pointer_sized(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn is_machine(&self) -> bool {
         match self.sty {
             Int(..) | Uint(..) | Float(..) => true,
@@ -1982,6 +2002,7 @@ pub fn is_machine(&self) -> bool {
         }
     }
 
+    #[inline]
     pub fn has_concrete_skeleton(&self) -> bool {
         match self.sty {
             Param(_) | Infer(_) | Error => false,
@@ -2028,6 +2049,7 @@ pub fn fn_sig(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PolyFnSig<'tcx> {
         }
     }
 
+    #[inline]
     pub fn is_fn(&self) -> bool {
         match self.sty {
             FnDef(..) | FnPtr(_) => true,
@@ -2035,6 +2057,15 @@ pub fn is_fn(&self) -> bool {
         }
     }
 
+    #[inline]
+    pub fn is_fn_ptr(&self) -> bool {
+        match self.sty {
+            FnPtr(_) => true,
+            _ => false,
+        }
+    }
+
+    #[inline]
     pub fn is_impl_trait(&self) -> bool {
         match self.sty {
             Opaque(..) => true,
index 09d576b23c0f53b31d4a314dfcdee168e57d89db..dee9a7c3f4a78b648e51533492892ab3223a6fe0 100644 (file)
@@ -3,6 +3,8 @@
 /// Basically a workaround; see [this comment] for details.
 ///
 /// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999
+// FIXME(eddyb) false positive, the lifetime parameter is "phantom" but needed.
+#[allow(unused_lifetimes)]
 pub trait Captures<'a> { }
 
 impl<'a, T: ?Sized> Captures<'a> for T { }
index 3aef5a76a3a05c9c652b7f825069fe6ce4d7d1ab..e7a70895a3023c884c91fd95a9bb16ef2d4065aa 100644 (file)
@@ -3,6 +3,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 pub mod expand;
 
index 7dca47485bb928d8f531083ef6b7d4affa8a3214..669fb9103aaaa9813f42a6db52a5166eb5cefea9 100644 (file)
@@ -1487,7 +1487,7 @@ fn initial_value(&self) -> bool {
     }
 }
 
-impl<'tcx> fmt::Debug for InteriorKind {
+impl fmt::Debug for InteriorKind {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             InteriorField(mc::FieldIndex(_, info)) => write!(f, "{}", info),
index 14bc77f380accdb9654dabdc5bc8325c66e3c9b5..5d8b0cd14dd2775e1403d51412ce702eb8681754 100644 (file)
@@ -3,6 +3,7 @@
 #![allow(non_camel_case_types)]
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(nll)]
 
index 100a896ea0c7d703a6312dbc6636cdad98707fcb..81acc16b7abec6a05097d2cb7bfd1dea95d0f720 100644 (file)
@@ -112,7 +112,7 @@ fn codegen_inline_asm(
     }
 }
 
-impl AsmMethods<'tcx> for CodegenCx<'ll, 'tcx> {
+impl AsmMethods for CodegenCx<'ll, 'tcx> {
     fn codegen_global_asm(&self, ga: &hir::GlobalAsm) {
         let asm = CString::new(ga.asm.as_str().as_bytes()).unwrap();
         unsafe {
index 102e9e38612e065e227f73d36588be9a2249fcf7..982156321aa151edc6087a74c37b1e20cde71e58 100644 (file)
@@ -1074,8 +1074,8 @@ fn do_not_inline(&mut self, llret: &'ll Value) {
     }
 }
 
-impl StaticBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
-fn get_static(&mut self, def_id: DefId) -> &'ll Value {
+impl StaticBuilderMethods for Builder<'a, 'll, 'tcx> {
+    fn get_static(&mut self, def_id: DefId) -> &'ll Value {
         // Forward to the `get_static` method of `CodegenCx`
         self.cx().get_static(def_id)
     }
index ead9bad656db5605d2138cdcc8ae2ba97bc1bcca..2bc46334555ec88eaa0ce628b94611c28a0d69e1 100644 (file)
@@ -645,6 +645,11 @@ macro_rules! vector_types {
         ifn!("llvm.fabs.v4f64", fn(t_v4f64) -> t_v4f64);
         ifn!("llvm.fabs.v8f64", fn(t_v8f64) -> t_v8f64);
 
+        ifn!("llvm.minnum.f32", fn(t_f32, t_f32) -> t_f32);
+        ifn!("llvm.minnum.f64", fn(t_f64, t_f64) -> t_f64);
+        ifn!("llvm.maxnum.f32", fn(t_f32, t_f32) -> t_f32);
+        ifn!("llvm.maxnum.f64", fn(t_f64, t_f64) -> t_f64);
+
         ifn!("llvm.floor.f32", fn(t_f32) -> t_f32);
         ifn!("llvm.floor.v2f32", fn(t_v2f32) -> t_v2f32);
         ifn!("llvm.floor.v4f32", fn(t_v4f32) -> t_v4f32);
index f32dc4312654036c42182689bf465417c3dc9dda..875f1d0940a3a1a740aafab96f8470abf1d5f425 100644 (file)
@@ -55,6 +55,10 @@ fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: &str) -> Option<&'ll Valu
         "fmaf64" => "llvm.fma.f64",
         "fabsf32" => "llvm.fabs.f32",
         "fabsf64" => "llvm.fabs.f64",
+        "minnumf32" => "llvm.minnum.f32",
+        "minnumf64" => "llvm.minnum.f64",
+        "maxnumf32" => "llvm.maxnum.f32",
+        "maxnumf64" => "llvm.maxnum.f64",
         "copysignf32" => "llvm.copysign.f32",
         "copysignf64" => "llvm.copysign.f64",
         "floorf32" => "llvm.floor.f32",
index 0fdd326a1882e667fb681bd4147a36c2d09b01d1..8391f02fc69ec07bcf3a62d7f779ad40504de64f 100644 (file)
@@ -10,7 +10,6 @@
 #![feature(box_syntax)]
 #![feature(const_cstr_unchecked)]
 #![feature(crate_visibility_modifier)]
-#![feature(custom_attribute)]
 #![feature(extern_types)]
 #![feature(in_band_lifetimes)]
 #![allow(unused_attributes)]
index 4d7af7a643b66003ed37b917660a826e807d97c6..71393e224e42db145197adacbfd5b92a099c25bb 100644 (file)
@@ -3,7 +3,6 @@
 #![feature(box_patterns)]
 #![feature(box_syntax)]
 #![feature(core_intrinsics)]
-#![feature(custom_attribute)]
 #![feature(libc)]
 #![feature(rustc_diagnostic_macros)]
 #![feature(stmt_expr_attributes)]
@@ -15,6 +14,7 @@
 #![allow(dead_code)]
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 #![allow(explicit_outlives_requirements)]
 
 #![recursion_limit="256"]
index bb6a13ed15a52cdeb3f3485ac6c69b56825456e4..e2fd1c2bc38e7f8d669cb54c1efbf362d45ab154 100644 (file)
@@ -272,7 +272,7 @@ pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
     }
 }
 
-pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Body<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
+pub fn cleanup_kinds<'tcx>(mir: &mir::Body<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
     fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
                               mir: &mir::Body<'tcx>) {
         for (bb, data) in mir.basic_blocks().iter_enumerated() {
index 7e5ee25d8ef78dd7ae3385ca2a9f390a78f03a7a..e4b82d8496669526c55e26b9ad2c9487feb3608f 100644 (file)
@@ -223,10 +223,7 @@ fn codegen_switchint_terminator<'b>(
         }
     }
 
-    fn codegen_return_terminator<'b>(
-        &mut self,
-        mut bx: Bx,
-    ) {
+    fn codegen_return_terminator(&mut self, mut bx: Bx) {
         if self.fn_ty.c_variadic {
             match self.va_list_ref {
                 Some(va_list) => {
index 7a2bd18df4ec9a67dac551df2f42a48d100242c9..87e15ba6aac5e414e7d519a3f7e7d905fb9599de 100644 (file)
@@ -429,7 +429,7 @@ pub fn codegen_rvalue_operand(
             mir::Rvalue::UnaryOp(op, ref operand) => {
                 let operand = self.codegen_operand(&mut bx, operand);
                 let lloperand = operand.immediate();
-                let is_float = operand.layout.ty.is_fp();
+                let is_float = operand.layout.ty.is_floating_point();
                 let llval = match op {
                     mir::UnOp::Not => bx.not(lloperand),
                     mir::UnOp::Neg => if is_float {
@@ -536,7 +536,7 @@ pub fn codegen_scalar_binop(
         rhs: Bx::Value,
         input_ty: Ty<'tcx>,
     ) -> Bx::Value {
-        let is_float = input_ty.is_fp();
+        let is_float = input_ty.is_floating_point();
         let is_signed = input_ty.is_signed();
         let is_unit = input_ty.is_unit();
         match op {
index a95bf3af5bf277c10c06e49dbcb7f1a7898821ac..fd3c868bbc50727251cf12ab6f1569ef1d8f7614 100644 (file)
@@ -12,6 +12,6 @@ fn codegen_inline_asm(
     ) -> bool;
 }
 
-pub trait AsmMethods<'tcx> {
+pub trait AsmMethods {
     fn codegen_global_asm(&self, ga: &GlobalAsm);
 }
index a11d1ba9231cc52dbd72949656b8db240cbe7ca9..2af57bcb064b7b21db8f2932a064666544b463af 100644 (file)
@@ -29,7 +29,7 @@ pub trait BuilderMethods<'a, 'tcx: 'a>:
     + AbiBuilderMethods<'tcx>
     + IntrinsicCallMethods<'tcx>
     + AsmBuilderMethods<'tcx>
-    + StaticBuilderMethods<'tcx>
+    + StaticBuilderMethods
     + HasParamEnv<'tcx>
     + HasTargetSpec
 
index 2bb619e79f5e06ee0b0781c46463e0433d94ca41..efe4a25570104c4f8881c97119111bcc2a77d1e2 100644 (file)
@@ -58,7 +58,7 @@ pub trait CodegenMethods<'tcx>:
     + StaticMethods
     + DebugInfoMethods<'tcx>
     + DeclareMethods<'tcx>
-    + AsmMethods<'tcx>
+    + AsmMethods
     + PreDefineMethods<'tcx>
     + HasParamEnv<'tcx>
     + HasTyCtxt<'tcx>
@@ -74,7 +74,7 @@ impl<'tcx, T> CodegenMethods<'tcx> for T where
         + StaticMethods
         + DebugInfoMethods<'tcx>
         + DeclareMethods<'tcx>
-        + AsmMethods<'tcx>
+        + AsmMethods
         + PreDefineMethods<'tcx>
         + HasParamEnv<'tcx>
         + HasTyCtxt<'tcx>
index d8992c159337dc3d3490b58258328555c7bdc3f5..6983311d797dcd4ee48a4655e74a414aec850716 100644 (file)
@@ -8,7 +8,7 @@ pub trait StaticMethods: BackendTypes {
     fn codegen_static(&self, def_id: DefId, is_mutable: bool);
 }
 
-pub trait StaticBuilderMethods<'tcx>: BackendTypes {
+pub trait StaticBuilderMethods: BackendTypes {
     fn get_static(&mut self, def_id: DefId) -> Self::Value;
     fn static_panic_msg(
         &mut self,
index efc18d401c082cd6ee9e184050b8290d50f05191..aa38d8d51848dec7df750321ec190032694b8b53 100644 (file)
@@ -101,7 +101,7 @@ pub trait LayoutTypeMethods<'tcx>: Backend<'tcx> {
     fn is_backend_immediate(&self, layout: TyLayout<'tcx>) -> bool;
     fn is_backend_scalar_pair(&self, layout: TyLayout<'tcx>) -> bool;
     fn backend_field_index(&self, layout: TyLayout<'tcx>, index: usize) -> u64;
-    fn scalar_pair_element_backend_type<'a>(
+    fn scalar_pair_element_backend_type(
         &self,
         layout: TyLayout<'tcx>,
         index: usize,
index ea1d08354528b3dc1f9bef55a053bb2c3c884cc9..3726e4f47b96b10422216096a5a9fcba4a809cf6 100644 (file)
@@ -8,7 +8,6 @@
 #![feature(box_patterns)]
 #![feature(box_syntax)]
 #![feature(core_intrinsics)]
-#![feature(custom_attribute)]
 #![feature(never_type)]
 #![feature(nll)]
 #![allow(unused_attributes)]
@@ -19,6 +18,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #[macro_use]
 extern crate rustc;
index ec7ff3bd813975d835d00e20295bc634c6ec2cb3..7a11ca070071b6ec26509361700aad60fa41f566 100644 (file)
@@ -636,7 +636,7 @@ pub fn contains(&self, elem: T) -> bool {
 ///
 /// All operations that involve a row and/or column index will panic if the
 /// index exceeds the relevant bound.
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Eq, PartialEq, RustcDecodable, RustcEncodable)]
 pub struct BitMatrix<R: Idx, C: Idx> {
     num_rows: usize,
     num_columns: usize,
@@ -658,6 +658,23 @@ pub fn new(num_rows: usize, num_columns: usize) -> BitMatrix<R, C> {
         }
     }
 
+    /// Creates a new matrix, with `row` used as the value for every row.
+    pub fn from_row_n(row: &BitSet<C>, num_rows: usize) -> BitMatrix<R, C> {
+        let num_columns = row.domain_size();
+        let words_per_row = num_words(num_columns);
+        assert_eq!(words_per_row, row.words().len());
+        BitMatrix {
+            num_rows,
+            num_columns,
+            words: iter::repeat(row.words()).take(num_rows).flatten().cloned().collect(),
+            marker: PhantomData,
+        }
+    }
+
+    pub fn rows(&self) -> impl Iterator<Item = R> {
+        (0..self.num_rows).map(R::new)
+    }
+
     /// The range of bits for a given row.
     fn range(&self, row: R) -> (usize, usize) {
         let words_per_row = num_words(self.num_columns);
@@ -737,6 +754,49 @@ pub fn union_rows(&mut self, read: R, write: R) -> bool {
         changed
     }
 
+    /// Adds the bits from `with` to the bits from row `write`, and
+    /// returns `true` if anything changed.
+    pub fn union_row_with(&mut self, with: &BitSet<C>, write: R) -> bool {
+        assert!(write.index() < self.num_rows);
+        assert_eq!(with.domain_size(), self.num_columns);
+        let (write_start, write_end) = self.range(write);
+        let mut changed = false;
+        for (read_index, write_index) in (0..with.words().len()).zip(write_start..write_end) {
+            let word = self.words[write_index];
+            let new_word = word | with.words()[read_index];
+            self.words[write_index] = new_word;
+            changed |= word != new_word;
+        }
+        changed
+    }
+
+    /// Sets every cell in `row` to true.
+    pub fn insert_all_into_row(&mut self, row: R) {
+        assert!(row.index() < self.num_rows);
+        let (start, end) = self.range(row);
+        let words = &mut self.words[..];
+        for index in start..end {
+            words[index] = !0;
+        }
+        self.clear_excess_bits(row);
+    }
+
+    /// Clear excess bits in the final word of the row.
+    fn clear_excess_bits(&mut self, row: R) {
+        let num_bits_in_final_word = self.num_columns % WORD_BITS;
+        if num_bits_in_final_word > 0 {
+            let mask = (1 << num_bits_in_final_word) - 1;
+            let (_, end) = self.range(row);
+            let final_word_idx = end - 1;
+            self.words[final_word_idx] &= mask;
+        }
+    }
+
+    /// Gets a slice of the underlying words.
+    pub fn words(&self) -> &[Word] {
+        &self.words
+    }
+
     /// Iterates through all the columns set to true in a given row of
     /// the matrix.
     pub fn iter<'a>(&'a self, row: R) -> BitIter<'a, C> {
@@ -748,6 +808,12 @@ pub fn iter<'a>(&'a self, row: R) -> BitIter<'a, C> {
             marker: PhantomData,
         }
     }
+
+    /// Returns the number of elements in `row`.
+    pub fn count(&self, row: R) -> usize {
+        let (start, end) = self.range(row);
+        self.words[start..end].iter().map(|e| e.count_ones() as usize).sum()
+    }
 }
 
 /// A fixed-column-size, variable-row-size 2D bit matrix with a moderately
@@ -1057,6 +1123,7 @@ fn matrix_iter() {
     matrix.insert(2, 99);
     matrix.insert(4, 0);
     matrix.union_rows(3, 5);
+    matrix.insert_all_into_row(6);
 
     let expected = [99];
     let mut iter = expected.iter();
@@ -1068,6 +1135,7 @@ fn matrix_iter() {
 
     let expected = [22, 75];
     let mut iter = expected.iter();
+    assert_eq!(matrix.count(3), expected.len());
     for i in matrix.iter(3) {
         let j = *iter.next().unwrap();
         assert_eq!(i, j);
@@ -1076,6 +1144,7 @@ fn matrix_iter() {
 
     let expected = [0];
     let mut iter = expected.iter();
+    assert_eq!(matrix.count(4), expected.len());
     for i in matrix.iter(4) {
         let j = *iter.next().unwrap();
         assert_eq!(i, j);
@@ -1084,11 +1153,24 @@ fn matrix_iter() {
 
     let expected = [22, 75];
     let mut iter = expected.iter();
+    assert_eq!(matrix.count(5), expected.len());
     for i in matrix.iter(5) {
         let j = *iter.next().unwrap();
         assert_eq!(i, j);
     }
     assert!(iter.next().is_none());
+
+    assert_eq!(matrix.count(6), 100);
+    let mut count = 0;
+    for (idx, i) in matrix.iter(6).enumerate() {
+        assert_eq!(idx, i);
+        count += 1;
+    }
+    assert_eq!(count, 100);
+
+    if let Some(i) = matrix.iter(7).next() {
+        panic!("expected no elements in row, but contains element {:?}", i);
+    }
 }
 
 #[test]
index 270d9520627646e23d919014943b01677ebd477b..0c81c27a96ee5e018cb7b52ea0f7d101c693d449 100644 (file)
@@ -503,6 +503,16 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
+impl<R: indexed_vec::Idx, C: indexed_vec::Idx, CTX> HashStable<CTX>
+for bit_set::BitMatrix<R, C>
+{
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          ctx: &mut CTX,
+                                          hasher: &mut StableHasher<W>) {
+        self.words().hash_stable(ctx, hasher);
+    }
+}
+
 impl_stable_hash_via_hash!(::std::path::Path);
 impl_stable_hash_via_hash!(::std::path::PathBuf);
 
index 02f8eee67b15118ffed36a9b0b19f79054a286f7..5fb6ed31b0693df2d469866ff3c39b8c4054affe 100644 (file)
@@ -18,6 +18,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 pub extern crate getopts;
 #[cfg(unix)]
index 31f697a724a0357385bfb81487c7684c32f84041..fc74e43ff5739e770ec056b8edd45a84acdd18ae 100644 (file)
@@ -348,7 +348,7 @@ pub fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuild
 
     /// Convenience function for internal use, clients should use one of the
     /// struct_* methods on Handler.
-    pub fn new_with_code(handler: &'a Handler,
+    crate fn new_with_code(handler: &'a Handler,
                          level: Level,
                          code: Option<DiagnosticId>,
                          message: &str)
index 3bf477efe35f91f05eb08e4bedb02ba8cb596ed9..fca8298409a61f5add93a7a787e6c4f6ba04cda2 100644 (file)
@@ -1,3 +1,12 @@
+//! The current rustc diagnostics emitter.
+//!
+//! An `Emitter` takes care of generating the output from a `DiagnosticBuilder` struct.
+//!
+//! There are various `Emitter` implementations that generate different output formats such as
+//! JSON and human readable output.
+//!
+//! The output types are defined in `librustc::session::config::ErrorOutputType`.
+
 use Destination::*;
 
 use syntax_pos::{SourceFile, Span, MultiSpan};
index 1831d58e736071561604afc393e92d29954331b9..05cee6dff230935626a535485da859ad0cf1011f 100644 (file)
@@ -1,12 +1,17 @@
+//! Diagnostics creation and emission for `rustc`.
+//!
+//! This module contains the code for creating and emitting diagnostics.
+
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
 
-#![feature(custom_attribute)]
+#![feature(crate_visibility_modifier)]
 #![allow(unused_attributes)]
 #![cfg_attr(unix, feature(libc))]
 #![feature(nll)]
 #![feature(optin_builtin_traits)]
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #[allow(unused_extern_crates)]
 extern crate serialize as rustc_serialize; // used by deriving
index c964f4cb19b8b01fbda9db15c2ee5461494a096a..ea89d2ca76d9f1aa12d84225a1623bdb187075b1 100644 (file)
@@ -258,7 +258,7 @@ fn dump_graph(tcx: TyCtxt<'_, '_, '_>) {
 pub struct GraphvizDepGraph<'q>(FxHashSet<&'q DepNode>,
                                 Vec<(&'q DepNode, &'q DepNode)>);
 
-impl<'a, 'tcx, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> {
+impl<'a, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> {
     type Node = &'q DepNode;
     type Edge = (&'q DepNode, &'q DepNode);
     fn nodes(&self) -> dot::Nodes<'_, &'q DepNode> {
@@ -276,7 +276,7 @@ fn target(&self, edge: &(&'q DepNode, &'q DepNode)) -> &'q DepNode {
     }
 }
 
-impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> {
+impl<'a, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> {
     type Node = &'q DepNode;
     type Edge = (&'q DepNode, &'q DepNode);
     fn graph_id(&self) -> dot::Id<'_> {
index eb82885abee06e6dd2f851261be2d219e456f4dc..50780ba4e7bb338b9efe2f45227eaba67be91e9e 100644 (file)
@@ -9,6 +9,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #[macro_use] extern crate rustc;
 #[allow(unused_extern_crates)]
index d1e2a1a4ad5063baa20cfc70632da6758b1800b3..7fc311d40c3d09fb2c3870655678d7b7855a434d 100644 (file)
@@ -8,6 +8,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![allow(unused_imports)]
 
index f4f7456a97ad65b746e8eb41296340265e210348..a86d3cc43948d50a4a6865ad9e009559206f4ac0 100644 (file)
@@ -121,9 +121,13 @@ pub fn create_session(
 }
 
 // Temporarily have stack size set to 32MB to deal with various crates with long method
-// chains or deep syntax trees.
+// chains or deep syntax trees, except when on Haiku.
 // FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line
-const STACK_SIZE: usize = 32 * 1024 * 1024; // 32MB
+#[cfg(not(target_os = "haiku"))]
+const STACK_SIZE: usize = 32 * 1024 * 1024;
+
+#[cfg(target_os = "haiku")]
+const STACK_SIZE: usize = 16 * 1024 * 1024;
 
 fn get_stack_size() -> Option<usize> {
     // FIXME: Hacks on hacks. If the env is trying to override the stack size
index 937085c8ad8e8e1e17ab6e75479cfbb207645353..f3b9408569397507ea1e15149d67c5569f8ed1db 100644 (file)
@@ -96,7 +96,7 @@ fn check_expr(&mut self, cx: &LateContext<'_, '_>, e: &hir::Expr) {
 declare_lint_pass!(BoxPointers => [BOX_POINTERS]);
 
 impl BoxPointers {
-    fn check_heap_type<'a, 'tcx>(&self, cx: &LateContext<'_, '_>, span: Span, ty: Ty<'_>) {
+    fn check_heap_type(&self, cx: &LateContext<'_, '_>, span: Span, ty: Ty<'_>) {
         for leaf_ty in ty.walk() {
             if leaf_ty.is_box() {
                 let m = format!("type uses owned (Box type) pointers: {}", ty);
@@ -1414,15 +1414,9 @@ impl KeywordIdents {
     fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
         for tt in tokens.into_trees() {
             match tt {
-                TokenTree::Token(span, tok) => match tok.ident() {
-                    // only report non-raw idents
-                    Some((ident, false)) => {
-                        self.check_ident_token(cx, UnderMacro(true), ast::Ident {
-                            span: span.substitute_dummy(ident.span),
-                            ..ident
-                        });
-                    }
-                    _ => {},
+                // Only report non-raw idents.
+                TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
+                    self.check_ident_token(cx, UnderMacro(true), ident);
                 }
                 TokenTree::Delimited(_, _, tts) => {
                     self.check_tokens(cx, tts)
index ba72beecc1a1800204902670d19e725959355e24..87d46a08d9aa64817ebd5740fe2022c40cee3fd7 100644 (file)
@@ -21,6 +21,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #[macro_use]
 extern crate rustc;
index ac18e131c4a3d7aafcf62cb5a1dd0e6b02d6546f..45cef61064d7870a612f1ac52a56b37f4522bb5b 100644 (file)
@@ -202,11 +202,7 @@ fn report_bin_hex_error(
 //  - `uX` => `uY`
 //
 // No suggestion for: `isize`, `usize`.
-fn get_type_suggestion<'a>(
-    t: Ty<'_>,
-    val: u128,
-    negative: bool,
-) -> Option<String> {
+fn get_type_suggestion(t: Ty<'_>, val: u128, negative: bool) -> Option<String> {
     use syntax::ast::IntTy::*;
     use syntax::ast::UintTy::*;
     macro_rules! find_fit {
@@ -531,8 +527,8 @@ fn ty_is_known_nonnull<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> b
     match ty.sty {
         ty::FnPtr(_) => true,
         ty::Ref(..) => true,
-        ty::Adt(field_def, substs) if field_def.repr.transparent() && field_def.is_struct() => {
-            for field in &field_def.non_enum_variant().fields {
+        ty::Adt(field_def, substs) if field_def.repr.transparent() && !field_def.is_union() => {
+            for field in field_def.all_fields() {
                 let field_ty = tcx.normalize_erasing_regions(
                     ParamEnv::reveal_all(),
                     field.ty(tcx, substs),
@@ -627,8 +623,8 @@ fn check_type_for_ffi(&self,
                             return FfiUnsafe {
                                 ty: ty,
                                 reason: "this struct has unspecified layout",
-                                help: Some("consider adding a #[repr(C)] or #[repr(transparent)] \
-                                            attribute to this struct"),
+                                help: Some("consider adding a `#[repr(C)]` or \
+                                            `#[repr(transparent)]` attribute to this struct"),
                             };
                         }
 
@@ -668,11 +664,12 @@ fn check_type_for_ffi(&self,
                         if all_phantom { FfiPhantom(ty) } else { FfiSafe }
                     }
                     AdtKind::Union => {
-                        if !def.repr.c() {
+                        if !def.repr.c() && !def.repr.transparent() {
                             return FfiUnsafe {
                                 ty: ty,
                                 reason: "this union has unspecified layout",
-                                help: Some("consider adding a #[repr(C)] attribute to this union"),
+                                help: Some("consider adding a `#[repr(C)]` or \
+                                            `#[repr(transparent)]` attribute to this union"),
                             };
                         }
 
@@ -690,6 +687,11 @@ fn check_type_for_ffi(&self,
                                 ParamEnv::reveal_all(),
                                 field.ty(cx, substs),
                             );
+                            // repr(transparent) types are allowed to have arbitrary ZSTs, not just
+                            // PhantomData -- skip checking all ZST fields.
+                            if def.repr.transparent() && is_zst(cx, field.did, field_ty) {
+                                continue;
+                            }
                             let r = self.check_type_for_ffi(cache, field_ty);
                             match r {
                                 FfiSafe => {
@@ -712,14 +714,15 @@ fn check_type_for_ffi(&self,
 
                         // Check for a repr() attribute to specify the size of the
                         // discriminant.
-                        if !def.repr.c() && def.repr.int.is_none() {
+                        if !def.repr.c() && !def.repr.transparent() && def.repr.int.is_none() {
                             // Special-case types like `Option<extern fn()>`.
                             if !is_repr_nullable_ptr(cx, ty, def, substs) {
                                 return FfiUnsafe {
                                     ty: ty,
                                     reason: "enum has no representation hint",
-                                    help: Some("consider adding a #[repr(...)] attribute \
-                                                to this enum"),
+                                    help: Some("consider adding a `#[repr(C)]`, \
+                                                `#[repr(transparent)]`, or integer `#[repr(...)]` \
+                                                attribute to this enum"),
                                 };
                             }
                         }
@@ -727,11 +730,16 @@ fn check_type_for_ffi(&self,
                         // Check the contained variants.
                         for variant in &def.variants {
                             for field in &variant.fields {
-                                let arg = cx.normalize_erasing_regions(
+                                let field_ty = cx.normalize_erasing_regions(
                                     ParamEnv::reveal_all(),
                                     field.ty(cx, substs),
                                 );
-                                let r = self.check_type_for_ffi(cache, arg);
+                                // repr(transparent) types are allowed to have arbitrary ZSTs, not
+                                // just PhantomData -- skip checking all ZST fields.
+                                if def.repr.transparent() && is_zst(cx, field.did, field_ty) {
+                                    continue;
+                                }
+                                let r = self.check_type_for_ffi(cache, field_ty);
                                 match r {
                                     FfiSafe => {}
                                     FfiUnsafe { .. } => {
index 991bebc647d0f7ca48822c1af358f2010ba1c2e7..7ffba41e2569a4f32e3077e4622ffc9e17596fa2 100644 (file)
@@ -614,7 +614,7 @@ fn load_derive_macros(&mut self, root: &CrateRoot<'_>, dylib: Option<PathBuf>, s
             match decl {
                 ProcMacro::CustomDerive { trait_name, attributes, client } => {
                     let attrs = attributes.iter().cloned().map(Symbol::intern).collect::<Vec<_>>();
-                    (trait_name, SyntaxExtension::ProcMacroDerive(
+                    (trait_name, SyntaxExtension::Derive(
                         Box::new(ProcMacroDerive {
                             client,
                             attrs: attrs.clone(),
@@ -624,13 +624,13 @@ fn load_derive_macros(&mut self, root: &CrateRoot<'_>, dylib: Option<PathBuf>, s
                     ))
                 }
                 ProcMacro::Attr { name, client } => {
-                    (name, SyntaxExtension::AttrProcMacro(
+                    (name, SyntaxExtension::Attr(
                         Box::new(AttrProcMacro { client }),
                         root.edition,
                     ))
                 }
                 ProcMacro::Bang { name, client } => {
-                    (name, SyntaxExtension::ProcMacro {
+                    (name, SyntaxExtension::Bang {
                         expander: Box::new(BangProcMacro { client }),
                         allow_internal_unstable: None,
                         edition: root.edition,
index db452bb4ac7bcbad232534fa8141fff760a1df96..35faa1df82b845f3a4ef0cf18b1a86cae2d174b3 100644 (file)
@@ -430,7 +430,7 @@ pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
             use syntax_ext::proc_macro_impl::BangProcMacro;
 
             let client = proc_macro::bridge::client::Client::expand1(proc_macro::quote);
-            let ext = SyntaxExtension::ProcMacro {
+            let ext = SyntaxExtension::Bang {
                 expander: Box::new(BangProcMacro { client }),
                 allow_internal_unstable: Some(vec![sym::proc_macro_def_site].into()),
                 edition: data.root.edition,
index 10ff606b013a75e582296f04908e2bc5bd6c9644..e3e327d0a5bd0920d487f4298f9e2771c372f210 100644 (file)
@@ -1167,6 +1167,7 @@ pub fn get_macro(&self, id: DefIndex) -> MacroDef {
         let constness = match self.entry(id).kind {
             EntryKind::Method(data) => data.decode(self).fn_data.constness,
             EntryKind::Fn(data) => data.decode(self).constness,
+            EntryKind::Variant(..) | EntryKind::Struct(..) => hir::Constness::Const,
             _ => hir::Constness::NotConst,
         };
         constness == hir::Constness::Const
index e0b92341575cc98252776ec0609a6f175739f485..e49ca8acf6702e579ba10a0e60744eb2798b59b9 100644 (file)
@@ -16,6 +16,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 extern crate libc;
 #[allow(unused_extern_crates)]
index d9d6fe0affb32ee82ed8b29b6a829f2f0e0bf381..0fc72b83a1668cf05c31f989b0ac8c09ecc8bb32 100644 (file)
@@ -90,7 +90,7 @@ fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
 impl LocalsStateAtExit {
     fn build(
         locals_are_invalidated_at_exit: bool,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         move_data: &MoveData<'tcx>
     ) -> Self {
         struct HasStorageDead(BitSet<Local>);
@@ -106,8 +106,8 @@ fn visit_local(&mut self, local: &Local, ctx: PlaceContext, _: Location) {
         if locals_are_invalidated_at_exit {
             LocalsStateAtExit::AllAreInvalidated
         } else {
-            let mut has_storage_dead = HasStorageDead(BitSet::new_empty(mir.local_decls.len()));
-            has_storage_dead.visit_body(mir);
+            let mut has_storage_dead = HasStorageDead(BitSet::new_empty(body.local_decls.len()));
+            has_storage_dead.visit_body(body);
             let mut has_storage_dead_or_moved = has_storage_dead.0;
             for move_out in &move_data.moves {
                 if let Some(index) = move_data.base_local(move_out.path) {
@@ -123,24 +123,24 @@ fn visit_local(&mut self, local: &Local, ctx: PlaceContext, _: Location) {
 impl<'tcx> BorrowSet<'tcx> {
     pub fn build(
         tcx: TyCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         locals_are_invalidated_at_exit: bool,
         move_data: &MoveData<'tcx>
     ) -> Self {
 
         let mut visitor = GatherBorrows {
             tcx,
-            mir,
+            body,
             idx_vec: IndexVec::new(),
             location_map: Default::default(),
             activation_map: Default::default(),
             local_map: Default::default(),
             pending_activations: Default::default(),
             locals_state_at_exit:
-                LocalsStateAtExit::build(locals_are_invalidated_at_exit, mir, move_data),
+                LocalsStateAtExit::build(locals_are_invalidated_at_exit, body, move_data),
         };
 
-        for (block, block_data) in traversal::preorder(mir) {
+        for (block, block_data) in traversal::preorder(body) {
             visitor.visit_basic_block_data(block, block_data);
         }
 
@@ -163,7 +163,7 @@ pub fn build(
 
 struct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
     location_map: FxHashMap<Location, BorrowIndex>,
     activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
@@ -191,7 +191,7 @@ fn visit_assign(
     ) {
         if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {
             if borrowed_place.ignore_borrow(
-                self.tcx, self.mir, &self.locals_state_at_exit) {
+                self.tcx, self.body, &self.locals_state_at_exit) {
                 return;
             }
 
@@ -246,7 +246,7 @@ fn visit_local(
             if let TwoPhaseActivation::ActivatedAt(other_location) =
                     borrow_data.activation_location {
                 span_bug!(
-                    self.mir.source_info(location).span,
+                    self.body.source_info(location).span,
                     "found two uses for 2-phase borrow temporary {:?}: \
                      {:?} and {:?}",
                     temp,
@@ -320,7 +320,7 @@ fn insert_as_pending_if_two_phase(
             temp
         } else {
             span_bug!(
-                self.mir.source_info(start_location).span,
+                self.body.source_info(start_location).span,
                 "expected 2-phase borrow to assign to a local, not `{:?}`",
                 assigned_place,
             );
@@ -339,7 +339,7 @@ fn insert_as_pending_if_two_phase(
         // assignment.
         let old_value = self.pending_activations.insert(temp, borrow_index);
         if let Some(old_index) = old_value {
-            span_bug!(self.mir.source_info(start_location).span,
+            span_bug!(self.body.source_info(start_location).span,
                       "found already pending activation for temp: {:?} \
                        at borrow_index: {:?} with associated data {:?}",
                       temp, old_index, self.idx_vec[old_index]);
index f8e73e838df49263c383da04ad68ddd20adae38a..359e3be860df0adf9b38484cb08abde0d2f4eae3 100644 (file)
@@ -202,7 +202,7 @@ pub(super) fn report_use_of_moved_or_uninitialized(
                 );
             }
 
-            let ty = used_place.ty(self.mir, self.infcx.tcx).ty;
+            let ty = used_place.ty(self.body, self.infcx.tcx).ty;
             let needs_note = match ty.sty {
                 ty::Closure(id, _) => {
                     let tables = self.infcx.tcx.typeck_tables_of(id);
@@ -217,7 +217,7 @@ pub(super) fn report_use_of_moved_or_uninitialized(
                 let mpi = self.move_data.moves[move_out_indices[0]].path;
                 let place = &self.move_data.move_paths[mpi].place;
 
-                let ty = place.ty(self.mir, self.infcx.tcx).ty;
+                let ty = place.ty(self.body, self.infcx.tcx).ty;
                 let opt_name = self.describe_place_with_options(place, IncludingDowncast(true));
                 let note_msg = match opt_name {
                     Some(ref name) => format!("`{}`", name),
@@ -235,7 +235,7 @@ pub(super) fn report_use_of_moved_or_uninitialized(
                     }
                 }
                 let span = if let Place::Base(PlaceBase::Local(local)) = place {
-                    let decl = &self.mir.local_decls[*local];
+                    let decl = &self.body.local_decls[*local];
                     Some(decl.source_info.span)
                 } else {
                     None
@@ -305,7 +305,7 @@ pub(super) fn report_move_out_while_borrowed(
             location,
             borrow,
             None,
-        ).add_explanation_to_diagnostic(self.infcx.tcx, self.mir, &mut err, "", Some(borrow_span));
+        ).add_explanation_to_diagnostic(self.infcx.tcx, self.body, &mut err, "", Some(borrow_span));
         err.buffer(&mut self.errors_buffer);
     }
 
@@ -342,7 +342,7 @@ pub(super) fn report_use_while_mutably_borrowed(
         });
 
         self.explain_why_borrow_contains_point(location, borrow, None)
-            .add_explanation_to_diagnostic(self.infcx.tcx, self.mir, &mut err, "", None);
+            .add_explanation_to_diagnostic(self.infcx.tcx, self.body, &mut err, "", None);
         err
     }
 
@@ -552,7 +552,7 @@ pub(super) fn report_conflicting_borrow(
 
         explanation.add_explanation_to_diagnostic(
             self.infcx.tcx,
-            self.mir,
+            self.body,
             &mut err,
             first_borrow_desc,
             None,
@@ -592,7 +592,7 @@ pub(super) fn describe_place_for_conflicting_borrow(
         // Define a small closure that we can use to check if the type of a place
         // is a union.
         let union_ty = |place: &Place<'tcx>| -> Option<Ty<'tcx>> {
-            let ty = place.ty(self.mir, self.infcx.tcx).ty;
+            let ty = place.ty(self.body, self.infcx.tcx).ty;
             ty.ty_adt_def().filter(|adt| adt.is_union()).map(|_| ty)
         };
         let describe_place = |place| self.describe_place(place).unwrap_or_else(|| "_".to_owned());
@@ -687,7 +687,7 @@ pub(super) fn report_borrowed_value_does_not_live_long_enough(
         let borrow_span = borrow_spans.var_or_use();
 
         let proper_span = match *root_place {
-            Place::Base(PlaceBase::Local(local)) => self.mir.local_decls[local].source_info.span,
+            Place::Base(PlaceBase::Local(local)) => self.body.local_decls[local].source_info.span,
             _ => drop_span,
         };
 
@@ -876,7 +876,7 @@ fn report_local_value_does_not_live_long_enough(
             } else {
                 explanation.add_explanation_to_diagnostic(
                     self.infcx.tcx,
-                    self.mir,
+                    self.body,
                     &mut err,
                     "",
                     None,
@@ -900,7 +900,8 @@ fn report_local_value_does_not_live_long_enough(
                 format!("value captured here{}", within),
             );
 
-            explanation.add_explanation_to_diagnostic(self.infcx.tcx, self.mir, &mut err, "", None);
+            explanation.add_explanation_to_diagnostic(
+                self.infcx.tcx, self.body, &mut err, "", None);
         }
 
         err
@@ -960,7 +961,7 @@ fn report_borrow_conflicts_with_destructor(
             _ => {}
         }
 
-        explanation.add_explanation_to_diagnostic(self.infcx.tcx, self.mir, &mut err, "", None);
+        explanation.add_explanation_to_diagnostic(self.infcx.tcx, self.body, &mut err, "", None);
 
         err.buffer(&mut self.errors_buffer);
     }
@@ -1043,7 +1044,7 @@ fn report_temporary_value_does_not_live_long_enough(
             }
             _ => {}
         }
-        explanation.add_explanation_to_diagnostic(self.infcx.tcx, self.mir, &mut err, "", None);
+        explanation.add_explanation_to_diagnostic(self.infcx.tcx, self.body, &mut err, "", None);
 
         let within = if borrow_spans.for_generator() {
             " by generator"
@@ -1076,7 +1077,7 @@ fn try_report_cannot_return_reference_to_local(
         };
 
         // FIXME use a better heuristic than Spans
-        let reference_desc = if return_span == self.mir.source_info(borrow.reserve_location).span {
+        let reference_desc = if return_span == self.body.source_info(borrow.reserve_location).span {
             "reference to"
         } else {
             "value referencing"
@@ -1085,7 +1086,7 @@ fn try_report_cannot_return_reference_to_local(
         let (place_desc, note) = if let Some(place_desc) = opt_place_desc {
             let local_kind = match borrow.borrowed_place {
                 Place::Base(PlaceBase::Local(local)) => {
-                    match self.mir.local_kind(local) {
+                    match self.body.local_kind(local) {
                         LocalKind::ReturnPointer
                         | LocalKind::Temp => bug!("temporary or return pointer with a name"),
                         LocalKind::Var => "local variable ",
@@ -1114,7 +1115,7 @@ fn try_report_cannot_return_reference_to_local(
             } else {
                 bug!("try_report_cannot_return_reference_to_local: not a local")
             };
-            match self.mir.local_kind(local) {
+            match self.body.local_kind(local) {
                 LocalKind::ReturnPointer | LocalKind::Temp => {
                     (
                         "temporary value".to_string(),
@@ -1251,10 +1252,10 @@ fn report_escaping_data(
     }
 
     fn get_moved_indexes(&mut self, location: Location, mpi: MovePathIndex) -> Vec<MoveSite> {
-        let mir = self.mir;
+        let body = self.body;
 
         let mut stack = Vec::new();
-        stack.extend(mir.predecessor_locations(location).map(|predecessor| {
+        stack.extend(body.predecessor_locations(location).map(|predecessor| {
             let is_back_edge = location.dominates(predecessor, &self.dominators);
             (predecessor, is_back_edge)
         }));
@@ -1273,7 +1274,7 @@ fn get_moved_indexes(&mut self, location: Location, mpi: MovePathIndex) -> Vec<M
             }
 
             // check for moves
-            let stmt_kind = mir[location.block]
+            let stmt_kind = body[location.block]
                 .statements
                 .get(location.statement_index)
                 .map(|s| &s.kind);
@@ -1328,7 +1329,7 @@ fn get_moved_indexes(&mut self, location: Location, mpi: MovePathIndex) -> Vec<M
             let mut any_match = false;
             drop_flag_effects::for_location_inits(
                 self.infcx.tcx,
-                self.mir,
+                self.body,
                 self.move_data,
                 location,
                 |m| {
@@ -1341,7 +1342,7 @@ fn get_moved_indexes(&mut self, location: Location, mpi: MovePathIndex) -> Vec<M
                 continue 'dfs;
             }
 
-            stack.extend(mir.predecessor_locations(location).map(|predecessor| {
+            stack.extend(body.predecessor_locations(location).map(|predecessor| {
                 let back_edge = location.dominates(predecessor, &self.dominators);
                 (predecessor, is_back_edge || back_edge)
             }));
@@ -1391,7 +1392,7 @@ pub(super) fn report_illegal_mutation_of_borrowed(
         );
 
         self.explain_why_borrow_contains_point(location, loan, None)
-            .add_explanation_to_diagnostic(self.infcx.tcx, self.mir, &mut err, "", None);
+            .add_explanation_to_diagnostic(self.infcx.tcx, self.body, &mut err, "", None);
 
         err.buffer(&mut self.errors_buffer);
     }
@@ -1410,10 +1411,10 @@ pub(super) fn report_illegal_reassignment(
         err_place: &Place<'tcx>,
     ) {
         let (from_arg, local_decl) = if let Place::Base(PlaceBase::Local(local)) = *err_place {
-            if let LocalKind::Arg = self.mir.local_kind(local) {
-                (true, Some(&self.mir.local_decls[local]))
+            if let LocalKind::Arg = self.body.local_kind(local) {
+                (true, Some(&self.body.local_decls[local]))
             } else {
-                (false, Some(&self.mir.local_decls[local]))
+                (false, Some(&self.body.local_decls[local]))
             }
         } else {
             (false, None)
@@ -1493,7 +1494,7 @@ fn classify_drop_access_kind(&self, place: &Place<'tcx>) -> StorageDeadOrDrop<'t
                         StorageDeadOrDrop::LocalStorageDead
                         | StorageDeadOrDrop::BoxedStorageDead => {
                             assert!(
-                                base.ty(self.mir, tcx).ty.is_box(),
+                                base.ty(self.body, tcx).ty.is_box(),
                                 "Drop of value behind a reference or raw pointer"
                             );
                             StorageDeadOrDrop::BoxedStorageDead
@@ -1501,7 +1502,7 @@ fn classify_drop_access_kind(&self, place: &Place<'tcx>) -> StorageDeadOrDrop<'t
                         StorageDeadOrDrop::Destructor(_) => base_access,
                     },
                     ProjectionElem::Field(..) | ProjectionElem::Downcast(..) => {
-                        let base_ty = base.ty(self.mir, tcx).ty;
+                        let base_ty = base.ty(self.body, tcx).ty;
                         match base_ty.sty {
                             ty::Adt(def, _) if def.has_dtor(tcx) => {
                                 // Report the outermost adt with a destructor
@@ -1560,7 +1561,7 @@ fn annotate_argument_and_return_for_borrow(
             location
         );
         if let Some(&Statement { kind: StatementKind::Assign(ref reservation, _), ..})
-             = &self.mir[location.block].statements.get(location.statement_index)
+             = &self.body[location.block].statements.get(location.statement_index)
         {
             debug!(
                 "annotate_argument_and_return_for_borrow: reservation={:?}",
@@ -1569,14 +1570,14 @@ fn annotate_argument_and_return_for_borrow(
             // Check that the initial assignment of the reserve location is into a temporary.
             let mut target = *match reservation {
                 Place::Base(PlaceBase::Local(local))
-                    if self.mir.local_kind(*local) == LocalKind::Temp => local,
+                    if self.body.local_kind(*local) == LocalKind::Temp => local,
                 _ => return None,
             };
 
             // Next, look through the rest of the block, checking if we are assigning the
             // `target` (that is, the place that contains our borrow) to anything.
             let mut annotated_closure = None;
-            for stmt in &self.mir[location.block].statements[location.statement_index + 1..] {
+            for stmt in &self.body[location.block].statements[location.statement_index + 1..] {
                 debug!(
                     "annotate_argument_and_return_for_borrow: target={:?} stmt={:?}",
                     target, stmt
@@ -1701,7 +1702,7 @@ fn annotate_argument_and_return_for_borrow(
             }
 
             // Check the terminator if we didn't find anything in the statements.
-            let terminator = &self.mir[location.block].terminator();
+            let terminator = &self.body[location.block].terminator();
             debug!(
                 "annotate_argument_and_return_for_borrow: target={:?} terminator={:?}",
                 target, terminator
index 3f977ea198b6dd00c11bdbb91917f7019a2112cf..a124c78ab67961ff96c30323b36705c64f7308d7 100644 (file)
@@ -38,7 +38,7 @@ pub(super) fn add_moved_or_invoked_closure_note(
     ) {
         debug!("add_moved_or_invoked_closure_note: location={:?} place={:?}", location, place);
         let mut target = place.local_or_deref_local();
-        for stmt in &self.mir[location.block].statements[location.statement_index..] {
+        for stmt in &self.body[location.block].statements[location.statement_index..] {
             debug!("add_moved_or_invoked_closure_note: stmt={:?} target={:?}", stmt, target);
             if let StatementKind::Assign(into, box Rvalue::Use(from)) = &stmt.kind {
                 debug!("add_fnonce_closure_note: into={:?} from={:?}", into, from);
@@ -52,7 +52,7 @@ pub(super) fn add_moved_or_invoked_closure_note(
         }
 
         // Check if we are attempting to call a closure after it has been invoked.
-        let terminator = self.mir[location.block].terminator();
+        let terminator = self.body[location.block].terminator();
         debug!("add_moved_or_invoked_closure_note: terminator={:?}", terminator);
         if let TerminatorKind::Call {
             func: Operand::Constant(box Constant {
@@ -75,7 +75,7 @@ pub(super) fn add_moved_or_invoked_closure_note(
                 };
 
                 debug!("add_moved_or_invoked_closure_note: closure={:?}", closure);
-                if let ty::Closure(did, _) = self.mir.local_decls[closure].ty.sty {
+                if let ty::Closure(did, _) = self.body.local_decls[closure].ty.sty {
                     let hir_id = self.infcx.tcx.hir().as_local_hir_id(did).unwrap();
 
                     if let Some((span, name)) = self.infcx.tcx.typeck_tables_of(did)
@@ -98,7 +98,7 @@ pub(super) fn add_moved_or_invoked_closure_note(
 
         // Check if we are just moving a closure after it has been invoked.
         if let Some(target) = target {
-            if let ty::Closure(did, _) = self.mir.local_decls[target].ty.sty {
+            if let ty::Closure(did, _) = self.body.local_decls[target].ty.sty {
                 let hir_id = self.infcx.tcx.hir().as_local_hir_id(did).unwrap();
 
                 if let Some((span, name)) = self.infcx.tcx.typeck_tables_of(did)
@@ -180,7 +180,7 @@ fn append_place_to_string(
                                     &including_downcast,
                                 )?;
                             } else if let Place::Base(PlaceBase::Local(local)) = proj.base {
-                                if self.mir.local_decls[local].is_ref_for_guard() {
+                                if self.body.local_decls[local].is_ref_for_guard() {
                                     self.append_place_to_string(
                                         &proj.base,
                                         buf,
@@ -276,7 +276,7 @@ fn append_place_to_string(
     /// Appends end-user visible description of the `local` place to `buf`. If `local` doesn't have
     /// a name, or its name was generated by the compiler, then `Err` is returned
     fn append_local_to_string(&self, local_index: Local, buf: &mut String) -> Result<(), ()> {
-        let local = &self.mir.local_decls[local_index];
+        let local = &self.body.local_decls[local_index];
         match local.name {
             Some(name) if !local.from_compiler_desugaring() => {
                 buf.push_str(name.as_str().get());
@@ -290,7 +290,7 @@ fn append_local_to_string(&self, local_index: Local, buf: &mut String) -> Result
     fn describe_field(&self, base: &Place<'tcx>, field: Field) -> String {
         match *base {
             Place::Base(PlaceBase::Local(local)) => {
-                let local = &self.mir.local_decls[local];
+                let local = &self.body.local_decls[local];
                 self.describe_field_from_ty(&local.ty, field, None)
             }
             Place::Base(PlaceBase::Static(ref static_)) =>
@@ -298,7 +298,7 @@ fn describe_field(&self, base: &Place<'tcx>, field: Field) -> String {
             Place::Projection(ref proj) => match proj.elem {
                 ProjectionElem::Deref => self.describe_field(&proj.base, field),
                 ProjectionElem::Downcast(_, variant_index) => {
-                    let base_ty = base.ty(self.mir, self.infcx.tcx).ty;
+                    let base_ty = base.ty(self.body, self.infcx.tcx).ty;
                     self.describe_field_from_ty(&base_ty, field, Some(variant_index))
                 }
                 ProjectionElem::Field(_, field_type) => {
@@ -556,9 +556,9 @@ pub(super) fn move_spans(
     ) -> UseSpans {
         use self::UseSpans::*;
 
-        let stmt = match self.mir[location.block].statements.get(location.statement_index) {
+        let stmt = match self.body[location.block].statements.get(location.statement_index) {
             Some(stmt) => stmt,
-            None => return OtherUse(self.mir.source_info(location).span),
+            None => return OtherUse(self.body.source_info(location).span),
         };
 
         debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);
@@ -596,7 +596,7 @@ pub(super) fn borrow_spans(&self, use_span: Span, location: Location) -> UseSpan
         use self::UseSpans::*;
         debug!("borrow_spans: use_span={:?} location={:?}", use_span, location);
 
-        let target = match self.mir[location.block]
+        let target = match self.body[location.block]
             .statements
             .get(location.statement_index)
         {
@@ -607,12 +607,12 @@ pub(super) fn borrow_spans(&self, use_span: Span, location: Location) -> UseSpan
             _ => return OtherUse(use_span),
         };
 
-        if self.mir.local_kind(target) != LocalKind::Temp {
+        if self.body.local_kind(target) != LocalKind::Temp {
             // operands are always temporaries.
             return OtherUse(use_span);
         }
 
-        for stmt in &self.mir[location.block].statements[location.statement_index + 1..] {
+        for stmt in &self.body[location.block].statements[location.statement_index + 1..] {
             if let StatementKind::Assign(
                 _, box Rvalue::Aggregate(ref kind, ref places)
             ) = stmt.kind {
@@ -682,7 +682,7 @@ fn closure_span(
     /// Helper to retrieve span(s) of given borrow from the current MIR
     /// representation
     pub(super) fn retrieve_borrow_spans(&self, borrow: &BorrowData<'_>) -> UseSpans {
-        let span = self.mir.source_info(borrow.reserve_location).span;
+        let span = self.body.source_info(borrow.reserve_location).span;
         self.borrow_spans(span, borrow.reserve_location)
     }
 }
index feade0d2a4aeea4f8c992a8815a184f18804800e..cc44dc3f5d46bd34be686d7bcea9a36281710d7e 100644 (file)
@@ -30,9 +30,9 @@ pub struct LocationIndex {
 }
 
 impl LocationTable {
-    crate fn new(mir: &Body<'_>) -> Self {
+    crate fn new(body: &Body<'_>) -> Self {
         let mut num_points = 0;
-        let statements_before_block = mir.basic_blocks()
+        let statements_before_block = body.basic_blocks()
             .iter()
             .map(|block_data| {
                 let v = num_points;
index 4ae4d039d60340e53a00b625c302898c78c0edef..502d601f7ce612580531855a5d4da5890e94b6b2 100644 (file)
@@ -88,40 +88,12 @@ pub fn provide(providers: &mut Providers<'_>) {
 }
 
 fn mir_borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> BorrowCheckResult<'tcx> {
-    let input_mir = tcx.mir_validated(def_id);
+    let input_body = tcx.mir_validated(def_id);
     debug!("run query mir_borrowck: {}", tcx.def_path_str(def_id));
 
-    // We are not borrow checking the automatically generated struct/variant constructors
-    // because we want to accept structs such as this (taken from the `linked-hash-map`
-    // crate):
-    // ```rust
-    // struct Qey<Q: ?Sized>(Q);
-    // ```
-    // MIR of this struct constructor looks something like this:
-    // ```rust
-    // fn Qey(_1: Q) -> Qey<Q>{
-    //     let mut _0: Qey<Q>;                  // return place
-    //
-    //     bb0: {
-    //         (_0.0: Q) = move _1;             // bb0[0]: scope 0 at src/main.rs:1:1: 1:26
-    //         return;                          // bb0[1]: scope 0 at src/main.rs:1:1: 1:26
-    //     }
-    // }
-    // ```
-    // The problem here is that `(_0.0: Q) = move _1;` is valid only if `Q` is
-    // of statically known size, which is not known to be true because of the
-    // `Q: ?Sized` constraint. However, it is true because the constructor can be
-    // called only when `Q` is of statically known size.
-    if tcx.is_constructor(def_id) {
-        return BorrowCheckResult {
-            closure_requirements: None,
-            used_mut_upvars: SmallVec::new(),
-        };
-    }
-
     let opt_closure_req = tcx.infer_ctxt().enter(|infcx| {
-        let input_mir: &Body<'_> = &input_mir.borrow();
-        do_mir_borrowck(&infcx, input_mir, def_id)
+        let input_body: &Body<'_> = &input_body.borrow();
+        do_mir_borrowck(&infcx, input_body, def_id)
     });
     debug!("mir_borrowck done");
 
@@ -130,7 +102,7 @@ fn mir_borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> BorrowC
 
 fn do_mir_borrowck<'a, 'gcx, 'tcx>(
     infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-    input_mir: &Body<'gcx>,
+    input_body: &Body<'gcx>,
     def_id: DefId,
 ) -> BorrowCheckResult<'gcx> {
     debug!("do_mir_borrowck(def_id = {:?})", def_id);
@@ -177,14 +149,14 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
     // requires first making our own copy of the MIR. This copy will
     // be modified (in place) to contain non-lexical lifetimes. It
     // will have a lifetime tied to the inference context.
-    let mut mir: Body<'tcx> = input_mir.clone();
-    let free_regions = nll::replace_regions_in_mir(infcx, def_id, param_env, &mut mir);
-    let mir = &mir; // no further changes
-    let location_table = &LocationTable::new(mir);
+    let mut body: Body<'tcx> = input_body.clone();
+    let free_regions = nll::replace_regions_in_mir(infcx, def_id, param_env, &mut body);
+    let body = &body; // no further changes
+    let location_table = &LocationTable::new(body);
 
     let mut errors_buffer = Vec::new();
     let (move_data, move_errors): (MoveData<'tcx>, Option<Vec<(Place<'tcx>, MoveError<'tcx>)>>) =
-        match MoveData::gather_moves(mir, tcx) {
+        match MoveData::gather_moves(body, tcx) {
             Ok(move_data) => (move_data, None),
             Err((move_data, move_errors)) => (move_data, Some(move_errors)),
         };
@@ -194,27 +166,27 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         param_env: param_env,
     };
 
-    let dead_unwinds = BitSet::new_empty(mir.basic_blocks().len());
+    let dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
     let mut flow_inits = FlowAtLocation::new(do_dataflow(
         tcx,
-        mir,
+        body,
         def_id,
         &attributes,
         &dead_unwinds,
-        MaybeInitializedPlaces::new(tcx, mir, &mdpe),
+        MaybeInitializedPlaces::new(tcx, body, &mdpe),
         |bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]),
     ));
 
     let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind_by_hir_id(id).is_fn_or_closure();
     let borrow_set = Rc::new(BorrowSet::build(
-            tcx, mir, locals_are_invalidated_at_exit, &mdpe.move_data));
+            tcx, body, locals_are_invalidated_at_exit, &mdpe.move_data));
 
     // If we are in non-lexical mode, compute the non-lexical lifetimes.
     let (regioncx, polonius_output, opt_closure_req) = nll::compute_regions(
         infcx,
         def_id,
         free_regions,
-        mir,
+        body,
         &upvars,
         location_table,
         param_env,
@@ -233,29 +205,29 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
 
     let flow_borrows = FlowAtLocation::new(do_dataflow(
         tcx,
-        mir,
+        body,
         def_id,
         &attributes,
         &dead_unwinds,
-        Borrows::new(tcx, mir, regioncx.clone(), &borrow_set),
+        Borrows::new(tcx, body, regioncx.clone(), &borrow_set),
         |rs, i| DebugFormatted::new(&rs.location(i)),
     ));
     let flow_uninits = FlowAtLocation::new(do_dataflow(
         tcx,
-        mir,
+        body,
         def_id,
         &attributes,
         &dead_unwinds,
-        MaybeUninitializedPlaces::new(tcx, mir, &mdpe),
+        MaybeUninitializedPlaces::new(tcx, body, &mdpe),
         |bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]),
     ));
     let flow_ever_inits = FlowAtLocation::new(do_dataflow(
         tcx,
-        mir,
+        body,
         def_id,
         &attributes,
         &dead_unwinds,
-        EverInitializedPlaces::new(tcx, mir, &mdpe),
+        EverInitializedPlaces::new(tcx, body, &mdpe),
         |bd, i| DebugFormatted::new(&bd.move_data().inits[i]),
     ));
 
@@ -267,11 +239,11 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         _ => true,
     };
 
-    let dominators = mir.dominators();
+    let dominators = body.dominators();
 
     let mut mbcx = MirBorrowckCtxt {
         infcx,
-        mir,
+        body,
         mir_def_id: def_id,
         move_data: &mdpe.move_data,
         location_table,
@@ -309,8 +281,8 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         let mut initial_diag =
             mbcx.report_conflicting_borrow(location, (&place, span), bk, &borrow);
 
-        let lint_root = if let ClearCrossCrate::Set(ref vsi) = mbcx.mir.source_scope_local_data {
-            let scope = mbcx.mir.source_info(location).scope;
+        let lint_root = if let ClearCrossCrate::Set(ref vsi) = mbcx.body.source_scope_local_data {
+            let scope = mbcx.body.source_info(location).scope;
             vsi[scope].lint_root
         } else {
             id
@@ -333,22 +305,22 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
     // would have a chance of erroneously adding non-user-defined mutable vars
     // to the set.
     let temporary_used_locals: FxHashSet<Local> = mbcx.used_mut.iter()
-        .filter(|&local| mbcx.mir.local_decls[*local].is_user_variable.is_none())
+        .filter(|&local| mbcx.body.local_decls[*local].is_user_variable.is_none())
         .cloned()
         .collect();
     // For the remaining unused locals that are marked as mutable, we avoid linting any that
     // were never initialized. These locals may have been removed as unreachable code; or will be
     // linted as unused variables.
-    let unused_mut_locals = mbcx.mir.mut_vars_iter()
+    let unused_mut_locals = mbcx.body.mut_vars_iter()
         .filter(|local| !mbcx.used_mut.contains(local))
         .collect();
     mbcx.gather_used_muts(temporary_used_locals, unused_mut_locals);
 
     debug!("mbcx.used_mut: {:?}", mbcx.used_mut);
     let used_mut = mbcx.used_mut;
-    for local in mbcx.mir.mut_vars_and_args_iter().filter(|local| !used_mut.contains(local)) {
-        if let ClearCrossCrate::Set(ref vsi) = mbcx.mir.source_scope_local_data {
-            let local_decl = &mbcx.mir.local_decls[local];
+    for local in mbcx.body.mut_vars_and_args_iter().filter(|local| !used_mut.contains(local)) {
+        if let ClearCrossCrate::Set(ref vsi) = mbcx.body.source_scope_local_data {
+            let local_decl = &mbcx.body.local_decls[local];
 
             // Skip implicit `self` argument for closures
             if local.index() == 1 && tcx.is_closure(mbcx.mir_def_id) {
@@ -453,7 +425,7 @@ fn downgrade_if_error(diag: &mut Diagnostic) {
 
 pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
-    mir: &'cx Body<'tcx>,
+    body: &'cx Body<'tcx>,
     mir_def_id: DefId,
     move_data: &'cx MoveData<'tcx>,
 
@@ -539,8 +511,8 @@ pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
 impl<'cx, 'gcx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
     type FlowState = Flows<'cx, 'gcx, 'tcx>;
 
-    fn mir(&self) -> &'cx Body<'tcx> {
-        self.mir
+    fn body(&self) -> &'cx Body<'tcx> {
+        self.body
     }
 
     fn visit_block_entry(&mut self, bb: BasicBlock, flow_state: &Self::FlowState) {
@@ -690,7 +662,7 @@ fn visit_terminator_entry(
                 let gcx = self.infcx.tcx.global_tcx();
 
                 // Compute the type with accurate region information.
-                let drop_place_ty = drop_place.ty(self.mir, self.infcx.tcx);
+                let drop_place_ty = drop_place.ty(self.body, self.infcx.tcx);
 
                 // Erase the regions.
                 let drop_place_ty = self.infcx.tcx.erase_regions(&drop_place_ty).ty;
@@ -1033,13 +1005,13 @@ fn check_access_for_conflict(
 
         let mut error_reported = false;
         let tcx = self.infcx.tcx;
-        let mir = self.mir;
+        let body = self.body;
         let location_table = self.location_table.start_index(location);
         let borrow_set = self.borrow_set.clone();
         each_borrow_involving_path(
             self,
             tcx,
-            mir,
+            body,
             location,
             (sd, place_span.0),
             &borrow_set,
@@ -1197,7 +1169,7 @@ fn mutate_place(
         // (e.g., `x = ...`) so long as it has never been initialized
         // before (at this point in the flow).
         if let &Place::Base(PlaceBase::Local(local)) = place_span.0 {
-            if let Mutability::Not = self.mir.local_decls[local].mutability {
+            if let Mutability::Not = self.body.local_decls[local].mutability {
                 // check for reassignments to immutable local variables
                 self.check_if_reassignment_to_immutable_state(
                     location,
@@ -1354,9 +1326,9 @@ fn propagate_closure_used_mut_upvar(&mut self, operand: &Operand<'tcx>) {
         match *operand {
             Operand::Move(Place::Base(PlaceBase::Local(local)))
             | Operand::Copy(Place::Base(PlaceBase::Local(local)))
-                if self.mir.local_decls[local].is_user_variable.is_none() =>
+                if self.body.local_decls[local].is_user_variable.is_none() =>
             {
-                if self.mir.local_decls[local].ty.is_mutable_pointer() {
+                if self.body.local_decls[local].ty.is_mutable_pointer() {
                     // The variable will be marked as mutable by the borrow.
                     return;
                 }
@@ -1387,7 +1359,7 @@ fn propagate_closure_used_mut_upvar(&mut self, operand: &Operand<'tcx>) {
                     _ => bug!("temporary initialized in arguments"),
                 };
 
-                let bbd = &self.mir[loc.block];
+                let bbd = &self.body[loc.block];
                 let stmt = &bbd.statements[loc.statement_index];
                 debug!("temporary assigned in: stmt={:?}", stmt);
 
@@ -1502,7 +1474,7 @@ fn check_for_invalidation_at_exit(
 
         if places_conflict::borrow_conflicts_with_place(
             self.infcx.tcx,
-            self.mir,
+            self.body,
             place,
             borrow.kind,
             root_place,
@@ -1589,7 +1561,7 @@ fn check_if_reassignment_to_immutable_state(
         if let Some(init_index) = self.is_local_ever_initialized(local, flow_state) {
             // And, if so, report an error.
             let init = &self.move_data.inits[init_index];
-            let span = init.span(&self.mir);
+            let span = init.span(&self.body);
             self.report_illegal_reassignment(
                 location, place_span, span, place_span.0
             );
@@ -1799,7 +1771,7 @@ fn check_if_assigned_path_is_moved(
                             // assigning to `P.f` requires `P` itself
                             // be already initialized
                             let tcx = self.infcx.tcx;
-                            match base.ty(self.mir, tcx).ty.sty {
+                            match base.ty(self.body, tcx).ty.sty {
                                 ty::Adt(def, _) if def.has_dtor(tcx) => {
                                     self.check_if_path_or_subpath_is_moved(
                                         location, InitializationRequiringAction::Assignment,
@@ -1903,11 +1875,11 @@ fn check_parent_of_field<'cx, 'gcx, 'tcx>(
                 // no move out from an earlier location) then this is an attempt at initialization
                 // of the union - we should error in that case.
                 let tcx = this.infcx.tcx;
-                if let ty::Adt(def, _) = base.ty(this.mir, tcx).ty.sty {
+                if let ty::Adt(def, _) = base.ty(this.body, tcx).ty.sty {
                     if def.is_union() {
                         if this.move_data.path_map[mpi].iter().any(|moi| {
                             this.move_data.moves[*moi].source.is_predecessor_of(
-                                location, this.mir,
+                                location, this.body,
                             )
                         }) {
                             return;
@@ -2125,7 +2097,7 @@ fn is_mutable<'d>(
     ) -> Result<RootPlace<'d, 'tcx>, &'d Place<'tcx>> {
         match *place {
             Place::Base(PlaceBase::Local(local)) => {
-                let local = &self.mir.local_decls[local];
+                let local = &self.body.local_decls[local];
                 match local.mutability {
                     Mutability::Not => match is_local_mutation_allowed {
                         LocalMutationIsAllowed::Yes => Ok(RootPlace {
@@ -2164,7 +2136,7 @@ fn is_mutable<'d>(
             Place::Projection(ref proj) => {
                 match proj.elem {
                     ProjectionElem::Deref => {
-                        let base_ty = proj.base.ty(self.mir, self.infcx.tcx).ty;
+                        let base_ty = proj.base.ty(self.body, self.infcx.tcx).ty;
 
                         // Check the kind of deref to decide
                         match base_ty.sty {
@@ -2292,7 +2264,7 @@ pub fn is_upvar_field_projection(&self, place: &Place<'tcx>) -> Option<Field> {
             Place::Projection(ref proj) => match proj.elem {
                 ProjectionElem::Field(field, _ty) => {
                     let tcx = self.infcx.tcx;
-                    let base_ty = proj.base.ty(self.mir, tcx).ty;
+                    let base_ty = proj.base.ty(self.body, tcx).ty;
 
                     if (base_ty.is_closure() || base_ty.is_generator()) &&
                         (!by_ref || self.upvars[field.index()].by_ref)
index f892c1597057bed8fa97f287d73b738e710efdb5..f4bc1bc54527aaed6ba74b905d905162ce302c12 100644 (file)
@@ -158,12 +158,12 @@ fn append_to_grouped_errors(
                 if let Some(StatementKind::Assign(
                     Place::Base(PlaceBase::Local(local)),
                     box Rvalue::Use(Operand::Move(move_from)),
-                )) = self.mir.basic_blocks()[location.block]
+                )) = self.body.basic_blocks()[location.block]
                     .statements
                     .get(location.statement_index)
                     .map(|stmt| &stmt.kind)
                 {
-                    let local_decl = &self.mir.local_decls[*local];
+                    let local_decl = &self.body.local_decls[*local];
                     // opt_match_place is the
                     // match_span is the span of the expression being matched on
                     // match *x.y { ... }        match_place is Some(*x.y)
@@ -178,7 +178,7 @@ fn append_to_grouped_errors(
                         pat_span: _,
                     }))) = local_decl.is_user_variable
                     {
-                        let stmt_source_info = self.mir.source_info(location);
+                        let stmt_source_info = self.body.source_info(location);
                         self.append_binding_error(
                             grouped_errors,
                             kind,
@@ -371,7 +371,7 @@ fn report_cannot_move_from_borrowed_content(
         // Inspect the type of the content behind the
         // borrow to provide feedback about why this
         // was a move rather than a copy.
-        let ty = deref_target_place.ty(self.mir, self.infcx.tcx).ty;
+        let ty = deref_target_place.ty(self.body, self.infcx.tcx).ty;
         let upvar_field = self.prefixes(&move_place, PrefixSet::All)
             .find_map(|p| self.is_upvar_field_projection(p));
 
@@ -381,7 +381,7 @@ fn report_cannot_move_from_borrowed_content(
         };
 
         if let Place::Base(PlaceBase::Local(local)) = *deref_base {
-            let decl = &self.mir.local_decls[local];
+            let decl = &self.body.local_decls[local];
             if decl.is_ref_for_guard() {
                 let mut err = self.infcx.tcx.cannot_move_out_of(
                     span,
@@ -470,7 +470,7 @@ fn report_cannot_move_from_borrowed_content(
         };
         let move_ty = format!(
             "{:?}",
-            move_place.ty(self.mir, self.infcx.tcx).ty,
+            move_place.ty(self.body, self.infcx.tcx).ty,
         );
         let snippet = self.infcx.tcx.sess.source_map().span_to_snippet(span).unwrap();
         let is_option = move_ty.starts_with("std::option::Option");
@@ -503,35 +503,15 @@ fn add_move_hints(
                 move_from,
                 ..
             } => {
-                let try_remove_deref = match move_from {
-                    Place::Projection(box Projection {
-                        elem: ProjectionElem::Deref,
-                        ..
-                    }) => true,
-                    _ => false,
-                };
-                if try_remove_deref && snippet.starts_with('*') {
-                    // The snippet doesn't start with `*` in (e.g.) index
-                    // expressions `a[b]`, which roughly desugar to
-                    // `*Index::index(&a, b)` or
-                    // `*IndexMut::index_mut(&mut a, b)`.
-                    err.span_suggestion(
-                        span,
-                        "consider removing the `*`",
-                        snippet[1..].to_owned(),
-                        Applicability::Unspecified,
-                    );
-                } else {
-                    err.span_suggestion(
-                        span,
-                        "consider borrowing here",
-                        format!("&{}", snippet),
-                        Applicability::Unspecified,
-                    );
-                }
+                err.span_suggestion(
+                    span,
+                    "consider borrowing here",
+                    format!("&{}", snippet),
+                    Applicability::Unspecified,
+                );
 
                 if binds_to.is_empty() {
-                    let place_ty = move_from.ty(self.mir, self.infcx.tcx).ty;
+                    let place_ty = move_from.ty(self.body, self.infcx.tcx).ty;
                     let place_desc = match self.describe_place(&move_from) {
                         Some(desc) => format!("`{}`", desc),
                         None => format!("value"),
@@ -559,7 +539,7 @@ fn add_move_hints(
             // No binding. Nothing to suggest.
             GroupedMoveError::OtherIllegalMove { ref original_path, use_spans, .. } => {
                 let span = use_spans.var_or_use();
-                let place_ty = original_path.ty(self.mir, self.infcx.tcx).ty;
+                let place_ty = original_path.ty(self.body, self.infcx.tcx).ty;
                 let place_desc = match self.describe_place(original_path) {
                     Some(desc) => format!("`{}`", desc),
                     None => format!("value"),
@@ -587,7 +567,7 @@ fn add_move_error_suggestions(
     ) {
         let mut suggestions: Vec<(Span, &str, String)> = Vec::new();
         for local in binds_to {
-            let bind_to = &self.mir.local_decls[*local];
+            let bind_to = &self.body.local_decls[*local];
             if let Some(
                 ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
                     pat_span,
@@ -637,7 +617,7 @@ fn add_move_error_details(
     ) {
         let mut noncopy_var_spans = Vec::new();
         for (j, local) in binds_to.into_iter().enumerate() {
-            let bind_to = &self.mir.local_decls[*local];
+            let bind_to = &self.body.local_decls[*local];
             let binding_span = bind_to.source_info.span;
 
             if j == 0 {
@@ -687,7 +667,7 @@ fn borrowed_content_source(&self, deref_base: &Place<'tcx>) -> BorrowedContentSo
                         _ => continue,
                     };
 
-                    let bbd = &self.mir[loc.block];
+                    let bbd = &self.body[loc.block];
                     let is_terminator = bbd.statements.len() == loc.statement_index;
                     debug!(
                         "borrowed_content_source: loc={:?} is_terminator={:?}",
@@ -705,7 +685,7 @@ fn borrowed_content_source(&self, deref_base: &Place<'tcx>) -> BorrowedContentSo
                         ..
                     }) = bbd.terminator {
                         if let Some(source)
-                            = BorrowedContentSource::from_call(func.ty(self.mir, tcx), tcx)
+                            = BorrowedContentSource::from_call(func.ty(self.body, tcx), tcx)
                         {
                             return source;
                         }
@@ -718,7 +698,7 @@ fn borrowed_content_source(&self, deref_base: &Place<'tcx>) -> BorrowedContentSo
 
         // If we didn't find an overloaded deref or index, then assume it's a
         // built in deref and check the type of the base.
-        let base_ty = deref_base.ty(self.mir, tcx).ty;
+        let base_ty = deref_base.ty(self.body, tcx).ty;
         if base_ty.is_unsafe_ptr() {
             BorrowedContentSource::DerefRawPointer
         } else if base_ty.is_mutable_pointer() {
index cbab84c2961a906dc20f97a2c3dfa1059ca5291b..46116e3e876bc0326a195a89eeaaaee881eb1d59 100644 (file)
@@ -52,7 +52,7 @@ pub(super) fn report_mutability_error(
                 if let Place::Base(PlaceBase::Local(_)) = access_place {
                     reason = ", as it is not declared as mutable".to_string();
                 } else {
-                    let name = self.mir.local_decls[*local]
+                    let name = self.body.local_decls[*local]
                         .name
                         .expect("immutable unnamed local");
                     reason = format!(", as `{}` is not declared as mutable", name);
@@ -64,7 +64,7 @@ pub(super) fn report_mutability_error(
                 elem: ProjectionElem::Field(upvar_index, _),
             }) => {
                 debug_assert!(is_closure_or_generator(
-                    base.ty(self.mir, self.infcx.tcx).ty
+                    base.ty(self.body, self.infcx.tcx).ty
                 ));
 
                 item_msg = format!("`{}`", access_place_desc.unwrap());
@@ -83,9 +83,9 @@ pub(super) fn report_mutability_error(
                 if *base == Place::Base(PlaceBase::Local(Local::new(1))) &&
                     !self.upvars.is_empty() {
                     item_msg = format!("`{}`", access_place_desc.unwrap());
-                    debug_assert!(self.mir.local_decls[Local::new(1)].ty.is_region_ptr());
+                    debug_assert!(self.body.local_decls[Local::new(1)].ty.is_region_ptr());
                     debug_assert!(is_closure_or_generator(
-                        the_place_err.ty(self.mir, self.infcx.tcx).ty
+                        the_place_err.ty(self.body, self.infcx.tcx).ty
                     ));
 
                     reason = if self.is_upvar_field_projection(access_place).is_some() {
@@ -95,7 +95,7 @@ pub(super) fn report_mutability_error(
                     }
                 } else if {
                     if let Place::Base(PlaceBase::Local(local)) = *base {
-                        self.mir.local_decls[local].is_ref_for_guard()
+                        self.body.local_decls[local].is_ref_for_guard()
                     } else {
                         false
                     }
@@ -104,7 +104,7 @@ pub(super) fn report_mutability_error(
                     reason = ", as it is immutable for the pattern guard".to_string();
                 } else {
                     let pointer_type =
-                        if base.ty(self.mir, self.infcx.tcx).ty.is_region_ptr() {
+                        if base.ty(self.body, self.infcx.tcx).ty.is_region_ptr() {
                             "`&` reference"
                         } else {
                             "`*const` pointer"
@@ -226,7 +226,7 @@ pub(super) fn report_mutability_error(
 
                 if let Some((span, message)) = annotate_struct_field(
                     self.infcx.tcx,
-                    base.ty(self.mir, self.infcx.tcx).ty,
+                    base.ty(self.body, self.infcx.tcx).ty,
                     field,
                 ) {
                     err.span_suggestion(
@@ -241,7 +241,7 @@ pub(super) fn report_mutability_error(
             // Suggest removing a `&mut` from the use of a mutable reference.
             Place::Base(PlaceBase::Local(local))
                 if {
-                    self.mir.local_decls.get(*local).map(|local_decl| {
+                    self.body.local_decls.get(*local).map(|local_decl| {
                         if let ClearCrossCrate::Set(
                             mir::BindingForm::ImplicitSelf(kind)
                         ) = local_decl.is_user_variable.as_ref().unwrap() {
@@ -275,12 +275,12 @@ pub(super) fn report_mutability_error(
             // We want to suggest users use `let mut` for local (user
             // variable) mutations...
             Place::Base(PlaceBase::Local(local))
-                if self.mir.local_decls[*local].can_be_made_mutable() => {
+                if self.body.local_decls[*local].can_be_made_mutable() => {
                 // ... but it doesn't make sense to suggest it on
                 // variables that are `ref x`, `ref mut x`, `&self`,
                 // or `&mut self` (such variables are simply not
                 // mutable).
-                let local_decl = &self.mir.local_decls[*local];
+                let local_decl = &self.body.local_decls[*local];
                 assert_eq!(local_decl.mutability, Mutability::Not);
 
                 err.span_label(span, format!("cannot {ACT}", ACT = act));
@@ -298,7 +298,7 @@ pub(super) fn report_mutability_error(
                 elem: ProjectionElem::Field(upvar_index, _),
             }) => {
                 debug_assert!(is_closure_or_generator(
-                    base.ty(self.mir, self.infcx.tcx).ty
+                    base.ty(self.body, self.infcx.tcx).ty
                 ));
 
                 err.span_label(span, format!("cannot {ACT}", ACT = act));
@@ -344,7 +344,7 @@ pub(super) fn report_mutability_error(
                 elem: ProjectionElem::Deref,
             }) if {
                 if let Some(ClearCrossCrate::Set(BindingForm::RefForGuard)) =
-                    self.mir.local_decls[*local].is_user_variable
+                    self.body.local_decls[*local].is_user_variable
                 {
                     true
                 } else {
@@ -366,9 +366,9 @@ pub(super) fn report_mutability_error(
             Place::Projection(box Projection {
                 base: Place::Base(PlaceBase::Local(local)),
                 elem: ProjectionElem::Deref,
-            }) if self.mir.local_decls[*local].is_user_variable.is_some() =>
+            }) if self.body.local_decls[*local].is_user_variable.is_some() =>
             {
-                let local_decl = &self.mir.local_decls[*local];
+                let local_decl = &self.body.local_decls[*local];
                 let suggestion = match local_decl.is_user_variable.as_ref().unwrap() {
                     ClearCrossCrate::Set(mir::BindingForm::ImplicitSelf(_)) => {
                         Some(suggest_ampmut_self(self.infcx.tcx, local_decl))
@@ -380,7 +380,7 @@ pub(super) fn report_mutability_error(
                         ..
                     })) => Some(suggest_ampmut(
                         self.infcx.tcx,
-                        self.mir,
+                        self.body,
                         *local,
                         local_decl,
                         *opt_ty_info,
@@ -451,7 +451,7 @@ pub(super) fn report_mutability_error(
             {
                 err.span_label(span, format!("cannot {ACT}", ACT = act));
                 err.span_help(
-                    self.mir.span,
+                    self.body.span,
                     "consider changing this to accept closures that implement `FnMut`"
                 );
             }
@@ -482,7 +482,7 @@ pub(super) fn report_mutability_error(
                                 },
                                 ..
                             }
-                        ) = &self.mir.basic_blocks()[location.block].terminator {
+                        ) = &self.body.basic_blocks()[location.block].terminator {
                             let index_trait = self.infcx.tcx.lang_items().index_trait();
                             if self.infcx.tcx.parent(id) == index_trait {
                                 let mut found = false;
@@ -557,14 +557,14 @@ fn suggest_ampmut_self<'cx, 'gcx, 'tcx>(
 // by trying (3.), then (2.) and finally falling back on (1.).
 fn suggest_ampmut<'cx, 'gcx, 'tcx>(
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     local: Local,
     local_decl: &mir::LocalDecl<'tcx>,
     opt_ty_info: Option<Span>,
 ) -> (Span, String) {
-    let locations = mir.find_assignments(local);
+    let locations = body.find_assignments(local);
     if !locations.is_empty() {
-        let assignment_rhs_span = mir.source_info(locations[0]).span;
+        let assignment_rhs_span = body.source_info(locations[0]).span;
         if let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) {
             if let (true, Some(ws_pos)) = (
                 src.starts_with("&'"),
index b485f37b78c2d8c5bfe35e6caf42b799c3e8cbc4..4a9aad2e80abcf3438684e719cec4b9cd12d3804 100644 (file)
@@ -18,7 +18,7 @@ pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
     liveness_constraints: &mut LivenessValues<RegionVid>,
     all_facts: &mut Option<AllFacts>,
     location_table: &LocationTable,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     borrow_set: &BorrowSet<'tcx>,
 ) {
     let mut cg = ConstraintGeneration {
@@ -29,7 +29,7 @@ pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
         all_facts,
     };
 
-    for (bb, data) in mir.basic_blocks().iter_enumerated() {
+    for (bb, data) in body.basic_blocks().iter_enumerated() {
         cg.visit_basic_block_data(bb, data);
     }
 }
index 3921246b06d388a8d12fc206b2b5dc5d0696c750..31181e72ed7ef8f4b7271c8ad62949925b42799c 100644 (file)
 use rustc_data_structures::fx::FxHashSet;
 
 crate fn find<'tcx>(
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     regioncx: &Rc<RegionInferenceContext<'tcx>>,
     tcx: TyCtxt<'_, '_, 'tcx>,
     region_vid: RegionVid,
     start_point: Location,
 ) -> Option<Cause> {
     let mut uf = UseFinder {
-        mir,
+        body,
         regioncx,
         tcx,
         region_vid,
@@ -28,7 +28,7 @@
 }
 
 struct UseFinder<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
-    mir: &'cx Body<'tcx>,
+    body: &'cx Body<'tcx>,
     regioncx: &'cx Rc<RegionInferenceContext<'tcx>>,
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     region_vid: RegionVid,
@@ -50,7 +50,7 @@ fn find(&mut self) -> Option<Cause> {
                 continue;
             }
 
-            let block_data = &self.mir[p.block];
+            let block_data = &self.body[p.block];
 
             match self.def_use(p, block_data.visitable(p.statement_index)) {
                 Some(DefUseResult::Def) => {}
@@ -87,7 +87,7 @@ fn find(&mut self) -> Option<Cause> {
 
     fn def_use(&self, location: Location, thing: &dyn MirVisitable<'tcx>) -> Option<DefUseResult> {
         let mut visitor = DefUseVisitor {
-            mir: self.mir,
+            body: self.body,
             tcx: self.tcx,
             region_vid: self.region_vid,
             def_use_result: None,
@@ -100,7 +100,7 @@ fn def_use(&self, location: Location, thing: &dyn MirVisitable<'tcx>) -> Option<
 }
 
 struct DefUseVisitor<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
-    mir: &'cx Body<'tcx>,
+    body: &'cx Body<'tcx>,
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     region_vid: RegionVid,
     def_use_result: Option<DefUseResult>,
@@ -114,7 +114,7 @@ enum DefUseResult {
 
 impl<'cx, 'gcx, 'tcx> Visitor<'tcx> for DefUseVisitor<'cx, 'gcx, 'tcx> {
     fn visit_local(&mut self, &local: &Local, context: PlaceContext, _: Location) {
-        let local_ty = self.mir.local_decls[local].ty;
+        let local_ty = self.body.local_decls[local].ty;
 
         let mut found_it = false;
         self.tcx.for_each_free_region(&local_ty, |r| {
index 60c46b36f5f06bca6e025bb0b5c8a9147f2ad954..f0e6d5636ccd05740360c4df55488074b47e6ccf 100644 (file)
@@ -54,7 +54,7 @@ pub(in crate::borrow_check) fn is_explained(&self) -> bool {
     pub(in crate::borrow_check) fn add_explanation_to_diagnostic<'cx, 'gcx, 'tcx>(
         &self,
         tcx: TyCtxt<'cx, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         err: &mut DiagnosticBuilder<'_>,
         borrow_desc: &str,
         borrow_span: Option<Span>,
@@ -94,7 +94,7 @@ pub(in crate::borrow_check) fn add_explanation_to_diagnostic<'cx, 'gcx, 'tcx>(
                 dropped_local,
                 should_note_order,
             } => {
-                let local_decl = &mir.local_decls[dropped_local];
+                let local_decl = &body.local_decls[dropped_local];
                 let (dtor_desc, type_desc) = match local_decl.ty.sty {
                     // If type is an ADT that implements Drop, then
                     // simplify output by reporting just the ADT name.
@@ -121,7 +121,7 @@ pub(in crate::borrow_check) fn add_explanation_to_diagnostic<'cx, 'gcx, 'tcx>(
                             TYPE = type_desc,
                             DTOR = dtor_desc
                         );
-                        err.span_label(mir.source_info(drop_loc).span, message);
+                        err.span_label(body.source_info(drop_loc).span, message);
 
                         if should_note_order {
                             err.note(
@@ -147,7 +147,7 @@ pub(in crate::borrow_check) fn add_explanation_to_diagnostic<'cx, 'gcx, 'tcx>(
                             TYPE = type_desc,
                             DTOR = dtor_desc
                         );
-                        err.span_label(mir.source_info(drop_loc).span, message);
+                        err.span_label(body.source_info(drop_loc).span, message);
 
                         if let Some(info) = &local_decl.is_block_tail {
                             // FIXME: use span_suggestion instead, highlighting the
@@ -233,7 +233,7 @@ pub(in crate::borrow_check) fn explain_why_borrow_contains_point(
         );
 
         let regioncx = &self.nonlexical_regioncx;
-        let mir = self.mir;
+        let body = self.body;
         let tcx = self.infcx.tcx;
 
         let borrow_region_vid = borrow.region;
@@ -248,9 +248,9 @@ pub(in crate::borrow_check) fn explain_why_borrow_contains_point(
             region_sub
         );
 
-        match find_use::find(mir, regioncx, tcx, region_sub, location) {
+        match find_use::find(body, regioncx, tcx, region_sub, location) {
             Some(Cause::LiveVar(local, location)) => {
-                let span = mir.source_info(location).span;
+                let span = body.source_info(location).span;
                 let spans = self
                     .move_spans(&Place::Base(PlaceBase::Local(local)), location)
                     .or_else(|| self.borrow_spans(span, location));
@@ -270,10 +270,10 @@ pub(in crate::borrow_check) fn explain_why_borrow_contains_point(
 
             Some(Cause::DropVar(local, location)) => {
                 let mut should_note_order = false;
-                if mir.local_decls[local].name.is_some() {
+                if body.local_decls[local].name.is_some() {
                     if let Some((WriteKind::StorageDeadOrDrop, place)) = kind_place {
                         if let Place::Base(PlaceBase::Local(borrowed_local)) = place {
-                             if mir.local_decls[*borrowed_local].name.is_some()
+                             if body.local_decls[*borrowed_local].name.is_some()
                                 && local != *borrowed_local
                             {
                                 should_note_order = true;
@@ -293,7 +293,7 @@ pub(in crate::borrow_check) fn explain_why_borrow_contains_point(
                 if let Some(region) = regioncx.to_error_region_vid(borrow_region_vid) {
                     let (category, from_closure, span, region_name) =
                         self.nonlexical_regioncx.free_region_constraint_info(
-                            self.mir,
+                            self.body,
                         &self.upvars,
                             self.mir_def_id,
                             self.infcx,
@@ -359,7 +359,7 @@ fn reach_through_backedge(&self, from: Location, to: Location) -> Option<Locatio
                 return outmost_back_edge;
             }
 
-            let block = &self.mir.basic_blocks()[location.block];
+            let block = &self.body.basic_blocks()[location.block];
 
             if location.statement_index < block.statements.len() {
                 let successor = location.successor_within_block();
@@ -421,7 +421,7 @@ fn find_loop_head_dfs(
         }
 
         if loop_head.dominates(from, &self.dominators) {
-            let block = &self.mir.basic_blocks()[from.block];
+            let block = &self.body.basic_blocks()[from.block];
 
             if from.statement_index < block.statements.len() {
                 let successor = from.successor_within_block();
@@ -453,7 +453,7 @@ fn find_loop_head_dfs(
     /// True if an edge `source -> target` is a backedge -- in other words, if the target
     /// dominates the source.
     fn is_back_edge(&self, source: Location, target: Location) -> bool {
-        target.dominates(source, &self.mir.dominators())
+        target.dominates(source, &self.body.dominators())
     }
 
     /// Determine how the borrow was later used.
@@ -469,7 +469,7 @@ fn later_use_kind(
                 (LaterUseKind::ClosureCapture, var_span)
             }
             UseSpans::OtherUse(span) => {
-                let block = &self.mir.basic_blocks()[location.block];
+                let block = &self.body.basic_blocks()[location.block];
 
                 let kind = if let Some(&Statement {
                     kind: StatementKind::FakeRead(FakeReadCause::ForLet, _),
@@ -491,7 +491,7 @@ fn later_use_kind(
                             Operand::Constant(c) => c.span,
                             Operand::Copy(Place::Base(PlaceBase::Local(l))) |
                             Operand::Move(Place::Base(PlaceBase::Local(l))) => {
-                                let local_decl = &self.mir.local_decls[*l];
+                                let local_decl = &self.body.local_decls[*l];
                                 if local_decl.name.is_none() {
                                     local_decl.source_info.span
                                 } else {
@@ -519,7 +519,7 @@ fn later_use_kind(
     fn was_captured_by_trait_object(&self, borrow: &BorrowData<'tcx>) -> bool {
         // Start at the reserve location, find the place that we want to see cast to a trait object.
         let location = borrow.reserve_location;
-        let block = &self.mir[location.block];
+        let block = &self.body[location.block];
         let stmt = block.statements.get(location.statement_index);
         debug!(
             "was_captured_by_trait_object: location={:?} stmt={:?}",
@@ -546,7 +546,7 @@ fn was_captured_by_trait_object(&self, borrow: &BorrowData<'tcx>) -> bool {
         );
         while let Some(current_location) = queue.pop() {
             debug!("was_captured_by_trait: target={:?}", target);
-            let block = &self.mir[current_location.block];
+            let block = &self.body[current_location.block];
             // We need to check the current location to find out if it is a terminator.
             let is_terminator = current_location.statement_index == block.statements.len();
             if !is_terminator {
index 017f4d48c120050d2d833539a246913634e15933..516eb6d2941e52602c913a1d766fdd9e100136be 100644 (file)
@@ -21,7 +21,7 @@ pub(super) fn generate_invalidates<'cx, 'gcx, 'tcx>(
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     all_facts: &mut Option<AllFacts>,
     location_table: &LocationTable,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     borrow_set: &BorrowSet<'tcx>,
 ) {
     if all_facts.is_none() {
@@ -30,16 +30,16 @@ pub(super) fn generate_invalidates<'cx, 'gcx, 'tcx>(
     }
 
     if let Some(all_facts) = all_facts {
-        let dominators = mir.dominators();
+        let dominators = body.dominators();
         let mut ig = InvalidationGenerator {
             all_facts,
             borrow_set,
             tcx,
             location_table,
-            mir,
+            body,
             dominators,
         };
-        ig.visit_body(mir);
+        ig.visit_body(body);
     }
 }
 
@@ -47,7 +47,7 @@ struct InvalidationGenerator<'cx, 'tcx: 'cx, 'gcx: 'tcx> {
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     all_facts: &'cx mut AllFacts,
     location_table: &'cx LocationTable,
-    mir: &'cx Body<'tcx>,
+    body: &'cx Body<'tcx>,
     dominators: Dominators<BasicBlock>,
     borrow_set: &'cx BorrowSet<'tcx>,
 }
@@ -258,7 +258,7 @@ fn visit_terminator_kind(
     }
 }
 
-impl<'cg, 'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> {
+impl<'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> {
     /// Simulates mutation of a place.
     fn mutate_place(
         &mut self,
@@ -400,13 +400,13 @@ fn check_access_for_conflict(
             rw,
         );
         let tcx = self.tcx;
-        let mir = self.mir;
+        let body = self.body;
         let borrow_set = self.borrow_set.clone();
         let indices = self.borrow_set.borrows.indices();
         each_borrow_involving_path(
             self,
             tcx,
-            mir,
+            body,
             location,
             (sd, place),
             &borrow_set.clone(),
index 0fb1705c8c294748864f778bd6bfa2521360de72..a260f5460e504da22ab232614b46d993e3b99153 100644 (file)
@@ -50,7 +50,7 @@ pub(in crate::borrow_check) fn replace_regions_in_mir<'cx, 'gcx, 'tcx>(
     infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
     def_id: DefId,
     param_env: ty::ParamEnv<'tcx>,
-    mir: &mut Body<'tcx>,
+    body: &mut Body<'tcx>,
 ) -> UniversalRegions<'tcx> {
     debug!("replace_regions_in_mir(def_id={:?})", def_id);
 
@@ -58,10 +58,10 @@ pub(in crate::borrow_check) fn replace_regions_in_mir<'cx, 'gcx, 'tcx>(
     let universal_regions = UniversalRegions::new(infcx, def_id, param_env);
 
     // Replace all remaining regions with fresh inference variables.
-    renumber::renumber_mir(infcx, mir);
+    renumber::renumber_mir(infcx, body);
 
     let source = MirSource::item(def_id);
-    mir_util::dump_mir(infcx.tcx, None, "renumber", &0, source, mir, |_, _| Ok(()));
+    mir_util::dump_mir(infcx.tcx, None, "renumber", &0, source, body, |_, _| Ok(()));
 
     universal_regions
 }
@@ -73,7 +73,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
     infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
     def_id: DefId,
     universal_regions: UniversalRegions<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     upvars: &[Upvar],
     location_table: &LocationTable,
     param_env: ty::ParamEnv<'gcx>,
@@ -94,7 +94,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
 
     let universal_regions = Rc::new(universal_regions);
 
-    let elements = &Rc::new(RegionValueElements::new(mir));
+    let elements = &Rc::new(RegionValueElements::new(body));
 
     // Run the MIR type-checker.
     let MirTypeckResults {
@@ -103,7 +103,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
     } = type_check::type_check(
         infcx,
         param_env,
-        mir,
+        body,
         def_id,
         &universal_regions,
         location_table,
@@ -139,7 +139,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
         &mut liveness_constraints,
         &mut all_facts,
         location_table,
-        &mir,
+        &body,
         borrow_set,
     );
 
@@ -148,7 +148,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
         universal_regions,
         placeholder_indices,
         universal_region_relations,
-        mir,
+        body,
         outlives_constraints,
         closure_bounds_mapping,
         type_tests,
@@ -161,7 +161,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
         infcx.tcx,
         &mut all_facts,
         location_table,
-        &mir,
+        &body,
         borrow_set,
     );
 
@@ -191,21 +191,21 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
 
     // Solve the region constraints.
     let closure_region_requirements =
-        regioncx.solve(infcx, &mir, upvars, def_id, errors_buffer);
+        regioncx.solve(infcx, &body, upvars, def_id, errors_buffer);
 
     // Dump MIR results into a file, if that is enabled. This let us
     // write unit-tests, as well as helping with debugging.
     dump_mir_results(
         infcx,
         MirSource::item(def_id),
-        &mir,
+        &body,
         &regioncx,
         &closure_region_requirements,
     );
 
     // We also have a `#[rustc_nll]` annotation that causes us to dump
     // information
-    dump_annotation(infcx, &mir, def_id, &regioncx, &closure_region_requirements, errors_buffer);
+    dump_annotation(infcx, &body, def_id, &regioncx, &closure_region_requirements, errors_buffer);
 
     (regioncx, polonius_output, closure_region_requirements)
 }
@@ -213,7 +213,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
 fn dump_mir_results<'a, 'gcx, 'tcx>(
     infcx: &InferCtxt<'a, 'gcx, 'tcx>,
     source: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     regioncx: &RegionInferenceContext<'_>,
     closure_region_requirements: &Option<ClosureRegionRequirements<'_>>,
 ) {
@@ -227,7 +227,7 @@ fn dump_mir_results<'a, 'gcx, 'tcx>(
         "nll",
         &0,
         source,
-        mir,
+        body,
         |pass_where, out| {
             match pass_where {
                 // Before the CFG, dump out the values for each region variable.
@@ -273,7 +273,7 @@ fn dump_mir_results<'a, 'gcx, 'tcx>(
 
 fn dump_annotation<'a, 'gcx, 'tcx>(
     infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     mir_def_id: DefId,
     regioncx: &RegionInferenceContext<'tcx>,
     closure_region_requirements: &Option<ClosureRegionRequirements<'_>>,
@@ -296,7 +296,7 @@ fn dump_annotation<'a, 'gcx, 'tcx>(
         let mut err = tcx
             .sess
             .diagnostic()
-            .span_note_diag(mir.span, "External requirements");
+            .span_note_diag(body.span, "External requirements");
 
         regioncx.annotate(tcx, &mut err);
 
@@ -317,7 +317,7 @@ fn dump_annotation<'a, 'gcx, 'tcx>(
         let mut err = tcx
             .sess
             .diagnostic()
-            .span_note_diag(mir.span, "No external requirements");
+            .span_note_diag(body.span, "No external requirements");
         regioncx.annotate(tcx, &mut err);
 
         err.buffer(errors_buffer);
index 3bf0f7e04d29540ebcbc41131e63bde0303bd621..85937848eca96fe33912d978d0101d175d793d1e 100644 (file)
@@ -62,7 +62,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     /// path to blame.
     fn best_blame_constraint(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         from_region: RegionVid,
         target_test: impl Fn(RegionVid) -> bool,
     ) -> (ConstraintCategory, bool, Span) {
@@ -88,9 +88,9 @@ fn best_blame_constraint(
         let mut categorized_path: Vec<(ConstraintCategory, bool, Span)> = path.iter()
             .map(|constraint| {
                 if constraint.category == ConstraintCategory::ClosureBounds {
-                    self.retrieve_closure_constraint_info(mir, &constraint)
+                    self.retrieve_closure_constraint_info(body, &constraint)
                 } else {
-                    (constraint.category, false, constraint.locations.span(mir))
+                    (constraint.category, false, constraint.locations.span(body))
                 }
             })
             .collect();
@@ -237,7 +237,7 @@ fn find_constraint_paths_between_regions(
     /// Here we would be invoked with `fr = 'a` and `outlived_fr = `'b`.
     pub(super) fn report_error(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         infcx: &InferCtxt<'_, '_, 'tcx>,
         mir_def_id: DefId,
@@ -247,7 +247,7 @@ pub(super) fn report_error(
     ) {
         debug!("report_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr);
 
-        let (category, _, span) = self.best_blame_constraint(mir, fr, |r| {
+        let (category, _, span) = self.best_blame_constraint(body, fr, |r| {
             self.provides_universal_region(r, fr, outlived_fr)
         });
 
@@ -274,7 +274,7 @@ pub(super) fn report_error(
         match (category, fr_is_local, outlived_fr_is_local) {
             (ConstraintCategory::Return, true, false) if self.is_closure_fn_mut(infcx, fr) => {
                 self.report_fnmut_error(
-                    mir,
+                    body,
                     upvars,
                     infcx,
                     mir_def_id,
@@ -286,7 +286,7 @@ pub(super) fn report_error(
             }
             (ConstraintCategory::Assignment, true, false)
             | (ConstraintCategory::CallArgument, true, false) => self.report_escaping_data_error(
-                mir,
+                body,
                 upvars,
                 infcx,
                 mir_def_id,
@@ -297,7 +297,7 @@ pub(super) fn report_error(
                 errors_buffer,
             ),
             _ => self.report_general_error(
-                mir,
+                body,
                 upvars,
                 infcx,
                 mir_def_id,
@@ -357,7 +357,7 @@ fn provides_universal_region(&self, r: RegionVid, fr1: RegionVid, fr2: RegionVid
     /// ```
     fn report_fnmut_error(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         infcx: &InferCtxt<'_, '_, 'tcx>,
         mir_def_id: DefId,
@@ -383,7 +383,7 @@ fn report_fnmut_error(
 
         diag.span_label(span, message);
 
-        match self.give_region_a_name(infcx, mir, upvars, mir_def_id, outlived_fr, &mut 1)
+        match self.give_region_a_name(infcx, body, upvars, mir_def_id, outlived_fr, &mut 1)
             .unwrap().source
         {
             RegionNameSource::NamedEarlyBoundRegion(fr_span)
@@ -422,7 +422,7 @@ fn report_fnmut_error(
     /// ```
     fn report_escaping_data_error(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         infcx: &InferCtxt<'_, '_, 'tcx>,
         mir_def_id: DefId,
@@ -433,9 +433,9 @@ fn report_escaping_data_error(
         errors_buffer: &mut Vec<Diagnostic>,
     ) {
         let fr_name_and_span =
-            self.get_var_name_and_span_for_region(infcx.tcx, mir, upvars, fr);
+            self.get_var_name_and_span_for_region(infcx.tcx, body, upvars, fr);
         let outlived_fr_name_and_span =
-            self.get_var_name_and_span_for_region(infcx.tcx, mir, upvars, outlived_fr);
+            self.get_var_name_and_span_for_region(infcx.tcx, body, upvars, outlived_fr);
 
         let escapes_from = match self.universal_regions.defining_ty {
             DefiningTy::Closure(..) => "closure",
@@ -451,7 +451,7 @@ fn report_escaping_data_error(
             || escapes_from == "const"
         {
             return self.report_general_error(
-                mir,
+                body,
                 upvars,
                 infcx,
                 mir_def_id,
@@ -514,7 +514,7 @@ fn report_escaping_data_error(
     /// ```
     fn report_general_error(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         infcx: &InferCtxt<'_, '_, 'tcx>,
         mir_def_id: DefId,
@@ -532,10 +532,11 @@ fn report_general_error(
         );
 
         let counter = &mut 1;
-        let fr_name = self.give_region_a_name(infcx, mir, upvars, mir_def_id, fr, counter).unwrap();
+        let fr_name = self.give_region_a_name(
+            infcx, body, upvars, mir_def_id, fr, counter).unwrap();
         fr_name.highlight_region_name(&mut diag);
         let outlived_fr_name =
-            self.give_region_a_name(infcx, mir, upvars, mir_def_id, outlived_fr, counter).unwrap();
+            self.give_region_a_name(infcx, body, upvars, mir_def_id, outlived_fr, counter).unwrap();
         outlived_fr_name.highlight_region_name(&mut diag);
 
         let mir_def_name = if infcx.tcx.is_closure(mir_def_id) {
@@ -667,7 +668,7 @@ fn add_static_impl_trait_suggestion(
 
     crate fn free_region_constraint_info(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         infcx: &InferCtxt<'_, '_, 'tcx>,
@@ -675,12 +676,12 @@ fn add_static_impl_trait_suggestion(
         outlived_region: RegionVid,
     ) -> (ConstraintCategory, bool, Span, Option<RegionName>) {
         let (category, from_closure, span) = self.best_blame_constraint(
-            mir,
+            body,
             borrow_region,
             |r| self.provides_universal_region(r, borrow_region, outlived_region)
         );
         let outlived_fr_name =
-            self.give_region_a_name(infcx, mir, upvars, mir_def_id, outlived_region, &mut 1);
+            self.give_region_a_name(infcx, body, upvars, mir_def_id, outlived_region, &mut 1);
         (category, from_closure, span, outlived_fr_name)
     }
 
@@ -724,18 +725,18 @@ fn add_static_impl_trait_suggestion(
     // Finds a good span to blame for the fact that `fr1` outlives `fr2`.
     crate fn find_outlives_blame_span(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         fr1: RegionVid,
         fr2: RegionVid,
     ) -> (ConstraintCategory, Span) {
         let (category, _, span) =
-            self.best_blame_constraint(mir, fr1, |r| self.provides_universal_region(r, fr1, fr2));
+            self.best_blame_constraint(body, fr1, |r| self.provides_universal_region(r, fr1, fr2));
         (category, span)
     }
 
     fn retrieve_closure_constraint_info(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         constraint: &OutlivesConstraint,
     ) -> (ConstraintCategory, bool, Span) {
         let loc = match constraint.locations {
@@ -747,7 +748,7 @@ fn retrieve_closure_constraint_info(
             self.closure_bounds_mapping[&loc].get(&(constraint.sup, constraint.sub));
         opt_span_category
             .map(|&(category, span)| (category, true, span))
-            .unwrap_or((constraint.category, false, mir.source_info(loc).span))
+            .unwrap_or((constraint.category, false, body.source_info(loc).span))
     }
 
     /// Returns `true` if a closure is inferred to be an `FnMut` closure.
index 0d452c99ea16853776aa04db9b3b9e42ebd16ca2..82720d0091394b231cfb15205dbc0cdcc5cad988 100644 (file)
@@ -152,7 +152,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     crate fn give_region_a_name(
         &self,
         infcx: &InferCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         fr: RegionVid,
@@ -165,7 +165,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
         let value = self.give_name_from_error_region(infcx.tcx, mir_def_id, fr, counter)
             .or_else(|| {
                 self.give_name_if_anonymous_region_appears_in_arguments(
-                    infcx, mir, mir_def_id, fr, counter,
+                    infcx, body, mir_def_id, fr, counter,
                 )
             })
             .or_else(|| {
@@ -175,12 +175,12 @@ impl<'tcx> RegionInferenceContext<'tcx> {
             })
             .or_else(|| {
                 self.give_name_if_anonymous_region_appears_in_output(
-                    infcx, mir, mir_def_id, fr, counter,
+                    infcx, body, mir_def_id, fr, counter,
                 )
             })
             .or_else(|| {
                 self.give_name_if_anonymous_region_appears_in_yield_ty(
-                    infcx, mir, mir_def_id, fr, counter,
+                    infcx, body, mir_def_id, fr, counter,
                 )
             });
 
@@ -332,7 +332,7 @@ fn get_named_span(
     fn give_name_if_anonymous_region_appears_in_arguments(
         &self,
         infcx: &InferCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         mir_def_id: DefId,
         fr: RegionVid,
         counter: &mut usize,
@@ -344,7 +344,7 @@ fn give_name_if_anonymous_region_appears_in_arguments(
             self.universal_regions.unnormalized_input_tys[implicit_inputs + argument_index];
         if let Some(region_name) = self.give_name_if_we_can_match_hir_ty_from_argument(
             infcx,
-            mir,
+            body,
             mir_def_id,
             fr,
             arg_ty,
@@ -354,13 +354,13 @@ fn give_name_if_anonymous_region_appears_in_arguments(
             return Some(region_name);
         }
 
-        self.give_name_if_we_cannot_match_hir_ty(infcx, mir, fr, arg_ty, counter)
+        self.give_name_if_we_cannot_match_hir_ty(infcx, body, fr, arg_ty, counter)
     }
 
     fn give_name_if_we_can_match_hir_ty_from_argument(
         &self,
         infcx: &InferCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         mir_def_id: DefId,
         needle_fr: RegionVid,
         argument_ty: Ty<'tcx>,
@@ -376,7 +376,7 @@ fn give_name_if_we_can_match_hir_ty_from_argument(
             // must highlight the variable.
             hir::TyKind::Infer => self.give_name_if_we_cannot_match_hir_ty(
                 infcx,
-                mir,
+                body,
                 needle_fr,
                 argument_ty,
                 counter,
@@ -406,7 +406,7 @@ fn give_name_if_we_can_match_hir_ty_from_argument(
     fn give_name_if_we_cannot_match_hir_ty(
         &self,
         infcx: &InferCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         needle_fr: RegionVid,
         argument_ty: Ty<'tcx>,
         counter: &mut usize,
@@ -422,7 +422,7 @@ fn give_name_if_we_cannot_match_hir_ty(
         let assigned_region_name = if type_name.find(&format!("'{}", counter)).is_some() {
             // Only add a label if we can confirm that a region was labelled.
             let argument_index = self.get_argument_index_for_region(infcx.tcx, needle_fr)?;
-            let (_, span) = self.get_argument_name_and_span_for_region(mir, argument_index);
+            let (_, span) = self.get_argument_name_and_span_for_region(body, argument_index);
 
             Some(RegionName {
                 // This counter value will already have been used, so this function will increment
@@ -676,7 +676,7 @@ fn give_name_if_anonymous_region_appears_in_upvars(
     fn give_name_if_anonymous_region_appears_in_output(
         &self,
         infcx: &InferCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         mir_def_id: DefId,
         fr: RegionVid,
         counter: &mut usize,
@@ -717,7 +717,7 @@ fn give_name_if_anonymous_region_appears_in_output(
                 node: hir::ImplItemKind::Method(method_sig, _),
                 ..
             }) => (method_sig.decl.output.span(), ""),
-            _ => (mir.span, ""),
+            _ => (body.span, ""),
         };
 
         Some(RegionName {
@@ -736,7 +736,7 @@ fn give_name_if_anonymous_region_appears_in_output(
     fn give_name_if_anonymous_region_appears_in_yield_ty(
         &self,
         infcx: &InferCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         mir_def_id: DefId,
         fr: RegionVid,
         counter: &mut usize,
@@ -768,7 +768,7 @@ fn give_name_if_anonymous_region_appears_in_yield_ty(
             }) => (
                 tcx.sess.source_map().end_point(*span)
             ),
-            _ => mir.span,
+            _ => body.span,
         };
 
         debug!(
index c02a492c34100295380e27095bf8a16570957eec..f2dbcc5db3ab1b56600eaa18dee46582f4669781 100644 (file)
@@ -11,7 +11,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     crate fn get_var_name_and_span_for_region(
         &self,
         tcx: TyCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         fr: RegionVid,
     ) -> Option<(Option<Symbol>, Span)> {
@@ -28,7 +28,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
             .or_else(|| {
                 debug!("get_var_name_and_span_for_region: attempting argument");
                 self.get_argument_index_for_region(tcx, fr)
-                    .map(|index| self.get_argument_name_and_span_for_region(mir, index))
+                    .map(|index| self.get_argument_name_and_span_for_region(body, index))
             })
     }
 
@@ -120,15 +120,15 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     /// declared.
     crate fn get_argument_name_and_span_for_region(
         &self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         argument_index: usize,
     ) -> (Option<Symbol>, Span) {
         let implicit_inputs = self.universal_regions.defining_ty.implicit_inputs();
         let argument_local = Local::new(implicit_inputs + argument_index + 1);
         debug!("get_argument_name_and_span_for_region: argument_local={:?}", argument_local);
 
-        let argument_name = mir.local_decls[argument_local].name;
-        let argument_span = mir.local_decls[argument_local].source_info.span;
+        let argument_name = body.local_decls[argument_local].name;
+        let argument_span = body.local_decls[argument_local].source_info.span;
         debug!("get_argument_name_and_span_for_region: argument_name={:?} argument_span={:?}",
                argument_name, argument_span);
 
index 4a00571feb11045c59749994892ca7e0fc16aeeb..170f61a63a5b459c05519f1011abf4be25363e0a 100644 (file)
@@ -185,7 +185,7 @@ pub(crate) fn new(
         universal_regions: Rc<UniversalRegions<'tcx>>,
         placeholder_indices: Rc<PlaceholderIndices>,
         universal_region_relations: Rc<UniversalRegionRelations<'tcx>>,
-        _mir: &Body<'tcx>,
+        _body: &Body<'tcx>,
         outlives_constraints: ConstraintSet,
         closure_bounds_mapping: FxHashMap<
             Location,
@@ -400,7 +400,7 @@ pub fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid {
     pub(super) fn solve<'gcx>(
         &mut self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         errors_buffer: &mut Vec<Diagnostic>,
@@ -409,19 +409,19 @@ pub(super) fn solve<'gcx>(
             infcx.tcx.sess.time_extended(),
             Some(infcx.tcx.sess),
             &format!("solve_nll_region_constraints({:?})", mir_def_id),
-            || self.solve_inner(infcx, mir, upvars, mir_def_id, errors_buffer),
+            || self.solve_inner(infcx, body, upvars, mir_def_id, errors_buffer),
         )
     }
 
     fn solve_inner<'gcx>(
         &mut self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         errors_buffer: &mut Vec<Diagnostic>,
     ) -> Option<ClosureRegionRequirements<'gcx>> {
-        self.propagate_constraints(mir);
+        self.propagate_constraints(body);
 
         // If this is a closure, we can propagate unsatisfied
         // `outlives_requirements` to our creator, so create a vector
@@ -436,7 +436,7 @@ fn solve_inner<'gcx>(
 
         self.check_type_tests(
             infcx,
-            mir,
+            body,
             mir_def_id,
             outlives_requirements.as_mut(),
             errors_buffer,
@@ -444,7 +444,7 @@ fn solve_inner<'gcx>(
 
         self.check_universal_regions(
             infcx,
-            mir,
+            body,
             upvars,
             mir_def_id,
             outlives_requirements.as_mut(),
@@ -468,7 +468,7 @@ fn solve_inner<'gcx>(
     /// for each region variable until all the constraints are
     /// satisfied. Note that some values may grow **too** large to be
     /// feasible, but we check this later.
-    fn propagate_constraints(&mut self, _mir: &Body<'tcx>) {
+    fn propagate_constraints(&mut self, _body: &Body<'tcx>) {
         debug!("propagate_constraints()");
 
         debug!("propagate_constraints: constraints={:#?}", {
@@ -581,7 +581,7 @@ fn add_incompatible_universe(&mut self, scc: ConstraintSccIndex) {
     fn check_type_tests<'gcx>(
         &self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         mir_def_id: DefId,
         mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
         errors_buffer: &mut Vec<Diagnostic>,
@@ -599,7 +599,7 @@ fn check_type_tests<'gcx>(
             let generic_ty = type_test.generic_kind.to_ty(tcx);
             if self.eval_verify_bound(
                 tcx,
-                mir,
+                body,
                 generic_ty,
                 type_test.lower_bound,
                 &type_test.verify_bound,
@@ -610,7 +610,7 @@ fn check_type_tests<'gcx>(
             if let Some(propagated_outlives_requirements) = &mut propagated_outlives_requirements {
                 if self.try_promote_type_test(
                     infcx,
-                    mir,
+                    body,
                     type_test,
                     propagated_outlives_requirements,
                 ) {
@@ -624,7 +624,7 @@ fn check_type_tests<'gcx>(
             let lower_bound_region = self.to_error_region(type_test.lower_bound);
 
             // Skip duplicate-ish errors.
-            let type_test_span = type_test.locations.span(mir);
+            let type_test_span = type_test.locations.span(body);
             let erased_generic_kind = tcx.erase_regions(&type_test.generic_kind);
             if !deduplicate_errors.insert((
                 erased_generic_kind,
@@ -725,7 +725,7 @@ pub fn to_error_region_vid(&self, r: RegionVid) -> Option<RegionVid> {
     fn try_promote_type_test<'gcx>(
         &self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         type_test: &TypeTest<'tcx>,
         propagated_outlives_requirements: &mut Vec<ClosureOutlivesRequirement<'gcx>>,
     ) -> bool {
@@ -762,7 +762,7 @@ fn try_promote_type_test<'gcx>(
             // where `ur` is a local bound -- we are sometimes in a
             // position to prove things that our caller cannot.  See
             // #53570 for an example.
-            if self.eval_verify_bound(tcx, mir, generic_ty, ur, &type_test.verify_bound) {
+            if self.eval_verify_bound(tcx, body, generic_ty, ur, &type_test.verify_bound) {
                 continue;
             }
 
@@ -782,7 +782,7 @@ fn try_promote_type_test<'gcx>(
                 let requirement = ClosureOutlivesRequirement {
                     subject,
                     outlived_free_region: upper_bound,
-                    blame_span: locations.span(mir),
+                    blame_span: locations.span(body),
                     category: ConstraintCategory::Boring,
                 };
                 debug!("try_promote_type_test: pushing {:#?}", requirement);
@@ -944,7 +944,7 @@ fn universal_upper_bound(&self, r: RegionVid) -> RegionVid {
     fn eval_verify_bound(
         &self,
         tcx: TyCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         generic_ty: Ty<'tcx>,
         lower_bound: RegionVid,
         verify_bound: &VerifyBound<'tcx>,
@@ -956,20 +956,20 @@ fn eval_verify_bound(
 
         match verify_bound {
             VerifyBound::IfEq(test_ty, verify_bound1) => {
-                self.eval_if_eq(tcx, mir, generic_ty, lower_bound, test_ty, verify_bound1)
+                self.eval_if_eq(tcx, body, generic_ty, lower_bound, test_ty, verify_bound1)
             }
 
             VerifyBound::OutlivedBy(r) => {
                 let r_vid = self.to_region_vid(r);
-                self.eval_outlives(mir, r_vid, lower_bound)
+                self.eval_outlives(body, r_vid, lower_bound)
             }
 
             VerifyBound::AnyBound(verify_bounds) => verify_bounds.iter().any(|verify_bound| {
-                self.eval_verify_bound(tcx, mir, generic_ty, lower_bound, verify_bound)
+                self.eval_verify_bound(tcx, body, generic_ty, lower_bound, verify_bound)
             }),
 
             VerifyBound::AllBounds(verify_bounds) => verify_bounds.iter().all(|verify_bound| {
-                self.eval_verify_bound(tcx, mir, generic_ty, lower_bound, verify_bound)
+                self.eval_verify_bound(tcx, body, generic_ty, lower_bound, verify_bound)
             }),
         }
     }
@@ -977,7 +977,7 @@ fn eval_verify_bound(
     fn eval_if_eq(
         &self,
         tcx: TyCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         generic_ty: Ty<'tcx>,
         lower_bound: RegionVid,
         test_ty: Ty<'tcx>,
@@ -986,7 +986,7 @@ fn eval_if_eq(
         let generic_ty_normalized = self.normalize_to_scc_representatives(tcx, generic_ty);
         let test_ty_normalized = self.normalize_to_scc_representatives(tcx, test_ty);
         if generic_ty_normalized == test_ty_normalized {
-            self.eval_verify_bound(tcx, mir, generic_ty, lower_bound, verify_bound)
+            self.eval_verify_bound(tcx, body, generic_ty, lower_bound, verify_bound)
         } else {
             false
         }
@@ -1037,7 +1037,7 @@ fn normalize_to_scc_representatives<T>(&self, tcx: TyCtxt<'_, '_, 'tcx>, value:
     // Evaluate whether `sup_region: sub_region @ point`.
     fn eval_outlives(
         &self,
-        _mir: &Body<'tcx>,
+        _body: &Body<'tcx>,
         sup_region: RegionVid,
         sub_region: RegionVid,
     ) -> bool {
@@ -1105,7 +1105,7 @@ fn eval_outlives(
     fn check_universal_regions<'gcx>(
         &self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
@@ -1119,7 +1119,7 @@ fn check_universal_regions<'gcx>(
                     // for our caller into the `outlives_requirements` vector.
                     self.check_universal_region(
                         infcx,
-                        mir,
+                        body,
                         upvars,
                         mir_def_id,
                         fr,
@@ -1129,7 +1129,7 @@ fn check_universal_regions<'gcx>(
                 }
 
                 NLLRegionVariableOrigin::Placeholder(placeholder) => {
-                    self.check_bound_universal_region(infcx, mir, mir_def_id, fr, placeholder);
+                    self.check_bound_universal_region(infcx, body, mir_def_id, fr, placeholder);
                 }
 
                 NLLRegionVariableOrigin::Existential => {
@@ -1150,7 +1150,7 @@ fn check_universal_regions<'gcx>(
     fn check_universal_region<'gcx>(
         &self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         longer_fr: RegionVid,
@@ -1183,7 +1183,7 @@ fn check_universal_region<'gcx>(
                 longer_fr,
                 representative,
                 infcx,
-                mir,
+                body,
                 upvars,
                 mir_def_id,
                 propagated_outlives_requirements,
@@ -1199,7 +1199,7 @@ fn check_universal_region<'gcx>(
                 longer_fr,
                 shorter_fr,
                 infcx,
-                mir,
+                body,
                 upvars,
                 mir_def_id,
                 propagated_outlives_requirements,
@@ -1216,7 +1216,7 @@ fn check_universal_region_relation(
         longer_fr: RegionVid,
         shorter_fr: RegionVid,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         upvars: &[Upvar],
         mir_def_id: DefId,
         propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
@@ -1245,7 +1245,8 @@ fn check_universal_region_relation(
             {
                 debug!("check_universal_region: fr_minus={:?}", fr_minus);
 
-                let blame_span_category = self.find_outlives_blame_span(mir, longer_fr, shorter_fr);
+                let blame_span_category =
+                    self.find_outlives_blame_span(body, longer_fr, shorter_fr);
 
                 // Grow `shorter_fr` until we find some non-local regions. (We
                 // always will.)  We'll call them `shorter_fr+` -- they're ever
@@ -1275,14 +1276,14 @@ fn check_universal_region_relation(
         //
         // Note: in this case, we use the unapproximated regions to report the
         // error. This gives better error messages in some cases.
-        self.report_error(mir, upvars, infcx, mir_def_id, longer_fr, shorter_fr, errors_buffer);
+        self.report_error(body, upvars, infcx, mir_def_id, longer_fr, shorter_fr, errors_buffer);
         Some(ErrorReported)
     }
 
     fn check_bound_universal_region<'gcx>(
         &self,
         infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         _mir_def_id: DefId,
         longer_fr: RegionVid,
         placeholder: ty::PlaceholderRegion,
@@ -1330,7 +1331,7 @@ fn check_bound_universal_region<'gcx>(
         };
 
         // Find the code to blame for the fact that `longer_fr` outlives `error_fr`.
-        let (_, span) = self.find_outlives_blame_span(mir, longer_fr, error_region);
+        let (_, span) = self.find_outlives_blame_span(body, longer_fr, error_region);
 
         // Obviously, this error message is far from satisfactory.
         // At present, though, it only appears in unit tests --
index 8822d7bb373934b4fb3129ca492a41c854e399d7..cfd80cecca510865849ec3829d0a7e208dce6d36 100644 (file)
@@ -20,9 +20,9 @@
 }
 
 impl RegionValueElements {
-    crate fn new(mir: &Body<'_>) -> Self {
+    crate fn new(body: &Body<'_>) -> Self {
         let mut num_points = 0;
-        let statements_before_block: IndexVec<BasicBlock, usize> = mir.basic_blocks()
+        let statements_before_block: IndexVec<BasicBlock, usize> = body.basic_blocks()
             .iter()
             .map(|block_data| {
                 let v = num_points;
@@ -37,7 +37,7 @@ impl RegionValueElements {
         debug!("RegionValueElements: num_points={:#?}", num_points);
 
         let mut basic_blocks = IndexVec::with_capacity(num_points);
-        for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
+        for (bb, bb_data) in body.basic_blocks().iter_enumerated() {
             basic_blocks.extend((0..=bb_data.statements.len()).map(|_| bb));
         }
 
@@ -92,7 +92,7 @@ impl RegionValueElements {
     /// Pushes all predecessors of `index` onto `stack`.
     crate fn push_predecessors(
         &self,
-        mir: &Body<'_>,
+        body: &Body<'_>,
         index: PointIndex,
         stack: &mut Vec<PointIndex>,
     ) {
@@ -104,9 +104,9 @@ impl RegionValueElements {
             // If this is a basic block head, then the predecessors are
             // the terminators of other basic blocks
             stack.extend(
-                mir.predecessors_for(block)
+                body.predecessors_for(block)
                     .iter()
-                    .map(|&pred_bb| mir.terminator_loc(pred_bb))
+                    .map(|&pred_bb| body.terminator_loc(pred_bb))
                     .map(|pred_loc| self.point_from_location(pred_loc)),
             );
         } else {
index a3b142c2ffcc309fa7e4c398ce53d4a4de8dbc20..b5fe3d7c8b32934ea127db869d0500b1b0835049 100644 (file)
@@ -6,12 +6,12 @@
 
 /// Replaces all free regions appearing in the MIR with fresh
 /// inference variables, returning the number of variables created.
-pub fn renumber_mir<'tcx>(infcx: &InferCtxt<'_, '_, 'tcx>, mir: &mut Body<'tcx>) {
+pub fn renumber_mir<'tcx>(infcx: &InferCtxt<'_, '_, 'tcx>, body: &mut Body<'tcx>) {
     debug!("renumber_mir()");
-    debug!("renumber_mir: mir.arg_count={:?}", mir.arg_count);
+    debug!("renumber_mir: body.arg_count={:?}", body.arg_count);
 
     let mut visitor = NLLVisitor { infcx };
-    visitor.visit_body(mir);
+    visitor.visit_body(body);
 }
 
 /// Replaces all regions appearing in `value` with fresh inference
@@ -47,12 +47,12 @@ fn renumber_regions<T>(&mut self, value: &T) -> T
 }
 
 impl<'a, 'gcx, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'gcx, 'tcx> {
-    fn visit_body(&mut self, mir: &mut Body<'tcx>) {
-        for promoted in mir.promoted.iter_mut() {
+    fn visit_body(&mut self, body: &mut Body<'tcx>) {
+        for promoted in body.promoted.iter_mut() {
             self.visit_body(promoted);
         }
 
-        self.super_body(mir);
+        self.super_body(body);
     }
 
     fn visit_ty(&mut self, ty: &mut Ty<'tcx>, ty_context: TyContext) {
index 120088e1784d432d44407764847d557e33296c9e..353c5a39b61b90a9aa797438cd877330cdd0e73b 100644 (file)
@@ -20,7 +20,7 @@
 impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
     pub(super) fn equate_inputs_and_outputs(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         universal_regions: &UniversalRegions<'tcx>,
         normalized_inputs_and_output: &[Ty<'tcx>],
     ) {
@@ -43,7 +43,7 @@ pub(super) fn equate_inputs_and_outputs(
                     // user-provided signature (e.g., the `_` in the code
                     // above) with fresh variables.
                     let (poly_sig, _) = self.infcx.instantiate_canonical_with_fresh_inference_vars(
-                        mir.span,
+                        body.span,
                         &user_provided_poly_sig,
                     );
 
@@ -53,7 +53,7 @@ pub(super) fn equate_inputs_and_outputs(
                     Some(
                         self.infcx
                             .replace_bound_vars_with_fresh_vars(
-                                mir.span,
+                                body.span,
                                 LateBoundRegionConversionTime::FnCall,
                                 &poly_sig,
                             )
@@ -73,8 +73,8 @@ pub(super) fn equate_inputs_and_outputs(
                 normalized_input_ty
             );
 
-            let mir_input_ty = mir.local_decls[local].ty;
-            let mir_input_span = mir.local_decls[local].source_info.span;
+            let mir_input_ty = body.local_decls[local].ty;
+            let mir_input_span = body.local_decls[local].source_info.span;
             self.equate_normalized_input_or_output(
                 normalized_input_ty,
                 mir_input_ty,
@@ -89,8 +89,8 @@ pub(super) fn equate_inputs_and_outputs(
                 // In MIR, closures begin an implicit `self`, so
                 // argument N is stored in local N+2.
                 let local = Local::new(argument_index + 2);
-                let mir_input_ty = mir.local_decls[local].ty;
-                let mir_input_span = mir.local_decls[local].source_info.span;
+                let mir_input_ty = body.local_decls[local].ty;
+                let mir_input_span = body.local_decls[local].source_info.span;
 
                 // If the user explicitly annotated the input types, enforce those.
                 let user_provided_input_ty =
@@ -104,19 +104,19 @@ pub(super) fn equate_inputs_and_outputs(
         }
 
         assert!(
-            mir.yield_ty.is_some() && universal_regions.yield_ty.is_some()
-                || mir.yield_ty.is_none() && universal_regions.yield_ty.is_none()
+            body.yield_ty.is_some() && universal_regions.yield_ty.is_some()
+                || body.yield_ty.is_none() && universal_regions.yield_ty.is_none()
         );
-        if let Some(mir_yield_ty) = mir.yield_ty {
+        if let Some(mir_yield_ty) = body.yield_ty {
             let ur_yield_ty = universal_regions.yield_ty.unwrap();
-            let yield_span = mir.local_decls[RETURN_PLACE].source_info.span;
+            let yield_span = body.local_decls[RETURN_PLACE].source_info.span;
             self.equate_normalized_input_or_output(ur_yield_ty, mir_yield_ty, yield_span);
         }
 
         // Return types are a bit more complex. They may contain existential `impl Trait`
         // types.
-        let mir_output_ty = mir.local_decls[RETURN_PLACE].ty;
-        let output_span = mir.local_decls[RETURN_PLACE].source_info.span;
+        let mir_output_ty = body.local_decls[RETURN_PLACE].ty;
+        let output_span = body.local_decls[RETURN_PLACE].source_info.span;
         if let Err(terr) = self.eq_opaque_type_and_type(
             mir_output_ty,
             normalized_output_ty,
index e1a7b9babd48a7f16b08c5c4488335f0b3ce58a1..2a066538cc234427c27118fcaf0f0f3abdf539a6 100644 (file)
@@ -60,9 +60,9 @@ impl LocalUseMap {
     crate fn build(
         live_locals: &Vec<Local>,
         elements: &RegionValueElements,
-        mir: &Body<'_>,
+        body: &Body<'_>,
     ) -> Self {
-        let nones = IndexVec::from_elem_n(None, mir.local_decls.len());
+        let nones = IndexVec::from_elem_n(None, body.local_decls.len());
         let mut local_use_map = LocalUseMap {
             first_def_at: nones.clone(),
             first_use_at: nones.clone(),
@@ -71,7 +71,7 @@ impl LocalUseMap {
         };
 
         let mut locals_with_use_data: IndexVec<Local, bool> =
-            IndexVec::from_elem_n(false, mir.local_decls.len());
+            IndexVec::from_elem_n(false, body.local_decls.len());
         live_locals
             .iter()
             .for_each(|&local| locals_with_use_data[local] = true);
@@ -81,7 +81,7 @@ impl LocalUseMap {
             elements,
             locals_with_use_data,
         }
-        .visit_body(mir);
+        .visit_body(body);
 
         local_use_map
     }
index 3cefab36e23516575649f6d14ebdef2c732f9644..fb99382e1314ad33cd74c5008c5d33aed27b6026 100644 (file)
@@ -27,7 +27,7 @@
 /// performed before
 pub(super) fn generate<'gcx, 'tcx>(
     typeck: &mut TypeChecker<'_, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     elements: &Rc<RegionValueElements>,
     flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
     move_data: &MoveData<'tcx>,
@@ -44,7 +44,7 @@ pub(super) fn generate<'gcx, 'tcx>(
         // of the `live_locals`.
         // FIXME: Review "live" terminology past this point, we should
         // not be naming the `Local`s as live.
-        mir.local_decls.indices().collect()
+        body.local_decls.indices().collect()
     } else {
         let free_regions = {
             regions_that_outlive_free_regions(
@@ -53,13 +53,13 @@ pub(super) fn generate<'gcx, 'tcx>(
                 &typeck.borrowck_context.constraints.outlives_constraints,
             )
         };
-        compute_live_locals(typeck.tcx(), &free_regions, mir)
+        compute_live_locals(typeck.tcx(), &free_regions, body)
     };
 
     if !live_locals.is_empty() {
         trace::trace(
             typeck,
-            mir,
+            body,
             elements,
             flow_inits,
             move_data,
@@ -77,9 +77,9 @@ pub(super) fn generate<'gcx, 'tcx>(
 fn compute_live_locals(
     tcx: TyCtxt<'_, '_, 'tcx>,
     free_regions: &FxHashSet<RegionVid>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
 ) -> Vec<Local> {
-    let live_locals: Vec<Local> = mir
+    let live_locals: Vec<Local> = body
         .local_decls
         .iter_enumerated()
         .filter_map(|(local, local_decl)| {
@@ -93,7 +93,7 @@ fn compute_live_locals(
         })
         .collect();
 
-    debug!("{} total variables", mir.local_decls.len());
+    debug!("{} total variables", body.local_decls.len());
     debug!("{} variables need liveness", live_locals.len());
     debug!("{} regions outlive free regions", free_regions.len());
 
index 345780c4760eb45a642e45625d32993f29df5231..828cb4cdcd04fab9417897cd0c5effa9aef80ffd 100644 (file)
@@ -32,7 +32,7 @@
 /// this respects `#[may_dangle]` annotations).
 pub(super) fn trace(
     typeck: &mut TypeChecker<'_, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     elements: &Rc<RegionValueElements>,
     flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
     move_data: &MoveData<'tcx>,
@@ -41,11 +41,11 @@ pub(super) fn trace(
 ) {
     debug!("trace()");
 
-    let local_use_map = &LocalUseMap::build(&live_locals, elements, mir);
+    let local_use_map = &LocalUseMap::build(&live_locals, elements, body);
 
     let cx = LivenessContext {
         typeck,
-        mir,
+        body,
         flow_inits,
         elements,
         local_use_map,
@@ -72,7 +72,7 @@ struct LivenessContext<'me, 'typeck, 'flow, 'gcx, 'tcx>
     elements: &'me RegionValueElements,
 
     /// MIR we are analyzing.
-    mir: &'me Body<'tcx>,
+    body: &'me Body<'tcx>,
 
     /// Mapping to/from the various indices used for initialization tracking.
     move_data: &'me MoveData<'tcx>,
@@ -145,7 +145,7 @@ fn compute_for_all_locals(&mut self, live_locals: Vec<Local>) {
             self.compute_use_live_points_for(local);
             self.compute_drop_live_points_for(local);
 
-            let local_ty = self.cx.mir.local_decls[local].ty;
+            let local_ty = self.cx.body.local_decls[local].ty;
 
             if !self.use_live_at.is_empty() {
                 self.cx.add_use_live_facts_for(local_ty, &self.use_live_at);
@@ -197,7 +197,7 @@ fn compute_use_live_points_for(&mut self, local: Local) {
             if self.use_live_at.insert(p) {
                 self.cx
                     .elements
-                    .push_predecessors(self.cx.mir, p, &mut self.stack)
+                    .push_predecessors(self.cx.body, p, &mut self.stack)
             }
         }
     }
@@ -220,7 +220,7 @@ fn compute_drop_live_points_for(&mut self, local: Local) {
         // Find the drops where `local` is initialized.
         for drop_point in self.cx.local_use_map.drops(local) {
             let location = self.cx.elements.to_location(drop_point);
-            debug_assert_eq!(self.cx.mir.terminator_loc(location.block), location,);
+            debug_assert_eq!(self.cx.body.terminator_loc(location.block), location,);
 
             if self.cx.initialized_at_terminator(location.block, mpi) {
                 if self.drop_live_at.insert(drop_point) {
@@ -270,7 +270,7 @@ fn compute_drop_live_points_for_block(&mut self, mpi: MovePathIndex, term_point:
         // live point.
         let term_location = self.cx.elements.to_location(term_point);
         debug_assert_eq!(
-            self.cx.mir.terminator_loc(term_location.block),
+            self.cx.body.terminator_loc(term_location.block),
             term_location,
         );
         let block = term_location.block;
@@ -297,7 +297,7 @@ fn compute_drop_live_points_for_block(&mut self, mpi: MovePathIndex, term_point:
             }
         }
 
-        for &pred_block in self.cx.mir.predecessors_for(block).iter() {
+        for &pred_block in self.cx.body.predecessors_for(block).iter() {
             debug!(
                 "compute_drop_live_points_for_block: pred_block = {:?}",
                 pred_block,
@@ -326,7 +326,7 @@ fn compute_drop_live_points_for_block(&mut self, mpi: MovePathIndex, term_point:
                 continue;
             }
 
-            let pred_term_loc = self.cx.mir.terminator_loc(pred_block);
+            let pred_term_loc = self.cx.body.terminator_loc(pred_block);
             let pred_term_point = self.cx.elements.point_from_location(pred_term_loc);
 
             // If the terminator of this predecessor either *assigns*
@@ -403,7 +403,7 @@ fn initialized_at_terminator(&mut self, block: BasicBlock, mpi: MovePathIndex) -
         // the effects of all statements. This is the only way to get
         // "just ahead" of a terminator.
         self.flow_inits.reset_to_entry_of(block);
-        for statement_index in 0..self.mir[block].statements.len() {
+        for statement_index in 0..self.body[block].statements.len() {
             let location = Location {
                 block,
                 statement_index,
@@ -485,7 +485,7 @@ fn add_drop_live_facts_for(
 
         drop_data.dropck_result.report_overflows(
             self.typeck.infcx.tcx,
-            self.mir.source_info(*drop_locations.first().unwrap()).span,
+            self.body.source_info(*drop_locations.first().unwrap()).span,
             dropped_ty,
         );
 
index 6a6ec8679018f7fd06dbe801155ff4718de5057f..d6da42c24cee433eac984c6d2dc9f475b0a7f43a 100644 (file)
@@ -112,7 +112,7 @@ macro_rules! span_mirbug_and_err {
 pub(crate) fn type_check<'gcx, 'tcx>(
     infcx: &InferCtxt<'_, 'gcx, 'tcx>,
     param_env: ty::ParamEnv<'gcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     mir_def_id: DefId,
     universal_regions: &Rc<UniversalRegions<'tcx>>,
     location_table: &LocationTable,
@@ -156,14 +156,14 @@ pub(crate) fn type_check<'gcx, 'tcx>(
         infcx,
         mir_def_id,
         param_env,
-        mir,
+        body,
         &region_bound_pairs,
         implicit_region_bound,
         &mut borrowck_context,
         &universal_region_relations,
         |mut cx| {
-            cx.equate_inputs_and_outputs(mir, universal_regions, &normalized_inputs_and_output);
-            liveness::generate(&mut cx, mir, elements, flow_inits, move_data, location_table);
+            cx.equate_inputs_and_outputs(body, universal_regions, &normalized_inputs_and_output);
+            liveness::generate(&mut cx, body, elements, flow_inits, move_data, location_table);
 
             translate_outlives_facts(cx.borrowck_context);
         },
@@ -179,7 +179,7 @@ fn type_check_internal<'a, 'gcx, 'tcx, R>(
     infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
     mir_def_id: DefId,
     param_env: ty::ParamEnv<'gcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     region_bound_pairs: &'a RegionBoundPairs<'tcx>,
     implicit_region_bound: ty::Region<'tcx>,
     borrowck_context: &'a mut BorrowCheckContext<'a, 'tcx>,
@@ -188,7 +188,7 @@ fn type_check_internal<'a, 'gcx, 'tcx, R>(
 ) -> R where {
     let mut checker = TypeChecker::new(
         infcx,
-        mir,
+        body,
         mir_def_id,
         param_env,
         region_bound_pairs,
@@ -197,14 +197,14 @@ fn type_check_internal<'a, 'gcx, 'tcx, R>(
         universal_region_relations,
     );
     let errors_reported = {
-        let mut verifier = TypeVerifier::new(&mut checker, mir);
-        verifier.visit_body(mir);
+        let mut verifier = TypeVerifier::new(&mut checker, body);
+        verifier.visit_body(body);
         verifier.errors_reported
     };
 
     if !errors_reported {
         // if verifier failed, don't do further checks to avoid ICEs
-        checker.typeck_mir(mir);
+        checker.typeck_mir(body);
     }
 
     extra(&mut checker)
@@ -253,7 +253,7 @@ enum FieldAccessError {
 /// is a problem.
 struct TypeVerifier<'a, 'b: 'a, 'gcx: 'tcx, 'tcx: 'b> {
     cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>,
-    mir: &'b Body<'tcx>,
+    body: &'b Body<'tcx>,
     last_span: Span,
     mir_def_id: DefId,
     errors_reported: bool,
@@ -327,7 +327,7 @@ fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
 
     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
         self.super_rvalue(rvalue, location);
-        let rval_ty = rvalue.ty(self.mir, self.tcx());
+        let rval_ty = rvalue.ty(self.body, self.tcx());
         self.sanitize_type(rvalue, rval_ty);
     }
 
@@ -368,25 +368,25 @@ fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
         }
     }
 
-    fn visit_body(&mut self, mir: &Body<'tcx>) {
-        self.sanitize_type(&"return type", mir.return_ty());
-        for local_decl in &mir.local_decls {
+    fn visit_body(&mut self, body: &Body<'tcx>) {
+        self.sanitize_type(&"return type", body.return_ty());
+        for local_decl in &body.local_decls {
             self.sanitize_type(local_decl, local_decl.ty);
         }
         if self.errors_reported {
             return;
         }
-        self.super_body(mir);
+        self.super_body(body);
     }
 }
 
 impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
-    fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'b Body<'tcx>) -> Self {
+    fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, body: &'b Body<'tcx>) -> Self {
         TypeVerifier {
-            mir,
+            body,
             mir_def_id: cx.mir_def_id,
             cx,
-            last_span: mir.span,
+            last_span: body.span,
             errors_reported: false,
         }
     }
@@ -451,7 +451,7 @@ fn sanitize_place(
         place.iterate(|place_base, place_projection| {
             let mut place_ty = match place_base {
                 PlaceBase::Local(index) =>
-                    PlaceTy::from_ty(self.mir.local_decls[*index].ty),
+                    PlaceTy::from_ty(self.body.local_decls[*index].ty),
                 PlaceBase::Static(box Static { kind, ty: sty }) => {
                     let sty = self.sanitize_type(place, sty);
                     let check_err =
@@ -478,10 +478,10 @@ fn sanitize_place(
                     match kind {
                         StaticKind::Promoted(promoted) => {
                             if !self.errors_reported {
-                                let promoted_mir = &self.mir.promoted[*promoted];
-                                self.sanitize_promoted(promoted_mir, location);
+                                let promoted_body = &self.body.promoted[*promoted];
+                                self.sanitize_promoted(promoted_body, location);
 
-                                let promoted_ty = promoted_mir.return_ty();
+                                let promoted_ty = promoted_body.return_ty();
                                 check_err(self, place, promoted_ty, sty);
                             }
                         }
@@ -538,12 +538,12 @@ fn sanitize_place(
         })
     }
 
-    fn sanitize_promoted(&mut self, promoted_mir: &'b Body<'tcx>, location: Location) {
+    fn sanitize_promoted(&mut self, promoted_body: &'b Body<'tcx>, location: Location) {
         // Determine the constraints from the promoted MIR by running the type
         // checker on the promoted MIR, then transfer the constraints back to
         // the main MIR, changing the locations to the provided location.
 
-        let parent_mir = mem::replace(&mut self.mir, promoted_mir);
+        let parent_body = mem::replace(&mut self.body, promoted_body);
 
         let all_facts = &mut None;
         let mut constraints = Default::default();
@@ -562,14 +562,14 @@ fn sanitize_promoted(&mut self, promoted_mir: &'b Body<'tcx>, location: Location
             &mut closure_bounds
         );
 
-        self.visit_body(promoted_mir);
+        self.visit_body(promoted_body);
 
         if !self.errors_reported {
             // if verifier failed, don't do further checks to avoid ICEs
-            self.cx.typeck_mir(promoted_mir);
+            self.cx.typeck_mir(promoted_body);
         }
 
-        self.mir = parent_mir;
+        self.body = parent_body;
         // Merge the outlives constraints back in, at the given location.
         mem::swap(self.cx.borrowck_context.all_facts, all_facts);
         mem::swap(
@@ -632,7 +632,7 @@ fn sanitize_projection(
                 )
             }
             ProjectionElem::Index(i) => {
-                let index_ty = Place::Base(PlaceBase::Local(i)).ty(self.mir, tcx).ty;
+                let index_ty = Place::Base(PlaceBase::Local(i)).ty(self.body, tcx).ty;
                 if index_ty != tcx.types.usize {
                     PlaceTy::from_ty(
                         span_mirbug_and_err!(self, i, "index by non-usize {:?}", i),
@@ -969,10 +969,10 @@ pub fn from_location(&self) -> Option<Location> {
     }
 
     /// Gets a span representing the location.
-    pub fn span(&self, mir: &Body<'_>) -> Span {
+    pub fn span(&self, body: &Body<'_>) -> Span {
         match self {
             Locations::All(span) => *span,
-            Locations::Single(l) => mir.source_info(*l).span,
+            Locations::Single(l) => body.source_info(*l).span,
         }
     }
 }
@@ -980,7 +980,7 @@ pub fn span(&self, mir: &Body<'_>) -> Span {
 impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
     fn new(
         infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
-        mir: &'a Body<'tcx>,
+        body: &'a Body<'tcx>,
         mir_def_id: DefId,
         param_env: ty::ParamEnv<'gcx>,
         region_bound_pairs: &'a RegionBoundPairs<'tcx>,
@@ -992,7 +992,7 @@ fn new(
             infcx,
             last_span: DUMMY_SP,
             mir_def_id,
-            user_type_annotations: &mir.user_type_annotations,
+            user_type_annotations: &body.user_type_annotations,
             param_env,
             region_bound_pairs,
             implicit_region_bound,
@@ -1317,7 +1317,7 @@ fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
         self.infcx.tcx
     }
 
-    fn check_stmt(&mut self, mir: &Body<'tcx>, stmt: &Statement<'tcx>, location: Location) {
+    fn check_stmt(&mut self, body: &Body<'tcx>, stmt: &Statement<'tcx>, location: Location) {
         debug!("check_stmt: {:?}", stmt);
         let tcx = self.tcx();
         match stmt.kind {
@@ -1345,14 +1345,14 @@ fn check_stmt(&mut self, mir: &Body<'tcx>, stmt: &Statement<'tcx>, location: Loc
                         ConstraintCategory::Return
                     },
                     Place::Base(PlaceBase::Local(l))
-                        if !mir.local_decls[l].is_user_variable.is_some() => {
+                        if !body.local_decls[l].is_user_variable.is_some() => {
                         ConstraintCategory::Boring
                     }
                     _ => ConstraintCategory::Assignment,
                 };
 
-                let place_ty = place.ty(mir, tcx).ty;
-                let rv_ty = rv.ty(mir, tcx);
+                let place_ty = place.ty(body, tcx).ty;
+                let rv_ty = rv.ty(body, tcx);
                 if let Err(terr) =
                     self.sub_types_or_anon(rv_ty, place_ty, location.to_locations(), category)
                 {
@@ -1386,7 +1386,7 @@ fn check_stmt(&mut self, mir: &Body<'tcx>, stmt: &Statement<'tcx>, location: Loc
                     }
                 }
 
-                self.check_rvalue(mir, rv, location);
+                self.check_rvalue(body, rv, location);
                 if !self.tcx().features().unsized_locals {
                     let trait_ref = ty::TraitRef {
                         def_id: tcx.lang_items().sized_trait().unwrap(),
@@ -1403,7 +1403,7 @@ fn check_stmt(&mut self, mir: &Body<'tcx>, stmt: &Statement<'tcx>, location: Loc
                 ref place,
                 variant_index,
             } => {
-                let place_type = place.ty(mir, tcx).ty;
+                let place_type = place.ty(body, tcx).ty;
                 let adt = match place_type.sty {
                     ty::Adt(adt, _) if adt.is_enum() => adt,
                     _ => {
@@ -1425,7 +1425,7 @@ fn check_stmt(&mut self, mir: &Body<'tcx>, stmt: &Statement<'tcx>, location: Loc
                 };
             }
             StatementKind::AscribeUserType(ref place, variance, box ref projection) => {
-                let place_ty = place.ty(mir, tcx).ty;
+                let place_ty = place.ty(body, tcx).ty;
                 if let Err(terr) = self.relate_type_and_user_type(
                     place_ty,
                     variance,
@@ -1456,7 +1456,7 @@ fn check_stmt(&mut self, mir: &Body<'tcx>, stmt: &Statement<'tcx>, location: Loc
 
     fn check_terminator(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         term: &Terminator<'tcx>,
         term_location: Location,
     ) {
@@ -1481,8 +1481,8 @@ fn check_terminator(
                 target: _,
                 unwind: _,
             } => {
-                let place_ty = location.ty(mir, tcx).ty;
-                let rv_ty = value.ty(mir, tcx);
+                let place_ty = location.ty(body, tcx).ty;
+                let rv_ty = value.ty(body, tcx);
 
                 let locations = term_location.to_locations();
                 if let Err(terr) =
@@ -1503,7 +1503,7 @@ fn check_terminator(
                 switch_ty,
                 ..
             } => {
-                let discr_ty = discr.ty(mir, tcx);
+                let discr_ty = discr.ty(body, tcx);
                 if let Err(terr) = self.sub_types(
                     discr_ty,
                     switch_ty,
@@ -1531,7 +1531,7 @@ fn check_terminator(
                 from_hir_call,
                 ..
             } => {
-                let func_ty = func.ty(mir, tcx);
+                let func_ty = func.ty(body, tcx);
                 debug!("check_terminator: call, func_ty={:?}", func_ty);
                 let sig = match func_ty.sty {
                     ty::FnDef(..) | ty::FnPtr(_) => func_ty.fn_sig(tcx),
@@ -1546,7 +1546,7 @@ fn check_terminator(
                     &sig,
                 );
                 let sig = self.normalize(sig, term_location);
-                self.check_call_dest(mir, term, &sig, destination, term_location);
+                self.check_call_dest(body, term, &sig, destination, term_location);
 
                 self.prove_predicates(
                     sig.inputs_and_output.iter().map(|ty| ty::Predicate::WellFormed(ty)),
@@ -1571,28 +1571,28 @@ fn check_terminator(
                         .add_element(region_vid, term_location);
                 }
 
-                self.check_call_inputs(mir, term, &sig, args, term_location, from_hir_call);
+                self.check_call_inputs(body, term, &sig, args, term_location, from_hir_call);
             }
             TerminatorKind::Assert {
                 ref cond, ref msg, ..
             } => {
-                let cond_ty = cond.ty(mir, tcx);
+                let cond_ty = cond.ty(body, tcx);
                 if cond_ty != tcx.types.bool {
                     span_mirbug!(self, term, "bad Assert ({:?}, not bool", cond_ty);
                 }
 
                 if let BoundsCheck { ref len, ref index } = *msg {
-                    if len.ty(mir, tcx) != tcx.types.usize {
+                    if len.ty(body, tcx) != tcx.types.usize {
                         span_mirbug!(self, len, "bounds-check length non-usize {:?}", len)
                     }
-                    if index.ty(mir, tcx) != tcx.types.usize {
+                    if index.ty(body, tcx) != tcx.types.usize {
                         span_mirbug!(self, index, "bounds-check index non-usize {:?}", index)
                     }
                 }
             }
             TerminatorKind::Yield { ref value, .. } => {
-                let value_ty = value.ty(mir, tcx);
-                match mir.yield_ty {
+                let value_ty = value.ty(body, tcx);
+                match body.yield_ty {
                     None => span_mirbug!(self, term, "yield in non-generator"),
                     Some(ty) => {
                         if let Err(terr) = self.sub_types(
@@ -1618,7 +1618,7 @@ fn check_terminator(
 
     fn check_call_dest(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         term: &Terminator<'tcx>,
         sig: &ty::FnSig<'tcx>,
         destination: &Option<(Place<'tcx>, BasicBlock)>,
@@ -1627,7 +1627,7 @@ fn check_call_dest(
         let tcx = self.tcx();
         match *destination {
             Some((ref dest, _target_block)) => {
-                let dest_ty = dest.ty(mir, tcx).ty;
+                let dest_ty = dest.ty(body, tcx).ty;
                 let category = match *dest {
                     Place::Base(PlaceBase::Local(RETURN_PLACE)) => {
                         if let BorrowCheckContext {
@@ -1649,7 +1649,7 @@ fn check_call_dest(
                         }
                     }
                     Place::Base(PlaceBase::Local(l))
-                        if !mir.local_decls[l].is_user_variable.is_some() => {
+                        if !body.local_decls[l].is_user_variable.is_some() => {
                         ConstraintCategory::Boring
                     }
                     _ => ConstraintCategory::Assignment,
@@ -1687,7 +1687,7 @@ fn check_call_dest(
 
     fn check_call_inputs(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         term: &Terminator<'tcx>,
         sig: &ty::FnSig<'tcx>,
         args: &[Operand<'tcx>],
@@ -1706,7 +1706,7 @@ fn check_call_inputs(
             span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
         }
         for (n, (fn_arg, op_arg)) in inputs.iter().zip(args).enumerate() {
-            let op_arg_ty = op_arg.ty(mir, self.tcx());
+            let op_arg_ty = op_arg.ty(body, self.tcx());
             let category = if from_hir_call {
                 ConstraintCategory::CallArgument
             } else {
@@ -1728,15 +1728,15 @@ fn check_call_inputs(
         }
     }
 
-    fn check_iscleanup(&mut self, mir: &Body<'tcx>, block_data: &BasicBlockData<'tcx>) {
+    fn check_iscleanup(&mut self, body: &Body<'tcx>, block_data: &BasicBlockData<'tcx>) {
         let is_cleanup = block_data.is_cleanup;
         self.last_span = block_data.terminator().source_info.span;
         match block_data.terminator().kind {
             TerminatorKind::Goto { target } => {
-                self.assert_iscleanup(mir, block_data, target, is_cleanup)
+                self.assert_iscleanup(body, block_data, target, is_cleanup)
             }
             TerminatorKind::SwitchInt { ref targets, .. } => for target in targets {
-                self.assert_iscleanup(mir, block_data, *target, is_cleanup);
+                self.assert_iscleanup(body, block_data, *target, is_cleanup);
             },
             TerminatorKind::Resume => if !is_cleanup {
                 span_mirbug!(self, block_data, "resume on non-cleanup block!")
@@ -1754,9 +1754,9 @@ fn check_iscleanup(&mut self, mir: &Body<'tcx>, block_data: &BasicBlockData<'tcx
                 if is_cleanup {
                     span_mirbug!(self, block_data, "yield in cleanup block")
                 }
-                self.assert_iscleanup(mir, block_data, resume, is_cleanup);
+                self.assert_iscleanup(body, block_data, resume, is_cleanup);
                 if let Some(drop) = drop {
-                    self.assert_iscleanup(mir, block_data, drop, is_cleanup);
+                    self.assert_iscleanup(body, block_data, drop, is_cleanup);
                 }
             }
             TerminatorKind::Unreachable => {}
@@ -1767,12 +1767,12 @@ fn check_iscleanup(&mut self, mir: &Body<'tcx>, block_data: &BasicBlockData<'tcx
                 cleanup: unwind,
                 ..
             } => {
-                self.assert_iscleanup(mir, block_data, target, is_cleanup);
+                self.assert_iscleanup(body, block_data, target, is_cleanup);
                 if let Some(unwind) = unwind {
                     if is_cleanup {
                         span_mirbug!(self, block_data, "unwind on cleanup block")
                     }
-                    self.assert_iscleanup(mir, block_data, unwind, true);
+                    self.assert_iscleanup(body, block_data, unwind, true);
                 }
             }
             TerminatorKind::Call {
@@ -1781,29 +1781,29 @@ fn check_iscleanup(&mut self, mir: &Body<'tcx>, block_data: &BasicBlockData<'tcx
                 ..
             } => {
                 if let &Some((_, target)) = destination {
-                    self.assert_iscleanup(mir, block_data, target, is_cleanup);
+                    self.assert_iscleanup(body, block_data, target, is_cleanup);
                 }
                 if let Some(cleanup) = cleanup {
                     if is_cleanup {
                         span_mirbug!(self, block_data, "cleanup on cleanup block")
                     }
-                    self.assert_iscleanup(mir, block_data, cleanup, true);
+                    self.assert_iscleanup(body, block_data, cleanup, true);
                 }
             }
             TerminatorKind::FalseEdges {
                 real_target,
                 ref imaginary_targets,
             } => {
-                self.assert_iscleanup(mir, block_data, real_target, is_cleanup);
+                self.assert_iscleanup(body, block_data, real_target, is_cleanup);
                 for target in imaginary_targets {
-                    self.assert_iscleanup(mir, block_data, *target, is_cleanup);
+                    self.assert_iscleanup(body, block_data, *target, is_cleanup);
                 }
             }
             TerminatorKind::FalseUnwind {
                 real_target,
                 unwind,
             } => {
-                self.assert_iscleanup(mir, block_data, real_target, is_cleanup);
+                self.assert_iscleanup(body, block_data, real_target, is_cleanup);
                 if let Some(unwind) = unwind {
                     if is_cleanup {
                         span_mirbug!(
@@ -1812,7 +1812,7 @@ fn check_iscleanup(&mut self, mir: &Body<'tcx>, block_data: &BasicBlockData<'tcx
                             "cleanup in cleanup block via false unwind"
                         );
                     }
-                    self.assert_iscleanup(mir, block_data, unwind, true);
+                    self.assert_iscleanup(body, block_data, unwind, true);
                 }
             }
         }
@@ -1820,12 +1820,12 @@ fn check_iscleanup(&mut self, mir: &Body<'tcx>, block_data: &BasicBlockData<'tcx
 
     fn assert_iscleanup(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         ctxt: &dyn fmt::Debug,
         bb: BasicBlock,
         iscleanuppad: bool,
     ) {
-        if mir[bb].is_cleanup != iscleanuppad {
+        if body[bb].is_cleanup != iscleanuppad {
             span_mirbug!(
                 self,
                 ctxt,
@@ -1836,8 +1836,8 @@ fn assert_iscleanup(
         }
     }
 
-    fn check_local(&mut self, mir: &Body<'tcx>, local: Local, local_decl: &LocalDecl<'tcx>) {
-        match mir.local_kind(local) {
+    fn check_local(&mut self, body: &Body<'tcx>, local: Local, local_decl: &LocalDecl<'tcx>) {
+        match body.local_kind(local) {
             LocalKind::ReturnPointer | LocalKind::Arg => {
                 // return values of normal functions are required to be
                 // sized by typeck, but return values of ADT constructors are
@@ -1938,16 +1938,16 @@ fn aggregate_field_ty(
         }
     }
 
-    fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
+    fn check_rvalue(&mut self, body: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
         let tcx = self.tcx();
 
         match rvalue {
             Rvalue::Aggregate(ak, ops) => {
-                self.check_aggregate_rvalue(mir, rvalue, ak, ops, location)
+                self.check_aggregate_rvalue(body, rvalue, ak, ops, location)
             }
 
             Rvalue::Repeat(operand, len) => if *len > 1 {
-                let operand_ty = operand.ty(mir, tcx);
+                let operand_ty = operand.ty(body, tcx);
 
                 let trait_ref = ty::TraitRef {
                     def_id: tcx.lang_items().copy_trait().unwrap(),
@@ -1964,7 +1964,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
             Rvalue::NullaryOp(_, ty) => {
                 // Even with unsized locals cannot box an unsized value.
                 if self.tcx().features().unsized_locals {
-                    let span = mir.source_info(location).span;
+                    let span = body.source_info(location).span;
                     self.ensure_place_sized(ty, span);
                 }
 
@@ -1983,7 +1983,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
             Rvalue::Cast(cast_kind, op, ty) => {
                 match cast_kind {
                     CastKind::Pointer(PointerCast::ReifyFnPointer) => {
-                        let fn_sig = op.ty(mir, tcx).fn_sig(tcx);
+                        let fn_sig = op.ty(body, tcx).fn_sig(tcx);
 
                         // The type that we see in the fcx is like
                         // `foo::<'a, 'b>`, where `foo` is the path to a
@@ -2012,7 +2012,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
                     }
 
                     CastKind::Pointer(PointerCast::ClosureFnPointer(unsafety)) => {
-                        let sig = match op.ty(mir, tcx).sty {
+                        let sig = match op.ty(body, tcx).sty {
                             ty::Closure(def_id, substs) => {
                                 substs.closure_sig_ty(def_id, tcx).fn_sig(tcx)
                             }
@@ -2038,7 +2038,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
                     }
 
                     CastKind::Pointer(PointerCast::UnsafeFnPointer) => {
-                        let fn_sig = op.ty(mir, tcx).fn_sig(tcx);
+                        let fn_sig = op.ty(body, tcx).fn_sig(tcx);
 
                         // The type that we see in the fcx is like
                         // `foo::<'a, 'b>`, where `foo` is the path to a
@@ -2070,7 +2070,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
                         let &ty = ty;
                         let trait_ref = ty::TraitRef {
                             def_id: tcx.lang_items().coerce_unsized_trait().unwrap(),
-                            substs: tcx.mk_substs_trait(op.ty(mir, tcx), &[ty.into()]),
+                            substs: tcx.mk_substs_trait(op.ty(body, tcx), &[ty.into()]),
                         };
 
                         self.prove_trait_ref(
@@ -2081,7 +2081,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
                     }
 
                     CastKind::Pointer(PointerCast::MutToConstPointer) => {
-                        let ty_from = match op.ty(mir, tcx).sty {
+                        let ty_from = match op.ty(body, tcx).sty {
                             ty::RawPtr(ty::TypeAndMut {
                                 ty: ty_from,
                                 mutbl: hir::MutMutable,
@@ -2129,7 +2129,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
                     }
 
                     CastKind::Misc => {
-                        if let ty::Ref(_, mut ty_from, _) = op.ty(mir, tcx).sty {
+                        if let ty::Ref(_, mut ty_from, _) = op.ty(body, tcx).sty {
                             let (mut ty_to, mutability) = if let ty::RawPtr(ty::TypeAndMut {
                                 ty: ty_to,
                                 mutbl,
@@ -2140,7 +2140,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
                                     self,
                                     rvalue,
                                     "invalid cast types {:?} -> {:?}",
-                                    op.ty(mir, tcx),
+                                    op.ty(body, tcx),
                                     ty,
                                 );
                                 return;
@@ -2196,7 +2196,7 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
             }
 
             Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
-                self.add_reborrow_constraint(mir, location, region, borrowed_place);
+                self.add_reborrow_constraint(body, location, region, borrowed_place);
             }
 
             Rvalue::BinaryOp(BinOp::Eq, left, right)
@@ -2205,13 +2205,13 @@ fn check_rvalue(&mut self, mir: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Lo
             | Rvalue::BinaryOp(BinOp::Le, left, right)
             | Rvalue::BinaryOp(BinOp::Gt, left, right)
             | Rvalue::BinaryOp(BinOp::Ge, left, right) => {
-                let ty_left = left.ty(mir, tcx);
+                let ty_left = left.ty(body, tcx);
                 if let ty::RawPtr(_) | ty::FnPtr(_) = ty_left.sty {
-                    let ty_right = right.ty(mir, tcx);
+                    let ty_right = right.ty(body, tcx);
                     let common_ty = self.infcx.next_ty_var(
                         TypeVariableOrigin {
                             kind: TypeVariableOriginKind::MiscVariable,
-                            span: mir.source_info(location).span,
+                            span: body.source_info(location).span,
                         }
                     );
                     self.sub_types(
@@ -2277,7 +2277,7 @@ fn rvalue_user_ty(&self, rvalue: &Rvalue<'tcx>) -> Option<UserTypeAnnotationInde
 
     fn check_aggregate_rvalue(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         rvalue: &Rvalue<'tcx>,
         aggregate_kind: &AggregateKind<'tcx>,
         operands: &[Operand<'tcx>],
@@ -2306,7 +2306,7 @@ fn check_aggregate_rvalue(
                     continue;
                 }
             };
-            let operand_ty = operand.ty(mir, tcx);
+            let operand_ty = operand.ty(body, tcx);
 
             if let Err(terr) = self.sub_types(
                 operand_ty,
@@ -2335,7 +2335,7 @@ fn check_aggregate_rvalue(
     /// - `borrowed_place`: the place `P` being borrowed
     fn add_reborrow_constraint(
         &mut self,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         location: Location,
         borrow_region: ty::Region<'tcx>,
         borrowed_place: &Place<'tcx>,
@@ -2382,7 +2382,7 @@ fn add_reborrow_constraint(
             match *elem {
                 ProjectionElem::Deref => {
                     let tcx = self.infcx.tcx;
-                    let base_ty = base.ty(mir, tcx).ty;
+                    let base_ty = base.ty(body, tcx).ty;
 
                     debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
                     match base_ty.sty {
@@ -2624,15 +2624,15 @@ fn prove_predicate(
         })
     }
 
-    fn typeck_mir(&mut self, mir: &Body<'tcx>) {
-        self.last_span = mir.span;
-        debug!("run_on_mir: {:?}", mir.span);
+    fn typeck_mir(&mut self, body: &Body<'tcx>) {
+        self.last_span = body.span;
+        debug!("run_on_mir: {:?}", body.span);
 
-        for (local, local_decl) in mir.local_decls.iter_enumerated() {
-            self.check_local(mir, local, local_decl);
+        for (local, local_decl) in body.local_decls.iter_enumerated() {
+            self.check_local(body, local, local_decl);
         }
 
-        for (block, block_data) in mir.basic_blocks().iter_enumerated() {
+        for (block, block_data) in body.basic_blocks().iter_enumerated() {
             let mut location = Location {
                 block,
                 statement_index: 0,
@@ -2641,12 +2641,12 @@ fn typeck_mir(&mut self, mir: &Body<'tcx>) {
                 if !stmt.source_info.span.is_dummy() {
                     self.last_span = stmt.source_info.span;
                 }
-                self.check_stmt(mir, stmt, location);
+                self.check_stmt(body, stmt, location);
                 location.statement_index += 1;
             }
 
-            self.check_terminator(mir, block_data.terminator(), location);
-            self.check_iscleanup(mir, block_data);
+            self.check_terminator(body, block_data.terminator(), location);
+            self.check_iscleanup(body, block_data);
         }
     }
 
index 557d235c23f0555b8ced991aa4b5b93d61f7edef..a11e5d9a5568728c13ebee24b79bd1f659b39258 100644 (file)
@@ -10,7 +10,7 @@
 /// Returns `true` if the borrow represented by `kind` is
 /// allowed to be split into separate Reservation and
 /// Activation phases.
-pub(super) fn allow_two_phase_borrow<'a, 'tcx, 'gcx: 'tcx>(kind: BorrowKind) -> bool {
+pub(super) fn allow_two_phase_borrow(kind: BorrowKind) -> bool {
     kind.allows_two_phase_borrow()
 }
 
@@ -25,7 +25,7 @@ pub(super) enum Control {
 pub(super) fn each_borrow_involving_path<'a, 'tcx, 'gcx: 'tcx, F, I, S> (
     s: &mut S,
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     _location: Location,
     access_place: (AccessDepth, &Place<'tcx>),
     borrow_set: &BorrowSet<'tcx>,
@@ -47,7 +47,7 @@ pub(super) fn each_borrow_involving_path<'a, 'tcx, 'gcx: 'tcx, F, I, S> (
 
         if places_conflict::borrow_conflicts_with_place(
             tcx,
-            mir,
+            body,
             &borrowed.borrowed_place,
             borrowed.kind,
             place,
index 9306e88e9ae9c804704a2a015a31700566d86c22..509bd16d4a080a8c6455498bf7a45c958363773b 100644 (file)
@@ -13,7 +13,7 @@
     fn ignore_borrow(
         &self,
         tcx: TyCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         locals_state_at_exit: &LocalsStateAtExit,
         ) -> bool;
 }
@@ -22,7 +22,7 @@ impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
     fn ignore_borrow(
         &self,
         tcx: TyCtxt<'_, '_, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         locals_state_at_exit: &LocalsStateAtExit,
     ) -> bool {
         self.iterate(|place_base, place_projection| {
@@ -40,7 +40,7 @@ fn ignore_borrow(
                         LocalsStateAtExit::AllAreInvalidated => false,
                         LocalsStateAtExit::SomeAreInvalidated { has_storage_dead_or_moved } => {
                             let ignore = !has_storage_dead_or_moved.contains(*index) &&
-                                mir.local_decls[*index].mutability == Mutability::Not;
+                                body.local_decls[*index].mutability == Mutability::Not;
                             debug!("ignore_borrow: local {:?} => {:?}", index, ignore);
                             ignore
                         }
@@ -55,7 +55,7 @@ fn ignore_borrow(
 
             for proj in place_projection {
                 if proj.elem == ProjectionElem::Deref {
-                    let ty = proj.base.ty(mir, tcx).ty;
+                    let ty = proj.base.ty(body, tcx).ty;
                     match ty.sty {
                         // For both derefs of raw pointers and `&T`
                         // references, the original path is `Copy` and
index 8aa27eef72a5e022efa1e534520245934713184a..a9ee0a65e3af6a93363fe91a31a1be79254dd16f 100644 (file)
 /// dataflow).
 crate fn places_conflict<'gcx, 'tcx>(
     tcx: TyCtxt<'_, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     borrow_place: &Place<'tcx>,
     access_place: &Place<'tcx>,
     bias: PlaceConflictBias,
 ) -> bool {
     borrow_conflicts_with_place(
         tcx,
-        mir,
+        body,
         borrow_place,
         BorrowKind::Mut { allow_two_phase_borrow: true },
         access_place,
@@ -48,7 +48,7 @@
 /// order to make the conservative choice and preserve soundness.
 pub(super) fn borrow_conflicts_with_place<'gcx, 'tcx>(
     tcx: TyCtxt<'_, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     borrow_place: &Place<'tcx>,
     borrow_kind: BorrowKind,
     access_place: &Place<'tcx>,
@@ -72,7 +72,7 @@ pub(super) fn borrow_conflicts_with_place<'gcx, 'tcx>(
         access_place.iterate(|access_base, access_projections| {
             place_components_conflict(
                 tcx,
-                mir,
+                body,
                 (borrow_base, borrow_projections),
                 borrow_kind,
                 (access_base, access_projections),
@@ -85,7 +85,7 @@ pub(super) fn borrow_conflicts_with_place<'gcx, 'tcx>(
 
 fn place_components_conflict<'gcx, 'tcx>(
     tcx: TyCtxt<'_, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     borrow_projections: (&PlaceBase<'tcx>, ProjectionsIter<'_, 'tcx>),
     borrow_kind: BorrowKind,
     access_projections: (&PlaceBase<'tcx>, ProjectionsIter<'_, 'tcx>),
@@ -175,7 +175,7 @@ fn place_components_conflict<'gcx, 'tcx>(
                 // check whether the components being borrowed vs
                 // accessed are disjoint (as in the second example,
                 // but not the first).
-                match place_projection_conflict(tcx, mir, borrow_c, access_c, bias) {
+                match place_projection_conflict(tcx, body, borrow_c, access_c, bias) {
                     Overlap::Arbitrary => {
                         // We have encountered different fields of potentially
                         // the same union - the borrow now partially overlaps.
@@ -214,7 +214,7 @@ fn place_components_conflict<'gcx, 'tcx>(
 
                 let base = &borrow_c.base;
                 let elem = &borrow_c.elem;
-                let base_ty = base.ty(mir, tcx).ty;
+                let base_ty = base.ty(body, tcx).ty;
 
                 match (elem, &base_ty.sty, access) {
                     (_, _, Shallow(Some(ArtificialField::ArrayLength)))
@@ -367,7 +367,7 @@ fn place_base_conflict<'a, 'gcx: 'tcx, 'tcx>(
 // between `elem1` and `elem2`.
 fn place_projection_conflict<'a, 'gcx: 'tcx, 'tcx>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     pi1: &Projection<'tcx>,
     pi2: &Projection<'tcx>,
     bias: PlaceConflictBias,
@@ -384,7 +384,7 @@ fn place_projection_conflict<'a, 'gcx: 'tcx, 'tcx>(
                 debug!("place_element_conflict: DISJOINT-OR-EQ-FIELD");
                 Overlap::EqualOrDisjoint
             } else {
-                let ty = pi1.base.ty(mir, tcx).ty;
+                let ty = pi1.base.ty(body, tcx).ty;
                 match ty.sty {
                     ty::Adt(def, _) if def.is_union() => {
                         // Different fields of a union, we are basically stuck.
index 0e1abeba70dffb0a834faa654fcea54c746905bc..416de1c67e727c311866d92f22088b416fa8677c 100644 (file)
@@ -38,7 +38,7 @@ fn is_prefix_of(&self, other: &Place<'tcx>) -> bool {
 
 
 pub(super) struct Prefixes<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
-    mir: &'cx Body<'tcx>,
+    body: &'cx Body<'tcx>,
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     kind: PrefixSet,
     next: Option<&'cx Place<'tcx>>,
@@ -68,7 +68,7 @@ pub(super) fn prefixes(
         Prefixes {
             next: Some(place),
             kind,
-            mir: self.mir,
+            body: self.body,
             tcx: self.infcx.tcx,
         }
     }
@@ -139,7 +139,7 @@ fn next(&mut self) -> Option<Self::Item> {
             // derefs, except we stop at the deref of a shared
             // reference.
 
-            let ty = proj.base.ty(self.mir, self.tcx).ty;
+            let ty = proj.base.ty(self.body, self.tcx).ty;
             match ty.sty {
                 ty::RawPtr(_) |
                 ty::Ref(
index 4a4787337ab582d92b2fffc82fafc21d0c22a59a..7b2f662c7033d1ef4e584933a31617089bb4e20c 100644 (file)
@@ -34,7 +34,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
                 never_initialized_mut_locals: &mut never_initialized_mut_locals,
                 mbcx: self,
             };
-            visitor.visit_body(visitor.mbcx.mir);
+            visitor.visit_body(visitor.mbcx.body);
         }
 
         // Take the union of the existed `used_mut` set with those variables we've found were
index 20d1423f8a8b6ef97256e633273b50e0fedfe536..0aabebcf9dc2c17636c086a61b7a2a4517c7ec68 100644 (file)
@@ -1575,11 +1575,7 @@ fn bind_and_guard_matched_candidate<'pat>(
 
     /// Append `AscribeUserType` statements onto the end of `block`
     /// for each ascription
-    fn ascribe_types<'pat>(
-        &mut self,
-        block: BasicBlock,
-        ascriptions: &[Ascription<'tcx>],
-    ) {
+    fn ascribe_types(&mut self, block: BasicBlock, ascriptions: &[Ascription<'tcx>]) {
         for ascription in ascriptions {
             let source_info = self.source_info(ascription.span);
 
index 0995a2f7fdf491ec37bf0cb0adc4453e5e4f0c1d..c767fff9253222347b40af0452252a5714211050 100644 (file)
@@ -463,7 +463,7 @@ fn compare(&mut self,
     /// that it *doesn't* apply. For now, we return false, indicate that the
     /// test does not apply to this candidate, but it might be we can get
     /// tighter match code if we do something a bit different.
-    pub fn sort_candidate<'pat, 'cand>(
+    pub fn sort_candidate<'pat>(
         &mut self,
         test_place: &Place<'tcx>,
         test: &Test<'tcx>,
index c8a31ecffb84dc03a6ffb345ab12dc817228f97c..65ece3fa82ff829e620c3ba505e9eb2bb157610a 100644 (file)
@@ -2,7 +2,6 @@
 use crate::build::scope::DropKind;
 use crate::hair::cx::Cx;
 use crate::hair::{LintLevel, BindingMode, PatternKind};
-use crate::shim;
 use crate::transform::MirSource;
 use crate::util as mir_util;
 use rustc::hir;
@@ -31,8 +30,6 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Body<'
 
     // Figure out what primary body this item has.
     let (body_id, return_ty_span) = match tcx.hir().get_by_hir_id(id) {
-        Node::Ctor(ctor) => return create_constructor_shim(tcx, id, ctor),
-
         Node::Expr(hir::Expr { node: hir::ExprKind::Closure(_, decl, body_id, _, _), .. })
         | Node::Item(hir::Item { node: hir::ItemKind::Fn(decl, _, _, body_id), .. })
         | Node::ImplItem(
@@ -69,7 +66,7 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Body<'
 
     tcx.infer_ctxt().enter(|infcx| {
         let cx = Cx::new(&infcx, id);
-        let mut mir = if cx.tables().tainted_by_errors {
+        let mut body = if cx.tables().tainted_by_errors {
             build::construct_error(cx, body_id)
         } else if cx.body_owner_kind.is_fn_or_closure() {
             // fetch the fully liberated fn signature (that is, all bound
@@ -168,19 +165,19 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Body<'
         // Convert the `mir::Body` to global types.
         let mut globalizer = GlobalizeMir {
             tcx,
-            span: mir.span
+            span: body.span
         };
-        globalizer.visit_body(&mut mir);
-        let mir = unsafe {
-            mem::transmute::<Body<'_>, Body<'tcx>>(mir)
+        globalizer.visit_body(&mut body);
+        let body = unsafe {
+            mem::transmute::<Body<'_>, Body<'tcx>>(body)
         };
 
         mir_util::dump_mir(tcx, None, "mir_map", &0,
-                           MirSource::item(def_id), &mir, |_, _| Ok(()) );
+                           MirSource::item(def_id), &body, |_, _| Ok(()) );
 
-        lints::check(tcx, &mir, def_id);
+        lints::check(tcx, &body, def_id);
 
-        mir
+        body
     })
 }
 
@@ -234,38 +231,6 @@ fn visit_substs(&mut self, substs: &mut SubstsRef<'tcx>, _: Location) {
     }
 }
 
-fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                     ctor_id: hir::HirId,
-                                     v: &'tcx hir::VariantData)
-                                     -> Body<'tcx>
-{
-    let span = tcx.hir().span_by_hir_id(ctor_id);
-    if let hir::VariantData::Tuple(ref fields, ctor_id) = *v {
-        tcx.infer_ctxt().enter(|infcx| {
-            let mut mir = shim::build_adt_ctor(&infcx, ctor_id, fields, span);
-
-            // Convert the `mir::Body` to global types.
-            let tcx = infcx.tcx.global_tcx();
-            let mut globalizer = GlobalizeMir {
-                tcx,
-                span: mir.span
-            };
-            globalizer.visit_body(&mut mir);
-            let mir = unsafe {
-                mem::transmute::<Body<'_>, Body<'tcx>>(mir)
-            };
-
-            mir_util::dump_mir(tcx, None, "mir_map", &0,
-                               MirSource::item(tcx.hir().local_def_id_from_hir_id(ctor_id)),
-                               &mir, |_, _| Ok(()) );
-
-            mir
-        })
-    } else {
-        span_bug!(span, "attempting to create MIR for non-tuple variant {:?}", v);
-    }
-}
-
 ///////////////////////////////////////////////////////////////////////////
 // BuildMir -- walks a crate, looking for fn items and methods to build MIR from
 
@@ -735,9 +700,9 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
     info!("fn_id {:?} has attrs {:?}", fn_def_id,
           tcx.get_attrs(fn_def_id));
 
-    let mut mir = builder.finish(yield_ty);
-    mir.spread_arg = spread_arg;
-    mir
+    let mut body = builder.finish(yield_ty);
+    body.spread_arg = spread_arg;
+    body
 }
 
 fn construct_const<'a, 'gcx, 'tcx>(
index d118a61bcc6ce06f03b9776f9f3cafea56b08218..b938e86ffad16c5458c04a812914a33b0a36e75c 100644 (file)
@@ -24,7 +24,7 @@
 use crate::interpret::{self,
     PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar,
     RawConst, ConstValue,
-    EvalResult, EvalError, InterpError, GlobalId, InterpretCx, StackPopCleanup,
+    InterpResult, InterpErrorInfo, InterpError, GlobalId, InterpretCx, StackPopCleanup,
     Allocation, AllocId, MemoryKind,
     snapshot, RefTracking,
 };
@@ -55,12 +55,12 @@ pub(crate) fn mk_eval_cx<'a, 'mir, 'tcx>(
 pub(crate) fn eval_promoted<'a, 'mir, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     cid: GlobalId<'tcx>,
-    mir: &'mir mir::Body<'tcx>,
+    body: &'mir mir::Body<'tcx>,
     param_env: ty::ParamEnv<'tcx>,
-) -> EvalResult<'tcx, MPlaceTy<'tcx>> {
+) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
     let span = tcx.def_span(cid.instance.def_id());
     let mut ecx = mk_eval_cx(tcx, span, param_env);
-    eval_body_using_ecx(&mut ecx, cid, mir, param_env)
+    eval_body_using_ecx(&mut ecx, cid, body, param_env)
 }
 
 fn mplace_to_const<'tcx>(
@@ -139,23 +139,23 @@ fn op_to_const<'tcx>(
 fn eval_body_using_ecx<'mir, 'tcx>(
     ecx: &mut CompileTimeEvalContext<'_, 'mir, 'tcx>,
     cid: GlobalId<'tcx>,
-    mir: &'mir mir::Body<'tcx>,
+    body: &'mir mir::Body<'tcx>,
     param_env: ty::ParamEnv<'tcx>,
-) -> EvalResult<'tcx, MPlaceTy<'tcx>> {
+) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
     debug!("eval_body_using_ecx: {:?}, {:?}", cid, param_env);
     let tcx = ecx.tcx.tcx;
-    let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
+    let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?;
     assert!(!layout.is_unsized());
     let ret = ecx.allocate(layout, MemoryKind::Stack);
 
     let name = ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id()));
     let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
     trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
-    assert!(mir.arg_count == 0);
+    assert!(body.arg_count == 0);
     ecx.push_stack_frame(
         cid.instance,
-        mir.span,
-        mir,
+        body.span,
+        body,
         Some(ret.into()),
         StackPopCleanup::None { cleanup: false },
     )?;
@@ -165,7 +165,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
 
     // Intern the result
     let mutability = if tcx.is_mutable_static(cid.instance.def_id()) ||
-                     !layout.ty.is_freeze(tcx, param_env, mir.span) {
+                     !layout.ty.is_freeze(tcx, param_env, body.span) {
         Mutability::Mutable
     } else {
         Mutability::Immutable
@@ -176,8 +176,8 @@ fn eval_body_using_ecx<'mir, 'tcx>(
     Ok(ret)
 }
 
-impl<'tcx> Into<EvalError<'tcx>> for ConstEvalError {
-    fn into(self) -> EvalError<'tcx> {
+impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalError {
+    fn into(self) -> InterpErrorInfo<'tcx> {
         InterpError::MachineError(self.to_string()).into()
     }
 }
@@ -333,7 +333,7 @@ fn find_fn(
         args: &[OpTy<'tcx>],
         dest: Option<PlaceTy<'tcx>>,
         ret: Option<mir::BasicBlock>,
-    ) -> EvalResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
+    ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
         debug!("eval_fn_call: {:?}", instance);
         // Only check non-glue functions
         if let ty::InstanceDef::Item(def_id) = instance.def {
@@ -354,7 +354,7 @@ fn find_fn(
         }
         // This is a const fn. Call it.
         Ok(Some(match ecx.load_mir(instance.def) {
-            Ok(mir) => mir,
+            Ok(body) => body,
             Err(err) => {
                 if let InterpError::NoMirFor(ref path) = err.kind {
                     return Err(
@@ -372,7 +372,7 @@ fn call_intrinsic(
         instance: ty::Instance<'tcx>,
         args: &[OpTy<'tcx>],
         dest: PlaceTy<'tcx>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if ecx.emulate_intrinsic(instance, args, dest)? {
             return Ok(());
         }
@@ -388,7 +388,7 @@ fn ptr_op(
         _bin_op: mir::BinOp,
         _left: ImmTy<'tcx>,
         _right: ImmTy<'tcx>,
-    ) -> EvalResult<'tcx, (Scalar, bool)> {
+    ) -> InterpResult<'tcx, (Scalar, bool)> {
         Err(
             ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into(),
         )
@@ -397,7 +397,7 @@ fn ptr_op(
     fn find_foreign_static(
         _def_id: DefId,
         _tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
-    ) -> EvalResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> {
+    ) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> {
         err!(ReadForeignStatic)
     }
 
@@ -423,13 +423,13 @@ fn tag_static_base_pointer(
     fn box_alloc(
         _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
         _dest: PlaceTy<'tcx>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Err(
             ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into(),
         )
     }
 
-    fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx> {
+    fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> InterpResult<'tcx> {
         {
             let steps = &mut ecx.machine.steps_since_detector_enabled;
 
@@ -456,7 +456,7 @@ fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult<
     #[inline(always)]
     fn stack_push(
         _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Ok(())
     }
 
@@ -465,7 +465,7 @@ fn stack_push(
     fn stack_pop(
         _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
         _extra: (),
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Ok(())
     }
 }
@@ -511,7 +511,7 @@ pub fn const_variant_index<'a, 'tcx>(
 
 pub fn error_to_const_error<'a, 'mir, 'tcx>(
     ecx: &InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
-    mut error: EvalError<'tcx>
+    mut error: InterpErrorInfo<'tcx>
 ) -> ConstEvalErr<'tcx> {
     error.print_backtrace();
     let stacktrace = ecx.generate_stacktrace(None);
@@ -628,14 +628,14 @@ pub fn const_eval_raw_provider<'a, 'tcx>(
     let mut ecx = InterpretCx::new(tcx.at(span), key.param_env, CompileTimeInterpreter::new());
 
     let res = ecx.load_mir(cid.instance.def);
-    res.map(|mir| {
+    res.map(|body| {
         if let Some(index) = cid.promoted {
-            &mir.promoted[index]
+            &body.promoted[index]
         } else {
-            mir
+            body
         }
     }).and_then(
-        |mir| eval_body_using_ecx(&mut ecx, cid, mir, key.param_env)
+        |body| eval_body_using_ecx(&mut ecx, cid, body, key.param_env)
     ).and_then(|place| {
         Ok(RawConst {
             alloc_id: place.to_ptr().expect("we allocated this ptr!").alloc_id,
index d43fa4257e06cce9cf3a79238416a6e278a8877d..9cba34b425350bbcb8e4e5bb6512eb8bd1b3914c 100644 (file)
@@ -131,6 +131,11 @@ pub fn with_iter_outgoing<F>(&self, f: F)
         curr_state.subtract(&self.stmt_kill);
         f(curr_state.iter());
     }
+
+    /// Returns a bitset of the elements present in the current state.
+    pub fn as_dense(&self) -> &BitSet<BD::Idx> {
+        &self.curr_state
+    }
 }
 
 impl<'tcx, BD> FlowsAtLocation for FlowAtLocation<'tcx, BD>
index f9d88ab879596f916438b9d240730ad324ac80ba..b77fdcdd7b6de6896d2827b0f2a2cb6a69cd8420 100644 (file)
@@ -47,9 +47,9 @@ pub fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>,
 //
 // FIXME: we have to do something for moving slice patterns.
 fn place_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                                                            mir: &Body<'tcx>,
+                                                            body: &Body<'tcx>,
                                                             place: &mir::Place<'tcx>) -> bool {
-    let ty = place.ty(mir, tcx).ty;
+    let ty = place.ty(body, tcx).ty;
     match ty.sty {
         ty::Array(..) => {
             debug!("place_contents_drop_state_cannot_differ place: {:?} ty: {:?} => false",
@@ -74,7 +74,7 @@ fn place_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx,
 
 pub(crate) fn on_lookup_result_bits<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     move_data: &MoveData<'tcx>,
     lookup_result: LookupResult,
     each_child: F)
@@ -85,14 +85,14 @@ pub(crate) fn on_lookup_result_bits<'a, 'gcx, 'tcx, F>(
             // access to untracked value - do not touch children
         }
         LookupResult::Exact(e) => {
-            on_all_children_bits(tcx, mir, move_data, e, each_child)
+            on_all_children_bits(tcx, body, move_data, e, each_child)
         }
     }
 }
 
 pub(crate) fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     move_data: &MoveData<'tcx>,
     move_path_index: MovePathIndex,
     mut each_child: F)
@@ -100,17 +100,17 @@ pub(crate) fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
 {
     fn is_terminal_path<'a, 'gcx, 'tcx>(
         tcx: TyCtxt<'a, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         move_data: &MoveData<'tcx>,
         path: MovePathIndex) -> bool
     {
         place_contents_drop_state_cannot_differ(
-            tcx, mir, &move_data.move_paths[path].place)
+            tcx, body, &move_data.move_paths[path].place)
     }
 
     fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
         tcx: TyCtxt<'a, 'gcx, 'tcx>,
-        mir: &Body<'tcx>,
+        body: &Body<'tcx>,
         move_data: &MoveData<'tcx>,
         move_path_index: MovePathIndex,
         each_child: &mut F)
@@ -118,30 +118,30 @@ fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
     {
         each_child(move_path_index);
 
-        if is_terminal_path(tcx, mir, move_data, move_path_index) {
+        if is_terminal_path(tcx, body, move_data, move_path_index) {
             return
         }
 
         let mut next_child_index = move_data.move_paths[move_path_index].first_child;
         while let Some(child_index) = next_child_index {
-            on_all_children_bits(tcx, mir, move_data, child_index, each_child);
+            on_all_children_bits(tcx, body, move_data, child_index, each_child);
             next_child_index = move_data.move_paths[child_index].next_sibling;
         }
     }
-    on_all_children_bits(tcx, mir, move_data, move_path_index, &mut each_child);
+    on_all_children_bits(tcx, body, move_data, move_path_index, &mut each_child);
 }
 
 pub(crate) fn on_all_drop_children_bits<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     ctxt: &MoveDataParamEnv<'gcx, 'tcx>,
     path: MovePathIndex,
     mut each_child: F)
     where F: FnMut(MovePathIndex)
 {
-    on_all_children_bits(tcx, mir, &ctxt.move_data, path, |child| {
+    on_all_children_bits(tcx, body, &ctxt.move_data, path, |child| {
         let place = &ctxt.move_data.move_paths[path].place;
-        let ty = place.ty(mir, tcx).ty;
+        let ty = place.ty(body, tcx).ty;
         debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty);
 
         let gcx = tcx.global_tcx();
@@ -156,16 +156,16 @@ pub(crate) fn on_all_drop_children_bits<'a, 'gcx, 'tcx, F>(
 
 pub(crate) fn drop_flag_effects_for_function_entry<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     ctxt: &MoveDataParamEnv<'gcx, 'tcx>,
     mut callback: F)
     where F: FnMut(MovePathIndex, DropFlagState)
 {
     let move_data = &ctxt.move_data;
-    for arg in mir.args_iter() {
+    for arg in body.args_iter() {
         let place = mir::Place::Base(mir::PlaceBase::Local(arg));
         let lookup_result = move_data.rev_lookup.find(&place);
-        on_lookup_result_bits(tcx, mir, move_data,
+        on_lookup_result_bits(tcx, body, move_data,
                               lookup_result,
                               |mpi| callback(mpi, DropFlagState::Present));
     }
@@ -173,7 +173,7 @@ pub(crate) fn drop_flag_effects_for_function_entry<'a, 'gcx, 'tcx, F>(
 
 pub(crate) fn drop_flag_effects_for_location<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     ctxt: &MoveDataParamEnv<'gcx, 'tcx>,
     loc: Location,
     mut callback: F)
@@ -187,7 +187,7 @@ pub(crate) fn drop_flag_effects_for_location<'a, 'gcx, 'tcx, F>(
         let path = mi.move_path_index(move_data);
         debug!("moving out of path {:?}", move_data.move_paths[path]);
 
-        on_all_children_bits(tcx, mir, move_data,
+        on_all_children_bits(tcx, body, move_data,
                              path,
                              |mpi| callback(mpi, DropFlagState::Absent))
     }
@@ -196,7 +196,7 @@ pub(crate) fn drop_flag_effects_for_location<'a, 'gcx, 'tcx, F>(
 
     for_location_inits(
         tcx,
-        mir,
+        body,
         move_data,
         loc,
         |mpi| callback(mpi, DropFlagState::Present)
@@ -205,7 +205,7 @@ pub(crate) fn drop_flag_effects_for_location<'a, 'gcx, 'tcx, F>(
 
 pub(crate) fn for_location_inits<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     move_data: &MoveData<'tcx>,
     loc: Location,
     mut callback: F)
@@ -217,7 +217,7 @@ pub(crate) fn for_location_inits<'a, 'gcx, 'tcx, F>(
             InitKind::Deep => {
                 let path = init.path;
 
-                on_all_children_bits(tcx, mir, move_data,
+                on_all_children_bits(tcx, body, move_data,
                                     path,
                                     &mut callback)
             },
index 4965f1a585d482aad4e9d6f2b0569d047879ae39..f62ad2fbef71f06bcd8d3d8e7cac5f55440f761f 100644 (file)
@@ -17,7 +17,7 @@
 pub trait MirWithFlowState<'tcx> {
     type BD: BitDenotation<'tcx>;
     fn def_id(&self) -> DefId;
-    fn mir(&self) -> &Body<'tcx>;
+    fn body(&self) -> &Body<'tcx>;
     fn flow_state(&self) -> &DataflowState<'tcx, Self::BD>;
 }
 
@@ -26,7 +26,7 @@ impl<'a, 'tcx, BD> MirWithFlowState<'tcx> for DataflowBuilder<'a, 'tcx, BD>
 {
     type BD = BD;
     fn def_id(&self) -> DefId { self.def_id }
-    fn mir(&self) -> &Body<'tcx> { self.flow_state.mir() }
+    fn body(&self) -> &Body<'tcx> { self.flow_state.body() }
     fn flow_state(&self) -> &DataflowState<'tcx, Self::BD> { &self.flow_state.flow_state }
 }
 
@@ -59,8 +59,8 @@ pub(crate) fn print_borrowck_graph_to<'a, 'tcx, BD, P>(
 #[derive(Copy, Clone, PartialEq, Eq, Debug)]
 pub struct Edge { source: BasicBlock, index: usize }
 
-fn outgoing(mir: &Body<'_>, bb: BasicBlock) -> Vec<Edge> {
-    (0..mir[bb].terminator().successors().count())
+fn outgoing(body: &Body<'_>, bb: BasicBlock) -> Vec<Edge> {
+    (0..body[bb].terminator().successors().count())
         .map(|index| Edge { source: bb, index: index}).collect()
 }
 
@@ -99,7 +99,7 @@ fn node_label(&self, n: &Node) -> dot::LabelText<'_> {
         // | [00-00] | _7 = const Foo::twiddle(move _8) | [0c-00]          | [f3-0f]          |
         // +---------+----------------------------------+------------------+------------------+
         let mut v = Vec::new();
-        self.node_label_internal(n, &mut v, *n, self.mbcx.mir()).unwrap();
+        self.node_label_internal(n, &mut v, *n, self.mbcx.body()).unwrap();
         dot::LabelText::html(String::from_utf8(v).unwrap())
     }
 
@@ -109,7 +109,7 @@ fn node_shape(&self, _n: &Node) -> Option<dot::LabelText<'_>> {
     }
 
     fn edge_label(&'a self, e: &Edge) -> dot::LabelText<'a> {
-        let term = self.mbcx.mir()[e.source].terminator();
+        let term = self.mbcx.body()[e.source].terminator();
         let label = &term.kind.fmt_successor_labels()[e.index];
         dot::LabelText::label(label.clone())
     }
@@ -124,7 +124,7 @@ fn node_label_internal<W: io::Write>(&self,
                                          n: &Node,
                                          w: &mut W,
                                          block: BasicBlock,
-                                         mir: &Body<'_>) -> io::Result<()> {
+                                         body: &Body<'_>) -> io::Result<()> {
         // Header rows
         const HDRS: [&str; 4] = ["ENTRY", "MIR", "BLOCK GENS", "BLOCK KILLS"];
         const HDR_FMT: &str = "bgcolor=\"grey\"";
@@ -137,8 +137,8 @@ fn node_label_internal<W: io::Write>(&self,
         write!(w, "</tr>")?;
 
         // Data row
-        self.node_label_verbose_row(n, w, block, mir)?;
-        self.node_label_final_row(n, w, block, mir)?;
+        self.node_label_verbose_row(n, w, block, body)?;
+        self.node_label_final_row(n, w, block, body)?;
         write!(w, "</table>")?;
 
         Ok(())
@@ -149,7 +149,7 @@ fn node_label_verbose_row<W: io::Write>(&self,
                                             n: &Node,
                                             w: &mut W,
                                             block: BasicBlock,
-                                            mir: &Body<'_>)
+                                            body: &Body<'_>)
                                             -> io::Result<()> {
         let i = n.index();
 
@@ -175,7 +175,7 @@ macro_rules! dump_set_for {
         // MIR statements
         write!(w, "<td>")?;
         {
-            let data = &mir[block];
+            let data = &body[block];
             for (i, statement) in data.statements.iter().enumerate() {
                 write!(w, "{}<br align=\"left\"/>",
                        dot::escape_html(&format!("{:3}: {:?}", i, statement)))?;
@@ -199,7 +199,7 @@ fn node_label_final_row<W: io::Write>(&self,
                                           n: &Node,
                                           w: &mut W,
                                           block: BasicBlock,
-                                          mir: &Body<'_>)
+                                          body: &Body<'_>)
                                           -> io::Result<()> {
         let i = n.index();
 
@@ -214,7 +214,7 @@ fn node_label_final_row<W: io::Write>(&self,
         // Terminator
         write!(w, "<td>")?;
         {
-            let data = &mir[block];
+            let data = &body[block];
             let mut terminator_head = String::new();
             data.terminator().kind.fmt_head(&mut terminator_head).unwrap();
             write!(w, "{}", dot::escape_html(&terminator_head))?;
@@ -241,7 +241,7 @@ impl<'a, 'tcx, MWF, P> dot::GraphWalk<'a> for Graph<'a, 'tcx, MWF, P>
     type Node = Node;
     type Edge = Edge;
     fn nodes(&self) -> dot::Nodes<'_, Node> {
-        self.mbcx.mir()
+        self.mbcx.body()
             .basic_blocks()
             .indices()
             .collect::<Vec<_>>()
@@ -249,11 +249,11 @@ fn nodes(&self) -> dot::Nodes<'_, Node> {
     }
 
     fn edges(&self) -> dot::Edges<'_, Edge> {
-        let mir = self.mbcx.mir();
+        let body = self.mbcx.body();
 
-        mir.basic_blocks()
+        body.basic_blocks()
            .indices()
-           .flat_map(|bb| outgoing(mir, bb))
+           .flat_map(|bb| outgoing(body, bb))
            .collect::<Vec<_>>()
            .into()
     }
@@ -263,7 +263,7 @@ fn source(&self, edge: &Edge) -> Node {
     }
 
     fn target(&self, edge: &Edge) -> Node {
-        let mir = self.mbcx.mir();
-        *mir[edge.source].terminator().successors().nth(edge.index).unwrap()
+        let body = self.mbcx.body();
+        *body[edge.source].terminator().successors().nth(edge.index).unwrap()
     }
 }
index 55ec46566a487997748fa17e4aba50e6941a67c6..a5e1b4ebaafee58d810a1e85c75820d9f16feed2 100644 (file)
 /// immovable generators.
 #[derive(Copy, Clone)]
 pub struct HaveBeenBorrowedLocals<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
 }
 
 impl<'a, 'tcx: 'a> HaveBeenBorrowedLocals<'a, 'tcx> {
-    pub fn new(mir: &'a Body<'tcx>)
+    pub fn new(body: &'a Body<'tcx>)
                -> Self {
-        HaveBeenBorrowedLocals { mir }
+        HaveBeenBorrowedLocals { body }
     }
 
-    pub fn mir(&self) -> &Body<'tcx> {
-        self.mir
+    pub fn body(&self) -> &Body<'tcx> {
+        self.body
     }
 }
 
@@ -30,7 +30,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for HaveBeenBorrowedLocals<'a, 'tcx> {
     type Idx = Local;
     fn name() -> &'static str { "has_been_borrowed_locals" }
     fn bits_per_block(&self) -> usize {
-        self.mir.local_decls.len()
+        self.body.local_decls.len()
     }
 
     fn start_block_effect(&self, _sets: &mut BitSet<Local>) {
@@ -40,7 +40,7 @@ fn start_block_effect(&self, _sets: &mut BitSet<Local>) {
     fn statement_effect(&self,
                         sets: &mut BlockSets<'_, Local>,
                         loc: Location) {
-        let stmt = &self.mir[loc.block].statements[loc.statement_index];
+        let stmt = &self.body[loc.block].statements[loc.statement_index];
 
         BorrowedLocalsVisitor {
             sets,
@@ -56,7 +56,7 @@ fn statement_effect(&self,
     fn terminator_effect(&self,
                          sets: &mut BlockSets<'_, Local>,
                          loc: Location) {
-        let terminator = self.mir[loc.block].terminator();
+        let terminator = self.body[loc.block].terminator();
         BorrowedLocalsVisitor {
             sets,
         }.visit_terminator(terminator, loc);
index 99051fb37f1488b9e91ada1e849971d2a75d4326..eedb936aed93789450e878b8a38cd2a0aa5f5df8 100644 (file)
@@ -31,7 +31,7 @@ pub struct BorrowIndex {
 /// borrows in compact bitvectors.
 pub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
 
     borrow_set: Rc<BorrowSet<'tcx>>,
     borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
@@ -48,7 +48,7 @@ struct StackEntry {
 }
 
 fn precompute_borrows_out_of_scope<'tcx>(
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     regioncx: &Rc<RegionInferenceContext<'tcx>>,
     borrows_out_of_scope_at_location: &mut FxHashMap<Location, Vec<BorrowIndex>>,
     borrow_index: BorrowIndex,
@@ -72,7 +72,7 @@ fn precompute_borrows_out_of_scope<'tcx>(
     stack.push(StackEntry {
         bb: location.block,
         lo: location.statement_index,
-        hi: mir[location.block].statements.len(),
+        hi: body[location.block].statements.len(),
         first_part_only: false,
     });
 
@@ -95,7 +95,7 @@ fn precompute_borrows_out_of_scope<'tcx>(
 
         if !finished_early {
             // Add successor BBs to the work list, if necessary.
-            let bb_data = &mir[bb];
+            let bb_data = &body[bb];
             assert!(hi == bb_data.statements.len());
             for &succ_bb in bb_data.terminator.as_ref().unwrap().successors() {
                 visited.entry(succ_bb)
@@ -121,7 +121,7 @@ fn precompute_borrows_out_of_scope<'tcx>(
                         stack.push(StackEntry {
                             bb: succ_bb,
                             lo: 0,
-                            hi: mir[succ_bb].statements.len(),
+                            hi: body[succ_bb].statements.len(),
                             first_part_only: false,
                         });
                         // Insert 0 for this BB, to represent the whole BB
@@ -136,7 +136,7 @@ fn precompute_borrows_out_of_scope<'tcx>(
 impl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {
     crate fn new(
         tcx: TyCtxt<'a, 'gcx, 'tcx>,
-        mir: &'a Body<'tcx>,
+        body: &'a Body<'tcx>,
         nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
         borrow_set: &Rc<BorrowSet<'tcx>>,
     ) -> Self {
@@ -145,14 +145,14 @@ impl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {
             let borrow_region = borrow_data.region.to_region_vid();
             let location = borrow_set.borrows[borrow_index].reserve_location;
 
-            precompute_borrows_out_of_scope(mir, &nonlexical_regioncx,
+            precompute_borrows_out_of_scope(body, &nonlexical_regioncx,
                                             &mut borrows_out_of_scope_at_location,
                                             borrow_index, borrow_region, location);
         }
 
         Borrows {
             tcx: tcx,
-            mir: mir,
+            body: body,
             borrow_set: borrow_set.clone(),
             borrows_out_of_scope_at_location,
             _nonlexical_regioncx: nonlexical_regioncx,
@@ -219,7 +219,7 @@ fn kill_borrows_on_place(
             // locations.
             if places_conflict::places_conflict(
                 self.tcx,
-                self.mir,
+                self.body,
                 &borrow_data.borrowed_place,
                 place,
                 places_conflict::PlaceConflictBias::NoOverlap,
@@ -257,7 +257,7 @@ fn before_statement_effect(&self,
     fn statement_effect(&self, sets: &mut BlockSets<'_, BorrowIndex>, location: Location) {
         debug!("Borrows::statement_effect: sets={:?} location={:?}", sets, location);
 
-        let block = &self.mir.basic_blocks().get(location.block).unwrap_or_else(|| {
+        let block = &self.body.basic_blocks().get(location.block).unwrap_or_else(|| {
             panic!("could not find block at location {:?}", location);
         });
         let stmt = block.statements.get(location.statement_index).unwrap_or_else(|| {
@@ -274,7 +274,7 @@ fn statement_effect(&self, sets: &mut BlockSets<'_, BorrowIndex>, location: Loca
                 if let mir::Rvalue::Ref(_, _, ref place) = **rhs {
                     if place.ignore_borrow(
                         self.tcx,
-                        self.mir,
+                        self.body,
                         &self.borrow_set.locals_state_at_exit,
                     ) {
                         return;
index 4f3b180edd111812323a536db9e87da2cc3a7a09..55ef861d7926efbd46d4c6a87bac9478c54e45f6 100644 (file)
 /// places that would require a dynamic drop-flag at that statement.
 pub struct MaybeInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
 }
 
 impl<'a, 'gcx: 'tcx, 'tcx> MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-               mir: &'a Body<'tcx>,
+               body: &'a Body<'tcx>,
                mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
                -> Self
     {
-        MaybeInitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
+        MaybeInitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
     }
 }
 
@@ -120,17 +120,17 @@ fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
 /// places that would require a dynamic drop-flag at that statement.
 pub struct MaybeUninitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
 }
 
 impl<'a, 'gcx, 'tcx> MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-               mir: &'a Body<'tcx>,
+               body: &'a Body<'tcx>,
                mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
                -> Self
     {
-        MaybeUninitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
+        MaybeUninitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
     }
 }
 
@@ -174,17 +174,17 @@ fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
 /// that would require a dynamic drop-flag at that statement.
 pub struct DefinitelyInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
 }
 
 impl<'a, 'gcx, 'tcx: 'a> DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-               mir: &'a Body<'tcx>,
+               body: &'a Body<'tcx>,
                mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
                -> Self
     {
-        DefinitelyInitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
+        DefinitelyInitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
     }
 }
 
@@ -223,17 +223,17 @@ fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
 /// ```
 pub struct EverInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
 }
 
 impl<'a, 'gcx: 'tcx, 'tcx: 'a> EverInitializedPlaces<'a, 'gcx, 'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-               mir: &'a Body<'tcx>,
+               body: &'a Body<'tcx>,
                mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
                -> Self
     {
-        EverInitializedPlaces { tcx: tcx, mir: mir, mdpe: mdpe }
+        EverInitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
     }
 }
 
@@ -284,7 +284,7 @@ fn bits_per_block(&self) -> usize {
 
     fn start_block_effect(&self, entry_set: &mut BitSet<MovePathIndex>) {
         drop_flag_effects_for_function_entry(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             |path, s| {
                 assert!(s == DropFlagState::Present);
                 entry_set.insert(path);
@@ -296,7 +296,7 @@ fn statement_effect(&self,
                         location: Location)
     {
         drop_flag_effects_for_location(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             location,
             |path, s| Self::update_bits(sets, path, s)
         )
@@ -307,7 +307,7 @@ fn terminator_effect(&self,
                          location: Location)
     {
         drop_flag_effects_for_location(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             location,
             |path, s| Self::update_bits(sets, path, s)
         )
@@ -322,7 +322,7 @@ fn propagate_call_return(
     ) {
         // when a call returns successfully, that means we need to set
         // the bits for that dest_place to 1 (initialized).
-        on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
+        on_lookup_result_bits(self.tcx, self.body, self.move_data(),
                               self.move_data().rev_lookup.find(dest_place),
                               |mpi| { in_out.insert(mpi); });
     }
@@ -342,7 +342,7 @@ fn start_block_effect(&self, entry_set: &mut BitSet<MovePathIndex>) {
         entry_set.insert_all();
 
         drop_flag_effects_for_function_entry(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             |path, s| {
                 assert!(s == DropFlagState::Present);
                 entry_set.remove(path);
@@ -354,7 +354,7 @@ fn statement_effect(&self,
                         location: Location)
     {
         drop_flag_effects_for_location(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             location,
             |path, s| Self::update_bits(sets, path, s)
         )
@@ -365,7 +365,7 @@ fn terminator_effect(&self,
                          location: Location)
     {
         drop_flag_effects_for_location(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             location,
             |path, s| Self::update_bits(sets, path, s)
         )
@@ -380,7 +380,7 @@ fn propagate_call_return(
     ) {
         // when a call returns successfully, that means we need to set
         // the bits for that dest_place to 0 (initialized).
-        on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
+        on_lookup_result_bits(self.tcx, self.body, self.move_data(),
                               self.move_data().rev_lookup.find(dest_place),
                               |mpi| { in_out.remove(mpi); });
     }
@@ -398,7 +398,7 @@ fn start_block_effect(&self, entry_set: &mut BitSet<MovePathIndex>) {
         entry_set.clear();
 
         drop_flag_effects_for_function_entry(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             |path, s| {
                 assert!(s == DropFlagState::Present);
                 entry_set.insert(path);
@@ -410,7 +410,7 @@ fn statement_effect(&self,
                         location: Location)
     {
         drop_flag_effects_for_location(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             location,
             |path, s| Self::update_bits(sets, path, s)
         )
@@ -421,7 +421,7 @@ fn terminator_effect(&self,
                          location: Location)
     {
         drop_flag_effects_for_location(
-            self.tcx, self.mir, self.mdpe,
+            self.tcx, self.body, self.mdpe,
             location,
             |path, s| Self::update_bits(sets, path, s)
         )
@@ -436,7 +436,7 @@ fn propagate_call_return(
     ) {
         // when a call returns successfully, that means we need to set
         // the bits for that dest_place to 1 (initialized).
-        on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
+        on_lookup_result_bits(self.tcx, self.body, self.move_data(),
                               self.move_data().rev_lookup.find(dest_place),
                               |mpi| { in_out.insert(mpi); });
     }
@@ -450,7 +450,7 @@ fn bits_per_block(&self) -> usize {
     }
 
     fn start_block_effect(&self, entry_set: &mut BitSet<InitIndex>) {
-        for arg_init in 0..self.mir.arg_count {
+        for arg_init in 0..self.body.arg_count {
             entry_set.insert(InitIndex::new(arg_init));
         }
     }
@@ -458,8 +458,8 @@ fn start_block_effect(&self, entry_set: &mut BitSet<InitIndex>) {
     fn statement_effect(&self,
                         sets: &mut BlockSets<'_, InitIndex>,
                         location: Location) {
-        let (_, mir, move_data) = (self.tcx, self.mir, self.move_data());
-        let stmt = &mir[location.block].statements[location.statement_index];
+        let (_, body, move_data) = (self.tcx, self.body, self.move_data());
+        let stmt = &body[location.block].statements[location.statement_index];
         let init_path_map = &move_data.init_path_map;
         let init_loc_map = &move_data.init_loc_map;
         let rev_lookup = &move_data.rev_lookup;
@@ -485,8 +485,8 @@ fn terminator_effect(&self,
                          sets: &mut BlockSets<'_, InitIndex>,
                          location: Location)
     {
-        let (mir, move_data) = (self.mir, self.move_data());
-        let term = mir[location.block].terminator();
+        let (body, move_data) = (self.body, self.move_data());
+        let term = body[location.block].terminator();
         let init_loc_map = &move_data.init_loc_map;
         debug!("terminator {:?} at loc {:?} initializes move_indexes {:?}",
                term, location, &init_loc_map[location]);
@@ -510,7 +510,7 @@ fn propagate_call_return(
 
         let call_loc = Location {
             block: call_bb,
-            statement_index: self.mir[call_bb].statements.len(),
+            statement_index: self.body[call_bb].statements.len(),
         };
         for init_index in &init_loc_map[call_loc] {
             assert!(init_index.index() < bits_per_block);
index 9bf346f5f62d64814b4d4b018cefd17fcb6f35c7..fed56e987ef4982f0af24d5a469c5b289e1b5e91 100644 (file)
@@ -5,17 +5,17 @@
 
 #[derive(Copy, Clone)]
 pub struct MaybeStorageLive<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
 }
 
 impl<'a, 'tcx: 'a> MaybeStorageLive<'a, 'tcx> {
-    pub fn new(mir: &'a Body<'tcx>)
+    pub fn new(body: &'a Body<'tcx>)
                -> Self {
-        MaybeStorageLive { mir }
+        MaybeStorageLive { body }
     }
 
-    pub fn mir(&self) -> &Body<'tcx> {
-        self.mir
+    pub fn body(&self) -> &Body<'tcx> {
+        self.body
     }
 }
 
@@ -23,7 +23,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeStorageLive<'a, 'tcx> {
     type Idx = Local;
     fn name() -> &'static str { "maybe_storage_live" }
     fn bits_per_block(&self) -> usize {
-        self.mir.local_decls.len()
+        self.body.local_decls.len()
     }
 
     fn start_block_effect(&self, _sets: &mut BitSet<Local>) {
@@ -33,7 +33,7 @@ fn start_block_effect(&self, _sets: &mut BitSet<Local>) {
     fn statement_effect(&self,
                         sets: &mut BlockSets<'_, Local>,
                         loc: Location) {
-        let stmt = &self.mir[loc.block].statements[loc.statement_index];
+        let stmt = &self.body[loc.block].statements[loc.statement_index];
 
         match stmt.kind {
             StatementKind::StorageLive(l) => sets.gen(l),
index 26bad0cb04d7061271f9b85deefd1b2bae33aff3..8e2068269ceaa883bd72a16ca710f746b406220f 100644 (file)
@@ -122,7 +122,7 @@ pub struct MoveDataParamEnv<'gcx, 'tcx> {
 }
 
 pub(crate) fn do_dataflow<'a, 'gcx, 'tcx, BD, P>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                                                 mir: &'a Body<'tcx>,
+                                                 body: &'a Body<'tcx>,
                                                  def_id: DefId,
                                                  attributes: &[ast::Attribute],
                                                  dead_unwinds: &BitSet<BasicBlock>,
@@ -132,7 +132,7 @@ pub(crate) fn do_dataflow<'a, 'gcx, 'tcx, BD, P>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
     where BD: BitDenotation<'tcx> + InitialFlow,
           P: Fn(&BD, BD::Idx) -> DebugFormatted
 {
-    let flow_state = DataflowAnalysis::new(mir, dead_unwinds, bd);
+    let flow_state = DataflowAnalysis::new(body, dead_unwinds, bd);
     flow_state.run(tcx, def_id, attributes, p)
 }
 
@@ -195,7 +195,7 @@ fn build_sets(&mut self) {
             self.flow_state.operator.start_block_effect(&mut sets.on_entry);
         }
 
-        for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
+        for (bb, data) in self.body.basic_blocks().iter_enumerated() {
             let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data;
 
             let mut interim_state;
@@ -231,10 +231,10 @@ impl<'b, 'a: 'b, 'tcx: 'a, BD> PropagationContext<'b, 'a, 'tcx, BD> where BD: Bi
 {
     fn walk_cfg(&mut self, in_out: &mut BitSet<BD::Idx>) {
         let mut dirty_queue: WorkQueue<mir::BasicBlock> =
-            WorkQueue::with_all(self.builder.mir.basic_blocks().len());
-        let mir = self.builder.mir;
+            WorkQueue::with_all(self.builder.body.basic_blocks().len());
+        let body = self.builder.body;
         while let Some(bb) = dirty_queue.pop() {
-            let bb_data = &mir[bb];
+            let bb_data = &body[bb];
             {
                 let sets = self.builder.flow_state.sets.for_block(bb.index());
                 debug_assert!(in_out.words().len() == sets.on_entry.words().len());
@@ -312,7 +312,7 @@ fn visit_terminator_entry(&mut self,
 
     fn analyze_results(&mut self, flow_uninit: &mut Self::FlowState) {
         let flow = flow_uninit;
-        for (bb, _) in traversal::reverse_postorder(self.mir()) {
+        for (bb, _) in traversal::reverse_postorder(self.body()) {
             flow.reset_to_entry_of(bb);
             self.process_basic_block(bb, flow);
         }
@@ -320,7 +320,7 @@ fn analyze_results(&mut self, flow_uninit: &mut Self::FlowState) {
 
     fn process_basic_block(&mut self, bb: BasicBlock, flow_state: &mut Self::FlowState) {
         let BasicBlockData { ref statements, ref terminator, is_cleanup: _ } =
-            self.mir()[bb];
+            self.body()[bb];
         let mut location = Location { block: bb, statement_index: 0 };
         for stmt in statements.iter() {
             flow_state.reconstruct_statement_effect(location);
@@ -343,13 +343,13 @@ fn process_basic_block(&mut self, bb: BasicBlock, flow_state: &mut Self::FlowSta
 
     // Delegated Hooks: Provide access to the MIR and process the flow state.
 
-    fn mir(&self) -> &'a Body<'tcx>;
+    fn body(&self) -> &'a Body<'tcx>;
 }
 
 pub fn state_for_location<'tcx, T: BitDenotation<'tcx>>(loc: Location,
                                                         analysis: &T,
                                                         result: &DataflowResults<'tcx, T>,
-                                                        mir: &Body<'tcx>)
+                                                        body: &Body<'tcx>)
     -> BitSet<T::Idx> {
     let mut on_entry = result.sets().on_entry_set_for(loc.block.index()).to_owned();
     let mut kill_set = on_entry.to_hybrid();
@@ -370,7 +370,7 @@ pub fn state_for_location<'tcx, T: BitDenotation<'tcx>>(loc: Location,
         }
 
         // Apply the pre-statement effect of the statement we're evaluating.
-        if loc.statement_index == mir[loc.block].statements.len() {
+        if loc.statement_index == body[loc.block].statements.len() {
             analysis.before_terminator_effect(&mut sets, loc);
         } else {
             analysis.before_statement_effect(&mut sets, loc);
@@ -384,7 +384,7 @@ pub struct DataflowAnalysis<'a, 'tcx: 'a, O> where O: BitDenotation<'tcx>
 {
     flow_state: DataflowState<'tcx, O>,
     dead_unwinds: &'a BitSet<mir::BasicBlock>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
 }
 
 impl<'a, 'tcx: 'a, O> DataflowAnalysis<'a, 'tcx, O> where O: BitDenotation<'tcx>
@@ -393,7 +393,7 @@ pub fn results(self) -> DataflowResults<'tcx, O> {
         DataflowResults(self.flow_state)
     }
 
-    pub fn mir(&self) -> &'a Body<'tcx> { self.mir }
+    pub fn body(&self) -> &'a Body<'tcx> { self.body }
 }
 
 pub struct DataflowResults<'tcx, O>(pub(crate) DataflowState<'tcx, O>) where O: BitDenotation<'tcx>;
@@ -697,11 +697,11 @@ fn propagate_call_return(
 
 impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx>
 {
-    pub fn new(mir: &'a Body<'tcx>,
+    pub fn new(body: &'a Body<'tcx>,
                dead_unwinds: &'a BitSet<mir::BasicBlock>,
                denotation: D) -> Self where D: InitialFlow {
         let bits_per_block = denotation.bits_per_block();
-        let num_blocks = mir.basic_blocks().len();
+        let num_blocks = body.basic_blocks().len();
 
         let on_entry_sets = if D::bottom_value() {
             vec![BitSet::new_filled(bits_per_block); num_blocks]
@@ -712,7 +712,7 @@ pub fn new(mir: &'a Body<'tcx>,
         let kill_sets = gen_sets.clone();
 
         DataflowAnalysis {
-            mir,
+            body,
             dead_unwinds,
             flow_state: DataflowState {
                 sets: AllSets {
index 816a269625399e7706a946a4658287c1c816c0f1..90e6c46f2817e221af2437bee395857a03d22469 100644 (file)
 use super::IllegalMoveOriginKind::*;
 
 struct MoveDataBuilder<'a, 'gcx: 'tcx, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     data: MoveData<'tcx>,
     errors: Vec<(Place<'tcx>, MoveError<'tcx>)>,
 }
 
 impl<'a, 'gcx, 'tcx> MoveDataBuilder<'a, 'gcx, 'tcx> {
-    fn new(mir: &'a Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
+    fn new(body: &'a Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
         let mut move_paths = IndexVec::new();
         let mut path_map = IndexVec::new();
         let mut init_path_map = IndexVec::new();
 
         MoveDataBuilder {
-            mir,
+            body,
             tcx,
             errors: Vec::new(),
             data: MoveData {
                 moves: IndexVec::new(),
-                loc_map: LocationMap::new(mir),
+                loc_map: LocationMap::new(body),
                 rev_lookup: MovePathLookup {
-                    locals: mir.local_decls.indices().map(PlaceBase::Local).map(|v| {
+                    locals: body.local_decls.indices().map(PlaceBase::Local).map(|v| {
                         Self::new_move_path(
                             &mut move_paths,
                             &mut path_map,
@@ -47,7 +47,7 @@ fn new(mir: &'a Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
                 move_paths,
                 path_map,
                 inits: IndexVec::new(),
-                init_loc_map: LocationMap::new(mir),
+                init_loc_map: LocationMap::new(body),
                 init_path_map,
             }
         }
@@ -104,9 +104,9 @@ fn move_path_for(&mut self, place: &Place<'tcx>)
             };
 
             for proj in place_projection {
-                let mir = self.builder.mir;
+                let body = self.builder.body;
                 let tcx = self.builder.tcx;
-                let place_ty = proj.base.ty(mir, tcx).ty;
+                let place_ty = proj.base.ty(body, tcx).ty;
                 match place_ty.sty {
                     ty::Ref(..) | ty::RawPtr(..) =>
                         return Err(MoveError::cannot_move_out_of(
@@ -183,11 +183,11 @@ fn finalize(
         self
     ) -> Result<MoveData<'tcx>, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
         debug!("{}", {
-            debug!("moves for {:?}:", self.mir.span);
+            debug!("moves for {:?}:", self.body.span);
             for (j, mo) in self.data.moves.iter_enumerated() {
                 debug!("    {:?} = {:?}", j, mo);
             }
-            debug!("move paths for {:?}:", self.mir.span);
+            debug!("move paths for {:?}:", self.body.span);
             for (j, path) in self.data.move_paths.iter_enumerated() {
                 debug!("    {:?} = {:?}", j, path);
             }
@@ -203,14 +203,14 @@ fn finalize(
 }
 
 pub(super) fn gather_moves<'a, 'gcx, 'tcx>(
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     tcx: TyCtxt<'a, 'gcx, 'tcx>
 ) -> Result<MoveData<'tcx>, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
-    let mut builder = MoveDataBuilder::new(mir, tcx);
+    let mut builder = MoveDataBuilder::new(body, tcx);
 
     builder.gather_args();
 
-    for (bb, block) in mir.basic_blocks().iter_enumerated() {
+    for (bb, block) in body.basic_blocks().iter_enumerated() {
         for (i, stmt) in block.statements.iter().enumerate() {
             let source = Location { block: bb, statement_index: i };
             builder.gather_statement(source, stmt);
@@ -228,7 +228,7 @@ pub(super) fn gather_moves<'a, 'gcx, 'tcx>(
 
 impl<'a, 'gcx, 'tcx> MoveDataBuilder<'a, 'gcx, 'tcx> {
     fn gather_args(&mut self) {
-        for arg in self.mir.args_iter() {
+        for arg in self.body.args_iter() {
             let path = self.data.rev_lookup.locals[arg];
 
             let init = self.data.inits.push(Init {
@@ -429,7 +429,7 @@ fn gather_init(&mut self, place: &Place<'tcx>, kind: InitKind) {
             Place::Projection(box Projection {
                 base,
                 elem: ProjectionElem::Field(_, _),
-            }) if match base.ty(self.builder.mir, self.builder.tcx).ty.sty {
+            }) if match base.ty(self.builder.body, self.builder.tcx).ty.sty {
                     ty::Adt(def, _) if def.is_union() => true,
                     _ => false,
             } => base,
index 0d20040d0d2d7d187fec366870ef3bc0129dcca9..7d75d352f94c9b081944aff9d7ea1bb94e8001b1 100644 (file)
@@ -138,9 +138,9 @@ fn index_mut(&mut self, index: Location) -> &mut Self::Output {
 }
 
 impl<T> LocationMap<T> where T: Default + Clone {
-    fn new(mir: &Body<'_>) -> Self {
+    fn new(body: &Body<'_>) -> Self {
         LocationMap {
-            map: mir.basic_blocks().iter().map(|block| {
+            map: body.basic_blocks().iter().map(|block| {
                 vec![T::default(); block.statements.len()+1]
             }).collect()
         }
@@ -205,10 +205,10 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
 }
 
 impl Init {
-    crate fn span<'gcx>(&self, mir: &Body<'gcx>) -> Span {
+    crate fn span<'gcx>(&self, body: &Body<'gcx>) -> Span {
         match self.location {
-            InitLocation::Argument(local) => mir.local_decls[local].source_info.span,
-            InitLocation::Statement(location) => mir.source_info(location).span,
+            InitLocation::Argument(local) => body.local_decls[local].source_info.span,
+            InitLocation::Statement(location) => body.source_info(location).span,
         }
     }
 }
@@ -306,9 +306,9 @@ fn cannot_move_out_of(location: Location, kind: IllegalMoveOriginKind<'tcx>) ->
 }
 
 impl<'a, 'gcx, 'tcx> MoveData<'tcx> {
-    pub fn gather_moves(mir: &Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>)
+    pub fn gather_moves(body: &Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>)
                         -> Result<Self, (Self, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
-        builder::gather_moves(mir, tcx)
+        builder::gather_moves(body, tcx)
     }
 
     /// For the move path `mpi`, returns the root local variable (if any) that starts the path.
index a5be55d16d488b7049ce0c5239d2804988b3db99..37a2e79dae91ff62d89ff94efebcf2648d1c266c 100644 (file)
             let allocation = tcx.intern_const_alloc(allocation);
             ConstValue::Slice { data: allocation, start: 0, end: s.len() }
         },
-        LitKind::Err(ref s) => {
-            let s = s.as_str();
-            let allocation = Allocation::from_byte_aligned_bytes(s.as_bytes());
-            let allocation = tcx.intern_const_alloc(allocation);
-            return Ok(tcx.mk_const(ty::Const {
-                val: ConstValue::Slice{ data: allocation, start: 0, end: s.len() },
-                ty: tcx.types.err,
-            }));
-        },
         LitKind::ByteStr(ref data) => {
             let id = tcx.allocate_bytes(data);
             ConstValue::Scalar(Scalar::Ptr(id.into()))
@@ -66,6 +57,7 @@
         }
         LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)),
         LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)),
+        LitKind::Err(_) => unreachable!(),
     };
     Ok(tcx.mk_const(ty::Const { val: lit, ty }))
 }
@@ -77,8 +69,7 @@ fn parse_float<'tcx>(
 ) -> Result<ConstValue<'tcx>, ()> {
     let num = num.as_str();
     use rustc_apfloat::ieee::{Single, Double};
-    use rustc_apfloat::Float;
-    let (data, size) = match fty {
+    let scalar = match fty {
         ast::FloatTy::F32 => {
             num.parse::<f32>().map_err(|_| ())?;
             let mut f = num.parse::<Single>().unwrap_or_else(|e| {
@@ -87,19 +78,19 @@ fn parse_float<'tcx>(
             if neg {
                 f = -f;
             }
-            (f.to_bits(), 4)
+            Scalar::from_f32(f)
         }
         ast::FloatTy::F64 => {
             num.parse::<f64>().map_err(|_| ())?;
             let mut f = num.parse::<Double>().unwrap_or_else(|e| {
-                panic!("apfloat::ieee::Single failed to parse `{}`: {:?}", num, e)
+                panic!("apfloat::ieee::Double failed to parse `{}`: {:?}", num, e)
             });
             if neg {
                 f = -f;
             }
-            (f.to_bits(), 8)
+            Scalar::from_f64(f)
         }
     };
 
-    Ok(ConstValue::Scalar(Scalar::from_uint(data, Size::from_bytes(size))))
+    Ok(ConstValue::Scalar(scalar))
 }
index ff7a9984da4ee164d11a58741a5dd3b67b591fba..6392e0996aec233eaec48a8a645df8fe2de22cb8 100644 (file)
@@ -5,11 +5,11 @@
 use syntax::symbol::sym;
 
 use rustc_apfloat::ieee::{Single, Double};
+use rustc_apfloat::{Float, FloatConvert};
 use rustc::mir::interpret::{
-    Scalar, EvalResult, Pointer, PointerArithmetic, InterpError,
+    Scalar, InterpResult, Pointer, PointerArithmetic, InterpError,
 };
 use rustc::mir::CastKind;
-use rustc_apfloat::Float;
 
 use super::{InterpretCx, Machine, PlaceTy, OpTy, Immediate};
 
@@ -28,7 +28,7 @@ pub fn cast(
         src: OpTy<'tcx, M::PointerTag>,
         kind: CastKind,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         use rustc::mir::CastKind::*;
         match kind {
             Pointer(PointerCast::Unsize) => {
@@ -80,7 +80,7 @@ pub fn cast(
                         if self.tcx.has_attr(def_id, sym::rustc_args_required_const) {
                             bug!("reifying a fn ptr that requires const arguments");
                         }
-                        let instance: EvalResult<'tcx, _> = ty::Instance::resolve(
+                        let instance: InterpResult<'tcx, _> = ty::Instance::resolve(
                             *self.tcx,
                             self.param_env,
                             def_id,
@@ -126,21 +126,33 @@ pub fn cast(
         Ok(())
     }
 
-    pub(super) fn cast_scalar(
+    fn cast_scalar(
         &self,
         val: Scalar<M::PointerTag>,
         src_layout: TyLayout<'tcx>,
         dest_layout: TyLayout<'tcx>,
-    ) -> EvalResult<'tcx, Scalar<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Scalar<M::PointerTag>> {
         use rustc::ty::TyKind::*;
         trace!("Casting {:?}: {:?} to {:?}", val, src_layout.ty, dest_layout.ty);
 
-        match val.to_bits_or_ptr(src_layout.size, self) {
-            Err(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
-            Ok(data) => {
-                match src_layout.ty.sty {
-                    Float(fty) => self.cast_from_float(data, fty, dest_layout.ty),
-                    _ => self.cast_from_int(data, src_layout, dest_layout),
+        match src_layout.ty.sty {
+            // Floating point
+            Float(FloatTy::F32) => self.cast_from_float(val.to_f32()?, dest_layout.ty),
+            Float(FloatTy::F64) => self.cast_from_float(val.to_f64()?, dest_layout.ty),
+            // Integer(-like), including fn ptr casts and casts from enums that
+            // are represented as integers (this excludes univariant enums, which
+            // are handled in `cast` directly).
+            _ => {
+                assert!(
+                    src_layout.ty.is_bool()       || src_layout.ty.is_char()     ||
+                    src_layout.ty.is_enum()       || src_layout.ty.is_integral() ||
+                    src_layout.ty.is_unsafe_ptr() || src_layout.ty.is_fn_ptr()   ||
+                    src_layout.ty.is_region_ptr(),
+                    "Unexpected cast from type {:?}", src_layout.ty
+                );
+                match val.to_bits_or_ptr(src_layout.size, self) {
+                    Err(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
+                    Ok(data) => self.cast_from_int(data, src_layout, dest_layout),
                 }
             }
         }
@@ -148,10 +160,11 @@ pub(super) fn cast_scalar(
 
     fn cast_from_int(
         &self,
-        v: u128,
+        v: u128, // raw bits
         src_layout: TyLayout<'tcx>,
         dest_layout: TyLayout<'tcx>,
-    ) -> EvalResult<'tcx, Scalar<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Scalar<M::PointerTag>> {
+        // Let's make sure v is sign-extended *if* it has a signed type.
         let signed = src_layout.abi.is_signed();
         let v = if signed {
             self.sign_extend(v, src_layout)
@@ -166,21 +179,17 @@ fn cast_from_int(
                 Ok(Scalar::from_uint(v, dest_layout.size))
             }
 
-            Float(FloatTy::F32) if signed => Ok(Scalar::from_uint(
-                Single::from_i128(v as i128).value.to_bits(),
-                Size::from_bits(32)
+            Float(FloatTy::F32) if signed => Ok(Scalar::from_f32(
+                Single::from_i128(v as i128).value
             )),
-            Float(FloatTy::F64) if signed => Ok(Scalar::from_uint(
-                Double::from_i128(v as i128).value.to_bits(),
-                Size::from_bits(64)
+            Float(FloatTy::F64) if signed => Ok(Scalar::from_f64(
+                Double::from_i128(v as i128).value
             )),
-            Float(FloatTy::F32) => Ok(Scalar::from_uint(
-                Single::from_u128(v).value.to_bits(),
-                Size::from_bits(32)
+            Float(FloatTy::F32) => Ok(Scalar::from_f32(
+                Single::from_u128(v).value
             )),
-            Float(FloatTy::F64) => Ok(Scalar::from_uint(
-                Double::from_u128(v).value.to_bits(),
-                Size::from_bits(64)
+            Float(FloatTy::F64) => Ok(Scalar::from_f64(
+                Double::from_u128(v).value
             )),
 
             Char => {
@@ -194,52 +203,36 @@ fn cast_from_int(
         }
     }
 
-    fn cast_from_float(
+    fn cast_from_float<F>(
         &self,
-        bits: u128,
-        fty: FloatTy,
+        f: F,
         dest_ty: Ty<'tcx>
-    ) -> EvalResult<'tcx, Scalar<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Scalar<M::PointerTag>>
+    where F: Float + Into<Scalar<M::PointerTag>> + FloatConvert<Single> + FloatConvert<Double>
+    {
         use rustc::ty::TyKind::*;
-        use rustc_apfloat::FloatConvert;
         match dest_ty.sty {
             // float -> uint
             Uint(t) => {
                 let width = t.bit_width().unwrap_or_else(|| self.pointer_size().bits() as usize);
-                let v = match fty {
-                    FloatTy::F32 => Single::from_bits(bits).to_u128(width).value,
-                    FloatTy::F64 => Double::from_bits(bits).to_u128(width).value,
-                };
+                let v = f.to_u128(width).value;
                 // This should already fit the bit width
                 Ok(Scalar::from_uint(v, Size::from_bits(width as u64)))
             },
             // float -> int
             Int(t) => {
                 let width = t.bit_width().unwrap_or_else(|| self.pointer_size().bits() as usize);
-                let v = match fty {
-                    FloatTy::F32 => Single::from_bits(bits).to_i128(width).value,
-                    FloatTy::F64 => Double::from_bits(bits).to_i128(width).value,
-                };
+                let v = f.to_i128(width).value;
                 Ok(Scalar::from_int(v, Size::from_bits(width as u64)))
             },
-            // f64 -> f32
-            Float(FloatTy::F32) if fty == FloatTy::F64 => {
-                Ok(Scalar::from_uint(
-                    Single::to_bits(Double::from_bits(bits).convert(&mut false).value),
-                    Size::from_bits(32),
-                ))
-            },
-            // f32 -> f64
-            Float(FloatTy::F64) if fty == FloatTy::F32 => {
-                Ok(Scalar::from_uint(
-                    Double::to_bits(Single::from_bits(bits).convert(&mut false).value),
-                    Size::from_bits(64),
-                ))
-            },
-            // identity cast
-            Float(FloatTy:: F64) => Ok(Scalar::from_uint(bits, Size::from_bits(64))),
-            Float(FloatTy:: F32) => Ok(Scalar::from_uint(bits, Size::from_bits(32))),
-            _ => err!(Unimplemented(format!("float to {:?} cast", dest_ty))),
+            // float -> f32
+            Float(FloatTy::F32) =>
+                Ok(Scalar::from_f32(f.convert(&mut false).value)),
+            // float -> f64
+            Float(FloatTy::F64) =>
+                Ok(Scalar::from_f64(f.convert(&mut false).value)),
+            // That's it.
+            _ => bug!("invalid float to {:?} cast", dest_ty),
         }
     }
 
@@ -247,7 +240,7 @@ fn cast_from_ptr(
         &self,
         ptr: Pointer<M::PointerTag>,
         ty: Ty<'tcx>
-    ) -> EvalResult<'tcx, Scalar<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Scalar<M::PointerTag>> {
         use rustc::ty::TyKind::*;
         match ty.sty {
             // Casting to a reference or fn pointer is not permitted by rustc,
@@ -267,7 +260,7 @@ fn unsize_into_ptr(
         // The pointee types
         sty: Ty<'tcx>,
         dty: Ty<'tcx>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // A<Struct> -> A<Trait> conversion
         let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
 
@@ -305,7 +298,7 @@ fn unsize_into(
         &mut self,
         src: OpTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         trace!("Unsizing {:?} into {:?}", src, dest);
         match (&src.layout.ty.sty, &dest.layout.ty.sty) {
             (&ty::Ref(_, s, _), &ty::Ref(_, d, _)) |
index a6153bf055d600a5223ae3c0cf2e6551d0afe6e8..a34889e6f33bc0a62d66777548251d052cf1b553 100644 (file)
@@ -16,7 +16,7 @@
 use rustc::mir::interpret::{
     ErrorHandled,
     GlobalId, Scalar, Pointer, FrameInfo, AllocId,
-    EvalResult, InterpError,
+    InterpResult, InterpError,
     truncate, sign_extend,
 };
 use rustc_data_structures::fx::FxHashMap;
@@ -56,7 +56,7 @@ pub struct Frame<'mir, 'tcx: 'mir, Tag=(), Extra=()> {
     // Function and callsite information
     ////////////////////////////////////////////////////////////////////////////////
     /// The MIR for the function called on this frame.
-    pub mir: &'mir mir::Body<'tcx>,
+    pub body: &'mir mir::Body<'tcx>,
 
     /// The def_id and substs of the current function.
     pub instance: ty::Instance<'tcx>,
@@ -135,7 +135,7 @@ pub enum LocalValue<Tag=(), Id=AllocId> {
 }
 
 impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
-    pub fn access(&self) -> EvalResult<'tcx, Operand<Tag>> {
+    pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> {
         match self.value {
             LocalValue::Dead => err!(DeadLocal),
             LocalValue::Uninitialized =>
@@ -148,7 +148,7 @@ pub fn access(&self) -> EvalResult<'tcx, Operand<Tag>> {
     /// to do so; otherwise return the `MemPlace` to consult instead.
     pub fn access_mut(
         &mut self,
-    ) -> EvalResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
+    ) -> InterpResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
         match self.value {
             LocalValue::Dead => err!(DeadLocal),
             LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
@@ -190,7 +190,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
     for InterpretCx<'a, 'mir, 'tcx, M>
 {
     type Ty = Ty<'tcx>;
-    type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
+    type TyLayout = InterpResult<'tcx, TyLayout<'tcx>>;
 
     #[inline]
     fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
@@ -252,14 +252,14 @@ pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExt
     }
 
     #[inline(always)]
-    pub(super) fn mir(&self) -> &'mir mir::Body<'tcx> {
-        self.frame().mir
+    pub(super) fn body(&self) -> &'mir mir::Body<'tcx> {
+        self.frame().body
     }
 
     pub(super) fn subst_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
         &self,
         substs: T,
-    ) -> EvalResult<'tcx, T> {
+    ) -> InterpResult<'tcx, T> {
         match self.stack.last() {
             Some(frame) => Ok(self.tcx.subst_and_normalize_erasing_regions(
                 frame.instance.substs,
@@ -278,7 +278,7 @@ pub(super) fn resolve(
         &self,
         def_id: DefId,
         substs: SubstsRef<'tcx>
-    ) -> EvalResult<'tcx, ty::Instance<'tcx>> {
+    ) -> InterpResult<'tcx, ty::Instance<'tcx>> {
         trace!("resolve: {:?}, {:#?}", def_id, substs);
         trace!("param_env: {:#?}", self.param_env);
         let substs = self.subst_and_normalize_erasing_regions(substs)?;
@@ -302,7 +302,7 @@ pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
     pub fn load_mir(
         &self,
         instance: ty::InstanceDef<'tcx>,
-    ) -> EvalResult<'tcx, &'tcx mir::Body<'tcx>> {
+    ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
         // do not continue if typeck errors occurred (can only occur in local crate)
         let did = instance.def_id();
         if did.is_local()
@@ -325,7 +325,7 @@ pub fn load_mir(
     pub(super) fn monomorphize<T: TypeFoldable<'tcx> + Subst<'tcx>>(
         &self,
         t: T,
-    ) -> EvalResult<'tcx, T> {
+    ) -> InterpResult<'tcx, T> {
         match self.stack.last() {
             Some(frame) => Ok(self.monomorphize_with_substs(t, frame.instance.substs)),
             None => if t.needs_subst() {
@@ -352,11 +352,11 @@ pub fn layout_of_local(
         frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
         local: mir::Local,
         layout: Option<TyLayout<'tcx>>,
-    ) -> EvalResult<'tcx, TyLayout<'tcx>> {
+    ) -> InterpResult<'tcx, TyLayout<'tcx>> {
         match frame.locals[local].layout.get() {
             None => {
                 let layout = crate::interpret::operand::from_known_layout(layout, || {
-                    let local_ty = frame.mir.local_decls[local].ty;
+                    let local_ty = frame.body.local_decls[local].ty;
                     let local_ty = self.monomorphize_with_substs(local_ty, frame.instance.substs);
                     self.layout_of(local_ty)
                 })?;
@@ -375,7 +375,7 @@ pub(super) fn size_and_align_of(
         &self,
         metadata: Option<Scalar<M::PointerTag>>,
         layout: TyLayout<'tcx>,
-    ) -> EvalResult<'tcx, Option<(Size, Align)>> {
+    ) -> InterpResult<'tcx, Option<(Size, Align)>> {
         if !layout.is_unsized() {
             return Ok(Some((layout.size, layout.align.abi)));
         }
@@ -467,7 +467,7 @@ pub(super) fn size_and_align_of(
     pub fn size_and_align_of_mplace(
         &self,
         mplace: MPlaceTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx, Option<(Size, Align)>> {
+    ) -> InterpResult<'tcx, Option<(Size, Align)>> {
         self.size_and_align_of(mplace.meta, mplace.layout)
     }
 
@@ -475,10 +475,10 @@ pub fn push_stack_frame(
         &mut self,
         instance: ty::Instance<'tcx>,
         span: source_map::Span,
-        mir: &'mir mir::Body<'tcx>,
+        body: &'mir mir::Body<'tcx>,
         return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
         return_to_block: StackPopCleanup,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if self.stack.len() > 0 {
             info!("PAUSING({}) {}", self.cur_frame(), self.frame().instance);
         }
@@ -487,7 +487,7 @@ pub fn push_stack_frame(
         // first push a stack frame so we have access to the local substs
         let extra = M::stack_push(self)?;
         self.stack.push(Frame {
-            mir,
+            body,
             block: mir::START_BLOCK,
             return_to_block,
             return_place,
@@ -501,13 +501,13 @@ pub fn push_stack_frame(
         });
 
         // don't allocate at all for trivial constants
-        if mir.local_decls.len() > 1 {
+        if body.local_decls.len() > 1 {
             // Locals are initially uninitialized.
             let dummy = LocalState {
                 value: LocalValue::Uninitialized,
                 layout: Cell::new(None),
             };
-            let mut locals = IndexVec::from_elem(dummy, &mir.local_decls);
+            let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
             // Return place is handled specially by the `eval_place` functions, and the
             // entry in `locals` should never be used. Make it dead, to be sure.
             locals[mir::RETURN_PLACE].value = LocalValue::Dead;
@@ -518,8 +518,8 @@ pub fn push_stack_frame(
                 | Some(DefKind::Const)
                 | Some(DefKind::AssocConst) => {},
                 _ => {
-                    trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
-                    for block in mir.basic_blocks() {
+                    trace!("push_stack_frame: {:?}: num_bbs: {}", span, body.basic_blocks().len());
+                    for block in body.basic_blocks() {
                         for stmt in block.statements.iter() {
                             use rustc::mir::StatementKind::{StorageDead, StorageLive};
                             match stmt.kind {
@@ -546,7 +546,7 @@ pub fn push_stack_frame(
         }
     }
 
-    pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
+    pub(super) fn pop_stack_frame(&mut self) -> InterpResult<'tcx> {
         info!("LEAVING({}) {}", self.cur_frame(), self.frame().instance);
         ::log_settings::settings().indentation -= 1;
         let frame = self.stack.pop().expect(
@@ -611,7 +611,7 @@ pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
     pub fn storage_live(
         &mut self,
         local: mir::Local
-    ) -> EvalResult<'tcx, LocalValue<M::PointerTag>> {
+    ) -> InterpResult<'tcx, LocalValue<M::PointerTag>> {
         assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
         trace!("{:?} is now live", local);
 
@@ -634,7 +634,7 @@ pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue<M::PointerTag> {
     pub(super) fn deallocate_local(
         &mut self,
         local: LocalValue<M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // FIXME: should we tell the user that there was a local which was never written to?
         if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
             trace!("deallocating local");
@@ -648,7 +648,7 @@ pub(super) fn deallocate_local(
     pub fn const_eval_raw(
         &self,
         gid: GlobalId<'tcx>,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         let param_env = if self.tcx.is_static(gid.instance.def_id()) {
             ty::ParamEnv::reveal_all()
         } else {
@@ -734,7 +734,7 @@ pub fn dump_place(&self, place: Place<M::PointerTag>) {
     pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> Vec<FrameInfo<'tcx>> {
         let mut last_span = None;
         let mut frames = Vec::new();
-        for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().rev() {
+        for &Frame { instance, span, body, block, stmt, .. } in self.stack().iter().rev() {
             // make sure we don't emit frames that are duplicates of the previous
             if explicit_span == Some(span) {
                 last_span = Some(span);
@@ -747,13 +747,13 @@ pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> Vec<FrameInfo<
             } else {
                 last_span = Some(span);
             }
-            let block = &mir.basic_blocks()[block];
+            let block = &body.basic_blocks()[block];
             let source_info = if stmt < block.statements.len() {
                 block.statements[stmt].source_info
             } else {
                 block.terminator().source_info
             };
-            let lint_root = match mir.source_scope_local_data {
+            let lint_root = match body.source_scope_local_data {
                 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
                 mir::ClearCrossCrate::Clear => None,
             };
index 4de2bd48eeb7eb8c5766b2a3a948203476c033c9..8888d7ded8bb4fa2a8407a9f89d6a4be48ab9c6f 100644 (file)
@@ -7,7 +7,7 @@
 use rustc::ty::layout::{LayoutOf, Primitive, Size};
 use rustc::mir::BinOp;
 use rustc::mir::interpret::{
-    EvalResult, InterpError, Scalar,
+    InterpResult, InterpError, Scalar,
 };
 
 use super::{
@@ -22,7 +22,7 @@ fn numeric_intrinsic<'tcx, Tag>(
     name: &str,
     bits: u128,
     kind: Primitive,
-) -> EvalResult<'tcx, Scalar<Tag>> {
+) -> InterpResult<'tcx, Scalar<Tag>> {
     let size = match kind {
         Primitive::Int(integer, _) => integer.size(),
         _ => bug!("invalid `{}` argument: {:?}", name, bits),
@@ -46,7 +46,7 @@ pub fn emulate_intrinsic(
         instance: ty::Instance<'tcx>,
         args: &[OpTy<'tcx, M::PointerTag>],
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, bool> {
+    ) -> InterpResult<'tcx, bool> {
         let substs = instance.substs;
 
         let intrinsic_name = &self.tcx.item_name(instance.def_id()).as_str()[..];
@@ -231,7 +231,7 @@ pub fn hook_fn(
         instance: ty::Instance<'tcx>,
         args: &[OpTy<'tcx, M::PointerTag>],
         dest: Option<PlaceTy<'tcx, M::PointerTag>>,
-    ) -> EvalResult<'tcx, bool> {
+    ) -> InterpResult<'tcx, bool> {
         let def_id = instance.def_id();
         // Some fn calls are actually BinOp intrinsics
         if let Some((op, oflo)) = self.tcx.is_binop_lang_item(def_id) {
index fa0750fa82aa98a988207883546634e85b4c7200..7ee77a9a05f8b323145c51d963d3370c0b14c88c 100644 (file)
@@ -10,7 +10,7 @@
 use rustc::ty::{self, query::TyCtxtAt};
 
 use super::{
-    Allocation, AllocId, EvalResult, Scalar, AllocationExtra,
+    Allocation, AllocId, InterpResult, Scalar, AllocationExtra,
     InterpretCx, PlaceTy, OpTy, ImmTy, MemoryKind,
 };
 
@@ -99,7 +99,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
 
     /// Called before a basic block terminator is executed.
     /// You can use this to detect endlessly running programs.
-    fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx>;
+    fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> InterpResult<'tcx>;
 
     /// Entry point to all function calls.
     ///
@@ -117,7 +117,7 @@ fn find_fn(
         args: &[OpTy<'tcx, Self::PointerTag>],
         dest: Option<PlaceTy<'tcx, Self::PointerTag>>,
         ret: Option<mir::BasicBlock>,
-    ) -> EvalResult<'tcx, Option<&'mir mir::Body<'tcx>>>;
+    ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>>;
 
     /// Directly process an intrinsic without pushing a stack frame.
     /// If this returns successfully, the engine will take care of jumping to the next block.
@@ -126,7 +126,7 @@ fn call_intrinsic(
         instance: ty::Instance<'tcx>,
         args: &[OpTy<'tcx, Self::PointerTag>],
         dest: PlaceTy<'tcx, Self::PointerTag>,
-    ) -> EvalResult<'tcx>;
+    ) -> InterpResult<'tcx>;
 
     /// Called for read access to a foreign static item.
     ///
@@ -138,7 +138,7 @@ fn call_intrinsic(
     fn find_foreign_static(
         def_id: DefId,
         tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
-    ) -> EvalResult<'tcx, Cow<'tcx, Allocation>>;
+    ) -> InterpResult<'tcx, Cow<'tcx, Allocation>>;
 
     /// Called for all binary operations on integer(-like) types when one operand is a pointer
     /// value, and for the `Offset` operation that is inherently about pointers.
@@ -149,13 +149,13 @@ fn ptr_op(
         bin_op: mir::BinOp,
         left: ImmTy<'tcx, Self::PointerTag>,
         right: ImmTy<'tcx, Self::PointerTag>,
-    ) -> EvalResult<'tcx, (Scalar<Self::PointerTag>, bool)>;
+    ) -> InterpResult<'tcx, (Scalar<Self::PointerTag>, bool)>;
 
     /// Heap allocations via the `box` keyword.
     fn box_alloc(
         ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
         dest: PlaceTy<'tcx, Self::PointerTag>,
-    ) -> EvalResult<'tcx>;
+    ) -> InterpResult<'tcx>;
 
     /// Called to initialize the "extra" state of an allocation and make the pointers
     /// it contains (in relocations) tagged.  The way we construct allocations is
@@ -196,18 +196,18 @@ fn retag(
         _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
         _kind: mir::RetagKind,
         _place: PlaceTy<'tcx, Self::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         Ok(())
     }
 
     /// Called immediately before a new stack frame got pushed
     fn stack_push(
         ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
-    ) -> EvalResult<'tcx, Self::FrameExtra>;
+    ) -> InterpResult<'tcx, Self::FrameExtra>;
 
     /// Called immediately after a stack frame gets popped
     fn stack_pop(
         ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
         extra: Self::FrameExtra,
-    ) -> EvalResult<'tcx>;
+    ) -> InterpResult<'tcx>;
 }
index 7db963a89aaef94531433be4dd34fd7816f0ea3c..7126cd86a19599e6aac579798ca40b8c6385cc38 100644 (file)
@@ -18,7 +18,7 @@
 
 use super::{
     Pointer, AllocId, Allocation, GlobalId, AllocationExtra,
-    EvalResult, Scalar, InterpError, GlobalAlloc, PointerArithmetic,
+    InterpResult, Scalar, InterpError, GlobalAlloc, PointerArithmetic,
     Machine, AllocMap, MayLeak, ErrorHandled, CheckInAllocMsg, InboundsCheck,
 };
 
@@ -156,7 +156,7 @@ pub fn reallocate(
         new_size: Size,
         new_align: Align,
         kind: MemoryKind<M::MemoryKinds>,
-    ) -> EvalResult<'tcx, Pointer<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
         if ptr.offset.bytes() != 0 {
             return err!(ReallocateNonBasePtr);
         }
@@ -178,7 +178,7 @@ pub fn reallocate(
     }
 
     /// Deallocate a local, or do nothing if that local has been made into a static
-    pub fn deallocate_local(&mut self, ptr: Pointer<M::PointerTag>) -> EvalResult<'tcx> {
+    pub fn deallocate_local(&mut self, ptr: Pointer<M::PointerTag>) -> InterpResult<'tcx> {
         // The allocation might be already removed by static interning.
         // This can only really happen in the CTFE instance, not in miri.
         if self.alloc_map.contains_key(&ptr.alloc_id) {
@@ -193,7 +193,7 @@ pub fn deallocate(
         ptr: Pointer<M::PointerTag>,
         size_and_align: Option<(Size, Align)>,
         kind: MemoryKind<M::MemoryKinds>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         trace!("deallocating: {}", ptr.alloc_id);
 
         if ptr.offset.bytes() != 0 {
@@ -257,7 +257,7 @@ pub fn check_align(
         &self,
         ptr: Scalar<M::PointerTag>,
         required_align: Align
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // Check non-NULL/Undef, extract offset
         let (offset, alloc_align) = match ptr.to_bits_or_ptr(self.pointer_size(), self) {
             Err(ptr) => {
@@ -304,7 +304,7 @@ pub fn check_bounds_ptr(
         ptr: Pointer<M::PointerTag>,
         liveness: InboundsCheck,
         msg: CheckInAllocMsg,
-    ) -> EvalResult<'tcx, Align> {
+    ) -> InterpResult<'tcx, Align> {
         let (allocation_size, align) = self.get_size_and_align(ptr.alloc_id, liveness)?;
         ptr.check_in_alloc(allocation_size, msg)?;
         Ok(align)
@@ -331,7 +331,7 @@ fn get_static_alloc(
         id: AllocId,
         tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
         memory_extra: &M::MemoryExtra,
-    ) -> EvalResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> {
+    ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> {
         let alloc = tcx.alloc_map.lock().get(id);
         let alloc = match alloc {
             Some(GlobalAlloc::Memory(mem)) =>
@@ -381,11 +381,14 @@ fn get_static_alloc(
         ).0)
     }
 
-    pub fn get(&self, id: AllocId) -> EvalResult<'tcx, &Allocation<M::PointerTag, M::AllocExtra>> {
+    pub fn get(
+        &self,
+        id: AllocId,
+    ) -> InterpResult<'tcx, &Allocation<M::PointerTag, M::AllocExtra>> {
         // The error type of the inner closure here is somewhat funny.  We have two
         // ways of "erroring": An actual error, or because we got a reference from
         // `get_static_alloc` that we can actually use directly without inserting anything anywhere.
-        // So the error type is `EvalResult<'tcx, &Allocation<M::PointerTag>>`.
+        // So the error type is `InterpResult<'tcx, &Allocation<M::PointerTag>>`.
         let a = self.alloc_map.get_or(id, || {
             let alloc = Self::get_static_alloc(id, self.tcx, &self.extra).map_err(Err)?;
             match alloc {
@@ -414,7 +417,7 @@ pub fn get(&self, id: AllocId) -> EvalResult<'tcx, &Allocation<M::PointerTag, M:
     pub fn get_mut(
         &mut self,
         id: AllocId,
-    ) -> EvalResult<'tcx, &mut Allocation<M::PointerTag, M::AllocExtra>> {
+    ) -> InterpResult<'tcx, &mut Allocation<M::PointerTag, M::AllocExtra>> {
         let tcx = self.tcx;
         let memory_extra = &self.extra;
         let a = self.alloc_map.get_mut_or(id, || {
@@ -450,7 +453,7 @@ pub fn get_size_and_align(
         &self,
         id: AllocId,
         liveness: InboundsCheck,
-    ) -> EvalResult<'static, (Size, Align)> {
+    ) -> InterpResult<'static, (Size, Align)> {
         if let Ok(alloc) = self.get(id) {
             return Ok((Size::from_bytes(alloc.bytes.len() as u64), alloc.align));
         }
@@ -477,7 +480,7 @@ pub fn get_size_and_align(
         }
     }
 
-    pub fn get_fn(&self, ptr: Pointer<M::PointerTag>) -> EvalResult<'tcx, Instance<'tcx>> {
+    pub fn get_fn(&self, ptr: Pointer<M::PointerTag>) -> InterpResult<'tcx, Instance<'tcx>> {
         if ptr.offset.bytes() != 0 {
             return err!(InvalidFunctionPointer);
         }
@@ -488,7 +491,7 @@ pub fn get_fn(&self, ptr: Pointer<M::PointerTag>) -> EvalResult<'tcx, Instance<'
         }
     }
 
-    pub fn mark_immutable(&mut self, id: AllocId) -> EvalResult<'tcx> {
+    pub fn mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
         self.get_mut(id)?.mutability = Mutability::Immutable;
         Ok(())
     }
@@ -625,7 +628,7 @@ pub fn read_bytes(
         &self,
         ptr: Scalar<M::PointerTag>,
         size: Size,
-    ) -> EvalResult<'tcx, &[u8]> {
+    ) -> InterpResult<'tcx, &[u8]> {
         if size.bytes() == 0 {
             Ok(&[])
         } else {
@@ -647,7 +650,7 @@ pub fn intern_static(
         &mut self,
         alloc_id: AllocId,
         mutability: Mutability,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         trace!(
             "mark_static_initialized {:?}, mutability: {:?}",
             alloc_id,
@@ -695,7 +698,7 @@ pub fn copy(
         dest_align: Align,
         size: Size,
         nonoverlapping: bool,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         self.copy_repeatedly(src, src_align, dest, dest_align, size, 1, nonoverlapping)
     }
 
@@ -708,7 +711,7 @@ pub fn copy_repeatedly(
         size: Size,
         length: u64,
         nonoverlapping: bool,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         self.check_align(src, src_align)?;
         self.check_align(dest, dest_align)?;
         if size.bytes() == 0 {
@@ -811,7 +814,7 @@ fn copy_undef_mask(
         dest: Pointer<M::PointerTag>,
         size: Size,
         repeat: u64,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // The bits have to be saved locally before writing to dest in case src and dest overlap.
         assert_eq!(size.bytes() as usize as u64, size.bytes());
 
index 5c75c323ddd7d773ffb80d1bcb9fc07c247733c8..7c83bf1d27d941c2571ee23def2fc053e8bd0bd0 100644 (file)
@@ -9,7 +9,7 @@
 use rustc::mir::interpret::{
     GlobalId, AllocId, CheckInAllocMsg,
     ConstValue, Pointer, Scalar,
-    EvalResult, InterpError, InboundsCheck,
+    InterpResult, InterpError, InboundsCheck,
     sign_extend, truncate,
 };
 use super::{
@@ -61,12 +61,12 @@ pub fn to_scalar_or_undef(self) -> ScalarMaybeUndef<Tag> {
     }
 
     #[inline]
-    pub fn to_scalar(self) -> EvalResult<'tcx, Scalar<Tag>> {
+    pub fn to_scalar(self) -> InterpResult<'tcx, Scalar<Tag>> {
         self.to_scalar_or_undef().not_undef()
     }
 
     #[inline]
-    pub fn to_scalar_pair(self) -> EvalResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
+    pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
         match self {
             Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
             Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?))
@@ -76,7 +76,7 @@ pub fn to_scalar_pair(self) -> EvalResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
     /// Converts the immediate into a pointer (or a pointer-sized integer).
     /// Throws away the second half of a ScalarPair!
     #[inline]
-    pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> {
+    pub fn to_scalar_ptr(self) -> InterpResult<'tcx, Scalar<Tag>> {
         match self {
             Immediate::Scalar(ptr) |
             Immediate::ScalarPair(ptr, _) => ptr.not_undef(),
@@ -86,7 +86,7 @@ pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> {
     /// Converts the value into its metadata.
     /// Throws away the first half of a ScalarPair!
     #[inline]
-    pub fn to_meta(self) -> EvalResult<'tcx, Option<Scalar<Tag>>> {
+    pub fn to_meta(self) -> InterpResult<'tcx, Option<Scalar<Tag>>> {
         Ok(match self {
             Immediate::Scalar(_) => None,
             Immediate::ScalarPair(_, meta) => Some(meta.not_undef()?),
@@ -185,7 +185,7 @@ pub fn from_scalar(val: Scalar<Tag>, layout: TyLayout<'tcx>) -> Self {
     }
 
     #[inline]
-    pub fn to_bits(self) -> EvalResult<'tcx, u128> {
+    pub fn to_bits(self) -> InterpResult<'tcx, u128> {
         self.to_scalar()?.to_bits(self.layout.size)
     }
 }
@@ -195,8 +195,8 @@ pub fn to_bits(self) -> EvalResult<'tcx, u128> {
 #[inline(always)]
 pub(super) fn from_known_layout<'tcx>(
     layout: Option<TyLayout<'tcx>>,
-    compute: impl FnOnce() -> EvalResult<'tcx, TyLayout<'tcx>>
-) -> EvalResult<'tcx, TyLayout<'tcx>> {
+    compute: impl FnOnce() -> InterpResult<'tcx, TyLayout<'tcx>>
+) -> InterpResult<'tcx, TyLayout<'tcx>> {
     match layout {
         None => compute(),
         Some(layout) => {
@@ -217,7 +217,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
     fn try_read_immediate_from_mplace(
         &self,
         mplace: MPlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, Option<Immediate<M::PointerTag>>> {
+    ) -> InterpResult<'tcx, Option<Immediate<M::PointerTag>>> {
         if mplace.layout.is_unsized() {
             // Don't touch unsized
             return Ok(None);
@@ -271,7 +271,7 @@ fn try_read_immediate_from_mplace(
     pub(crate) fn try_read_immediate(
         &self,
         src: OpTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, Result<Immediate<M::PointerTag>, MemPlace<M::PointerTag>>> {
+    ) -> InterpResult<'tcx, Result<Immediate<M::PointerTag>, MemPlace<M::PointerTag>>> {
         Ok(match src.try_as_mplace() {
             Ok(mplace) => {
                 if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
@@ -289,7 +289,7 @@ pub(crate) fn try_read_immediate(
     pub fn read_immediate(
         &self,
         op: OpTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
         if let Ok(imm) = self.try_read_immediate(op)? {
             Ok(ImmTy { imm, layout: op.layout })
         } else {
@@ -301,7 +301,7 @@ pub fn read_immediate(
     pub fn read_scalar(
         &self,
         op: OpTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
+    ) -> InterpResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
         Ok(self.read_immediate(op)?.to_scalar_or_undef())
     }
 
@@ -309,7 +309,7 @@ pub fn read_scalar(
     pub fn read_str(
         &self,
         mplace: MPlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, &str> {
+    ) -> InterpResult<'tcx, &str> {
         let len = mplace.len(self)?;
         let bytes = self.memory.read_bytes(mplace.ptr, Size::from_bytes(len as u64))?;
         let str = ::std::str::from_utf8(bytes)
@@ -322,7 +322,7 @@ pub fn operand_field(
         &self,
         op: OpTy<'tcx, M::PointerTag>,
         field: u64,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         let base = match op.try_as_mplace() {
             Ok(mplace) => {
                 // The easy case
@@ -357,7 +357,7 @@ pub fn operand_downcast(
         &self,
         op: OpTy<'tcx, M::PointerTag>,
         variant: VariantIdx,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         // Downcasts only change the layout
         Ok(match op.try_as_mplace() {
             Ok(mplace) => {
@@ -374,7 +374,7 @@ pub fn operand_projection(
         &self,
         base: OpTy<'tcx, M::PointerTag>,
         proj_elem: &mir::PlaceElem<'tcx>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         use rustc::mir::ProjectionElem::*;
         Ok(match *proj_elem {
             Field(field, _) => self.operand_field(base, field.index() as u64)?,
@@ -401,7 +401,7 @@ pub fn access_local(
         frame: &super::Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
         local: mir::Local,
         layout: Option<TyLayout<'tcx>>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         assert_ne!(local, mir::RETURN_PLACE);
         let layout = self.layout_of_local(frame, local, layout)?;
         let op = if layout.is_zst() {
@@ -418,7 +418,7 @@ pub fn access_local(
     pub fn place_to_op(
         &self,
         place: PlaceTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         let op = match *place {
             Place::Ptr(mplace) => {
                 Operand::Indirect(mplace)
@@ -435,7 +435,7 @@ pub(super) fn eval_place_to_op(
         &self,
         mir_place: &mir::Place<'tcx>,
         layout: Option<TyLayout<'tcx>>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         use rustc::mir::Place;
         use rustc::mir::PlaceBase;
 
@@ -475,7 +475,7 @@ pub fn eval_operand(
         &self,
         mir_op: &mir::Operand<'tcx>,
         layout: Option<TyLayout<'tcx>>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         use rustc::mir::Operand::*;
         let op = match *mir_op {
             // FIXME: do some more logic on `move` to invalidate the old location
@@ -493,7 +493,7 @@ pub fn eval_operand(
     pub(super) fn eval_operands(
         &self,
         ops: &[mir::Operand<'tcx>],
-    ) -> EvalResult<'tcx, Vec<OpTy<'tcx, M::PointerTag>>> {
+    ) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::PointerTag>>> {
         ops.into_iter()
             .map(|op| self.eval_operand(op, None))
             .collect()
@@ -505,7 +505,7 @@ pub(super) fn eval_operands(
         &self,
         val: &'tcx ty::Const<'tcx>,
         layout: Option<TyLayout<'tcx>>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         let tag_scalar = |scalar| match scalar {
             Scalar::Ptr(ptr) => Scalar::Ptr(self.tag_static_base_pointer(ptr)),
             Scalar::Raw { data, size } => Scalar::Raw { data, size },
@@ -561,7 +561,7 @@ pub(super) fn eval_operands(
     pub fn read_discriminant(
         &self,
         rval: OpTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, (u128, VariantIdx)> {
+    ) -> InterpResult<'tcx, (u128, VariantIdx)> {
         trace!("read_discriminant_value {:#?}", rval.layout);
 
         let (discr_kind, discr_index) = match rval.layout.variants {
index 488f81d8f740ec776ff4897ffd1620a0fe0f7299..db7da9359de7b7762bfc153120bdce4bd56dc699 100644 (file)
@@ -1,9 +1,8 @@
 use rustc::mir;
-use rustc::ty::{self, layout::{Size, TyLayout}};
+use rustc::ty::{self, layout::TyLayout};
 use syntax::ast::FloatTy;
-use rustc_apfloat::ieee::{Double, Single};
 use rustc_apfloat::Float;
-use rustc::mir::interpret::{EvalResult, Scalar};
+use rustc::mir::interpret::{InterpResult, Scalar};
 
 use super::{InterpretCx, PlaceTy, Immediate, Machine, ImmTy};
 
@@ -17,7 +16,7 @@ pub fn binop_with_overflow(
         left: ImmTy<'tcx, M::PointerTag>,
         right: ImmTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let (val, overflowed) = self.binary_op(op, left, right)?;
         let val = Immediate::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
         self.write_immediate(val, dest)
@@ -31,7 +30,7 @@ pub fn binop_ignore_overflow(
         left: ImmTy<'tcx, M::PointerTag>,
         right: ImmTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let (val, _overflowed) = self.binary_op(op, left, right)?;
         self.write_scalar(val, dest)
     }
@@ -43,7 +42,7 @@ fn binary_char_op(
         bin_op: mir::BinOp,
         l: char,
         r: char,
-    ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
+    ) -> (Scalar<M::PointerTag>, bool) {
         use rustc::mir::BinOp::*;
 
         let res = match bin_op {
@@ -55,7 +54,7 @@ fn binary_char_op(
             Ge => l >= r,
             _ => bug!("Invalid operation on char: {:?}", bin_op),
         };
-        return Ok((Scalar::from_bool(res), false));
+        return (Scalar::from_bool(res), false);
     }
 
     fn binary_bool_op(
@@ -63,7 +62,7 @@ fn binary_bool_op(
         bin_op: mir::BinOp,
         l: bool,
         r: bool,
-    ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
+    ) -> (Scalar<M::PointerTag>, bool) {
         use rustc::mir::BinOp::*;
 
         let res = match bin_op {
@@ -78,46 +77,32 @@ fn binary_bool_op(
             BitXor => l ^ r,
             _ => bug!("Invalid operation on bool: {:?}", bin_op),
         };
-        return Ok((Scalar::from_bool(res), false));
+        return (Scalar::from_bool(res), false);
     }
 
-    fn binary_float_op(
+    fn binary_float_op<F: Float + Into<Scalar<M::PointerTag>>>(
         &self,
         bin_op: mir::BinOp,
-        fty: FloatTy,
-        // passing in raw bits
-        l: u128,
-        r: u128,
-    ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
+        l: F,
+        r: F,
+    ) -> (Scalar<M::PointerTag>, bool) {
         use rustc::mir::BinOp::*;
 
-        macro_rules! float_math {
-            ($ty:path, $size:expr) => {{
-                let l = <$ty>::from_bits(l);
-                let r = <$ty>::from_bits(r);
-                let bitify = |res: ::rustc_apfloat::StatusAnd<$ty>|
-                    Scalar::from_uint(res.value.to_bits(), Size::from_bytes($size));
-                let val = match bin_op {
-                    Eq => Scalar::from_bool(l == r),
-                    Ne => Scalar::from_bool(l != r),
-                    Lt => Scalar::from_bool(l < r),
-                    Le => Scalar::from_bool(l <= r),
-                    Gt => Scalar::from_bool(l > r),
-                    Ge => Scalar::from_bool(l >= r),
-                    Add => bitify(l + r),
-                    Sub => bitify(l - r),
-                    Mul => bitify(l * r),
-                    Div => bitify(l / r),
-                    Rem => bitify(l % r),
-                    _ => bug!("invalid float op: `{:?}`", bin_op),
-                };
-                return Ok((val, false));
-            }};
-        }
-        match fty {
-            FloatTy::F32 => float_math!(Single, 4),
-            FloatTy::F64 => float_math!(Double, 8),
-        }
+        let val = match bin_op {
+            Eq => Scalar::from_bool(l == r),
+            Ne => Scalar::from_bool(l != r),
+            Lt => Scalar::from_bool(l < r),
+            Le => Scalar::from_bool(l <= r),
+            Gt => Scalar::from_bool(l > r),
+            Ge => Scalar::from_bool(l >= r),
+            Add => (l + r).value.into(),
+            Sub => (l - r).value.into(),
+            Mul => (l * r).value.into(),
+            Div => (l / r).value.into(),
+            Rem => (l % r).value.into(),
+            _ => bug!("invalid float op: `{:?}`", bin_op),
+        };
+        return (val, false);
     }
 
     fn binary_int_op(
@@ -128,7 +113,7 @@ fn binary_int_op(
         left_layout: TyLayout<'tcx>,
         r: u128,
         right_layout: TyLayout<'tcx>,
-    ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
+    ) -> InterpResult<'tcx, (Scalar<M::PointerTag>, bool)> {
         use rustc::mir::BinOp::*;
 
         // Shift ops can have an RHS with a different numeric type.
@@ -279,35 +264,42 @@ pub fn binary_op(
         bin_op: mir::BinOp,
         left: ImmTy<'tcx, M::PointerTag>,
         right: ImmTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
+    ) -> InterpResult<'tcx, (Scalar<M::PointerTag>, bool)> {
         trace!("Running binary op {:?}: {:?} ({:?}), {:?} ({:?})",
             bin_op, *left, left.layout.ty, *right, right.layout.ty);
 
         match left.layout.ty.sty {
             ty::Char => {
                 assert_eq!(left.layout.ty, right.layout.ty);
-                let left = left.to_scalar()?.to_char()?;
-                let right = right.to_scalar()?.to_char()?;
-                self.binary_char_op(bin_op, left, right)
+                let left = left.to_scalar()?;
+                let right = right.to_scalar()?;
+                Ok(self.binary_char_op(bin_op, left.to_char()?, right.to_char()?))
             }
             ty::Bool => {
                 assert_eq!(left.layout.ty, right.layout.ty);
-                let left = left.to_scalar()?.to_bool()?;
-                let right = right.to_scalar()?.to_bool()?;
-                self.binary_bool_op(bin_op, left, right)
+                let left = left.to_scalar()?;
+                let right = right.to_scalar()?;
+                Ok(self.binary_bool_op(bin_op, left.to_bool()?, right.to_bool()?))
             }
             ty::Float(fty) => {
                 assert_eq!(left.layout.ty, right.layout.ty);
-                let left = left.to_bits()?;
-                let right = right.to_bits()?;
-                self.binary_float_op(bin_op, fty, left, right)
+                let left = left.to_scalar()?;
+                let right = right.to_scalar()?;
+                Ok(match fty {
+                    FloatTy::F32 => self.binary_float_op(bin_op, left.to_f32()?, right.to_f32()?),
+                    FloatTy::F64 => self.binary_float_op(bin_op, left.to_f64()?, right.to_f64()?),
+                })
             }
             _ => {
                 // Must be integer(-like) types.  Don't forget about == on fn pointers.
-                assert!(left.layout.ty.is_integral() || left.layout.ty.is_unsafe_ptr() ||
-                    left.layout.ty.is_fn());
-                assert!(right.layout.ty.is_integral() || right.layout.ty.is_unsafe_ptr() ||
-                    right.layout.ty.is_fn());
+                assert!(
+                    left.layout.ty.is_integral()   ||
+                    left.layout.ty.is_unsafe_ptr() || left.layout.ty.is_fn_ptr(),
+                    "Unexpected LHS type {:?} for BinOp {:?}", left.layout.ty, bin_op);
+                assert!(
+                    right.layout.ty.is_integral()   ||
+                    right.layout.ty.is_unsafe_ptr() || right.layout.ty.is_fn_ptr(),
+                    "Unexpected RHS type {:?} for BinOp {:?}", right.layout.ty, bin_op);
 
                 // Handle operations that support pointer values
                 if left.to_scalar_ptr()?.is_ptr() ||
@@ -329,7 +321,7 @@ pub fn unary_op(
         &self,
         un_op: mir::UnOp,
         val: ImmTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, Scalar<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Scalar<M::PointerTag>> {
         use rustc::mir::UnOp::*;
 
         let layout = val.layout;
@@ -346,13 +338,12 @@ pub fn unary_op(
                 Ok(Scalar::from_bool(res))
             }
             ty::Float(fty) => {
-                let val = val.to_bits(layout.size)?;
                 let res = match (un_op, fty) {
-                    (Neg, FloatTy::F32) => Single::to_bits(-Single::from_bits(val)),
-                    (Neg, FloatTy::F64) => Double::to_bits(-Double::from_bits(val)),
+                    (Neg, FloatTy::F32) => Scalar::from_f32(-val.to_f32()?),
+                    (Neg, FloatTy::F64) => Scalar::from_f64(-val.to_f64()?),
                     _ => bug!("Invalid float op {:?}", un_op)
                 };
-                Ok(Scalar::from_uint(res, layout.size))
+                Ok(res)
             }
             _ => {
                 assert!(layout.ty.is_integral());
index fac9665d968e2a9b7e21d6119ca56c4ca2e081e7..758230e2b7dcb8c271ed90531de5fc9908ba5784 100644 (file)
@@ -12,7 +12,7 @@
 use rustc::ty::TypeFoldable;
 
 use super::{
-    GlobalId, AllocId, Allocation, Scalar, EvalResult, Pointer, PointerArithmetic,
+    GlobalId, AllocId, Allocation, Scalar, InterpResult, Pointer, PointerArithmetic,
     InterpretCx, Machine, AllocMap, AllocationExtra,
     RawConst, Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind, LocalValue
 };
@@ -130,7 +130,7 @@ pub fn to_scalar_ptr_align(self) -> (Scalar<Tag>, Align) {
 
     /// metact the ptr part of the mplace
     #[inline(always)]
-    pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
+    pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
         // At this point, we forget about the alignment information --
         // the place has been turned into a reference, and no matter where it came from,
         // it now must be aligned.
@@ -152,7 +152,7 @@ pub fn offset(
         offset: Size,
         meta: Option<Scalar<Tag>>,
         cx: &impl HasDataLayout,
-    ) -> EvalResult<'tcx, Self> {
+    ) -> InterpResult<'tcx, Self> {
         Ok(MemPlace {
             ptr: self.ptr.ptr_offset(offset, cx)?,
             align: self.align.restrict_for_offset(offset),
@@ -190,7 +190,7 @@ pub fn offset(
         meta: Option<Scalar<Tag>>,
         layout: TyLayout<'tcx>,
         cx: &impl HasDataLayout,
-    ) -> EvalResult<'tcx, Self> {
+    ) -> InterpResult<'tcx, Self> {
         Ok(MPlaceTy {
             mplace: self.mplace.offset(offset, meta, cx)?,
             layout,
@@ -203,7 +203,7 @@ fn from_aligned_ptr(ptr: Pointer<Tag>, layout: TyLayout<'tcx>) -> Self {
     }
 
     #[inline]
-    pub(super) fn len(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
+    pub(super) fn len(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
         if self.layout.is_unsized() {
             // We need to consult `meta` metadata
             match self.layout.ty.sty {
@@ -222,7 +222,7 @@ pub(super) fn len(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
     }
 
     #[inline]
-    pub(super) fn vtable(self) -> EvalResult<'tcx, Pointer<Tag>> {
+    pub(super) fn vtable(self) -> InterpResult<'tcx, Pointer<Tag>> {
         match self.layout.ty.sty {
             ty::Dynamic(..) => self.mplace.meta.unwrap().to_ptr(),
             _ => bug!("vtable not supported on type {:?}", self.layout.ty),
@@ -277,7 +277,7 @@ pub fn to_scalar_ptr_align(self) -> (Scalar<Tag>, Align) {
     }
 
     #[inline]
-    pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
+    pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
         self.to_mem_place().to_ptr()
     }
 }
@@ -306,7 +306,7 @@ impl<'a, 'mir, 'tcx, Tag, M> InterpretCx<'a, 'mir, 'tcx, M>
     pub fn ref_to_mplace(
         &self,
         val: ImmTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         let pointee_type = val.layout.ty.builtin_deref(true).unwrap().ty;
         let layout = self.layout_of(pointee_type)?;
 
@@ -327,7 +327,7 @@ pub fn ref_to_mplace(
     pub fn deref_operand(
         &self,
         src: OpTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         let val = self.read_immediate(src)?;
         trace!("deref to {} on {:?}", val.layout.ty, *val);
         self.ref_to_mplace(val)
@@ -341,7 +341,7 @@ pub fn mplace_field(
         &self,
         base: MPlaceTy<'tcx, M::PointerTag>,
         field: u64,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         // Not using the layout method because we want to compute on u64
         let offset = match base.layout.fields {
             layout::FieldPlacement::Arbitrary { ref offsets, .. } =>
@@ -397,7 +397,7 @@ pub fn mplace_array_fields(
         &self,
         base: MPlaceTy<'tcx, Tag>,
     ) ->
-        EvalResult<'tcx, impl Iterator<Item=EvalResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'a>
+        InterpResult<'tcx, impl Iterator<Item=InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'a>
     {
         let len = base.len(self)?; // also asserts that we have a type where this makes sense
         let stride = match base.layout.fields {
@@ -414,7 +414,7 @@ pub fn mplace_subslice(
         base: MPlaceTy<'tcx, M::PointerTag>,
         from: u64,
         to: u64,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         let len = base.len(self)?; // also asserts that we have a type where this makes sense
         assert!(from <= len - to);
 
@@ -448,7 +448,7 @@ pub fn mplace_downcast(
         &self,
         base: MPlaceTy<'tcx, M::PointerTag>,
         variant: VariantIdx,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         // Downcasts only change the layout
         assert!(base.meta.is_none());
         Ok(MPlaceTy { layout: base.layout.for_variant(self, variant), ..base })
@@ -459,7 +459,7 @@ pub fn mplace_projection(
         &self,
         base: MPlaceTy<'tcx, M::PointerTag>,
         proj_elem: &mir::PlaceElem<'tcx>,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         use rustc::mir::ProjectionElem::*;
         Ok(match *proj_elem {
             Field(field, _) => self.mplace_field(base, field.index() as u64)?,
@@ -504,7 +504,7 @@ pub fn place_field(
         &mut self,
         base: PlaceTy<'tcx, M::PointerTag>,
         field: u64,
-    ) -> EvalResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
         // FIXME: We could try to be smarter and avoid allocation for fields that span the
         // entire place.
         let mplace = self.force_allocation(base)?;
@@ -515,7 +515,7 @@ pub fn place_downcast(
         &self,
         base: PlaceTy<'tcx, M::PointerTag>,
         variant: VariantIdx,
-    ) -> EvalResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
         // Downcast just changes the layout
         Ok(match base.place {
             Place::Ptr(mplace) =>
@@ -532,7 +532,7 @@ pub fn place_projection(
         &mut self,
         base: PlaceTy<'tcx, M::PointerTag>,
         proj_elem: &mir::ProjectionElem<mir::Local, Ty<'tcx>>,
-    ) -> EvalResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
         use rustc::mir::ProjectionElem::*;
         Ok(match *proj_elem {
             Field(field, _) =>  self.place_field(base, field.index() as u64)?,
@@ -552,7 +552,7 @@ pub fn place_projection(
     pub(super) fn eval_static_to_mplace(
         &self,
         place_static: &mir::Static<'tcx>
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         use rustc::mir::StaticKind;
 
         Ok(match place_static.kind {
@@ -600,7 +600,7 @@ pub(super) fn eval_static_to_mplace(
     pub fn eval_place(
         &mut self,
         mir_place: &mir::Place<'tcx>,
-    ) -> EvalResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, PlaceTy<'tcx, M::PointerTag>> {
         use rustc::mir::PlaceBase;
 
         mir_place.iterate(|place_base, place_projection| {
@@ -612,7 +612,7 @@ pub fn eval_place(
                         PlaceTy {
                             place: *return_place,
                             layout: self
-                                .layout_of(self.monomorphize(self.frame().mir.return_ty())?)?,
+                                .layout_of(self.monomorphize(self.frame().body.return_ty())?)?,
                         }
                     }
                     None => return err!(InvalidNullPointerUsage),
@@ -642,7 +642,7 @@ pub fn write_scalar(
         &mut self,
         val: impl Into<ScalarMaybeUndef<M::PointerTag>>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         self.write_immediate(Immediate::Scalar(val.into()), dest)
     }
 
@@ -652,7 +652,7 @@ pub fn write_immediate(
         &mut self,
         src: Immediate<M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         self.write_immediate_no_validate(src, dest)?;
 
         if M::enforce_validity(self) {
@@ -663,6 +663,23 @@ pub fn write_immediate(
         Ok(())
     }
 
+    /// Write an `Immediate` to memory.
+    #[inline(always)]
+    pub fn write_immediate_to_mplace(
+        &mut self,
+        src: Immediate<M::PointerTag>,
+        dest: MPlaceTy<'tcx, M::PointerTag>,
+    ) -> InterpResult<'tcx> {
+        self.write_immediate_to_mplace_no_validate(src, dest)?;
+
+        if M::enforce_validity(self) {
+            // Data got changed, better make sure it matches the type!
+            self.validate_operand(dest.into(), vec![], None, /*const_mode*/ false)?;
+        }
+
+        Ok(())
+    }
+
     /// Write an immediate to a place.
     /// If you use this you are responsible for validating that things got copied at the
     /// right type.
@@ -670,7 +687,7 @@ fn write_immediate_no_validate(
         &mut self,
         src: Immediate<M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if cfg!(debug_assertions) {
             // This is a very common path, avoid some checks in release mode
             assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
@@ -720,7 +737,7 @@ fn write_immediate_to_mplace_no_validate(
         &mut self,
         value: Immediate<M::PointerTag>,
         dest: MPlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let (ptr, ptr_align) = dest.to_scalar_ptr_align();
         // Note that it is really important that the type here is the right one, and matches the
         // type things are read at. In case `src_val` is a `ScalarPair`, we don't do any magic here
@@ -784,7 +801,7 @@ pub fn copy_op(
         &mut self,
         src: OpTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         self.copy_op_no_validate(src, dest)?;
 
         if M::enforce_validity(self) {
@@ -803,7 +820,7 @@ fn copy_op_no_validate(
         &mut self,
         src: OpTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // We do NOT compare the types for equality, because well-typed code can
         // actually "transmute" `&mut T` to `&T` in an assignment without a cast.
         assert!(src.layout.details == dest.layout.details,
@@ -848,7 +865,7 @@ pub fn copy_op_transmute(
         &mut self,
         src: OpTy<'tcx, M::PointerTag>,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if src.layout.details == dest.layout.details {
             // Fast path: Just use normal `copy_op`
             return self.copy_op(src, dest);
@@ -895,7 +912,7 @@ pub fn force_allocation_maybe_sized(
         &mut self,
         place: PlaceTy<'tcx, M::PointerTag>,
         meta: Option<Scalar<M::PointerTag>>,
-    ) -> EvalResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option<Size>)> {
+    ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option<Size>)> {
         let (mplace, size) = match place.place {
             Place::Local { frame, local } => {
                 match self.stack[frame].locals[local].access_mut()? {
@@ -948,7 +965,7 @@ pub fn force_allocation_maybe_sized(
     pub fn force_allocation(
         &mut self,
         place: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         Ok(self.force_allocation_maybe_sized(place, None)?.0)
     }
 
@@ -965,7 +982,7 @@ pub fn write_discriminant_index(
         &mut self,
         variant_index: VariantIdx,
         dest: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         match dest.layout.variants {
             layout::Variants::Single { index } => {
                 assert_eq!(index, variant_index);
@@ -1021,7 +1038,7 @@ pub fn write_discriminant_index(
     pub fn raw_const_to_mplace(
         &self,
         raw: RawConst<'tcx>,
-    ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
         // This must be an allocation in `tcx`
         assert!(self.tcx.alloc_map.lock().get(raw.alloc_id).is_some());
         let ptr = self.tag_static_base_pointer(Pointer::from(raw.alloc_id));
@@ -1032,7 +1049,7 @@ pub fn raw_const_to_mplace(
     /// Turn a place with a `dyn Trait` type into a place with the actual dynamic type.
     /// Also return some more information so drop doesn't have to run the same code twice.
     pub(super) fn unpack_dyn_trait(&self, mplace: MPlaceTy<'tcx, M::PointerTag>)
-    -> EvalResult<'tcx, (ty::Instance<'tcx>, MPlaceTy<'tcx, M::PointerTag>)> {
+    -> InterpResult<'tcx, (ty::Instance<'tcx>, MPlaceTy<'tcx, M::PointerTag>)> {
         let vtable = mplace.vtable()?; // also sanity checks the type
         let (instance, ty) = self.read_drop_type_from_vtable(vtable)?;
         let layout = self.layout_of(ty)?;
index c0bc7ce6b39ad09af7c5d7e790fef386c4f68a78..4e13291c787da81245ae24138452d5f4fafb7850 100644 (file)
@@ -12,7 +12,7 @@
 use rustc::mir::interpret::{
     AllocId, Pointer, Scalar,
     Relocations, Allocation, UndefMask,
-    EvalResult, InterpError,
+    InterpResult, InterpError,
 };
 
 use rustc::ty::{self, TyCtxt};
 
 #[derive(Default)]
 pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir> {
-    /// The set of all `EvalSnapshot` *hashes* observed by this detector.
+    /// The set of all `InterpSnapshot` *hashes* observed by this detector.
     ///
     /// When a collision occurs in this table, we store the full snapshot in
     /// `snapshots`.
     hashes: FxHashSet<u64>,
 
-    /// The set of all `EvalSnapshot`s observed by this detector.
+    /// The set of all `InterpSnapshot`s observed by this detector.
     ///
-    /// An `EvalSnapshot` will only be fully cloned once it has caused a
+    /// An `InterpSnapshot` will only be fully cloned once it has caused a
     /// collision in `hashes`. As a result, the detector must observe at least
     /// *two* full cycles of an infinite loop before it triggers.
-    snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx>>,
+    snapshots: FxHashSet<InterpSnapshot<'a, 'mir, 'tcx>>,
 }
 
 impl<'a, 'mir, 'tcx> InfiniteLoopDetector<'a, 'mir, 'tcx>
@@ -51,7 +51,7 @@ pub fn observe_and_analyze<'b>(
         span: Span,
         memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
         stack: &[Frame<'mir, 'tcx>],
-    ) -> EvalResult<'tcx, ()> {
+    ) -> InterpResult<'tcx, ()> {
         // Compute stack's hash before copying anything
         let mut hcx = tcx.get_stable_hashing_context();
         let mut hasher = StableHasher::<u64>::new();
@@ -72,7 +72,7 @@ pub fn observe_and_analyze<'b>(
         // We need to make a full copy. NOW things that to get really expensive.
         info!("snapshotting the state of the interpreter");
 
-        if self.snapshots.insert(EvalSnapshot::new(memory, stack)) {
+        if self.snapshots.insert(InterpSnapshot::new(memory, stack)) {
             // Spurious collision or first cycle
             return Ok(())
         }
@@ -315,8 +315,8 @@ struct FrameSnapshot<'a, 'tcx: 'a> {
     stmt: usize,
 }
 
-impl_stable_hash_for!(impl<'mir, 'tcx: 'mir> for struct Frame<'mir, 'tcx> {
-    mir,
+impl_stable_hash_for!(impl<> for struct Frame<'mir, 'tcx> {
+    body,
     instance,
     span,
     return_to_block,
@@ -334,7 +334,7 @@ impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>
 
     fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
         let Frame {
-            mir: _,
+            body: _,
             instance,
             span,
             return_to_block,
@@ -384,18 +384,18 @@ fn resolve(&'b self, id: &AllocId) -> Option<&'b Allocation> {
 /// The virtual machine state during const-evaluation at a given point in time.
 /// We assume the `CompileTimeInterpreter` has no interesting extra state that
 /// is worth considering here.
-struct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir> {
+struct InterpSnapshot<'a, 'mir, 'tcx: 'a + 'mir> {
     memory: Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
     stack: Vec<Frame<'mir, 'tcx>>,
 }
 
-impl<'a, 'mir, 'tcx: 'a + 'mir> EvalSnapshot<'a, 'mir, 'tcx>
+impl<'a, 'mir, 'tcx: 'a + 'mir> InterpSnapshot<'a, 'mir, 'tcx>
 {
     fn new(
         memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
         stack: &[Frame<'mir, 'tcx>]
     ) -> Self {
-        EvalSnapshot {
+        InterpSnapshot {
             memory: memory.clone(),
             stack: stack.into(),
         }
@@ -411,7 +411,7 @@ fn snapshot(&'b self)
 
 }
 
-impl<'a, 'mir, 'tcx> Hash for EvalSnapshot<'a, 'mir, 'tcx>
+impl<'a, 'mir, 'tcx> Hash for InterpSnapshot<'a, 'mir, 'tcx>
 {
     fn hash<H: Hasher>(&self, state: &mut H) {
         // Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)
@@ -422,16 +422,16 @@ fn hash<H: Hasher>(&self, state: &mut H) {
     }
 }
 
-impl_stable_hash_for!(impl<'tcx, 'b, 'mir> for struct EvalSnapshot<'b, 'mir, 'tcx> {
+impl_stable_hash_for!(impl<> for struct InterpSnapshot<'_, 'mir, 'tcx> {
     // Not hashing memory: Avoid hashing memory all the time during execution
     memory -> _,
     stack,
 });
 
-impl<'a, 'mir, 'tcx> Eq for EvalSnapshot<'a, 'mir, 'tcx>
+impl<'a, 'mir, 'tcx> Eq for InterpSnapshot<'a, 'mir, 'tcx>
 {}
 
-impl<'a, 'mir, 'tcx> PartialEq for EvalSnapshot<'a, 'mir, 'tcx>
+impl<'a, 'mir, 'tcx> PartialEq for InterpSnapshot<'a, 'mir, 'tcx>
 {
     fn eq(&self, other: &Self) -> bool {
         // FIXME: This looks to be a *ridiculously expensive* comparison operation.
index 4ca865cc8449945ffbbf642515d3ba1c4046f0ac..9312d71188c97355f86d867e7320face59f613f1 100644 (file)
@@ -4,7 +4,7 @@
 
 use rustc::mir;
 use rustc::ty::layout::LayoutOf;
-use rustc::mir::interpret::{EvalResult, Scalar, PointerArithmetic};
+use rustc::mir::interpret::{InterpResult, Scalar, PointerArithmetic};
 
 use super::{InterpretCx, Machine};
 
@@ -36,7 +36,7 @@ fn binop_right_homogeneous(op: mir::BinOp) -> bool {
 }
 
 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
-    pub fn run(&mut self) -> EvalResult<'tcx> {
+    pub fn run(&mut self) -> InterpResult<'tcx> {
         while self.step()? {}
         Ok(())
     }
@@ -44,15 +44,15 @@ pub fn run(&mut self) -> EvalResult<'tcx> {
     /// Returns `true` as long as there are more things to do.
     ///
     /// This is used by [priroda](https://github.com/oli-obk/priroda)
-    pub fn step(&mut self) -> EvalResult<'tcx, bool> {
+    pub fn step(&mut self) -> InterpResult<'tcx, bool> {
         if self.stack.is_empty() {
             return Ok(false);
         }
 
         let block = self.frame().block;
         let stmt_id = self.frame().stmt;
-        let mir = self.mir();
-        let basic_block = &mir.basic_blocks()[block];
+        let body = self.body();
+        let basic_block = &body.basic_blocks()[block];
 
         let old_frames = self.cur_frame();
 
@@ -70,7 +70,7 @@ pub fn step(&mut self) -> EvalResult<'tcx, bool> {
         Ok(true)
     }
 
-    fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> EvalResult<'tcx> {
+    fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
         info!("{:?}", stmt);
 
         use rustc::mir::StatementKind::*;
@@ -136,7 +136,7 @@ fn eval_rvalue_into_place(
         &mut self,
         rvalue: &mir::Rvalue<'tcx>,
         place: &mir::Place<'tcx>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let dest = self.eval_place(place)?;
 
         use rustc::mir::Rvalue::*;
@@ -277,7 +277,7 @@ fn eval_rvalue_into_place(
         Ok(())
     }
 
-    fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> EvalResult<'tcx> {
+    fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
         info!("{:?}", terminator.kind);
         self.tcx.span = terminator.source_info.span;
         self.memory.tcx.span = terminator.source_info.span;
index a39af9640ac343bd6254de6692fc2a8863c99aa4..ff8d6804febbd057e5377d25c321dbd8b0e1e34e 100644 (file)
@@ -6,14 +6,14 @@
 use syntax::source_map::Span;
 use rustc_target::spec::abi::Abi;
 
-use rustc::mir::interpret::{EvalResult, PointerArithmetic, InterpError, Scalar};
+use rustc::mir::interpret::{InterpResult, PointerArithmetic, InterpError, Scalar};
 use super::{
     InterpretCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
 };
 
 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
     #[inline]
-    pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx> {
+    pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
         if let Some(target) = target {
             self.frame_mut().block = target;
             self.frame_mut().stmt = 0;
@@ -26,7 +26,7 @@ pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx
     pub(super) fn eval_terminator(
         &mut self,
         terminator: &mir::Terminator<'tcx>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         use rustc::mir::TerminatorKind::*;
         match terminator.kind {
             Return => {
@@ -206,7 +206,7 @@ fn pass_argument(
         rust_abi: bool,
         caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
         callee_arg: PlaceTy<'tcx, M::PointerTag>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         if rust_abi && callee_arg.layout.is_zst() {
             // Nothing to do.
             trace!("Skipping callee ZST");
@@ -234,7 +234,7 @@ fn eval_fn_call(
         args: &[OpTy<'tcx, M::PointerTag>],
         dest: Option<PlaceTy<'tcx, M::PointerTag>>,
         ret: Option<mir::BasicBlock>,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         trace!("eval_fn_call: {:#?}", instance);
 
         match instance.def {
@@ -281,15 +281,15 @@ fn eval_fn_call(
                 }
 
                 // We need MIR for this fn
-                let mir = match M::find_fn(self, instance, args, dest, ret)? {
-                    Some(mir) => mir,
+                let body = match M::find_fn(self, instance, args, dest, ret)? {
+                    Some(body) => body,
                     None => return Ok(()),
                 };
 
                 self.push_stack_frame(
                     instance,
                     span,
-                    mir,
+                    body,
                     dest,
                     StackPopCleanup::Goto(ret),
                 )?;
@@ -307,8 +307,8 @@ fn eval_fn_call(
                     );
                     trace!(
                         "spread_arg: {:?}, locals: {:#?}",
-                        mir.spread_arg,
-                        mir.args_iter()
+                        body.spread_arg,
+                        body.args_iter()
                             .map(|local|
                                 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
                             )
@@ -337,7 +337,7 @@ fn eval_fn_call(
                                 .chain((0..untuple_arg.layout.fields.count()).into_iter()
                                     .map(|i| self.operand_field(untuple_arg, i as u64))
                                 )
-                                .collect::<EvalResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
+                                .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
                         } else {
                             // Plain arg passing
                             Cow::from(args)
@@ -352,12 +352,12 @@ fn eval_fn_call(
                     // this is a single iterator (that handles `spread_arg`), then
                     // `pass_argument` would be the loop body. It takes care to
                     // not advance `caller_iter` for ZSTs.
-                    let mut locals_iter = mir.args_iter();
+                    let mut locals_iter = body.args_iter();
                     while let Some(local) = locals_iter.next() {
                         let dest = self.eval_place(
                             &mir::Place::Base(mir::PlaceBase::Local(local))
                         )?;
-                        if Some(local) == mir.spread_arg {
+                        if Some(local) == body.spread_arg {
                             // Must be a tuple
                             for i in 0..dest.layout.fields.count() {
                                 let dest = self.place_field(dest, i as u64)?;
@@ -457,7 +457,7 @@ fn drop_in_place(
         instance: ty::Instance<'tcx>,
         span: Span,
         target: mir::BasicBlock,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
         // We take the address of the object.  This may well be unaligned, which is fine
         // for us here.  However, unaligned accesses will probably make the actual drop
index 33cb1a097175d5df2755c16c98a34e8349c907a0..220f3e8b936419f99717bae1edbd19898aca0b13 100644 (file)
@@ -1,6 +1,6 @@
 use rustc::ty::{self, Ty, Instance};
 use rustc::ty::layout::{Size, Align, LayoutOf};
-use rustc::mir::interpret::{Scalar, Pointer, EvalResult, PointerArithmetic};
+use rustc::mir::interpret::{Scalar, Pointer, InterpResult, PointerArithmetic};
 
 use super::{InterpretCx, InterpError, Machine, MemoryKind};
 
@@ -15,7 +15,7 @@ pub fn get_vtable(
         &mut self,
         ty: Ty<'tcx>,
         poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
-    ) -> EvalResult<'tcx, Pointer<M::PointerTag>> {
+    ) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
         trace!("get_vtable(trait_ref={:?})", poly_trait_ref);
 
         let (ty, poly_trait_ref) = self.tcx.erase_regions(&(ty, poly_trait_ref));
@@ -102,7 +102,7 @@ pub fn get_vtable(
     pub fn read_drop_type_from_vtable(
         &self,
         vtable: Pointer<M::PointerTag>,
-    ) -> EvalResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
+    ) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
         // we don't care about the pointee type, we just want a pointer
         self.memory.check_align(vtable.into(), self.tcx.data_layout.pointer_align.abi)?;
         let drop_fn = self.memory
@@ -121,7 +121,7 @@ pub fn read_drop_type_from_vtable(
     pub fn read_size_and_align_from_vtable(
         &self,
         vtable: Pointer<M::PointerTag>,
-    ) -> EvalResult<'tcx, (Size, Align)> {
+    ) -> InterpResult<'tcx, (Size, Align)> {
         let pointer_size = self.pointer_size();
         self.memory.check_align(vtable.into(), self.tcx.data_layout.pointer_align.abi)?;
         let alloc = self.memory.get(vtable.alloc_id)?;
index 967496e59645a857a549d0b83fc82e4c3d72ab1a..6768d9ec6bc1999d200e32d70247aa1fb49a2bfd 100644 (file)
@@ -8,7 +8,7 @@
 use rustc::ty;
 use rustc_data_structures::fx::FxHashSet;
 use rustc::mir::interpret::{
-    Scalar, GlobalAlloc, EvalResult, InterpError, CheckInAllocMsg,
+    Scalar, GlobalAlloc, InterpResult, InterpError, CheckInAllocMsg,
 };
 
 use super::{
@@ -81,7 +81,7 @@ pub struct RefTracking<T> {
     pub todo: Vec<(T, Vec<PathElem>)>,
 }
 
-impl<'tcx, T: Copy + Eq + Hash> RefTracking<T> {
+impl<T: Copy + Eq + Hash> RefTracking<T> {
     pub fn new(op: T) -> Self {
         let mut ref_tracking = RefTracking {
             seen: FxHashSet::default(),
@@ -223,7 +223,7 @@ fn visit_elem(
         &mut self,
         new_op: OpTy<'tcx, M::PointerTag>,
         elem: PathElem,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         // Remember the old state
         let path_len = self.path.len();
         // Perform operation
@@ -251,7 +251,7 @@ fn visit_field(
         old_op: OpTy<'tcx, M::PointerTag>,
         field: usize,
         new_op: OpTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let elem = self.aggregate_field_path_elem(old_op.layout, field);
         self.visit_elem(new_op, elem)
     }
@@ -262,7 +262,7 @@ fn visit_variant(
         old_op: OpTy<'tcx, M::PointerTag>,
         variant_id: VariantIdx,
         new_op: OpTy<'tcx, M::PointerTag>
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let name = match old_op.layout.ty.sty {
             ty::Adt(adt, _) => PathElem::Variant(adt.variants[variant_id].ident.name),
             // Generators also have variants
@@ -273,7 +273,7 @@ fn visit_variant(
     }
 
     #[inline]
-    fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> EvalResult<'tcx>
+    fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx>
     {
         trace!("visit_value: {:?}, {:?}", *op, op.layout);
         // Translate some possible errors to something nicer.
@@ -293,7 +293,7 @@ fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> EvalResult<'tcx>
         }
     }
 
-    fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> EvalResult<'tcx>
+    fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx>
     {
         let value = self.ecx.read_immediate(value)?;
         // Go over all the primitive types
@@ -449,7 +449,7 @@ fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> EvalResult<'t
         Ok(())
     }
 
-    fn visit_uninhabited(&mut self) -> EvalResult<'tcx>
+    fn visit_uninhabited(&mut self) -> InterpResult<'tcx>
     {
         validation_failure!("a value of an uninhabited type", self.path)
     }
@@ -458,7 +458,7 @@ fn visit_scalar(
         &mut self,
         op: OpTy<'tcx, M::PointerTag>,
         layout: &layout::Scalar,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         let value = self.ecx.read_scalar(op)?;
         // Determine the allowed range
         let (lo, hi) = layout.valid_range.clone().into_inner();
@@ -526,8 +526,8 @@ fn visit_scalar(
     fn visit_aggregate(
         &mut self,
         op: OpTy<'tcx, M::PointerTag>,
-        fields: impl Iterator<Item=EvalResult<'tcx, Self::V>>,
-    ) -> EvalResult<'tcx> {
+        fields: impl Iterator<Item=InterpResult<'tcx, Self::V>>,
+    ) -> InterpResult<'tcx> {
         match op.layout.ty.sty {
             ty::Str => {
                 let mplace = op.to_mem_place(); // strings are never immediate
@@ -621,7 +621,7 @@ pub fn validate_operand(
         path: Vec<PathElem>,
         ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::PointerTag>>>,
         const_mode: bool,
-    ) -> EvalResult<'tcx> {
+    ) -> InterpResult<'tcx> {
         trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty);
 
         // Construct a visitor
index cf67b0a97bcf8dd23ee1b7ca7369f4ebe15e2139..b5477c68610195878a5243a7661614451ec01bf0 100644 (file)
@@ -4,7 +4,7 @@
 use rustc::ty::layout::{self, TyLayout, VariantIdx};
 use rustc::ty;
 use rustc::mir::interpret::{
-    EvalResult,
+    InterpResult,
 };
 
 use super::{
@@ -23,7 +23,7 @@ pub trait Value<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>: Copy
     fn to_op(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>>;
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>>;
 
     /// Creates this from an `MPlaceTy`.
     fn from_mem_place(mplace: MPlaceTy<'tcx, M::PointerTag>) -> Self;
@@ -33,14 +33,14 @@ fn project_downcast(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
         variant: VariantIdx,
-    ) -> EvalResult<'tcx, Self>;
+    ) -> InterpResult<'tcx, Self>;
 
     /// Projects to the n-th field.
     fn project_field(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
         field: u64,
-    ) -> EvalResult<'tcx, Self>;
+    ) -> InterpResult<'tcx, Self>;
 }
 
 // Operands and memory-places are both values.
@@ -57,7 +57,7 @@ fn layout(&self) -> TyLayout<'tcx> {
     fn to_op(
         self,
         _ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         Ok(self)
     }
 
@@ -71,7 +71,7 @@ fn project_downcast(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
         variant: VariantIdx,
-    ) -> EvalResult<'tcx, Self> {
+    ) -> InterpResult<'tcx, Self> {
         ecx.operand_downcast(self, variant)
     }
 
@@ -80,7 +80,7 @@ fn project_field(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
         field: u64,
-    ) -> EvalResult<'tcx, Self> {
+    ) -> InterpResult<'tcx, Self> {
         ecx.operand_field(self, field)
     }
 }
@@ -96,7 +96,7 @@ fn layout(&self) -> TyLayout<'tcx> {
     fn to_op(
         self,
         _ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
-    ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+    ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
         Ok(self.into())
     }
 
@@ -110,7 +110,7 @@ fn project_downcast(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
         variant: VariantIdx,
-    ) -> EvalResult<'tcx, Self> {
+    ) -> InterpResult<'tcx, Self> {
         ecx.mplace_downcast(self, variant)
     }
 
@@ -119,7 +119,7 @@ fn project_field(
         self,
         ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
         field: u64,
-    ) -> EvalResult<'tcx, Self> {
+    ) -> InterpResult<'tcx, Self> {
         ecx.mplace_field(self, field)
     }
 }
@@ -137,25 +137,25 @@ fn ecx(&$($mutability)? self)
             // Recursive actions, ready to be overloaded.
             /// Visits the given value, dispatching as appropriate to more specialized visitors.
             #[inline(always)]
-            fn visit_value(&mut self, v: Self::V) -> EvalResult<'tcx>
+            fn visit_value(&mut self, v: Self::V) -> InterpResult<'tcx>
             {
                 self.walk_value(v)
             }
             /// Visits the given value as a union. No automatic recursion can happen here.
             #[inline(always)]
-            fn visit_union(&mut self, _v: Self::V) -> EvalResult<'tcx>
+            fn visit_union(&mut self, _v: Self::V) -> InterpResult<'tcx>
             {
                 Ok(())
             }
             /// Visits this value as an aggregate, you are getting an iterator yielding
-            /// all the fields (still in an `EvalResult`, you have to do error handling yourself).
+            /// all the fields (still in an `InterpResult`, you have to do error handling yourself).
             /// Recurses into the fields.
             #[inline(always)]
             fn visit_aggregate(
                 &mut self,
                 v: Self::V,
-                fields: impl Iterator<Item=EvalResult<'tcx, Self::V>>,
-            ) -> EvalResult<'tcx> {
+                fields: impl Iterator<Item=InterpResult<'tcx, Self::V>>,
+            ) -> InterpResult<'tcx> {
                 self.walk_aggregate(v, fields)
             }
 
@@ -170,7 +170,7 @@ fn visit_field(
                 _old_val: Self::V,
                 _field: usize,
                 new_val: Self::V,
-            ) -> EvalResult<'tcx> {
+            ) -> InterpResult<'tcx> {
                 self.visit_value(new_val)
             }
 
@@ -181,7 +181,7 @@ fn visit_variant(
                 _old_val: Self::V,
                 _variant: VariantIdx,
                 new_val: Self::V,
-            ) -> EvalResult<'tcx> {
+            ) -> InterpResult<'tcx> {
                 self.visit_value(new_val)
             }
 
@@ -191,7 +191,7 @@ fn visit_variant(
             /// it is meant to provide the chance for additional checks when a value of uninhabited
             /// layout is detected.
             #[inline(always)]
-            fn visit_uninhabited(&mut self) -> EvalResult<'tcx>
+            fn visit_uninhabited(&mut self) -> InterpResult<'tcx>
             { Ok(()) }
             /// Called whenever we reach a value with scalar layout.
             /// We do NOT provide a `ScalarMaybeUndef` here to avoid accessing memory if the
@@ -201,7 +201,7 @@ fn visit_uninhabited(&mut self) -> EvalResult<'tcx>
             /// it is meant to provide the chance for additional checks when a value of scalar
             /// layout is detected.
             #[inline(always)]
-            fn visit_scalar(&mut self, _v: Self::V, _layout: &layout::Scalar) -> EvalResult<'tcx>
+            fn visit_scalar(&mut self, _v: Self::V, _layout: &layout::Scalar) -> InterpResult<'tcx>
             { Ok(()) }
 
             /// Called whenever we reach a value of primitive type. There can be no recursion
@@ -209,22 +209,22 @@ fn visit_scalar(&mut self, _v: Self::V, _layout: &layout::Scalar) -> EvalResult<
             /// We do *not* provide an `ImmTy` here because some implementations might want
             /// to write to the place this primitive lives in.
             #[inline(always)]
-            fn visit_primitive(&mut self, _v: Self::V) -> EvalResult<'tcx>
+            fn visit_primitive(&mut self, _v: Self::V) -> InterpResult<'tcx>
             { Ok(()) }
 
             // Default recursors. Not meant to be overloaded.
             fn walk_aggregate(
                 &mut self,
                 v: Self::V,
-                fields: impl Iterator<Item=EvalResult<'tcx, Self::V>>,
-            ) -> EvalResult<'tcx> {
+                fields: impl Iterator<Item=InterpResult<'tcx, Self::V>>,
+            ) -> InterpResult<'tcx> {
                 // Now iterate over it.
                 for (idx, field_val) in fields.enumerate() {
                     self.visit_field(v, idx, field_val?)?;
                 }
                 Ok(())
             }
-            fn walk_value(&mut self, v: Self::V) -> EvalResult<'tcx>
+            fn walk_value(&mut self, v: Self::V) -> InterpResult<'tcx>
             {
                 trace!("walk_value: type: {}", v.layout().ty);
                 // If this is a multi-variant layout, we have to find the right one and proceed with
@@ -306,7 +306,7 @@ fn walk_value(&mut self, v: Self::V) -> EvalResult<'tcx>
                     layout::FieldPlacement::Arbitrary { ref offsets, .. } => {
                         // FIXME: We collect in a vec because otherwise there are lifetime
                         // errors: Projecting to a field needs access to `ecx`.
-                        let fields: Vec<EvalResult<'tcx, Self::V>> =
+                        let fields: Vec<InterpResult<'tcx, Self::V>> =
                             (0..offsets.len()).map(|i| {
                                 v.project_field(self.ecx(), i as u64)
                             })
index 9213a009ea740374641900fa189a332c0c2b5f1e..a7cbe84330d899a9e10a5b760cb3649f928361a3 100644 (file)
 #![feature(unicode_internals)]
 #![feature(step_trait)]
 #![feature(slice_concat_ext)]
+#![feature(trusted_len)]
 #![feature(try_blocks)]
 
 #![recursion_limit="256"]
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 #![allow(explicit_outlives_requirements)]
 
 #[macro_use] extern crate log;
index cbfc89934718600c9337c4b40e0b81fab9bc4a6b..e15c8a4b416643c1b807705fe60902e4cf4ee4df 100644 (file)
@@ -8,18 +8,18 @@
 use rustc::ty::subst::InternalSubsts;
 
 pub fn check(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-             mir: &Body<'tcx>,
+             body: &Body<'tcx>,
              def_id: DefId) {
     let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
 
     if let Some(fn_like_node) = FnLikeNode::from_node(tcx.hir().get_by_hir_id(hir_id)) {
-        check_fn_for_unconditional_recursion(tcx, fn_like_node.kind(), mir, def_id);
+        check_fn_for_unconditional_recursion(tcx, fn_like_node.kind(), body, def_id);
     }
 }
 
 fn check_fn_for_unconditional_recursion(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                         fn_kind: FnKind<'_>,
-                                        mir: &Body<'tcx>,
+                                        body: &Body<'tcx>,
                                         def_id: DefId) {
     if let FnKind::Closure(_) = fn_kind {
         // closures can't recur, so they don't matter.
@@ -54,7 +54,7 @@ fn check_fn_for_unconditional_recursion(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // to have behaviour like the above, rather than
     // e.g., accidentally recursing after an assert.
 
-    let basic_blocks = mir.basic_blocks();
+    let basic_blocks = body.basic_blocks();
     let mut reachable_without_self_call_queue = vec![mir::START_BLOCK];
     let mut reached_exit_without_self_call = false;
     let mut self_call_locations = vec![];
@@ -84,7 +84,7 @@ fn check_fn_for_unconditional_recursion(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         if let Some(ref terminator) = block.terminator {
             match terminator.kind {
                 TerminatorKind::Call { ref func, .. } => {
-                    let func_ty = func.ty(mir, tcx);
+                    let func_ty = func.ty(body, tcx);
 
                     if let ty::FnDef(fn_def_id, substs) = func_ty.sty {
                         let (call_fn_id, call_substs) =
index eed051449e155637663f2a581bb3712d97e156d2..706ace0c1f1bca4354feb53234c24e6bfb0c2b4b 100644 (file)
 use rustc::ty::print::obsolete::DefPathBasedNames;
 use rustc::ty::adjustment::{CustomCoerceUnsized, PointerCast};
 use rustc::session::config::EntryFnType;
-use rustc::mir::{self, Location, Place, PlaceBase, Promoted, Static, StaticKind};
+use rustc::mir::{self, Location, PlaceBase, Promoted, Static, StaticKind};
 use rustc::mir::visit::Visitor as MirVisitor;
 use rustc::mir::mono::{MonoItem, InstantiationMode};
 use rustc::mir::interpret::{Scalar, GlobalId, GlobalAlloc, ErrorHandled};
@@ -516,7 +516,7 @@ fn check_type_length_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
 struct MirNeighborCollector<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &'a mir::Body<'tcx>,
+    body: &'a mir::Body<'tcx>,
     output: &'a mut Vec<MonoItem<'tcx>>,
     param_substs: SubstsRef<'tcx>,
 }
@@ -538,7 +538,7 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
                     ty::ParamEnv::reveal_all(),
                     &target_ty,
                 );
-                let source_ty = operand.ty(self.mir, self.tcx);
+                let source_ty = operand.ty(self.body, self.tcx);
                 let source_ty = self.tcx.subst_and_normalize_erasing_regions(
                     self.param_substs,
                     ty::ParamEnv::reveal_all(),
@@ -560,7 +560,7 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
             mir::Rvalue::Cast(
                 mir::CastKind::Pointer(PointerCast::ReifyFnPointer), ref operand, _
             ) => {
-                let fn_ty = operand.ty(self.mir, self.tcx);
+                let fn_ty = operand.ty(self.body, self.tcx);
                 let fn_ty = self.tcx.subst_and_normalize_erasing_regions(
                     self.param_substs,
                     ty::ParamEnv::reveal_all(),
@@ -571,7 +571,7 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
             mir::Rvalue::Cast(
                 mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)), ref operand, _
             ) => {
-                let source_ty = operand.ty(self.mir, self.tcx);
+                let source_ty = operand.ty(self.body, self.tcx);
                 let source_ty = self.tcx.subst_and_normalize_erasing_regions(
                     self.param_substs,
                     ty::ParamEnv::reveal_all(),
@@ -621,7 +621,7 @@ fn visit_terminator_kind(&mut self,
         let tcx = self.tcx;
         match *kind {
             mir::TerminatorKind::Call { ref func, .. } => {
-                let callee_ty = func.ty(self.mir, tcx);
+                let callee_ty = func.ty(self.body, tcx);
                 let callee_ty = tcx.subst_and_normalize_erasing_regions(
                     self.param_substs,
                     ty::ParamEnv::reveal_all(),
@@ -631,7 +631,7 @@ fn visit_terminator_kind(&mut self,
             }
             mir::TerminatorKind::Drop { ref location, .. } |
             mir::TerminatorKind::DropAndReplace { ref location, .. } => {
-                let ty = location.ty(self.mir, self.tcx).ty;
+                let ty = location.ty(self.body, self.tcx).ty;
                 let ty = tcx.subst_and_normalize_erasing_regions(
                     self.param_substs,
                     ty::ParamEnv::reveal_all(),
@@ -655,14 +655,12 @@ fn visit_terminator_kind(&mut self,
         self.super_terminator_kind(kind, location);
     }
 
-    fn visit_place(&mut self,
-                    place: &mir::Place<'tcx>,
-                    context: mir::visit::PlaceContext,
-                    location: Location) {
-        match place {
-            Place::Base(
-                PlaceBase::Static(box Static{ kind:StaticKind::Static(def_id), .. })
-            ) => {
+    fn visit_place_base(&mut self,
+                        place_base: &mir::PlaceBase<'tcx>,
+                        _context: mir::visit::PlaceContext,
+                        location: Location) {
+        match place_base {
+            PlaceBase::Static(box Static { kind: StaticKind::Static(def_id), .. }) => {
                 debug!("visiting static {:?} @ {:?}", def_id, location);
 
                 let tcx = self.tcx;
@@ -671,10 +669,13 @@ fn visit_place(&mut self,
                     self.output.push(MonoItem::Static(*def_id));
                 }
             }
-            _ => {}
+            PlaceBase::Static(box Static { kind: StaticKind::Promoted(_), .. }) => {
+                // FIXME: should we handle promoteds here instead of eagerly in collect_neighbours?
+            }
+            PlaceBase::Local(_) => {
+                // Locals have no relevance for collector
+            }
         }
-
-        self.super_place(place, context, location);
     }
 }
 
@@ -906,7 +907,7 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 }
 
-fn create_fn_mono_item<'a, 'tcx>(instance: Instance<'tcx>) -> MonoItem<'tcx> {
+fn create_fn_mono_item<'tcx>(instance: Instance<'tcx>) -> MonoItem<'tcx> {
     debug!("create_fn_mono_item(instance={})", instance);
     MonoItem::Fn(instance)
 }
@@ -1210,16 +1211,16 @@ fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                 instance: Instance<'tcx>,
                                 output: &mut Vec<MonoItem<'tcx>>)
 {
-    let mir = tcx.instance_mir(instance.def);
+    let body = tcx.instance_mir(instance.def);
 
     MirNeighborCollector {
         tcx,
-        mir: &mir,
+        body: &body,
         output,
         param_substs: instance.substs,
-    }.visit_body(&mir);
+    }.visit_body(&body);
     let param_env = ty::ParamEnv::reveal_all();
-    for i in 0..mir.promoted.len() {
+    for i in 0..body.promoted.len() {
         use rustc_data_structures::indexed_vec::Idx;
         let i = Promoted::new(i);
         let cid = GlobalId {
@@ -1230,7 +1231,7 @@ fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             Ok(val) => collect_const(tcx, val, instance.substs, output),
             Err(ErrorHandled::Reported) => {},
             Err(ErrorHandled::TooGeneric) => span_bug!(
-                mir.promoted[i].span, "collection encountered polymorphic constant",
+                body.promoted[i].span, "collection encountered polymorphic constant",
             ),
         }
     }
index 0cefc8c3a92ab2ac3703033f11f865fb0805f0bf..0f2196b5da6beb532eccfb35832c2ab0cd23e1b4 100644 (file)
@@ -1,6 +1,5 @@
 use rustc::hir;
 use rustc::hir::def_id::DefId;
-use rustc::infer;
 use rustc::mir::*;
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::layout::VariantIdx;
@@ -21,6 +20,7 @@
 };
 use crate::util::elaborate_drops::{self, DropElaborator, DropStyle, DropFlagMode};
 use crate::util::patch::MirPatch;
+use crate::util::expand_aggregate;
 
 pub fn provide(providers: &mut Providers<'_>) {
     providers.mir_shims = make_shim;
@@ -175,8 +175,8 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     // Check if this is a generator, if so, return the drop glue for it
     if let Some(&ty::TyS { sty: ty::Generator(gen_def_id, substs, _), .. }) = ty {
-        let mir = &**tcx.optimized_mir(gen_def_id).generator_drop.as_ref().unwrap();
-        return mir.subst(tcx, substs.substs);
+        let body = &**tcx.optimized_mir(gen_def_id).generator_drop.as_ref().unwrap();
+        return body.subst(tcx, substs.substs);
     }
 
     let substs = if let Some(ty) = ty {
@@ -202,7 +202,7 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     block(&mut blocks, TerminatorKind::Goto { target: return_block });
     block(&mut blocks, TerminatorKind::Return);
 
-    let mut mir = Body::new(
+    let mut body = Body::new(
         blocks,
         IndexVec::from_elem_n(
             SourceScopeData { span: span, parent_scope: None }, 1
@@ -223,7 +223,7 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let dropee_ptr = Place::Base(PlaceBase::Local(Local::new(1+0)));
         if tcx.sess.opts.debugging_opts.mir_emit_retag {
             // Function arguments should be retagged, and we make this one raw.
-            mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, Statement {
+            body.basic_blocks_mut()[START_BLOCK].statements.insert(0, Statement {
                 source_info,
                 kind: StatementKind::Retag(RetagKind::Raw, dropee_ptr.clone()),
             });
@@ -231,8 +231,8 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let patch = {
             let param_env = tcx.param_env(def_id).with_reveal_all();
             let mut elaborator = DropShimElaborator {
-                mir: &mir,
-                patch: MirPatch::new(&mir),
+                body: &body,
+                patch: MirPatch::new(&body),
                 tcx,
                 param_env
             };
@@ -249,14 +249,14 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             );
             elaborator.patch
         };
-        patch.apply(&mut mir);
+        patch.apply(&mut body);
     }
 
-    mir
+    body
 }
 
 pub struct DropShimElaborator<'a, 'tcx: 'a> {
-    pub mir: &'a Body<'tcx>,
+    pub body: &'a Body<'tcx>,
     pub patch: MirPatch<'tcx>,
     pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
     pub param_env: ty::ParamEnv<'tcx>,
@@ -272,7 +272,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> {
     type Path = ();
 
     fn patch(&mut self) -> &mut MirPatch<'tcx> { &mut self.patch }
-    fn mir(&self) -> &'a Body<'tcx> { self.mir }
+    fn body(&self) -> &'a Body<'tcx> { self.body }
     fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx }
     fn param_env(&self) -> ty::ParamEnv<'tcx> { self.param_env }
 
@@ -821,7 +821,7 @@ fn build_call_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         block(&mut blocks, vec![], TerminatorKind::Resume, true);
     }
 
-    let mut mir = Body::new(
+    let mut body = Body::new(
         blocks,
         IndexVec::from_elem_n(
             SourceScopeData { span: span, parent_scope: None }, 1
@@ -837,34 +837,31 @@ fn build_call_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         vec![],
     );
     if let Abi::RustCall = sig.abi {
-        mir.spread_arg = Some(Local::new(sig.inputs().len()));
+        body.spread_arg = Some(Local::new(sig.inputs().len()));
     }
-    mir
+    body
 }
 
-pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
-                                      ctor_id: hir::HirId,
-                                      fields: &[hir::StructField],
-                                      span: Span)
-                                      -> Body<'tcx>
-{
-    let tcx = infcx.tcx;
-    let gcx = tcx.global_tcx();
-    let def_id = tcx.hir().local_def_id_from_hir_id(ctor_id);
-    let param_env = gcx.param_env(def_id);
+pub fn build_adt_ctor<'gcx>(tcx: TyCtxt<'_, 'gcx, 'gcx>, ctor_id: DefId) -> &'gcx Body<'gcx> {
+    debug_assert!(tcx.is_constructor(ctor_id));
+
+    let span = tcx.hir().span_if_local(ctor_id)
+        .unwrap_or_else(|| bug!("no span for ctor {:?}", ctor_id));
+
+    let param_env = tcx.param_env(ctor_id);
 
     // Normalize the sig.
-    let sig = gcx.fn_sig(def_id)
+    let sig = tcx.fn_sig(ctor_id)
         .no_bound_vars()
         .expect("LBR in ADT constructor signature");
-    let sig = gcx.normalize_erasing_regions(param_env, sig);
+    let sig = tcx.normalize_erasing_regions(param_env, sig);
 
     let (adt_def, substs) = match sig.output().sty {
         ty::Adt(adt_def, substs) => (adt_def, substs),
         _ => bug!("unexpected type for ADT ctor {:?}", sig.output())
     };
 
-    debug!("build_ctor: def_id={:?} sig={:?} fields={:?}", def_id, sig, fields);
+    debug!("build_ctor: ctor_id={:?} sig={:?}", ctor_id, sig);
 
     let local_decls = local_decls_for_sig(&sig, span);
 
@@ -873,26 +870,37 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
         scope: OUTERMOST_SOURCE_SCOPE
     };
 
-    let variant_no = if adt_def.is_enum() {
-        adt_def.variant_index_with_ctor_id(def_id)
+    let variant_index = if adt_def.is_enum() {
+        adt_def.variant_index_with_ctor_id(ctor_id)
     } else {
         VariantIdx::new(0)
     };
 
-    // return = ADT(arg0, arg1, ...); return
+    // Generate the following MIR:
+    //
+    // (return as Variant).field0 = arg0;
+    // (return as Variant).field1 = arg1;
+    //
+    // return;
+    debug!("build_ctor: variant_index={:?}", variant_index);
+
+    let statements = expand_aggregate(
+        Place::RETURN_PLACE,
+        adt_def
+            .variants[variant_index]
+            .fields
+            .iter()
+            .enumerate()
+            .map(|(idx, field_def)| (
+                Operand::Move(Place::Base(PlaceBase::Local(Local::new(idx + 1)))),
+                field_def.ty(tcx, substs),
+            )),
+        AggregateKind::Adt(adt_def, variant_index, substs, None, None),
+        source_info,
+    ).collect();
+
     let start_block = BasicBlockData {
-        statements: vec![Statement {
-            source_info,
-            kind: StatementKind::Assign(
-                Place::RETURN_PLACE,
-                box Rvalue::Aggregate(
-                    box AggregateKind::Adt(adt_def, variant_no, substs, None, None),
-                    (1..sig.inputs().len()+1).map(|i| {
-                        Operand::Move(Place::Base(PlaceBase::Local(Local::new(i))))
-                    }).collect()
-                )
-            )
-        }],
+        statements,
         terminator: Some(Terminator {
             source_info,
             kind: TerminatorKind::Return,
@@ -900,7 +908,7 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
         is_cleanup: false
     };
 
-    Body::new(
+    let body = Body::new(
         IndexVec::from_elem_n(start_block, 1),
         IndexVec::from_elem_n(
             SourceScopeData { span: span, parent_scope: None }, 1
@@ -914,5 +922,17 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
         vec![],
         span,
         vec![],
-    )
+    );
+
+    crate::util::dump_mir(
+        tcx,
+        None,
+        "mir_map",
+        &0,
+        crate::transform::MirSource::item(ctor_id),
+        &body,
+        |_, _| Ok(()),
+    );
+
+    tcx.arena.alloc(body)
 }
index 712e9b1fe25059021e4d0275d73bc2dd42d503ac..40af3579766363923d002425c503bbec375616d3 100644 (file)
@@ -34,22 +34,22 @@ impl MirPass for AddCallGuards {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        self.add_call_guards(mir);
+                          body: &mut Body<'tcx>) {
+        self.add_call_guards(body);
     }
 }
 
 impl AddCallGuards {
-    pub fn add_call_guards(&self, mir: &mut Body<'_>) {
+    pub fn add_call_guards(&self, body: &mut Body<'_>) {
         let pred_count: IndexVec<_, _> =
-            mir.predecessors().iter().map(|ps| ps.len()).collect();
+            body.predecessors().iter().map(|ps| ps.len()).collect();
 
         // We need a place to store the new blocks generated
         let mut new_blocks = Vec::new();
 
-        let cur_len = mir.basic_blocks().len();
+        let cur_len = body.basic_blocks().len();
 
-        for block in mir.basic_blocks_mut() {
+        for block in body.basic_blocks_mut() {
             match block.terminator {
                 Some(Terminator {
                     kind: TerminatorKind::Call {
@@ -81,6 +81,6 @@ pub fn add_call_guards(&self, mir: &mut Body<'_>) {
 
         debug!("Broke {} N edges", new_blocks.len());
 
-        mir.basic_blocks_mut().extend(new_blocks);
+        body.basic_blocks_mut().extend(new_blocks);
     }
 }
index f7a4bf759545cf9903cd22997e2d42226839ccea..23f5e636b7f8a56351e1e5807ba8b013e0f63615 100644 (file)
@@ -43,40 +43,40 @@ impl MirPass for AddMovesForPackedDrops {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>)
+                          body: &mut Body<'tcx>)
     {
-        debug!("add_moves_for_packed_drops({:?} @ {:?})", src, mir.span);
-        add_moves_for_packed_drops(tcx, mir, src.def_id());
+        debug!("add_moves_for_packed_drops({:?} @ {:?})", src, body.span);
+        add_moves_for_packed_drops(tcx, body, src.def_id());
     }
 }
 
 pub fn add_moves_for_packed_drops<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &mut Body<'tcx>,
+    body: &mut Body<'tcx>,
     def_id: DefId)
 {
-    let patch = add_moves_for_packed_drops_patch(tcx, mir, def_id);
-    patch.apply(mir);
+    let patch = add_moves_for_packed_drops_patch(tcx, body, def_id);
+    patch.apply(body);
 }
 
 fn add_moves_for_packed_drops_patch<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     def_id: DefId)
     -> MirPatch<'tcx>
 {
-    let mut patch = MirPatch::new(mir);
+    let mut patch = MirPatch::new(body);
     let param_env = tcx.param_env(def_id);
 
-    for (bb, data) in mir.basic_blocks().iter_enumerated() {
+    for (bb, data) in body.basic_blocks().iter_enumerated() {
         let loc = Location { block: bb, statement_index: data.statements.len() };
         let terminator = data.terminator();
 
         match terminator.kind {
             TerminatorKind::Drop { ref location, .. }
-                if util::is_disaligned(tcx, mir, param_env, location) =>
+                if util::is_disaligned(tcx, body, param_env, location) =>
             {
-                add_move_for_packed_drop(tcx, mir, &mut patch, terminator,
+                add_move_for_packed_drop(tcx, body, &mut patch, terminator,
                                          loc, data.is_cleanup);
             }
             TerminatorKind::DropAndReplace { .. } => {
@@ -92,7 +92,7 @@ fn add_moves_for_packed_drops_patch<'a, 'tcx>(
 
 fn add_move_for_packed_drop<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     patch: &mut MirPatch<'tcx>,
     terminator: &Terminator<'tcx>,
     loc: Location,
@@ -106,7 +106,7 @@ fn add_move_for_packed_drop<'a, 'tcx>(
     };
 
     let source_info = terminator.source_info;
-    let ty = location.ty(mir, tcx).ty;
+    let ty = location.ty(body, tcx).ty;
     let temp = patch.new_temp(ty, terminator.source_info.span);
 
     let storage_dead_block = patch.new_block(BasicBlockData {
index 23319f7055183e2ceb3360705d560fa00a87c3b9..bea95bcd567adcc46103110e519016518002b9de 100644 (file)
@@ -77,13 +77,13 @@ impl MirPass for AddRetag {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>)
+                          body: &mut Body<'tcx>)
     {
         if !tcx.sess.opts.debugging_opts.mir_emit_retag {
             return;
         }
-        let (span, arg_count) = (mir.span, mir.arg_count);
-        let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
+        let (span, arg_count) = (body.span, body.arg_count);
+        let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
         let needs_retag = |place: &Place<'tcx>| {
             // FIXME: Instead of giving up for unstable places, we should introduce
             // a temporary and retag on that.
index 8ec8a8fa12eeced2231e51515b8268f3e83397fc..b8077d224e4efd9bc2cf1121d53d5d155990af9c 100644 (file)
@@ -19,7 +19,7 @@
 use crate::util;
 
 pub struct UnsafetyChecker<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     const_context: bool,
     min_const_fn: bool,
     source_scope_local_data: &'a IndexVec<SourceScope, SourceScopeLocalData>,
@@ -32,11 +32,11 @@ pub struct UnsafetyChecker<'a, 'tcx: 'a> {
     inherited_blocks: Vec<(hir::HirId, bool)>,
 }
 
-impl<'a, 'gcx, 'tcx> UnsafetyChecker<'a, 'tcx> {
+impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> {
     fn new(
         const_context: bool,
         min_const_fn: bool,
-        mir: &'a Body<'tcx>,
+        body: &'a Body<'tcx>,
         source_scope_local_data: &'a IndexVec<SourceScope, SourceScopeLocalData>,
         tcx: TyCtxt<'a, 'tcx, 'tcx>,
         param_env: ty::ParamEnv<'tcx>,
@@ -46,13 +46,13 @@ fn new(
             assert!(const_context);
         }
         Self {
-            mir,
+            body,
             const_context,
             min_const_fn,
             source_scope_local_data,
             violations: vec![],
             source_info: SourceInfo {
-                span: mir.span,
+                span: body.span,
                 scope: OUTERMOST_SOURCE_SCOPE
             },
             tcx,
@@ -87,7 +87,7 @@ fn visit_terminator(&mut self,
             }
 
             TerminatorKind::Call { ref func, .. } => {
-                let func_ty = func.ty(self.mir, self.tcx);
+                let func_ty = func.ty(self.body, self.tcx);
                 let sig = func_ty.fn_sig(self.tcx);
                 if let hir::Unsafety::Unsafe = sig.unsafety() {
                     self.require_unsafe("call to unsafe function",
@@ -159,7 +159,7 @@ fn visit_rvalue(&mut self,
             // pointers during const evaluation have no integral address, only an abstract one
             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty)
             if self.const_context && self.tcx.features().const_raw_ptr_to_usize_cast => {
-                let operand_ty = operand.ty(self.mir, self.tcx);
+                let operand_ty = operand.ty(self.body, self.tcx);
                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
                 match (cast_in, cast_out) {
@@ -182,7 +182,7 @@ fn visit_rvalue(&mut self,
             // result of a comparison of addresses would differ between runtime and compile-time.
             Rvalue::BinaryOp(_, ref lhs, _)
             if self.const_context && self.tcx.features().const_compare_raw_pointers => {
-                if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.mir, self.tcx).sty {
+                if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).sty {
                     self.register_violations(&[UnsafetyViolation {
                         source_info: self.source_info,
                         description: InternedString::intern("pointer operation"),
@@ -199,13 +199,41 @@ fn visit_rvalue(&mut self,
     fn visit_place(&mut self,
                     place: &Place<'tcx>,
                     context: PlaceContext,
-                    location: Location) {
-        match place {
-            &Place::Projection(box Projection {
-                ref base, ref elem
-            }) => {
+                    _location: Location) {
+        place.iterate(|place_base, place_projections| {
+            match place_base {
+                PlaceBase::Local(..) => {
+                    // Locals are safe.
+                }
+                PlaceBase::Static(box Static { kind: StaticKind::Promoted(_), .. }) => {
+                    bug!("unsafety checking should happen before promotion")
+                }
+                PlaceBase::Static(box Static { kind: StaticKind::Static(def_id), .. }) => {
+                    if self.tcx.is_mutable_static(*def_id) {
+                        self.require_unsafe("use of mutable static",
+                            "mutable statics can be mutated by multiple threads: aliasing \
+                             violations or data races will cause undefined behavior",
+                             UnsafetyViolationKind::General);
+                    } else if self.tcx.is_foreign_item(*def_id) {
+                        let source_info = self.source_info;
+                        let lint_root =
+                            self.source_scope_local_data[source_info.scope].lint_root;
+                        self.register_violations(&[UnsafetyViolation {
+                            source_info,
+                            description: InternedString::intern("use of extern static"),
+                            details: InternedString::intern(
+                                "extern statics are not controlled by the Rust type system: \
+                                invalid data, aliasing violations or data races will cause \
+                                undefined behavior"),
+                            kind: UnsafetyViolationKind::ExternStatic(lint_root)
+                        }], &[]);
+                    }
+                }
+            }
+
+            for proj in place_projections {
                 if context.is_borrow() {
-                    if util::is_disaligned(self.tcx, self.mir, self.param_env, place) {
+                    if util::is_disaligned(self.tcx, self.body, self.param_env, place) {
                         let source_info = self.source_info;
                         let lint_root =
                             self.source_scope_local_data[source_info.scope].lint_root;
@@ -220,8 +248,8 @@ fn visit_place(&mut self,
                         }], &[]);
                     }
                 }
-                let is_borrow_of_interior_mut = context.is_borrow() && !base
-                    .ty(self.mir, self.tcx)
+                let is_borrow_of_interior_mut = context.is_borrow() && !proj.base
+                    .ty(self.body, self.tcx)
                     .ty
                     .is_freeze(self.tcx, self.param_env, self.source_info.span);
                 // prevent
@@ -236,15 +264,15 @@ fn visit_place(&mut self,
                     );
                 }
                 let old_source_info = self.source_info;
-                if let &Place::Base(PlaceBase::Local(local)) = base {
-                    if self.mir.local_decls[local].internal {
+                if let Place::Base(PlaceBase::Local(local)) = proj.base {
+                    if self.body.local_decls[local].internal {
                         // Internal locals are used in the `move_val_init` desugaring.
                         // We want to check unsafety against the source info of the
                         // desugaring, rather than the source info of the RHS.
-                        self.source_info = self.mir.local_decls[local].source_info;
+                        self.source_info = self.body.local_decls[local].source_info;
                     }
                 }
-                let base_ty = base.ty(self.mir, self.tcx).ty;
+                let base_ty = proj.base.ty(self.body, self.tcx).ty;
                 match base_ty.sty {
                     ty::RawPtr(..) => {
                         self.require_unsafe("dereference of raw pointer",
@@ -260,8 +288,8 @@ fn visit_place(&mut self,
                                     MutatingUseContext::AsmOutput
                                 )
                             {
-                                let elem_ty = match elem {
-                                    &ProjectionElem::Field(_, ty) => ty,
+                                let elem_ty = match proj.elem {
+                                    ProjectionElem::Field(_, ty) => ty,
                                     _ => span_bug!(
                                         self.source_info.span,
                                         "non-field projection {:?} from union?",
@@ -292,36 +320,7 @@ fn visit_place(&mut self,
                 }
                 self.source_info = old_source_info;
             }
-            &Place::Base(PlaceBase::Local(..)) => {
-                // locals are safe
-            }
-            &Place::Base(PlaceBase::Static(box Static { kind: StaticKind::Promoted(_), .. })) => {
-                bug!("unsafety checking should happen before promotion")
-            }
-            &Place::Base(
-                PlaceBase::Static(box Static { kind: StaticKind::Static(def_id), .. })
-            ) => {
-                if self.tcx.is_mutable_static(def_id) {
-                    self.require_unsafe("use of mutable static",
-                        "mutable statics can be mutated by multiple threads: aliasing violations \
-                         or data races will cause undefined behavior",
-                         UnsafetyViolationKind::General);
-                } else if self.tcx.is_foreign_item(def_id) {
-                    let source_info = self.source_info;
-                    let lint_root =
-                        self.source_scope_local_data[source_info.scope].lint_root;
-                    self.register_violations(&[UnsafetyViolation {
-                        source_info,
-                        description: InternedString::intern("use of extern static"),
-                        details: InternedString::intern(
-                            "extern statics are not controlled by the Rust type system: invalid \
-                            data, aliasing violations or data races will cause undefined behavior"),
-                        kind: UnsafetyViolationKind::ExternStatic(lint_root)
-                    }], &[]);
-                }
-            }
-        };
-        self.super_place(place, context, location);
+        });
     }
 }
 
@@ -413,7 +412,7 @@ fn check_mut_borrowing_layout_constrained_field(
         }) = place {
             match *elem {
                 ProjectionElem::Field(..) => {
-                    let ty = base.ty(&self.mir.local_decls, self.tcx).ty;
+                    let ty = base.ty(&self.body.local_decls, self.tcx).ty;
                     match ty.sty {
                         ty::Adt(def, _) => match self.tcx.layout_scalar_valid_range(def.did) {
                             (Bound::Unbounded, Bound::Unbounded) => {},
@@ -513,9 +512,9 @@ fn unsafety_check_result<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
 
     // N.B., this borrow is valid because all the consumers of
     // `mir_built` force this.
-    let mir = &tcx.mir_built(def_id).borrow();
+    let body = &tcx.mir_built(def_id).borrow();
 
-    let source_scope_local_data = match mir.source_scope_local_data {
+    let source_scope_local_data = match body.source_scope_local_data {
         ClearCrossCrate::Set(ref data) => data,
         ClearCrossCrate::Clear => {
             debug!("unsafety_violations: {:?} - remote, skipping", def_id);
@@ -537,8 +536,8 @@ fn unsafety_check_result<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
     };
     let mut checker = UnsafetyChecker::new(
         const_context, min_const_fn,
-        mir, source_scope_local_data, tcx, param_env);
-    checker.visit_body(mir);
+        body, source_scope_local_data, tcx, param_env);
+    checker.visit_body(body);
 
     check_unused_unsafe(tcx, def_id, &checker.used_unsafe, &mut checker.inherited_blocks);
     UnsafetyCheckResult {
index 63a1b059d90178ed14e8f63a2d49cae988c30c9d..2bbd6ff21047b5632272907061ef1f618d4e4192 100644 (file)
@@ -30,9 +30,9 @@ impl MirPass for CleanupNonCodegenStatements {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _source: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         let mut delete = DeleteNonCodegenStatements;
-        delete.visit_body(mir);
+        delete.visit_body(body);
     }
 }
 
index dbaa4e557c66f0f2a26a7f7c4a3b6763e0faad73..b112643e2cd4ab77eee88147ed43ed50c72e7e3c 100644 (file)
 use rustc::mir::visit::{
     Visitor, PlaceContext, MutatingUseContext, MutVisitor, NonMutatingUseContext,
 };
-use rustc::mir::interpret::{InterpError, Scalar, GlobalId, EvalResult};
+use rustc::mir::interpret::{InterpError, Scalar, GlobalId, InterpResult};
 use rustc::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
 use syntax_pos::{Span, DUMMY_SP};
 use rustc::ty::subst::InternalSubsts;
 use rustc_data_structures::indexed_vec::IndexVec;
 use rustc::ty::layout::{
-    LayoutOf, TyLayout, LayoutError,
-    HasTyCtxt, TargetDataLayout, HasDataLayout,
+    LayoutOf, TyLayout, LayoutError, HasTyCtxt, TargetDataLayout, HasDataLayout, Size,
 };
 
 use crate::interpret::{
@@ -35,7 +34,7 @@ impl MirPass for ConstProp {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           source: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         // will be evaluated by miri and produce its errors there
         if source.promoted.is_some() {
             return;
@@ -64,16 +63,16 @@ fn run_pass<'a, 'tcx>(&self,
         // constants, instead of just checking for const-folding succeeding.
         // That would require an uniform one-def no-mutation analysis
         // and RPO (or recursing when needing the value of a local).
-        let mut optimization_finder = ConstPropagator::new(mir, tcx, source);
-        optimization_finder.visit_body(mir);
+        let mut optimization_finder = ConstPropagator::new(body, tcx, source);
+        optimization_finder.visit_body(body);
 
         // put back the data we stole from `mir`
         std::mem::replace(
-            &mut mir.source_scope_local_data,
+            &mut body.source_scope_local_data,
             optimization_finder.source_scope_local_data
         );
         std::mem::replace(
-            &mut mir.promoted,
+            &mut body.promoted,
             optimization_finder.promoted
         );
 
@@ -121,19 +120,19 @@ fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
 
 impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> {
     fn new(
-        mir: &mut Body<'tcx>,
+        body: &mut Body<'tcx>,
         tcx: TyCtxt<'a, 'tcx, 'tcx>,
         source: MirSource<'tcx>,
     ) -> ConstPropagator<'a, 'mir, 'tcx> {
         let param_env = tcx.param_env(source.def_id());
         let ecx = mk_eval_cx(tcx, tcx.def_span(source.def_id()), param_env);
-        let can_const_prop = CanConstProp::check(mir);
+        let can_const_prop = CanConstProp::check(body);
         let source_scope_local_data = std::mem::replace(
-            &mut mir.source_scope_local_data,
+            &mut body.source_scope_local_data,
             ClearCrossCrate::Clear
         );
         let promoted = std::mem::replace(
-            &mut mir.promoted,
+            &mut body.promoted,
             IndexVec::new()
         );
 
@@ -143,10 +142,10 @@ fn new(
             source,
             param_env,
             can_const_prop,
-            places: IndexVec::from_elem(None, &mir.local_decls),
+            places: IndexVec::from_elem(None, &body.local_decls),
             source_scope_local_data,
             //FIXME(wesleywiser) we can't steal this because `Visitor::super_visit_body()` needs it
-            local_decls: mir.local_decls.clone(),
+            local_decls: body.local_decls.clone(),
             promoted,
         }
     }
@@ -157,7 +156,7 @@ fn use_ecx<F, T>(
         f: F
     ) -> Option<T>
     where
-        F: FnOnce(&mut Self) -> EvalResult<'tcx, T>,
+        F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
     {
         self.ecx.tcx.span = source_info.span;
         let lint_root = match self.source_scope_local_data {
@@ -316,8 +315,8 @@ fn eval_place(&mut self, place: &Place<'tcx>, source_info: SourceInfo) -> Option
                     // cannot use `const_eval` here, because that would require having the MIR
                     // for the current function available, but we're producing said MIR right now
                     let res = self.use_ecx(source_info, |this| {
-                        let mir = &this.promoted[*promoted];
-                        eval_promoted(this.tcx, cid, mir, this.param_env)
+                        let body = &this.promoted[*promoted];
+                        eval_promoted(this.tcx, cid, body, this.param_env)
                     })?;
                     trace!("evaluated promoted {:?} to {:?}", promoted, res);
                     res.into()
@@ -333,6 +332,12 @@ fn eval_place(&mut self, place: &Place<'tcx>, source_info: SourceInfo) -> Option
                             this.ecx.operand_field(eval, field.index() as u64)
                         })?;
                     },
+                    ProjectionElem::Deref => {
+                        trace!("processing deref");
+                        eval = self.use_ecx(source_info, |this| {
+                            this.ecx.deref_operand(eval)
+                        })?.into();
+                    }
                     // We could get more projections by using e.g., `operand_projection`,
                     // but we do not even have the stack frame set up properly so
                     // an `Index` projection would throw us off-track.
@@ -363,8 +368,12 @@ fn const_prop(
             Rvalue::Use(ref op) => {
                 self.eval_operand(op, source_info)
             },
+            Rvalue::Ref(_, _, ref place) => {
+                let src = self.eval_place(place, source_info)?;
+                let mplace = src.try_as_mplace().ok()?;
+                Some(ImmTy::from_scalar(mplace.ptr.into(), place_layout).into())
+            },
             Rvalue::Repeat(..) |
-            Rvalue::Ref(..) |
             Rvalue::Aggregate(..) |
             Rvalue::NullaryOp(NullOp::Box, _) |
             Rvalue::Discriminant(..) => None,
@@ -376,10 +385,30 @@ fn const_prop(
                     this.ecx.cast(op, kind, dest.into())?;
                     Ok(dest.into())
                 })
-            }
+            },
+            Rvalue::Len(ref place) => {
+                let place = self.eval_place(&place, source_info)?;
+                let mplace = place.try_as_mplace().ok()?;
+
+                if let ty::Slice(_) = mplace.layout.ty.sty {
+                    let len = mplace.meta.unwrap().to_usize(&self.ecx).unwrap();
 
-            // FIXME(oli-obk): evaluate static/constant slice lengths
-            Rvalue::Len(_) => None,
+                    Some(ImmTy {
+                        imm: Immediate::Scalar(
+                            Scalar::from_uint(
+                                len,
+                                Size::from_bits(
+                                    self.tcx.sess.target.usize_ty.bit_width().unwrap() as u64
+                                )
+                            ).into(),
+                        ),
+                        layout: self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
+                    }.into())
+                } else {
+                    trace!("not slice: {:?}", mplace.layout.ty.sty);
+                    None
+                }
+            },
             Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
                 type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some(
                     ImmTy {
@@ -525,12 +554,10 @@ fn replace_with_const(
         source_info: SourceInfo,
     ) {
         trace!("attepting to replace {:?} with {:?}", rval, value);
-        self.ecx.validate_operand(
-            value,
-            vec![],
-            None,
-            true,
-        ).expect("value should already be a valid const");
+        if let Err(e) = self.ecx.validate_operand(value, vec![], None, true) {
+            trace!("validation error, attempt failed: {:?}", e);
+            return;
+        }
 
         // FIXME> figure out what tho do when try_read_immediate fails
         let imm = self.use_ecx(source_info, |this| {
@@ -586,10 +613,10 @@ struct CanConstProp {
 
 impl CanConstProp {
     /// returns true if `local` can be propagated
-    fn check(mir: &Body<'_>) -> IndexVec<Local, bool> {
+    fn check(body: &Body<'_>) -> IndexVec<Local, bool> {
         let mut cpv = CanConstProp {
-            can_const_prop: IndexVec::from_elem(true, &mir.local_decls),
-            found_assignment: IndexVec::from_elem(false, &mir.local_decls),
+            can_const_prop: IndexVec::from_elem(true, &body.local_decls),
+            found_assignment: IndexVec::from_elem(false, &body.local_decls),
         };
         for (local, val) in cpv.can_const_prop.iter_enumerated_mut() {
             // cannot use args at all
@@ -597,13 +624,13 @@ fn check(mir: &Body<'_>) -> IndexVec<Local, bool> {
             //        lint for x != y
             // FIXME(oli-obk): lint variables until they are used in a condition
             // FIXME(oli-obk): lint if return value is constant
-            *val = mir.local_kind(local) == LocalKind::Temp;
+            *val = body.local_kind(local) == LocalKind::Temp;
 
             if !*val {
                 trace!("local {:?} can't be propagated because it's not a temporary", local);
             }
         }
-        cpv.visit_body(mir);
+        cpv.visit_body(body);
         cpv.can_const_prop
     }
 }
index c48d2d295711a495f59d94713a15566a64cfa755..45b3fb79dd9fa70dc0d048cb1eae8e810f1e7662 100644 (file)
@@ -33,23 +33,23 @@ impl MirPass for CopyPropagation {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _source: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         // We only run when the MIR optimization level is > 1.
         // This avoids a slow pass, and messing up debug info.
         if tcx.sess.opts.debugging_opts.mir_opt_level <= 1 {
             return;
         }
 
-        let mut def_use_analysis = DefUseAnalysis::new(mir);
+        let mut def_use_analysis = DefUseAnalysis::new(body);
         loop {
-            def_use_analysis.analyze(mir);
+            def_use_analysis.analyze(body);
 
-            if eliminate_self_assignments(mir, &def_use_analysis) {
-                def_use_analysis.analyze(mir);
+            if eliminate_self_assignments(body, &def_use_analysis) {
+                def_use_analysis.analyze(body);
             }
 
             let mut changed = false;
-            for dest_local in mir.local_decls.indices() {
+            for dest_local in body.local_decls.indices() {
                 debug!("Considering destination local: {:?}", dest_local);
 
                 let action;
@@ -76,7 +76,7 @@ fn run_pass<'a, 'tcx>(&self,
                     }
                     // Conservatively gives up if the dest is an argument,
                     // because there may be uses of the original argument value.
-                    if mir.local_kind(dest_local) == LocalKind::Arg {
+                    if body.local_kind(dest_local) == LocalKind::Arg {
                         debug!("  Can't copy-propagate local: dest {:?} (argument)",
                             dest_local);
                         continue;
@@ -84,7 +84,7 @@ fn run_pass<'a, 'tcx>(&self,
                     let dest_place_def = dest_use_info.defs_not_including_drop().next().unwrap();
                     location = dest_place_def.location;
 
-                    let basic_block = &mir[location.block];
+                    let basic_block = &body[location.block];
                     let statement_index = location.statement_index;
                     let statement = match basic_block.statements.get(statement_index) {
                         Some(statement) => statement,
@@ -103,7 +103,7 @@ fn run_pass<'a, 'tcx>(&self,
                             let maybe_action = match *operand {
                                 Operand::Copy(ref src_place) |
                                 Operand::Move(ref src_place) => {
-                                    Action::local_copy(&mir, &def_use_analysis, src_place)
+                                    Action::local_copy(&body, &def_use_analysis, src_place)
                                 }
                                 Operand::Constant(ref src_constant) => {
                                     Action::constant(src_constant)
@@ -122,7 +122,7 @@ fn run_pass<'a, 'tcx>(&self,
                     }
                 }
 
-                changed = action.perform(mir, &def_use_analysis, dest_local, location) || changed;
+                changed = action.perform(body, &def_use_analysis, dest_local, location) || changed;
                 // FIXME(pcwalton): Update the use-def chains to delete the instructions instead of
                 // regenerating the chains.
                 break
@@ -135,17 +135,17 @@ fn run_pass<'a, 'tcx>(&self,
 }
 
 fn eliminate_self_assignments(
-    mir: &mut Body<'_>,
+    body: &mut Body<'_>,
     def_use_analysis: &DefUseAnalysis,
 ) -> bool {
     let mut changed = false;
 
-    for dest_local in mir.local_decls.indices() {
+    for dest_local in body.local_decls.indices() {
         let dest_use_info = def_use_analysis.local_info(dest_local);
 
         for def in dest_use_info.defs_not_including_drop() {
             let location = def.location;
-            if let Some(stmt) = mir[location.block].statements.get(location.statement_index) {
+            if let Some(stmt) = body[location.block].statements.get(location.statement_index) {
                 match stmt.kind {
                     StatementKind::Assign(
                         Place::Base(PlaceBase::Local(local)),
@@ -163,7 +163,7 @@ fn eliminate_self_assignments(
                 continue;
             }
             debug!("Deleting a self-assignment for {:?}", dest_local);
-            mir.make_statement_nop(location);
+            body.make_statement_nop(location);
             changed = true;
         }
     }
@@ -177,7 +177,7 @@ enum Action<'tcx> {
 }
 
 impl<'tcx> Action<'tcx> {
-    fn local_copy(mir: &Body<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &Place<'tcx>)
+    fn local_copy(body: &Body<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &Place<'tcx>)
                   -> Option<Action<'tcx>> {
         // The source must be a local.
         let src_local = if let Place::Base(PlaceBase::Local(local)) = *src_place {
@@ -214,7 +214,7 @@ fn local_copy(mir: &Body<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &P
         //     USE(SRC);
         let src_def_count = src_use_info.def_count_not_including_drop();
         // allow function arguments to be propagated
-        let is_arg = mir.local_kind(src_local) == LocalKind::Arg;
+        let is_arg = body.local_kind(src_local) == LocalKind::Arg;
         if (is_arg && src_def_count != 0) || (!is_arg && src_def_count != 1) {
             debug!(
                 "  Can't copy-propagate local: {} defs of src{}",
@@ -232,7 +232,7 @@ fn constant(src_constant: &Constant<'tcx>) -> Option<Action<'tcx>> {
     }
 
     fn perform(self,
-               mir: &mut Body<'tcx>,
+               body: &mut Body<'tcx>,
                def_use_analysis: &DefUseAnalysis,
                dest_local: Local,
                location: Location)
@@ -249,21 +249,21 @@ fn perform(self,
                        src_local);
                 for place_use in &def_use_analysis.local_info(dest_local).defs_and_uses {
                     if place_use.context.is_storage_marker() {
-                        mir.make_statement_nop(place_use.location)
+                        body.make_statement_nop(place_use.location)
                     }
                 }
                 for place_use in &def_use_analysis.local_info(src_local).defs_and_uses {
                     if place_use.context.is_storage_marker() {
-                        mir.make_statement_nop(place_use.location)
+                        body.make_statement_nop(place_use.location)
                     }
                 }
 
                 // Replace all uses of the destination local with the source local.
-                def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_local);
+                def_use_analysis.replace_all_defs_and_uses_with(dest_local, body, src_local);
 
                 // Finally, zap the now-useless assignment instruction.
                 debug!("  Deleting assignment");
-                mir.make_statement_nop(location);
+                body.make_statement_nop(location);
 
                 true
             }
@@ -277,7 +277,7 @@ fn perform(self,
                 let dest_local_info = def_use_analysis.local_info(dest_local);
                 for place_use in &dest_local_info.defs_and_uses {
                     if place_use.context.is_storage_marker() {
-                        mir.make_statement_nop(place_use.location)
+                        body.make_statement_nop(place_use.location)
                     }
                 }
 
@@ -285,7 +285,7 @@ fn perform(self,
                 let mut visitor = ConstantPropagationVisitor::new(dest_local,
                                                                   src_constant);
                 for dest_place_use in &dest_local_info.defs_and_uses {
-                    visitor.visit_location(mir, dest_place_use.location)
+                    visitor.visit_location(body, dest_place_use.location)
                 }
 
                 // Zap the assignment instruction if we eliminated all the uses. We won't have been
@@ -296,7 +296,7 @@ fn perform(self,
                     debug!("  {} of {} use(s) replaced; deleting assignment",
                            visitor.uses_replaced,
                            use_count);
-                    mir.make_statement_nop(location);
+                    body.make_statement_nop(location);
                     true
                 } else if visitor.uses_replaced == 0 {
                     debug!("  No uses replaced; not deleting assignment");
index 7da37f956cedd2fc23c0468acd17caaaac5514c3..78725f7523aab4afae9c5b62a163aa1acace545f 100644 (file)
@@ -1,8 +1,7 @@
 use rustc::mir::*;
 use rustc::ty::TyCtxt;
-use rustc::ty::layout::VariantIdx;
-use rustc_data_structures::indexed_vec::Idx;
 use crate::transform::{MirPass, MirSource};
+use crate::util::expand_aggregate;
 
 pub struct Deaggregator;
 
@@ -10,8 +9,8 @@ impl MirPass for Deaggregator {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _source: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
+                          body: &mut Body<'tcx>) {
+        let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
         let local_decls = &*local_decls;
         for bb in basic_blocks {
             bb.expand_statements(|stmt| {
@@ -31,7 +30,7 @@ fn run_pass<'a, 'tcx>(&self,
 
                 let stmt = stmt.replace_nop();
                 let source_info = stmt.source_info;
-                let (mut lhs, kind, operands) = match stmt.kind {
+                let (lhs, kind, operands) = match stmt.kind {
                     StatementKind::Assign(lhs, box rvalue) => {
                         match rvalue {
                             Rvalue::Aggregate(kind, operands) => (lhs, kind, operands),
@@ -41,62 +40,15 @@ fn run_pass<'a, 'tcx>(&self,
                     _ => bug!()
                 };
 
-                let mut set_discriminant = None;
-                let active_field_index = match *kind {
-                    AggregateKind::Adt(adt_def, variant_index, _, _, active_field_index) => {
-                        if adt_def.is_enum() {
-                            set_discriminant = Some(Statement {
-                                kind: StatementKind::SetDiscriminant {
-                                    place: lhs.clone(),
-                                    variant_index,
-                                },
-                                source_info,
-                            });
-                            lhs = lhs.downcast(adt_def, variant_index);
-                        }
-                        active_field_index
-                    }
-                    AggregateKind::Generator(..) => {
-                        // Right now we only support initializing generators to
-                        // variant 0 (Unresumed).
-                        let variant_index = VariantIdx::new(0);
-                        set_discriminant = Some(Statement {
-                            kind: StatementKind::SetDiscriminant {
-                                place: lhs.clone(),
-                                variant_index,
-                            },
-                            source_info,
-                        });
-
-                        // Operands are upvars stored on the base place, so no
-                        // downcast is necessary.
-
-                        None
-                    }
-                    _ => None
-                };
-
-                Some(operands.into_iter().enumerate().map(move |(i, op)| {
-                    let lhs_field = if let AggregateKind::Array(_) = *kind {
-                        // FIXME(eddyb) `offset` should be u64.
-                        let offset = i as u32;
-                        assert_eq!(offset as usize, i);
-                        lhs.clone().elem(ProjectionElem::ConstantIndex {
-                            offset,
-                            // FIXME(eddyb) `min_length` doesn't appear to be used.
-                            min_length: offset + 1,
-                            from_end: false
-                        })
-                    } else {
+                Some(expand_aggregate(
+                    lhs,
+                    operands.into_iter().map(|op| {
                         let ty = op.ty(local_decls, tcx);
-                        let field = Field::new(active_field_index.unwrap_or(i));
-                        lhs.clone().field(field, ty)
-                    };
-                    Statement {
-                        source_info,
-                        kind: StatementKind::Assign(lhs_field, box Rvalue::Use(op)),
-                    }
-                }).chain(set_discriminant))
+                        (op, ty)
+                    }),
+                    *kind,
+                    source_info,
+                ))
             });
         }
     }
index ebb65094a306df2681a4cfdef5dbb4e24fd29eff..9d88a2c8027e383acf35291c3515b03def905217 100644 (file)
@@ -21,7 +21,7 @@ fn name<'a>(&'a self) -> Cow<'a, str> {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _source: MirSource<'tcx>,
-                          _mir: &mut Body<'tcx>)
+                          _body: &mut Body<'tcx>)
     {
     }
 }
@@ -42,7 +42,7 @@ pub fn on_mir_pass<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                              pass_num: &dyn fmt::Display,
                              pass_name: &str,
                              source: MirSource<'tcx>,
-                             mir: &Body<'tcx>,
+                             body: &Body<'tcx>,
                              is_after: bool) {
     if mir_util::dump_enabled(tcx, pass_name, source) {
         mir_util::dump_mir(tcx,
@@ -50,7 +50,7 @@ pub fn on_mir_pass<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                            pass_name,
                            &Disambiguator { is_after },
                            source,
-                           mir,
+                           body,
                            |_, _| Ok(()) );
     }
 }
index c833af29c36f0ed1427be0ac03b7830649d52ee1..c48b94bded65179a80b09cc60748b622813a6e5c 100644 (file)
@@ -24,13 +24,13 @@ impl MirPass for ElaborateDrops {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>)
+                          body: &mut Body<'tcx>)
     {
-        debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
+        debug!("elaborate_drops({:?} @ {:?})", src, body.span);
 
         let def_id = src.def_id();
         let param_env = tcx.param_env(src.def_id()).with_reveal_all();
-        let move_data = match MoveData::gather_moves(mir, tcx) {
+        let move_data = match MoveData::gather_moves(body, tcx) {
             Ok(move_data) => move_data,
             Err((move_data, _move_errors)) => {
                 // The only way we should be allowing any move_errors
@@ -45,32 +45,32 @@ fn run_pass<'a, 'tcx>(&self,
             }
         };
         let elaborate_patch = {
-            let mir = &*mir;
+            let body = &*body;
             let env = MoveDataParamEnv {
                 move_data,
                 param_env,
             };
-            let dead_unwinds = find_dead_unwinds(tcx, mir, def_id, &env);
+            let dead_unwinds = find_dead_unwinds(tcx, body, def_id, &env);
             let flow_inits =
-                do_dataflow(tcx, mir, def_id, &[], &dead_unwinds,
-                            MaybeInitializedPlaces::new(tcx, mir, &env),
+                do_dataflow(tcx, body, def_id, &[], &dead_unwinds,
+                            MaybeInitializedPlaces::new(tcx, body, &env),
                             |bd, p| DebugFormatted::new(&bd.move_data().move_paths[p]));
             let flow_uninits =
-                do_dataflow(tcx, mir, def_id, &[], &dead_unwinds,
-                            MaybeUninitializedPlaces::new(tcx, mir, &env),
+                do_dataflow(tcx, body, def_id, &[], &dead_unwinds,
+                            MaybeUninitializedPlaces::new(tcx, body, &env),
                             |bd, p| DebugFormatted::new(&bd.move_data().move_paths[p]));
 
             ElaborateDropsCtxt {
                 tcx,
-                mir,
+                body,
                 env: &env,
                 flow_inits,
                 flow_uninits,
                 drop_flags: Default::default(),
-                patch: MirPatch::new(mir),
+                patch: MirPatch::new(body),
             }.elaborate()
         };
-        elaborate_patch.apply(mir);
+        elaborate_patch.apply(body);
     }
 }
 
@@ -79,20 +79,20 @@ fn run_pass<'a, 'tcx>(&self,
 /// that can't drop anything.
 fn find_dead_unwinds<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     def_id: hir::def_id::DefId,
     env: &MoveDataParamEnv<'tcx, 'tcx>)
     -> BitSet<BasicBlock>
 {
-    debug!("find_dead_unwinds({:?})", mir.span);
+    debug!("find_dead_unwinds({:?})", body.span);
     // We only need to do this pass once, because unwind edges can only
     // reach cleanup blocks, which can't have unwind edges themselves.
-    let mut dead_unwinds = BitSet::new_empty(mir.basic_blocks().len());
+    let mut dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
     let flow_inits =
-        do_dataflow(tcx, mir, def_id, &[], &dead_unwinds,
-                    MaybeInitializedPlaces::new(tcx, mir, &env),
+        do_dataflow(tcx, body, def_id, &[], &dead_unwinds,
+                    MaybeInitializedPlaces::new(tcx, body, &env),
                     |bd, p| DebugFormatted::new(&bd.move_data().move_paths[p]));
-    for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
+    for (bb, bb_data) in body.basic_blocks().iter_enumerated() {
         let location = match bb_data.terminator().kind {
             TerminatorKind::Drop { ref location, unwind: Some(_), .. } |
             TerminatorKind::DropAndReplace { ref location, unwind: Some(_), .. } => location,
@@ -107,7 +107,7 @@ fn find_dead_unwinds<'a, 'tcx>(
                bb, bb_data, init_data.live);
         for stmt in 0..bb_data.statements.len() {
             let loc = Location { block: bb, statement_index: stmt };
-            init_data.apply_location(tcx, mir, env, loc);
+            init_data.apply_location(tcx, body, env, loc);
         }
 
         let path = match env.move_data.rev_lookup.find(location) {
@@ -121,7 +121,7 @@ fn find_dead_unwinds<'a, 'tcx>(
         debug!("find_dead_unwinds @ {:?}: path({:?})={:?}", bb, location, path);
 
         let mut maybe_live = false;
-        on_all_drop_children_bits(tcx, mir, &env, path, |child| {
+        on_all_drop_children_bits(tcx, body, &env, path, |child| {
             let (child_maybe_live, _) = init_data.state(child);
             maybe_live |= child_maybe_live;
         });
@@ -143,11 +143,11 @@ struct InitializationData {
 impl InitializationData {
     fn apply_location<'a,'tcx>(&mut self,
                                tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                               mir: &Body<'tcx>,
+                               body: &Body<'tcx>,
                                env: &MoveDataParamEnv<'tcx, 'tcx>,
                                loc: Location)
     {
-        drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
+        drop_flag_effects_for_location(tcx, body, env, loc, |path, df| {
             debug!("at location {:?}: setting {:?} to {:?}",
                    loc, path, df);
             match df {
@@ -186,8 +186,8 @@ fn patch(&mut self) -> &mut MirPatch<'tcx> {
         &mut self.ctxt.patch
     }
 
-    fn mir(&self) -> &'a Body<'tcx> {
-        self.ctxt.mir
+    fn body(&self) -> &'a Body<'tcx> {
+        self.ctxt.body
     }
 
     fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
@@ -206,7 +206,7 @@ fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
                 let mut some_dead = false;
                 let mut children_count = 0;
                 on_all_drop_children_bits(
-                    self.tcx(), self.mir(), self.ctxt.env, path, |child| {
+                    self.tcx(), self.body(), self.ctxt.env, path, |child| {
                         let (live, dead) = self.init_data.state(child);
                         debug!("elaborate_drop: state({:?}) = {:?}",
                                child, (live, dead));
@@ -232,7 +232,7 @@ fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMod
             }
             DropFlagMode::Deep => {
                 on_all_children_bits(
-                    self.tcx(), self.mir(), self.ctxt.move_data(), path,
+                    self.tcx(), self.body(), self.ctxt.move_data(), path,
                     |child| self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent)
                  );
             }
@@ -291,7 +291,7 @@ fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
 
 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     env: &'a MoveDataParamEnv<'tcx, 'tcx>,
     flow_inits: DataflowResults<'tcx, MaybeInitializedPlaces<'a, 'tcx, 'tcx>>,
     flow_uninits:  DataflowResults<'tcx, MaybeUninitializedPlaces<'a, 'tcx, 'tcx>>,
@@ -314,7 +314,7 @@ fn initialization_data_at(&self, loc: Location) -> InitializationData {
                 .to_owned(),
         };
         for stmt in 0..loc.statement_index {
-            data.apply_location(self.tcx, self.mir, self.env,
+            data.apply_location(self.tcx, self.body, self.env,
                                 Location { block: loc.block, statement_index: stmt });
         }
         data
@@ -323,7 +323,7 @@ fn initialization_data_at(&self, loc: Location) -> InitializationData {
     fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
         let tcx = self.tcx;
         let patch = &mut self.patch;
-        debug!("create_drop_flag({:?})", self.mir.span);
+        debug!("create_drop_flag({:?})", self.body.span);
         self.drop_flags.entry(index).or_insert_with(|| {
             patch.new_internal(tcx.types.bool, span)
         });
@@ -351,7 +351,7 @@ fn elaborate(mut self) -> MirPatch<'tcx>
 
     fn collect_drop_flags(&mut self)
     {
-        for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
+        for (bb, data) in self.body.basic_blocks().iter_enumerated() {
             let terminator = data.terminator();
             let location = match terminator.kind {
                 TerminatorKind::Drop { ref location, .. } |
@@ -382,7 +382,7 @@ fn collect_drop_flags(&mut self)
                 }
             };
 
-            on_all_drop_children_bits(self.tcx, self.mir, self.env, path, |child| {
+            on_all_drop_children_bits(self.tcx, self.body, self.env, path, |child| {
                 let (maybe_live, maybe_dead) = init_data.state(child);
                 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
                        child, location, path, (maybe_live, maybe_dead));
@@ -395,7 +395,7 @@ fn collect_drop_flags(&mut self)
 
     fn elaborate_drops(&mut self)
     {
-        for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
+        for (bb, data) in self.body.basic_blocks().iter_enumerated() {
             let loc = Location { block: bb, statement_index: data.statements.len() };
             let terminator = data.terminator();
 
@@ -464,7 +464,7 @@ fn elaborate_replace(
         unwind: Option<BasicBlock>)
     {
         let bb = loc.block;
-        let data = &self.mir[bb];
+        let data = &self.body[bb];
         let terminator = data.terminator();
         assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
 
@@ -508,7 +508,7 @@ fn elaborate_replace(
                     target,
                     Unwind::To(unwind),
                     bb);
-                on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
+                on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| {
                     self.set_drop_flag(Location { block: target, statement_index: 0 },
                                        child, DropFlagState::Present);
                     self.set_drop_flag(Location { block: unwind, statement_index: 0 },
@@ -539,7 +539,7 @@ fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
 
     fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
         if let Some(&flag) = self.drop_flags.get(&path) {
-            let span = self.patch.source_info_for_location(self.mir, loc).span;
+            let span = self.patch.source_info_for_location(self.body, loc).span;
             let val = self.constant_bool(span, val.value());
             self.patch.add_assign(loc, Place::Base(PlaceBase::Local(flag)), val);
         }
@@ -547,7 +547,7 @@ fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagSta
 
     fn drop_flags_on_init(&mut self) {
         let loc = Location::START;
-        let span = self.patch.source_info_for_location(self.mir, loc).span;
+        let span = self.patch.source_info_for_location(self.body, loc).span;
         let false_ = self.constant_bool(span, false);
         for flag in self.drop_flags.values() {
             self.patch.add_assign(loc, Place::Base(PlaceBase::Local(*flag)), false_.clone());
@@ -555,7 +555,7 @@ fn drop_flags_on_init(&mut self) {
     }
 
     fn drop_flags_for_fn_rets(&mut self) {
-        for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
+        for (bb, data) in self.body.basic_blocks().iter_enumerated() {
             if let TerminatorKind::Call {
                 destination: Some((ref place, tgt)), cleanup: Some(_), ..
             } = data.terminator().kind {
@@ -564,7 +564,7 @@ fn drop_flags_for_fn_rets(&mut self) {
                 let loc = Location { block: tgt, statement_index: 0 };
                 let path = self.move_data().rev_lookup.find(place);
                 on_lookup_result_bits(
-                    self.tcx, self.mir, self.move_data(), path,
+                    self.tcx, self.body, self.move_data(), path,
                     |child| self.set_drop_flag(loc, child, DropFlagState::Present)
                 );
             }
@@ -574,7 +574,7 @@ fn drop_flags_for_fn_rets(&mut self) {
     fn drop_flags_for_args(&mut self) {
         let loc = Location::START;
         dataflow::drop_flag_effects_for_function_entry(
-            self.tcx, self.mir, self.env, |path, ds| {
+            self.tcx, self.body, self.env, |path, ds| {
                 self.set_drop_flag(loc, path, ds);
             }
         )
@@ -587,7 +587,7 @@ fn drop_flags_for_locs(&mut self) {
         // drop flags by themselves, to avoid the drop flags being
         // clobbered before they are read.
 
-        for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
+        for (bb, data) in self.body.basic_blocks().iter_enumerated() {
             debug!("drop_flags_for_locs({:?})", data);
             for i in 0..(data.statements.len()+1) {
                 debug!("drop_flag_for_locs: stmt {}", i);
@@ -619,7 +619,7 @@ fn drop_flags_for_locs(&mut self) {
                 }
                 let loc = Location { block: bb, statement_index: i };
                 dataflow::drop_flag_effects_for_location(
-                    self.tcx, self.mir, self.env, loc, |path, ds| {
+                    self.tcx, self.body, self.env, loc, |path, ds| {
                         if ds == DropFlagState::Absent || allow_initializations {
                             self.set_drop_flag(loc, path, ds)
                         }
@@ -638,7 +638,7 @@ fn drop_flags_for_locs(&mut self) {
                 let loc = Location { block: bb, statement_index: data.statements.len() };
                 let path = self.move_data().rev_lookup.find(place);
                 on_lookup_result_bits(
-                    self.tcx, self.mir, self.move_data(), path,
+                    self.tcx, self.body, self.move_data(), path,
                     |child| self.set_drop_flag(loc, child, DropFlagState::Present)
                 );
             }
index ffc5bba6d608660c4b10e4f6df4f448a314ec729..5ed7abc88b8f172c0781b719b639e32826e11caf 100644 (file)
@@ -53,7 +53,7 @@ impl MirPass for EraseRegions {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        EraseRegionsVisitor::new(tcx).visit_body(mir);
+                          body: &mut Body<'tcx>) {
+        EraseRegionsVisitor::new(tcx).visit_body(body);
     }
 }
index f36ede4e8d9cdcce31ae31210a718efafd986b2a..0665d09d756aa7d1e0aa71bdf9f0e938e8b932f1 100644 (file)
 use rustc::ty::subst::SubstsRef;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
-use rustc_data_structures::bit_set::BitSet;
+use rustc_data_structures::bit_set::{BitSet, BitMatrix};
 use std::borrow::Cow;
 use std::iter;
 use std::mem;
 use crate::transform::{MirPass, MirSource};
 use crate::transform::simplify;
 use crate::transform::no_landing_pads::no_landing_pads;
+use crate::dataflow::{DataflowResults, DataflowResultsConsumer, FlowAtLocation};
 use crate::dataflow::{do_dataflow, DebugFormatted, state_for_location};
 use crate::dataflow::{MaybeStorageLive, HaveBeenBorrowedLocals};
 use crate::util::dump_mir;
@@ -103,11 +104,11 @@ fn visit_place(&mut self,
                     place: &mut Place<'tcx>,
                     context: PlaceContext,
                     location: Location) {
-        if *place == Place::Base(PlaceBase::Local(self_arg())) {
-            *place = Place::Projection(Box::new(Projection {
-                base: place.clone(),
+        if place.base_local() == Some(self_arg()) {
+            replace_base(place, Place::Projection(Box::new(Projection {
+                base: Place::Base(PlaceBase::Local(self_arg())),
                 elem: ProjectionElem::Deref,
-            }));
+            })));
         } else {
             self.super_place(place, context, location);
         }
@@ -130,17 +131,25 @@ fn visit_place(&mut self,
                     place: &mut Place<'tcx>,
                     context: PlaceContext,
                     location: Location) {
-        if *place == Place::Base(PlaceBase::Local(self_arg())) {
-            *place = Place::Projection(Box::new(Projection {
-                base: place.clone(),
+        if place.base_local() == Some(self_arg()) {
+            replace_base(place, Place::Projection(Box::new(Projection {
+                base: Place::Base(PlaceBase::Local(self_arg())),
                 elem: ProjectionElem::Field(Field::new(0), self.ref_gen_ty),
-            }));
+            })));
         } else {
             self.super_place(place, context, location);
         }
     }
 }
 
+fn replace_base(place: &mut Place<'tcx>, new_base: Place<'tcx>) {
+    if let Place::Projection(proj) = place {
+        replace_base(&mut proj.base, new_base);
+    } else {
+        *place = new_base;
+    }
+}
+
 fn self_arg() -> Local {
     Local::new(1)
 }
@@ -210,14 +219,14 @@ fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statemen
     }
 
     // Create a statement which reads the discriminant into a temporary
-    fn get_discr(&self, mir: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
-        let temp_decl = LocalDecl::new_internal(self.tcx.types.isize, mir.span);
-        let local_decls_len = mir.local_decls.push(temp_decl);
+    fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
+        let temp_decl = LocalDecl::new_internal(self.tcx.types.isize, body.span);
+        let local_decls_len = body.local_decls.push(temp_decl);
         let temp = Place::Base(PlaceBase::Local(local_decls_len));
 
         let self_place = Place::Base(PlaceBase::Local(self_arg()));
         let assign = Statement {
-            source_info: source_info(mir),
+            source_info: source_info(body),
             kind: StatementKind::Assign(temp.clone(), box Rvalue::Discriminant(self_place)),
         };
         (assign, temp)
@@ -236,10 +245,10 @@ fn visit_place(&mut self,
                     place: &mut Place<'tcx>,
                     context: PlaceContext,
                     location: Location) {
-        if let Place::Base(PlaceBase::Local(l)) = *place {
+        if let Some(l) = place.base_local() {
             // Replace an Local in the remap with a generator struct access
             if let Some(&(ty, variant_index, idx)) = self.remap.get(&l) {
-                *place = self.make_field(variant_index, idx, ty);
+                replace_base(place, self.make_field(variant_index, idx, ty));
             }
         } else {
             self.super_place(place, context, location);
@@ -304,8 +313,8 @@ fn visit_basic_block_data(&mut self,
 fn make_generator_state_argument_indirect<'a, 'tcx>(
                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
                 def_id: DefId,
-                mir: &mut Body<'tcx>) {
-    let gen_ty = mir.local_decls.raw[1].ty;
+                body: &mut Body<'tcx>) {
+    let gen_ty = body.local_decls.raw[1].ty;
 
     let region = ty::ReFree(ty::FreeRegion {
         scope: def_id,
@@ -320,16 +329,16 @@ fn make_generator_state_argument_indirect<'a, 'tcx>(
     });
 
     // Replace the by value generator argument
-    mir.local_decls.raw[1].ty = ref_gen_ty;
+    body.local_decls.raw[1].ty = ref_gen_ty;
 
     // Add a deref to accesses of the generator state
-    DerefArgVisitor.visit_body(mir);
+    DerefArgVisitor.visit_body(body);
 }
 
 fn make_generator_state_argument_pinned<'a, 'tcx>(
                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                mir: &mut Body<'tcx>) {
-    let ref_gen_ty = mir.local_decls.raw[1].ty;
+                body: &mut Body<'tcx>) {
+    let ref_gen_ty = body.local_decls.raw[1].ty;
 
     let pin_did = tcx.lang_items().pin_type().unwrap();
     let pin_adt_ref = tcx.adt_def(pin_did);
@@ -337,17 +346,17 @@ fn make_generator_state_argument_pinned<'a, 'tcx>(
     let pin_ref_gen_ty = tcx.mk_adt(pin_adt_ref, substs);
 
     // Replace the by ref generator argument
-    mir.local_decls.raw[1].ty = pin_ref_gen_ty;
+    body.local_decls.raw[1].ty = pin_ref_gen_ty;
 
     // Add the Pin field access to accesses of the generator state
-    PinArgVisitor { ref_gen_ty }.visit_body(mir);
+    PinArgVisitor { ref_gen_ty }.visit_body(body);
 }
 
 fn replace_result_variable<'tcx>(
     ret_ty: Ty<'tcx>,
-    mir: &mut Body<'tcx>,
+    body: &mut Body<'tcx>,
 ) -> Local {
-    let source_info = source_info(mir);
+    let source_info = source_info(body);
     let new_ret = LocalDecl {
         mutability: Mutability::Mut,
         ty: ret_ty,
@@ -359,14 +368,14 @@ fn replace_result_variable<'tcx>(
         is_block_tail: None,
         is_user_variable: None,
     };
-    let new_ret_local = Local::new(mir.local_decls.len());
-    mir.local_decls.push(new_ret);
-    mir.local_decls.swap(RETURN_PLACE, new_ret_local);
+    let new_ret_local = Local::new(body.local_decls.len());
+    body.local_decls.push(new_ret);
+    body.local_decls.swap(RETURN_PLACE, new_ret_local);
 
     RenameLocalVisitor {
         from: RETURN_PLACE,
         to: new_ret_local,
-    }.visit_body(mir);
+    }.visit_body(body);
 
     new_ret_local
 }
@@ -385,65 +394,79 @@ fn visit_statement(&mut self,
     }
 }
 
+struct LivenessInfo {
+    /// Which locals are live across any suspension point.
+    ///
+    /// GeneratorSavedLocal is indexed in terms of the elements in this set;
+    /// i.e. GeneratorSavedLocal::new(1) corresponds to the second local
+    /// included in this set.
+    live_locals: liveness::LiveVarSet,
+
+    /// The set of saved locals live at each suspension point.
+    live_locals_at_suspension_points: Vec<BitSet<GeneratorSavedLocal>>,
+
+    /// For every saved local, the set of other saved locals that are
+    /// storage-live at the same time as this local. We cannot overlap locals in
+    /// the layout which have conflicting storage.
+    storage_conflicts: BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>,
+
+    /// For every suspending block, the locals which are storage-live across
+    /// that suspension point.
+    storage_liveness: FxHashMap<BasicBlock, liveness::LiveVarSet>,
+}
+
 fn locals_live_across_suspend_points(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     source: MirSource<'tcx>,
     movable: bool,
-) -> (
-    liveness::LiveVarSet,
-    FxHashMap<BasicBlock, liveness::LiveVarSet>,
-    BitSet<BasicBlock>,
-) {
-    let dead_unwinds = BitSet::new_empty(mir.basic_blocks().len());
+) -> LivenessInfo {
+    let dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
     let def_id = source.def_id();
 
     // Calculate when MIR locals have live storage. This gives us an upper bound of their
     // lifetimes.
-    let storage_live_analysis = MaybeStorageLive::new(mir);
+    let storage_live_analysis = MaybeStorageLive::new(body);
     let storage_live =
-        do_dataflow(tcx, mir, def_id, &[], &dead_unwinds, storage_live_analysis,
-                    |bd, p| DebugFormatted::new(&bd.mir().local_decls[p]));
+        do_dataflow(tcx, body, def_id, &[], &dead_unwinds, storage_live_analysis,
+                    |bd, p| DebugFormatted::new(&bd.body().local_decls[p]));
 
     // Find the MIR locals which do not use StorageLive/StorageDead statements.
     // The storage of these locals are always live.
-    let mut ignored = StorageIgnored(BitSet::new_filled(mir.local_decls.len()));
-    ignored.visit_body(mir);
+    let mut ignored = StorageIgnored(BitSet::new_filled(body.local_decls.len()));
+    ignored.visit_body(body);
 
     // Calculate the MIR locals which have been previously
     // borrowed (even if they are still active).
     // This is only used for immovable generators.
     let borrowed_locals = if !movable {
-        let analysis = HaveBeenBorrowedLocals::new(mir);
+        let analysis = HaveBeenBorrowedLocals::new(body);
         let result =
-            do_dataflow(tcx, mir, def_id, &[], &dead_unwinds, analysis,
-                        |bd, p| DebugFormatted::new(&bd.mir().local_decls[p]));
+            do_dataflow(tcx, body, def_id, &[], &dead_unwinds, analysis,
+                        |bd, p| DebugFormatted::new(&bd.body().local_decls[p]));
         Some((analysis, result))
     } else {
         None
     };
 
     // Calculate the liveness of MIR locals ignoring borrows.
-    let mut set = liveness::LiveVarSet::new_empty(mir.local_decls.len());
+    let mut live_locals = liveness::LiveVarSet::new_empty(body.local_decls.len());
     let mut liveness = liveness::liveness_of_locals(
-        mir,
+        body,
     );
     liveness::dump_mir(
         tcx,
         "generator_liveness",
         source,
-        mir,
+        body,
         &liveness,
     );
 
     let mut storage_liveness_map = FxHashMap::default();
+    let mut live_locals_at_suspension_points = Vec::new();
 
-    let mut suspending_blocks = BitSet::new_empty(mir.basic_blocks().len());
-
-    for (block, data) in mir.basic_blocks().iter_enumerated() {
+    for (block, data) in body.basic_blocks().iter_enumerated() {
         if let TerminatorKind::Yield { .. } = data.terminator().kind {
-            suspending_blocks.insert(block);
-
             let loc = Location {
                 block: block,
                 statement_index: data.statements.len(),
@@ -453,7 +476,7 @@ fn locals_live_across_suspend_points(
                 let borrowed_locals = state_for_location(loc,
                                                          analysis,
                                                          result,
-                                                         mir);
+                                                         body);
                 // The `liveness` variable contains the liveness of MIR locals ignoring borrows.
                 // This is correct for movable generators since borrows cannot live across
                 // suspension points. However for immovable generators we need to account for
@@ -470,7 +493,7 @@ fn locals_live_across_suspend_points(
             let mut storage_liveness = state_for_location(loc,
                                                           &storage_live_analysis,
                                                           &storage_live,
-                                                          mir);
+                                                          body);
 
             // Store the storage liveness for later use so we can restore the state
             // after a suspension point
@@ -482,20 +505,177 @@ fn locals_live_across_suspend_points(
             // Locals live are live at this point only if they are used across
             // suspension points (the `liveness` variable)
             // and their storage is live (the `storage_liveness` variable)
-            storage_liveness.intersect(&liveness.outs[block]);
+            let mut live_locals_here = storage_liveness;
+            live_locals_here.intersect(&liveness.outs[block]);
 
-            let live_locals = storage_liveness;
+            // The generator argument is ignored
+            live_locals_here.remove(self_arg());
 
-            // Add the locals life at this suspension point to the set of locals which live across
+            // Add the locals live at this suspension point to the set of locals which live across
             // any suspension points
-            set.union(&live_locals);
+            live_locals.union(&live_locals_here);
+
+            live_locals_at_suspension_points.push(live_locals_here);
+        }
+    }
+
+    // Renumber our liveness_map bitsets to include only the locals we are
+    // saving.
+    let live_locals_at_suspension_points = live_locals_at_suspension_points
+        .iter()
+        .map(|live_here| renumber_bitset(&live_here, &live_locals))
+        .collect();
+
+    let storage_conflicts = compute_storage_conflicts(
+        body,
+        &live_locals,
+        &ignored,
+        storage_live,
+        storage_live_analysis);
+
+    LivenessInfo {
+        live_locals,
+        live_locals_at_suspension_points,
+        storage_conflicts,
+        storage_liveness: storage_liveness_map,
+    }
+}
+
+/// Renumbers the items present in `stored_locals` and applies the renumbering
+/// to 'input`.
+///
+/// For example, if `stored_locals = [1, 3, 5]`, this would be renumbered to
+/// `[0, 1, 2]`. Thus, if `input = [3, 5]` we would return `[1, 2]`.
+fn renumber_bitset(input: &BitSet<Local>, stored_locals: &liveness::LiveVarSet)
+-> BitSet<GeneratorSavedLocal> {
+    assert!(stored_locals.superset(&input), "{:?} not a superset of {:?}", stored_locals, input);
+    let mut out = BitSet::new_empty(stored_locals.count());
+    for (idx, local) in stored_locals.iter().enumerate() {
+        let saved_local = GeneratorSavedLocal::from(idx);
+        if input.contains(local) {
+            out.insert(saved_local);
+        }
+    }
+    debug!("renumber_bitset({:?}, {:?}) => {:?}", input, stored_locals, out);
+    out
+}
+
+/// For every saved local, looks for which locals are StorageLive at the same
+/// time. Generates a bitset for every local of all the other locals that may be
+/// StorageLive simultaneously with that local. This is used in the layout
+/// computation; see `GeneratorLayout` for more.
+fn compute_storage_conflicts(
+    body: &'mir Body<'tcx>,
+    stored_locals: &liveness::LiveVarSet,
+    ignored: &StorageIgnored,
+    storage_live: DataflowResults<'tcx, MaybeStorageLive<'mir, 'tcx>>,
+    _storage_live_analysis: MaybeStorageLive<'mir, 'tcx>,
+) -> BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal> {
+    assert_eq!(body.local_decls.len(), ignored.0.domain_size());
+    assert_eq!(body.local_decls.len(), stored_locals.domain_size());
+    debug!("compute_storage_conflicts({:?})", body.span);
+    debug!("ignored = {:?}", ignored.0);
+
+    // Storage ignored locals are not eligible for overlap, since their storage
+    // is always live.
+    let mut ineligible_locals = ignored.0.clone();
+    ineligible_locals.intersect(&stored_locals);
+
+    // Compute the storage conflicts for all eligible locals.
+    let mut visitor = StorageConflictVisitor {
+        body,
+        stored_locals: &stored_locals,
+        local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len())
+    };
+    let mut state = FlowAtLocation::new(storage_live);
+    visitor.analyze_results(&mut state);
+    let local_conflicts = visitor.local_conflicts;
+
+    // Compress the matrix using only stored locals (Local -> GeneratorSavedLocal).
+    //
+    // NOTE: Today we store a full conflict bitset for every local. Technically
+    // this is twice as many bits as we need, since the relation is symmetric.
+    // However, in practice these bitsets are not usually large. The layout code
+    // also needs to keep track of how many conflicts each local has, so it's
+    // simpler to keep it this way for now.
+    let mut storage_conflicts = BitMatrix::new(stored_locals.count(), stored_locals.count());
+    for (idx_a, local_a) in stored_locals.iter().enumerate() {
+        let saved_local_a = GeneratorSavedLocal::new(idx_a);
+        if ineligible_locals.contains(local_a) {
+            // Conflicts with everything.
+            storage_conflicts.insert_all_into_row(saved_local_a);
+        } else {
+            // Keep overlap information only for stored locals.
+            for (idx_b, local_b) in stored_locals.iter().enumerate() {
+                let saved_local_b = GeneratorSavedLocal::new(idx_b);
+                if local_conflicts.contains(local_a, local_b) {
+                    storage_conflicts.insert(saved_local_a, saved_local_b);
+                }
+            }
         }
     }
+    storage_conflicts
+}
+
+struct StorageConflictVisitor<'body, 'tcx: 'body, 's> {
+    body: &'body Body<'tcx>,
+    stored_locals: &'s liveness::LiveVarSet,
+    // FIXME(tmandry): Consider using sparse bitsets here once we have good
+    // benchmarks for generators.
+    local_conflicts: BitMatrix<Local, Local>,
+}
+
+impl<'body, 'tcx: 'body, 's> DataflowResultsConsumer<'body, 'tcx>
+for StorageConflictVisitor<'body, 'tcx, 's> {
+    type FlowState = FlowAtLocation<'tcx, MaybeStorageLive<'body, 'tcx>>;
+
+    fn body(&self) -> &'body Body<'tcx> {
+        self.body
+    }
+
+    fn visit_block_entry(&mut self,
+                         block: BasicBlock,
+                         flow_state: &Self::FlowState) {
+        // statement_index is only used for logging, so this is fine.
+        self.apply_state(flow_state, Location { block, statement_index: 0 });
+    }
 
-    // The generator argument is ignored
-    set.remove(self_arg());
+    fn visit_statement_entry(&mut self,
+                             loc: Location,
+                             _stmt: &Statement<'tcx>,
+                             flow_state: &Self::FlowState) {
+        self.apply_state(flow_state, loc);
+    }
 
-    (set, storage_liveness_map, suspending_blocks)
+    fn visit_terminator_entry(&mut self,
+                              loc: Location,
+                              _term: &Terminator<'tcx>,
+                              flow_state: &Self::FlowState) {
+        self.apply_state(flow_state, loc);
+    }
+}
+
+impl<'body, 'tcx: 'body, 's> StorageConflictVisitor<'body, 'tcx, 's> {
+    fn apply_state(&mut self,
+                   flow_state: &FlowAtLocation<'tcx, MaybeStorageLive<'body, 'tcx>>,
+                   loc: Location) {
+        // Ignore unreachable blocks.
+        match self.body.basic_blocks()[loc.block].terminator().kind {
+            TerminatorKind::Unreachable => return,
+            _ => (),
+        };
+
+        let mut eligible_storage_live = flow_state.as_dense().clone();
+        eligible_storage_live.intersect(&self.stored_locals);
+
+        for local in eligible_storage_live.iter() {
+            self.local_conflicts.union_row_with(&eligible_storage_live, local);
+        }
+
+        if eligible_storage_live.count() > 1 {
+            trace!("at {:?}, eligible_storage_live={:?}", loc, eligible_storage_live);
+        }
+    }
 }
 
 fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
@@ -503,14 +683,15 @@ fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                             upvars: &Vec<Ty<'tcx>>,
                             interior: Ty<'tcx>,
                             movable: bool,
-                            mir: &mut Body<'tcx>)
+                            body: &mut Body<'tcx>)
     -> (FxHashMap<Local, (Ty<'tcx>, VariantIdx, usize)>,
         GeneratorLayout<'tcx>,
         FxHashMap<BasicBlock, liveness::LiveVarSet>)
 {
     // Use a liveness analysis to compute locals which are live across a suspension point
-    let (live_locals, storage_liveness, suspending_blocks) =
-        locals_live_across_suspend_points(tcx, mir, source, movable);
+    let LivenessInfo {
+        live_locals, live_locals_at_suspension_points, storage_conflicts, storage_liveness
+    } = locals_live_across_suspend_points(tcx, body, source, movable);
 
     // Erase regions from the types passed in from typeck so we can compare them with
     // MIR types
@@ -520,7 +701,7 @@ fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         _ => bug!(),
     };
 
-    for (local, decl) in mir.local_decls.iter_enumerated() {
+    for (local, decl) in body.local_decls.iter_enumerated() {
         // Ignore locals which are internal or not live
         if !live_locals.contains(local) || decl.internal {
             continue;
@@ -529,7 +710,7 @@ fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         // Sanity check that typeck knows about the type of locals which are
         // live across a suspension point
         if !allowed.contains(&decl.ty) && !allowed_upvars.contains(&decl.ty) {
-            span_bug!(mir.span,
+            span_bug!(body.span,
                       "Broken MIR: generator contains type {} in MIR, \
                        but typeck only knows about {}",
                       decl.ty,
@@ -537,51 +718,61 @@ fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         }
     }
 
-    let dummy_local = LocalDecl::new_internal(tcx.mk_unit(), mir.span);
-
-    // Gather live locals and their indices replacing values in mir.local_decls with a dummy
-    // to avoid changing local indices
-    let live_decls = live_locals.iter().map(|local| {
-        let var = mem::replace(&mut mir.local_decls[local], dummy_local.clone());
-        (local, var)
-    });
+    let dummy_local = LocalDecl::new_internal(tcx.mk_unit(), body.span);
+
+    // Gather live locals and their indices replacing values in body.local_decls
+    // with a dummy to avoid changing local indices.
+    let mut locals = IndexVec::<GeneratorSavedLocal, _>::new();
+    let mut tys = IndexVec::<GeneratorSavedLocal, _>::new();
+    let mut decls = IndexVec::<GeneratorSavedLocal, _>::new();
+    for (idx, local) in live_locals.iter().enumerate() {
+        let var = mem::replace(&mut body.local_decls[local], dummy_local.clone());
+        locals.push(local);
+        tys.push(var.ty);
+        decls.push(var);
+        debug!("generator saved local {:?} => {:?}", GeneratorSavedLocal::from(idx), local);
+    }
 
-    // For now we will access everything via variant #3, leaving empty variants
-    // for the UNRESUMED, RETURNED, and POISONED states.
-    // If there were a yield-less generator without a variant #3, it would not
-    // have any vars to remap, so we would never use this.
-    let variant_index = VariantIdx::new(3);
+    // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states.
+    const RESERVED_VARIANTS: usize = 3;
 
+    // Build the generator variant field list.
     // Create a map from local indices to generator struct indices.
-    // We also create a vector of the LocalDecls of these locals.
+    let mut variant_fields: IndexVec<VariantIdx, IndexVec<Field, GeneratorSavedLocal>> =
+        iter::repeat(IndexVec::new()).take(RESERVED_VARIANTS).collect();
     let mut remap = FxHashMap::default();
-    let mut decls = IndexVec::new();
-    for (idx, (local, var)) in live_decls.enumerate() {
-        remap.insert(local, (var.ty, variant_index, idx));
-        decls.push(var);
+    for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() {
+        let variant_index = VariantIdx::from(RESERVED_VARIANTS + suspension_point_idx);
+        let mut fields = IndexVec::new();
+        for (idx, saved_local) in live_locals.iter().enumerate() {
+            fields.push(saved_local);
+            // Note that if a field is included in multiple variants, we will
+            // just use the first one here. That's fine; fields do not move
+            // around inside generators, so it doesn't matter which variant
+            // index we access them by.
+            remap.entry(locals[saved_local]).or_insert((tys[saved_local], variant_index, idx));
+        }
+        variant_fields.push(fields);
     }
-    let field_tys = decls.iter().map(|field| field.ty).collect::<IndexVec<_, _>>();
-
-    // Put every var in each variant, for now.
-    let all_vars = (0..field_tys.len()).map(GeneratorSavedLocal::from).collect();
-    let empty_variants = iter::repeat(IndexVec::new()).take(3);
-    let state_variants = iter::repeat(all_vars).take(suspending_blocks.count());
+    debug!("generator variant_fields = {:?}", variant_fields);
+    debug!("generator storage_conflicts = {:#?}", storage_conflicts);
 
     let layout = GeneratorLayout {
-        field_tys,
-        variant_fields: empty_variants.chain(state_variants).collect(),
+        field_tys: tys,
+        variant_fields,
+        storage_conflicts,
         __local_debuginfo_codegen_only_do_not_use: decls,
     };
 
     (remap, layout, storage_liveness)
 }
 
-fn insert_switch<'a, 'tcx>(mir: &mut Body<'tcx>,
+fn insert_switch<'a, 'tcx>(body: &mut Body<'tcx>,
                            cases: Vec<(usize, BasicBlock)>,
                            transform: &TransformVisitor<'a, 'tcx>,
                            default: TerminatorKind<'tcx>) {
-    let default_block = insert_term_block(mir, default);
-    let (assign, discr) = transform.get_discr(mir);
+    let default_block = insert_term_block(body, default);
+    let (assign, discr) = transform.get_discr(body);
     let switch = TerminatorKind::SwitchInt {
         discr: Operand::Move(discr),
         switch_ty: transform.discr_ty,
@@ -589,8 +780,8 @@ fn insert_switch<'a, 'tcx>(mir: &mut Body<'tcx>,
         targets: cases.iter().map(|&(_, d)| d).chain(iter::once(default_block)).collect(),
     };
 
-    let source_info = source_info(mir);
-    mir.basic_blocks_mut().raw.insert(0, BasicBlockData {
+    let source_info = source_info(body);
+    body.basic_blocks_mut().raw.insert(0, BasicBlockData {
         statements: vec![assign],
         terminator: Some(Terminator {
             source_info,
@@ -599,7 +790,7 @@ fn insert_switch<'a, 'tcx>(mir: &mut Body<'tcx>,
         is_cleanup: false,
     });
 
-    let blocks = mir.basic_blocks_mut().iter_mut();
+    let blocks = body.basic_blocks_mut().iter_mut();
 
     for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) {
         *target = BasicBlock::new(target.index() + 1);
@@ -608,7 +799,7 @@ fn insert_switch<'a, 'tcx>(mir: &mut Body<'tcx>,
 
 fn elaborate_generator_drops<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                        def_id: DefId,
-                                       mir: &mut Body<'tcx>) {
+                                       body: &mut Body<'tcx>) {
     use crate::util::elaborate_drops::{elaborate_drop, Unwind};
     use crate::util::patch::MirPatch;
     use crate::shim::DropShimElaborator;
@@ -621,13 +812,13 @@ fn elaborate_generator_drops<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let gen = self_arg();
 
     let mut elaborator = DropShimElaborator {
-        mir: mir,
-        patch: MirPatch::new(mir),
+        body: body,
+        patch: MirPatch::new(body),
         tcx,
         param_env
     };
 
-    for (block, block_data) in mir.basic_blocks().iter_enumerated() {
+    for (block, block_data) in body.basic_blocks().iter_enumerated() {
         let (target, unwind, source_info) = match block_data.terminator() {
             &Terminator {
                 source_info,
@@ -654,7 +845,7 @@ fn elaborate_generator_drops<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             block,
         );
     }
-    elaborator.patch.apply(mir);
+    elaborator.patch.apply(body);
 }
 
 fn create_generator_drop_shim<'a, 'tcx>(
@@ -663,22 +854,22 @@ fn create_generator_drop_shim<'a, 'tcx>(
                 def_id: DefId,
                 source: MirSource<'tcx>,
                 gen_ty: Ty<'tcx>,
-                mir: &Body<'tcx>,
+                body: &Body<'tcx>,
                 drop_clean: BasicBlock) -> Body<'tcx> {
-    let mut mir = mir.clone();
+    let mut body = body.clone();
 
-    let source_info = source_info(&mir);
+    let source_info = source_info(&body);
 
-    let mut cases = create_cases(&mut mir, transform, |point| point.drop);
+    let mut cases = create_cases(&mut body, transform, |point| point.drop);
 
     cases.insert(0, (UNRESUMED, drop_clean));
 
     // The returned state and the poisoned state fall through to the default
     // case which is just to return
 
-    insert_switch(&mut mir, cases, &transform, TerminatorKind::Return);
+    insert_switch(&mut body, cases, &transform, TerminatorKind::Return);
 
-    for block in mir.basic_blocks_mut() {
+    for block in body.basic_blocks_mut() {
         let kind = &mut block.terminator_mut().kind;
         if let TerminatorKind::GeneratorDrop = *kind {
             *kind = TerminatorKind::Return;
@@ -686,7 +877,7 @@ fn create_generator_drop_shim<'a, 'tcx>(
     }
 
     // Replace the return variable
-    mir.local_decls[RETURN_PLACE] = LocalDecl {
+    body.local_decls[RETURN_PLACE] = LocalDecl {
         mutability: Mutability::Mut,
         ty: tcx.mk_unit(),
         user_ty: UserTypeProjections::none(),
@@ -698,10 +889,10 @@ fn create_generator_drop_shim<'a, 'tcx>(
         is_user_variable: None,
     };
 
-    make_generator_state_argument_indirect(tcx, def_id, &mut mir);
+    make_generator_state_argument_indirect(tcx, def_id, &mut body);
 
     // Change the generator argument from &mut to *mut
-    mir.local_decls[self_arg()] = LocalDecl {
+    body.local_decls[self_arg()] = LocalDecl {
         mutability: Mutability::Mut,
         ty: tcx.mk_ptr(ty::TypeAndMut {
             ty: gen_ty,
@@ -717,27 +908,27 @@ fn create_generator_drop_shim<'a, 'tcx>(
     };
     if tcx.sess.opts.debugging_opts.mir_emit_retag {
         // Alias tracking must know we changed the type
-        mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, Statement {
+        body.basic_blocks_mut()[START_BLOCK].statements.insert(0, Statement {
             source_info,
             kind: StatementKind::Retag(RetagKind::Raw, Place::Base(PlaceBase::Local(self_arg()))),
         })
     }
 
-    no_landing_pads(tcx, &mut mir);
+    no_landing_pads(tcx, &mut body);
 
     // Make sure we remove dead blocks to remove
     // unrelated code from the resume part of the function
-    simplify::remove_dead_blocks(&mut mir);
+    simplify::remove_dead_blocks(&mut body);
 
-    dump_mir(tcx, None, "generator_drop", &0, source, &mut mir, |_, _| Ok(()) );
+    dump_mir(tcx, None, "generator_drop", &0, source, &mut body, |_, _| Ok(()) );
 
-    mir
+    body
 }
 
-fn insert_term_block<'tcx>(mir: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
-    let term_block = BasicBlock::new(mir.basic_blocks().len());
-    let source_info = source_info(mir);
-    mir.basic_blocks_mut().push(BasicBlockData {
+fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
+    let term_block = BasicBlock::new(body.basic_blocks().len());
+    let source_info = source_info(body);
+    body.basic_blocks_mut().push(BasicBlockData {
         statements: Vec::new(),
         terminator: Some(Terminator {
             source_info,
@@ -749,12 +940,12 @@ fn insert_term_block<'tcx>(mir: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) ->
 }
 
 fn insert_panic_block<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                mir: &mut Body<'tcx>,
+                                body: &mut Body<'tcx>,
                                 message: AssertMessage<'tcx>) -> BasicBlock {
-    let assert_block = BasicBlock::new(mir.basic_blocks().len());
+    let assert_block = BasicBlock::new(body.basic_blocks().len());
     let term = TerminatorKind::Assert {
         cond: Operand::Constant(box Constant {
-            span: mir.span,
+            span: body.span,
             ty: tcx.types.bool,
             user_ty: None,
             literal: ty::Const::from_bool(tcx, false),
@@ -765,8 +956,8 @@ fn insert_panic_block<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         cleanup: None,
     };
 
-    let source_info = source_info(mir);
-    mir.basic_blocks_mut().push(BasicBlockData {
+    let source_info = source_info(body);
+    body.basic_blocks_mut().push(BasicBlockData {
         statements: Vec::new(),
         terminator: Some(Terminator {
             source_info,
@@ -783,9 +974,9 @@ fn create_generator_resume_function<'a, 'tcx>(
         transform: TransformVisitor<'a, 'tcx>,
         def_id: DefId,
         source: MirSource<'tcx>,
-        mir: &mut Body<'tcx>) {
+        body: &mut Body<'tcx>) {
     // Poison the generator when it unwinds
-    for block in mir.basic_blocks_mut() {
+    for block in body.basic_blocks_mut() {
         let source_info = block.terminator().source_info;
         if let &TerminatorKind::Resume = &block.terminator().kind {
             block.statements.push(
@@ -793,7 +984,7 @@ fn create_generator_resume_function<'a, 'tcx>(
         }
     }
 
-    let mut cases = create_cases(mir, &transform, |point| Some(point.resume));
+    let mut cases = create_cases(body, &transform, |point| Some(point.resume));
 
     use rustc::mir::interpret::InterpError::{
         GeneratorResumedAfterPanic,
@@ -803,43 +994,43 @@ fn create_generator_resume_function<'a, 'tcx>(
     // Jump to the entry point on the unresumed
     cases.insert(0, (UNRESUMED, BasicBlock::new(0)));
     // Panic when resumed on the returned state
-    cases.insert(1, (RETURNED, insert_panic_block(tcx, mir, GeneratorResumedAfterReturn)));
+    cases.insert(1, (RETURNED, insert_panic_block(tcx, body, GeneratorResumedAfterReturn)));
     // Panic when resumed on the poisoned state
-    cases.insert(2, (POISONED, insert_panic_block(tcx, mir, GeneratorResumedAfterPanic)));
+    cases.insert(2, (POISONED, insert_panic_block(tcx, body, GeneratorResumedAfterPanic)));
 
-    insert_switch(mir, cases, &transform, TerminatorKind::Unreachable);
+    insert_switch(body, cases, &transform, TerminatorKind::Unreachable);
 
-    make_generator_state_argument_indirect(tcx, def_id, mir);
-    make_generator_state_argument_pinned(tcx, mir);
+    make_generator_state_argument_indirect(tcx, def_id, body);
+    make_generator_state_argument_pinned(tcx, body);
 
-    no_landing_pads(tcx, mir);
+    no_landing_pads(tcx, body);
 
     // Make sure we remove dead blocks to remove
     // unrelated code from the drop part of the function
-    simplify::remove_dead_blocks(mir);
+    simplify::remove_dead_blocks(body);
 
-    dump_mir(tcx, None, "generator_resume", &0, source, mir, |_, _| Ok(()) );
+    dump_mir(tcx, None, "generator_resume", &0, source, body, |_, _| Ok(()) );
 }
 
-fn source_info<'a, 'tcx>(mir: &Body<'tcx>) -> SourceInfo {
+fn source_info<'tcx>(body: &Body<'tcx>) -> SourceInfo {
     SourceInfo {
-        span: mir.span,
+        span: body.span,
         scope: OUTERMOST_SOURCE_SCOPE,
     }
 }
 
-fn insert_clean_drop<'a, 'tcx>(mir: &mut Body<'tcx>) -> BasicBlock {
-    let return_block = insert_term_block(mir, TerminatorKind::Return);
+fn insert_clean_drop<'tcx>(body: &mut Body<'tcx>) -> BasicBlock {
+    let return_block = insert_term_block(body, TerminatorKind::Return);
 
     // Create a block to destroy an unresumed generators. This can only destroy upvars.
-    let drop_clean = BasicBlock::new(mir.basic_blocks().len());
+    let drop_clean = BasicBlock::new(body.basic_blocks().len());
     let term = TerminatorKind::Drop {
         location: Place::Base(PlaceBase::Local(self_arg())),
         target: return_block,
         unwind: None,
     };
-    let source_info = source_info(mir);
-    mir.basic_blocks_mut().push(BasicBlockData {
+    let source_info = source_info(body);
+    body.basic_blocks_mut().push(BasicBlockData {
         statements: Vec::new(),
         terminator: Some(Terminator {
             source_info,
@@ -851,20 +1042,20 @@ fn insert_clean_drop<'a, 'tcx>(mir: &mut Body<'tcx>) -> BasicBlock {
     drop_clean
 }
 
-fn create_cases<'a, 'tcx, F>(mir: &mut Body<'tcx>,
+fn create_cases<'a, 'tcx, F>(body: &mut Body<'tcx>,
                           transform: &TransformVisitor<'a, 'tcx>,
                           target: F) -> Vec<(usize, BasicBlock)>
     where F: Fn(&SuspensionPoint) -> Option<BasicBlock> {
-    let source_info = source_info(mir);
+    let source_info = source_info(body);
 
     transform.suspension_points.iter().filter_map(|point| {
         // Find the target for this suspension point, if applicable
         target(point).map(|target| {
-            let block = BasicBlock::new(mir.basic_blocks().len());
+            let block = BasicBlock::new(body.basic_blocks().len());
             let mut statements = Vec::new();
 
             // Create StorageLive instructions for locals with live storage
-            for i in 0..(mir.local_decls.len()) {
+            for i in 0..(body.local_decls.len()) {
                 let l = Local::new(i);
                 if point.storage_liveness.contains(l) && !transform.remap.contains_key(&l) {
                     statements.push(Statement {
@@ -875,7 +1066,7 @@ fn create_cases<'a, 'tcx, F>(mir: &mut Body<'tcx>,
             }
 
             // Then jump to the real target
-            mir.basic_blocks_mut().push(BasicBlockData {
+            body.basic_blocks_mut().push(BasicBlockData {
                 statements,
                 terminator: Some(Terminator {
                     source_info,
@@ -895,20 +1086,20 @@ impl MirPass for StateTransform {
     fn run_pass<'a, 'tcx>(&self,
                     tcx: TyCtxt<'a, 'tcx, 'tcx>,
                     source: MirSource<'tcx>,
-                    mir: &mut Body<'tcx>) {
-        let yield_ty = if let Some(yield_ty) = mir.yield_ty {
+                    body: &mut Body<'tcx>) {
+        let yield_ty = if let Some(yield_ty) = body.yield_ty {
             yield_ty
         } else {
             // This only applies to generators
             return
         };
 
-        assert!(mir.generator_drop.is_none());
+        assert!(body.generator_drop.is_none());
 
         let def_id = source.def_id();
 
         // The first argument is the generator type passed by value
-        let gen_ty = mir.local_decls.raw[1].ty;
+        let gen_ty = body.local_decls.raw[1].ty;
 
         // Get the interior types and substs which typeck computed
         let (upvars, interior, discr_ty, movable) = match gen_ty.sty {
@@ -926,13 +1117,13 @@ fn run_pass<'a, 'tcx>(&self,
         let state_adt_ref = tcx.adt_def(state_did);
         let state_substs = tcx.intern_substs(&[
             yield_ty.into(),
-            mir.return_ty().into(),
+            body.return_ty().into(),
         ]);
         let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
 
         // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
         // RETURN_PLACE then is a fresh unused local with type ret_ty.
-        let new_ret_local = replace_result_variable(ret_ty, mir);
+        let new_ret_local = replace_result_variable(ret_ty, body);
 
         // Extract locals which are live across suspension point into `layout`
         // `remap` gives a mapping from local indices onto generator struct indices
@@ -943,7 +1134,7 @@ fn run_pass<'a, 'tcx>(&self,
             &upvars,
             interior,
             movable,
-            mir);
+            body);
 
         // Run the transformation which converts Places from Local to generator struct
         // accesses for locals in `remap`.
@@ -959,27 +1150,27 @@ fn run_pass<'a, 'tcx>(&self,
             new_ret_local,
             discr_ty,
         };
-        transform.visit_body(mir);
+        transform.visit_body(body);
 
         // Update our MIR struct to reflect the changed we've made
-        mir.yield_ty = None;
-        mir.arg_count = 1;
-        mir.spread_arg = None;
-        mir.generator_layout = Some(layout);
+        body.yield_ty = None;
+        body.arg_count = 1;
+        body.spread_arg = None;
+        body.generator_layout = Some(layout);
 
         // Insert `drop(generator_struct)` which is used to drop upvars for generators in
         // the unresumed state.
         // This is expanded to a drop ladder in `elaborate_generator_drops`.
-        let drop_clean = insert_clean_drop(mir);
+        let drop_clean = insert_clean_drop(body);
 
-        dump_mir(tcx, None, "generator_pre-elab", &0, source, mir, |_, _| Ok(()) );
+        dump_mir(tcx, None, "generator_pre-elab", &0, source, body, |_, _| Ok(()) );
 
         // Expand `drop(generator_struct)` to a drop ladder which destroys upvars.
         // If any upvars are moved out of, drop elaboration will handle upvar destruction.
         // However we need to also elaborate the code generated by `insert_clean_drop`.
-        elaborate_generator_drops(tcx, def_id, mir);
+        elaborate_generator_drops(tcx, def_id, body);
 
-        dump_mir(tcx, None, "generator_post-transform", &0, source, mir, |_, _| Ok(()) );
+        dump_mir(tcx, None, "generator_post-transform", &0, source, body, |_, _| Ok(()) );
 
         // Create a copy of our MIR and use it to create the drop shim for the generator
         let drop_shim = create_generator_drop_shim(tcx,
@@ -987,12 +1178,12 @@ fn run_pass<'a, 'tcx>(&self,
             def_id,
             source,
             gen_ty,
-            &mir,
+            &body,
             drop_clean);
 
-        mir.generator_drop = Some(box drop_shim);
+        body.generator_drop = Some(box drop_shim);
 
         // Create the Generator::resume function
-        create_generator_resume_function(tcx, transform, def_id, source, mir);
+        create_generator_resume_function(tcx, transform, def_id, source, body);
     }
 }
index 0fac6868f57d3cd5900fbf51e5609a5b5d5325dd..651910c64924dd801144d2cbd1174c539ad2a1e2 100644 (file)
@@ -41,9 +41,9 @@ impl MirPass for Inline {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           source: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
-            Inliner { tcx, source }.run_pass(mir);
+            Inliner { tcx, source }.run_pass(body);
         }
     }
 }
@@ -54,7 +54,7 @@ struct Inliner<'a, 'tcx: 'a> {
 }
 
 impl<'a, 'tcx> Inliner<'a, 'tcx> {
-    fn run_pass(&self, caller_mir: &mut Body<'tcx>) {
+    fn run_pass(&self, caller_body: &mut Body<'tcx>) {
         // Keep a queue of callsites to try inlining on. We take
         // advantage of the fact that queries detect cycles here to
         // allow us to try and fetch the fully optimized MIR of a
@@ -76,10 +76,10 @@ fn run_pass(&self, caller_mir: &mut Body<'tcx>) {
         if self.tcx.hir().body_owner_kind_by_hir_id(id).is_fn_or_closure()
             && self.source.promoted.is_none()
         {
-            for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated() {
+            for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated() {
                 if let Some(callsite) = self.get_valid_function_call(bb,
                                                                     bb_data,
-                                                                    caller_mir,
+                                                                    caller_body,
                                                                     param_env) {
                     callsites.push_back(callsite);
                 }
@@ -103,7 +103,7 @@ fn run_pass(&self, caller_mir: &mut Body<'tcx>) {
                 let self_node_id = self.tcx.hir().as_local_node_id(self.source.def_id()).unwrap();
                 let callee_node_id = self.tcx.hir().as_local_node_id(callsite.callee);
 
-                let callee_mir = if let Some(callee_node_id) = callee_node_id {
+                let callee_body = if let Some(callee_node_id) = callee_node_id {
                     // Avoid a cycle here by only using `optimized_mir` only if we have
                     // a lower node id than the callee. This ensures that the callee will
                     // not inline us. This trick only works without incremental compilation.
@@ -120,29 +120,29 @@ fn run_pass(&self, caller_mir: &mut Body<'tcx>) {
                     self.tcx.optimized_mir(callsite.callee)
                 };
 
-                let callee_mir = if self.consider_optimizing(callsite, callee_mir) {
+                let callee_body = if self.consider_optimizing(callsite, callee_body) {
                     self.tcx.subst_and_normalize_erasing_regions(
                         &callsite.substs,
                         param_env,
-                        callee_mir,
+                        callee_body,
                     )
                 } else {
                     continue;
                 };
 
-                let start = caller_mir.basic_blocks().len();
-                debug!("attempting to inline callsite {:?} - mir={:?}", callsite, callee_mir);
-                if !self.inline_call(callsite, caller_mir, callee_mir) {
+                let start = caller_body.basic_blocks().len();
+                debug!("attempting to inline callsite {:?} - body={:?}", callsite, callee_body);
+                if !self.inline_call(callsite, caller_body, callee_body) {
                     debug!("attempting to inline callsite {:?} - failure", callsite);
                     continue;
                 }
                 debug!("attempting to inline callsite {:?} - success", callsite);
 
                 // Add callsites from inlined function
-                for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
+                for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated().skip(start) {
                     if let Some(new_callsite) = self.get_valid_function_call(bb,
                                                                              bb_data,
-                                                                             caller_mir,
+                                                                             caller_body,
                                                                              param_env) {
                         // Don't inline the same function multiple times.
                         if callsite.callee != new_callsite.callee {
@@ -163,15 +163,15 @@ fn run_pass(&self, caller_mir: &mut Body<'tcx>) {
         // Simplify if we inlined anything.
         if changed {
             debug!("Running simplify cfg on {:?}", self.source);
-            CfgSimplifier::new(caller_mir).simplify();
-            remove_dead_blocks(caller_mir);
+            CfgSimplifier::new(caller_body).simplify();
+            remove_dead_blocks(caller_body);
         }
     }
 
     fn get_valid_function_call(&self,
                                bb: BasicBlock,
                                bb_data: &BasicBlockData<'tcx>,
-                               caller_mir: &Body<'tcx>,
+                               caller_body: &Body<'tcx>,
                                param_env: ParamEnv<'tcx>,
     ) -> Option<CallSite<'tcx>> {
         // Don't inline calls that are in cleanup blocks.
@@ -180,7 +180,7 @@ fn get_valid_function_call(&self,
         // Only consider direct calls to functions
         let terminator = bb_data.terminator();
         if let TerminatorKind::Call { func: ref op, .. } = terminator.kind {
-            if let ty::FnDef(callee_def_id, substs) = op.ty(caller_mir, self.tcx).sty {
+            if let ty::FnDef(callee_def_id, substs) = op.ty(caller_body, self.tcx).sty {
                 let instance = Instance::resolve(self.tcx,
                                                  param_env,
                                                  callee_def_id,
@@ -204,19 +204,19 @@ fn get_valid_function_call(&self,
 
     fn consider_optimizing(&self,
                            callsite: CallSite<'tcx>,
-                           callee_mir: &Body<'tcx>)
+                           callee_body: &Body<'tcx>)
                            -> bool
     {
         debug!("consider_optimizing({:?})", callsite);
-        self.should_inline(callsite, callee_mir)
+        self.should_inline(callsite, callee_body)
             && self.tcx.consider_optimizing(|| format!("Inline {:?} into {:?}",
-                                                       callee_mir.span,
+                                                       callee_body.span,
                                                        callsite))
     }
 
     fn should_inline(&self,
                      callsite: CallSite<'tcx>,
-                     callee_mir: &Body<'tcx>)
+                     callee_body: &Body<'tcx>)
                      -> bool
     {
         debug!("should_inline({:?})", callsite);
@@ -224,13 +224,13 @@ fn should_inline(&self,
 
         // Don't inline closures that have capture debuginfo
         // FIXME: Handle closures better
-        if callee_mir.__upvar_debuginfo_codegen_only_do_not_use.len() > 0 {
+        if callee_body.__upvar_debuginfo_codegen_only_do_not_use.len() > 0 {
             debug!("    upvar debuginfo present - not inlining");
             return false;
         }
 
         // Cannot inline generators which haven't been transformed yet
-        if callee_mir.yield_ty.is_some() {
+        if callee_body.yield_ty.is_some() {
             debug!("    yield ty present - not inlining");
             return false;
         }
@@ -281,7 +281,7 @@ fn should_inline(&self,
         // Give a bonus functions with a small number of blocks,
         // We normally have two or three blocks for even
         // very small functions.
-        if callee_mir.basic_blocks().len() <= 3 {
+        if callee_body.basic_blocks().len() <= 3 {
             threshold += threshold / 4;
         }
         debug!("    final inline threshold = {}", threshold);
@@ -296,10 +296,10 @@ fn should_inline(&self,
         // Traverse the MIR manually so we can account for the effects of
         // inlining on the CFG.
         let mut work_list = vec![START_BLOCK];
-        let mut visited = BitSet::new_empty(callee_mir.basic_blocks().len());
+        let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
         while let Some(bb) = work_list.pop() {
             if !visited.insert(bb.index()) { continue; }
-            let blk = &callee_mir.basic_blocks()[bb];
+            let blk = &callee_body.basic_blocks()[bb];
 
             for stmt in &blk.statements {
                 // Don't count StorageLive/StorageDead in the inlining cost.
@@ -319,7 +319,7 @@ fn should_inline(&self,
                     work_list.push(target);
                     // If the location doesn't actually need dropping, treat it like
                     // a regular goto.
-                    let ty = location.ty(callee_mir, tcx).subst(tcx, callsite.substs).ty;
+                    let ty = location.ty(callee_body, tcx).subst(tcx, callsite.substs).ty;
                     if ty.needs_drop(tcx, param_env) {
                         cost += CALL_PENALTY;
                         if let Some(unwind) = unwind {
@@ -366,8 +366,8 @@ fn should_inline(&self,
 
         let ptr_size = tcx.data_layout.pointer_size.bytes();
 
-        for v in callee_mir.vars_and_temps_iter() {
-            let v = &callee_mir.local_decls[v];
+        for v in callee_body.vars_and_temps_iter() {
+            let v = &callee_body.local_decls[v];
             let ty = v.ty.subst(tcx, callsite.substs);
             // Cost of the var is the size in machine-words, if we know
             // it.
@@ -394,44 +394,44 @@ fn should_inline(&self,
 
     fn inline_call(&self,
                    callsite: CallSite<'tcx>,
-                   caller_mir: &mut Body<'tcx>,
-                   mut callee_mir: Body<'tcx>) -> bool {
-        let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
+                   caller_body: &mut Body<'tcx>,
+                   mut callee_body: Body<'tcx>) -> bool {
+        let terminator = caller_body[callsite.bb].terminator.take().unwrap();
         match terminator.kind {
             // FIXME: Handle inlining of diverging calls
             TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
                 debug!("Inlined {:?} into {:?}", callsite.callee, self.source);
 
-                let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
-                let mut scope_map = IndexVec::with_capacity(callee_mir.source_scopes.len());
-                let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
+                let mut local_map = IndexVec::with_capacity(callee_body.local_decls.len());
+                let mut scope_map = IndexVec::with_capacity(callee_body.source_scopes.len());
+                let mut promoted_map = IndexVec::with_capacity(callee_body.promoted.len());
 
-                for mut scope in callee_mir.source_scopes.iter().cloned() {
+                for mut scope in callee_body.source_scopes.iter().cloned() {
                     if scope.parent_scope.is_none() {
                         scope.parent_scope = Some(callsite.location.scope);
-                        scope.span = callee_mir.span;
+                        scope.span = callee_body.span;
                     }
 
                     scope.span = callsite.location.span;
 
-                    let idx = caller_mir.source_scopes.push(scope);
+                    let idx = caller_body.source_scopes.push(scope);
                     scope_map.push(idx);
                 }
 
-                for loc in callee_mir.vars_and_temps_iter() {
-                    let mut local = callee_mir.local_decls[loc].clone();
+                for loc in callee_body.vars_and_temps_iter() {
+                    let mut local = callee_body.local_decls[loc].clone();
 
                     local.source_info.scope =
                         scope_map[local.source_info.scope];
                     local.source_info.span = callsite.location.span;
                     local.visibility_scope = scope_map[local.visibility_scope];
 
-                    let idx = caller_mir.local_decls.push(local);
+                    let idx = caller_body.local_decls.push(local);
                     local_map.push(idx);
                 }
 
                 promoted_map.extend(
-                    callee_mir.promoted.iter().cloned().map(|p| caller_mir.promoted.push(p))
+                    callee_body.promoted.iter().cloned().map(|p| caller_body.promoted.push(p))
                 );
 
                 // If the call is something like `a[*i] = f(i)`, where
@@ -465,18 +465,18 @@ fn dest_needs_borrow(place: &Place<'_>) -> bool {
                         BorrowKind::Mut { allow_two_phase_borrow: false },
                         destination.0);
 
-                    let ty = dest.ty(caller_mir, self.tcx);
+                    let ty = dest.ty(caller_body, self.tcx);
 
                     let temp = LocalDecl::new_temp(ty, callsite.location.span);
 
-                    let tmp = caller_mir.local_decls.push(temp);
+                    let tmp = caller_body.local_decls.push(temp);
                     let tmp = Place::Base(PlaceBase::Local(tmp));
 
                     let stmt = Statement {
                         source_info: callsite.location,
                         kind: StatementKind::Assign(tmp.clone(), box dest)
                     };
-                    caller_mir[callsite.bb]
+                    caller_body[callsite.bb]
                         .statements.push(stmt);
                     tmp.deref()
                 } else {
@@ -486,9 +486,9 @@ fn dest_needs_borrow(place: &Place<'_>) -> bool {
                 let return_block = destination.1;
 
                 // Copy the arguments if needed.
-                let args: Vec<_> = self.make_call_args(args, &callsite, caller_mir);
+                let args: Vec<_> = self.make_call_args(args, &callsite, caller_body);
 
-                let bb_len = caller_mir.basic_blocks().len();
+                let bb_len = caller_body.basic_blocks().len();
                 let mut integrator = Integrator {
                     block_idx: bb_len,
                     args: &args,
@@ -503,9 +503,9 @@ fn dest_needs_borrow(place: &Place<'_>) -> bool {
                 };
 
 
-                for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
+                for (bb, mut block) in callee_body.basic_blocks_mut().drain_enumerated(..) {
                     integrator.visit_basic_block_data(bb, &mut block);
-                    caller_mir.basic_blocks_mut().push(block);
+                    caller_body.basic_blocks_mut().push(block);
                 }
 
                 let terminator = Terminator {
@@ -513,12 +513,12 @@ fn dest_needs_borrow(place: &Place<'_>) -> bool {
                     kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
                 };
 
-                caller_mir[callsite.bb].terminator = Some(terminator);
+                caller_body[callsite.bb].terminator = Some(terminator);
 
                 true
             }
             kind => {
-                caller_mir[callsite.bb].terminator = Some(Terminator {
+                caller_body[callsite.bb].terminator = Some(Terminator {
                     source_info: terminator.source_info,
                     kind,
                 });
@@ -531,7 +531,7 @@ fn make_call_args(
         &self,
         args: Vec<Operand<'tcx>>,
         callsite: &CallSite<'tcx>,
-        caller_mir: &mut Body<'tcx>,
+        caller_body: &mut Body<'tcx>,
     ) -> Vec<Local> {
         let tcx = self.tcx;
 
@@ -560,12 +560,12 @@ fn make_call_args(
         // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
         if tcx.is_closure(callsite.callee) {
             let mut args = args.into_iter();
-            let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir);
-            let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir);
+            let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
+            let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
             assert!(args.next().is_none());
 
             let tuple = Place::Base(PlaceBase::Local(tuple));
-            let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_mir, tcx).ty.sty {
+            let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.sty {
                 s
             } else {
                 bug!("Closure arguments are not passed as a tuple");
@@ -584,13 +584,13 @@ fn make_call_args(
                     ));
 
                     // Spill to a local to make e.g., `tmp0`.
-                    self.create_temp_if_necessary(tuple_field, callsite, caller_mir)
+                    self.create_temp_if_necessary(tuple_field, callsite, caller_body)
                 });
 
             closure_ref_arg.chain(tuple_tmp_args).collect()
         } else {
             args.into_iter()
-                .map(|a| self.create_temp_if_necessary(a, callsite, caller_mir))
+                .map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
                 .collect()
         }
     }
@@ -601,13 +601,13 @@ fn create_temp_if_necessary(
         &self,
         arg: Operand<'tcx>,
         callsite: &CallSite<'tcx>,
-        caller_mir: &mut Body<'tcx>,
+        caller_body: &mut Body<'tcx>,
     ) -> Local {
         // FIXME: Analysis of the usage of the arguments to avoid
         // unnecessary temporaries.
 
         if let Operand::Move(Place::Base(PlaceBase::Local(local))) = arg {
-            if caller_mir.local_kind(local) == LocalKind::Temp {
+            if caller_body.local_kind(local) == LocalKind::Temp {
                 // Reuse the operand if it's a temporary already
                 return local;
             }
@@ -617,16 +617,16 @@ fn create_temp_if_necessary(
         // Otherwise, create a temporary for the arg
         let arg = Rvalue::Use(arg);
 
-        let ty = arg.ty(caller_mir, self.tcx);
+        let ty = arg.ty(caller_body, self.tcx);
 
         let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
-        let arg_tmp = caller_mir.local_decls.push(arg_tmp);
+        let arg_tmp = caller_body.local_decls.push(arg_tmp);
 
         let stmt = Statement {
             source_info: callsite.location,
             kind: StatementKind::Assign(Place::Base(PlaceBase::Local(arg_tmp)), box arg),
         };
-        caller_mir[callsite.bb].statements.push(stmt);
+        caller_body[callsite.bb].statements.push(stmt);
         arg_tmp
     }
 }
index 1b92b1acac55d6e46eb11960f330305473cac2a7..2899112b7b542943d2446d847f1f8d67bac4b606 100644 (file)
@@ -15,7 +15,7 @@ impl MirPass for InstCombine {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         // We only run when optimizing MIR (at any level).
         if tcx.sess.opts.debugging_opts.mir_opt_level == 0 {
             return
@@ -25,13 +25,13 @@ fn run_pass<'a, 'tcx>(&self,
         // read-only so that we can do global analyses on the MIR in the process (e.g.
         // `Place::ty()`).
         let optimizations = {
-            let mut optimization_finder = OptimizationFinder::new(mir, tcx);
-            optimization_finder.visit_body(mir);
+            let mut optimization_finder = OptimizationFinder::new(body, tcx);
+            optimization_finder.visit_body(body);
             optimization_finder.optimizations
         };
 
         // Then carry out those optimizations.
-        MutVisitor::visit_body(&mut InstCombineVisitor { optimizations }, mir);
+        MutVisitor::visit_body(&mut InstCombineVisitor { optimizations }, body);
     }
 }
 
@@ -64,15 +64,15 @@ fn visit_rvalue(&mut self, rvalue: &mut Rvalue<'tcx>, location: Location) {
 
 /// Finds optimization opportunities on the MIR.
 struct OptimizationFinder<'b, 'a, 'tcx:'a+'b> {
-    mir: &'b Body<'tcx>,
+    body: &'b Body<'tcx>,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     optimizations: OptimizationList<'tcx>,
 }
 
 impl<'b, 'a, 'tcx:'b> OptimizationFinder<'b, 'a, 'tcx> {
-    fn new(mir: &'b Body<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> OptimizationFinder<'b, 'a, 'tcx> {
+    fn new(body: &'b Body<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> OptimizationFinder<'b, 'a, 'tcx> {
         OptimizationFinder {
-            mir,
+            body,
             tcx,
             optimizations: OptimizationList::default(),
         }
@@ -83,16 +83,16 @@ impl<'b, 'a, 'tcx> Visitor<'tcx> for OptimizationFinder<'b, 'a, 'tcx> {
     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
         if let Rvalue::Ref(_, _, Place::Projection(ref projection)) = *rvalue {
             if let ProjectionElem::Deref = projection.elem {
-                if projection.base.ty(self.mir, self.tcx).ty.is_region_ptr() {
+                if projection.base.ty(self.body, self.tcx).ty.is_region_ptr() {
                     self.optimizations.and_stars.insert(location);
                 }
             }
         }
 
         if let Rvalue::Len(ref place) = *rvalue {
-            let place_ty = place.ty(&self.mir.local_decls, self.tcx).ty;
+            let place_ty = place.ty(&self.body.local_decls, self.tcx).ty;
             if let ty::Array(_, len) = place_ty.sty {
-                let span = self.mir.source_info(location).span;
+                let span = self.body.source_info(location).span;
                 let ty = self.tcx.types.usize;
                 let constant = Constant { span, ty, literal: len, user_ty: None };
                 self.optimizations.arrays_lengths.insert(location, constant);
index 8c19637a955f158885aabacc815831111d391ec3..189258c2e92c471b264b5c54a4bf09ad2eea7f4f 100644 (file)
@@ -13,23 +13,23 @@ impl MirPass for Lower128Bit {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         let debugging_override = tcx.sess.opts.debugging_opts.lower_128bit_ops;
         let target_default = tcx.sess.host.options.i128_lowering;
         if !debugging_override.unwrap_or(target_default) {
             return
         }
 
-        self.lower_128bit_ops(tcx, mir);
+        self.lower_128bit_ops(tcx, body);
     }
 }
 
 impl Lower128Bit {
-    fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Body<'tcx>) {
+    fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &mut Body<'tcx>) {
         let mut new_blocks = Vec::new();
-        let cur_len = mir.basic_blocks().len();
+        let cur_len = body.basic_blocks().len();
 
-        let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
+        let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
         for block in basic_blocks.iter_mut() {
             for i in (0..block.statements.len()).rev() {
                 let (lang_item, rhs_kind) =
index 82193d98655d6a79b19d73bcf9af49654a434a63..b6220ac1d21fff2e575130f19e81501f59d635f7 100644 (file)
@@ -1,4 +1,4 @@
-use crate::build;
+use crate::{build, shim};
 use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use rustc::mir::{Body, MirPhase, Promoted};
 use rustc::ty::{TyCtxt, InstanceDef};
@@ -145,20 +145,20 @@ fn name<'a>(&'a self) -> Cow<'a, str> {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           source: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>);
+                          body: &mut Body<'tcx>);
 }
 
 pub fn run_passes(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &mut Body<'tcx>,
+    body: &mut Body<'tcx>,
     instance: InstanceDef<'tcx>,
     mir_phase: MirPhase,
     passes: &[&dyn MirPass],
 ) {
     let phase_index = mir_phase.phase_index();
 
-    let run_passes = |mir: &mut Body<'tcx>, promoted| {
-        if mir.phase >= mir_phase {
+    let run_passes = |body: &mut Body<'tcx>, promoted| {
+        if body.phase >= mir_phase {
             return;
         }
 
@@ -168,13 +168,13 @@ pub fn run_passes(
         };
         let mut index = 0;
         let mut run_pass = |pass: &dyn MirPass| {
-            let run_hooks = |mir: &_, index, is_after| {
+            let run_hooks = |body: &_, index, is_after| {
                 dump_mir::on_mir_pass(tcx, &format_args!("{:03}-{:03}", phase_index, index),
-                                      &pass.name(), source, mir, is_after);
+                                      &pass.name(), source, body, is_after);
             };
-            run_hooks(mir, index, false);
-            pass.run_pass(tcx, source, mir);
-            run_hooks(mir, index, true);
+            run_hooks(body, index, false);
+            pass.run_pass(tcx, source, body);
+            run_hooks(body, index, true);
 
             index += 1;
         };
@@ -183,16 +183,16 @@ pub fn run_passes(
             run_pass(*pass);
         }
 
-        mir.phase = mir_phase;
+        body.phase = mir_phase;
     };
 
-    run_passes(mir, None);
+    run_passes(body, None);
 
-    for (index, promoted_mir) in mir.promoted.iter_enumerated_mut() {
-        run_passes(promoted_mir, Some(index));
+    for (index, promoted_body) in body.promoted.iter_enumerated_mut() {
+        run_passes(promoted_body, Some(index));
 
         //Let's make sure we don't miss any nested instances
-        assert!(promoted_mir.promoted.is_empty())
+        assert!(promoted_body.promoted.is_empty())
     }
 }
 
@@ -200,14 +200,14 @@ fn mir_const<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Stea
     // Unsafety check uses the raw mir, so make sure it is run
     let _ = tcx.unsafety_check_result(def_id);
 
-    let mut mir = tcx.mir_built(def_id).steal();
-    run_passes(tcx, &mut mir, InstanceDef::Item(def_id), MirPhase::Const, &[
+    let mut body = tcx.mir_built(def_id).steal();
+    run_passes(tcx, &mut body, InstanceDef::Item(def_id), MirPhase::Const, &[
         // What we need to do constant evaluation.
         &simplify::SimplifyCfg::new("initial"),
         &rustc_peek::SanityCheck,
         &uniform_array_move_out::UniformArrayMoveOut,
     ]);
-    tcx.alloc_steal_mir(mir)
+    tcx.alloc_steal_mir(body)
 }
 
 fn mir_validated<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
@@ -218,17 +218,25 @@ fn mir_validated<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx
         let _ = tcx.mir_const_qualif(def_id);
     }
 
-    let mut mir = tcx.mir_const(def_id).steal();
-    run_passes(tcx, &mut mir, InstanceDef::Item(def_id), MirPhase::Validated, &[
+    let mut body = tcx.mir_const(def_id).steal();
+    run_passes(tcx, &mut body, InstanceDef::Item(def_id), MirPhase::Validated, &[
         // What we need to run borrowck etc.
         &qualify_consts::QualifyAndPromoteConstants,
         &simplify::SimplifyCfg::new("qualify-consts"),
     ]);
-    tcx.alloc_steal_mir(mir)
+    tcx.alloc_steal_mir(body)
 }
 
 fn optimized_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
-    // `mir_borrowck` uses `mir_validated`, so we have to force it to
+    if tcx.is_constructor(def_id) {
+        // There's no reason to run all of the MIR passes on constructors when
+        // we can just output the MIR we want directly. This also saves const
+        // qualification and borrow checking the trouble of special casing
+        // constructors.
+        return shim::build_adt_ctor(tcx, def_id);
+    }
+
+    // (Mir-)Borrowck uses `mir_validated`, so we have to force it to
     // execute before we can steal.
     tcx.ensure().mir_borrowck(def_id);
 
@@ -236,8 +244,8 @@ fn optimized_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx
         tcx.ensure().borrowck(def_id);
     }
 
-    let mut mir = tcx.mir_validated(def_id).steal();
-    run_passes(tcx, &mut mir, InstanceDef::Item(def_id), MirPhase::Optimized, &[
+    let mut body = tcx.mir_validated(def_id).steal();
+    run_passes(tcx, &mut body, InstanceDef::Item(def_id), MirPhase::Optimized, &[
         // Remove all things only needed by analysis
         &no_landing_pads::NoLandingPads,
         &simplify_branches::SimplifyBranches::new("initial"),
@@ -290,5 +298,5 @@ fn optimized_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx
         &add_call_guards::CriticalCallEdges,
         &dump_mir::Marker("PreCodegen"),
     ]);
-    tcx.arena.alloc(mir)
+    tcx.arena.alloc(body)
 }
index 719e22ca1502380b8139279c2afb6579d5f9aea3..a987c19c2aba088ac171eace93aeb8914e36e5e0 100644 (file)
@@ -12,14 +12,14 @@ impl MirPass for NoLandingPads {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        no_landing_pads(tcx, mir)
+                          body: &mut Body<'tcx>) {
+        no_landing_pads(tcx, body)
     }
 }
 
-pub fn no_landing_pads<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Body<'tcx>) {
+pub fn no_landing_pads<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &mut Body<'tcx>) {
     if tcx.sess.no_landing_pads() {
-        NoLandingPads.visit_body(mir);
+        NoLandingPads.visit_body(body);
     }
 }
 
index 4b95fbf4b7d2f500cb2a6b100e6eeef33c38d339..4fbb95ec4ad351babfd72361d94d1cfa257beafa 100644 (file)
@@ -71,7 +71,7 @@ pub enum Candidate {
 struct TempCollector<'tcx> {
     temps: IndexVec<Local, TempState>,
     span: Span,
-    mir: &'tcx Body<'tcx>,
+    body: &'tcx Body<'tcx>,
 }
 
 impl<'tcx> Visitor<'tcx> for TempCollector<'tcx> {
@@ -81,7 +81,7 @@ fn visit_local(&mut self,
                    location: Location) {
         debug!("visit_local: index={:?} context={:?} location={:?}", index, context, location);
         // We're only interested in temporaries and the return place
-        match self.mir.local_kind(index) {
+        match self.body.local_kind(index) {
             | LocalKind::Temp
             | LocalKind::ReturnPointer
             => {},
@@ -134,12 +134,12 @@ fn visit_source_info(&mut self, source_info: &SourceInfo) {
     }
 }
 
-pub fn collect_temps(mir: &Body<'_>,
+pub fn collect_temps(body: &Body<'_>,
                      rpo: &mut ReversePostorder<'_, '_>) -> IndexVec<Local, TempState> {
     let mut collector = TempCollector {
-        temps: IndexVec::from_elem(TempState::Undefined, &mir.local_decls),
-        span: mir.span,
-        mir,
+        temps: IndexVec::from_elem(TempState::Undefined, &body.local_decls),
+        span: body.span,
+        body,
     };
     for (bb, data) in rpo {
         collector.visit_basic_block_data(bb, data);
@@ -369,7 +369,7 @@ fn visit_local(&mut self,
     }
 }
 
-pub fn promote_candidates<'a, 'tcx>(mir: &mut Body<'tcx>,
+pub fn promote_candidates<'a, 'tcx>(body: &mut Body<'tcx>,
                                     tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                     mut temps: IndexVec<Local, TempState>,
                                     candidates: Vec<Candidate>) {
@@ -379,7 +379,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Body<'tcx>,
     for candidate in candidates.into_iter().rev() {
         match candidate {
             Candidate::Ref(Location { block, statement_index }) => {
-                match mir[block].statements[statement_index].kind {
+                match body[block].statements[statement_index].kind {
                     StatementKind::Assign(Place::Base(PlaceBase::Local(local)), _) => {
                         if temps[local] == TempState::PromotedOut {
                             // Already promoted.
@@ -395,7 +395,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Body<'tcx>,
 
         // Declare return place local so that `mir::Body::new` doesn't complain.
         let initial_locals = iter::once(
-            LocalDecl::new_return_place(tcx.types.never, mir.span)
+            LocalDecl::new_return_place(tcx.types.never, body.span)
         ).collect();
 
         let promoter = Promoter {
@@ -403,19 +403,19 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Body<'tcx>,
                 IndexVec::new(),
                 // FIXME: maybe try to filter this to avoid blowing up
                 // memory usage?
-                mir.source_scopes.clone(),
-                mir.source_scope_local_data.clone(),
+                body.source_scopes.clone(),
+                body.source_scope_local_data.clone(),
                 IndexVec::new(),
                 None,
                 initial_locals,
                 IndexVec::new(),
                 0,
                 vec![],
-                mir.span,
+                body.span,
                 vec![],
             ),
             tcx,
-            source: mir,
+            source: body,
             temps: &mut temps,
             keep_original: false
         };
@@ -424,7 +424,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Body<'tcx>,
 
     // Eliminate assignments to, and drops of promoted temps.
     let promoted = |index: Local| temps[index] == TempState::PromotedOut;
-    for block in mir.basic_blocks_mut() {
+    for block in body.basic_blocks_mut() {
         block.statements.retain(|statement| {
             match statement.kind {
                 StatementKind::Assign(Place::Base(PlaceBase::Local(index)), _) |
index 8696291e05875e2aeaa816d2913a21bf9e188c84..19bd812ec80c0106ab784b3dc4ab24321ac0eec8 100644 (file)
 /// What kind of item we are in.
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
 enum Mode {
-    Const,
+    /// A `static` item.
     Static,
+    /// A `static mut` item.
     StaticMut,
+    /// A `const fn` item.
     ConstFn,
-    Fn
+    /// A `const` item or an anonymous constant (e.g. in array lengths).
+    Const,
+    /// Other type of `fn`.
+    NonConstFn,
+}
+
+impl Mode {
+    /// Determine whether we have to do full const-checking because syntactically, we
+    /// are required to be "const".
+    #[inline]
+    fn requires_const_checking(self) -> bool {
+        self != Mode::NonConstFn
+    }
 }
 
 impl fmt::Display for Mode {
@@ -48,7 +62,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
             Mode::Const => write!(f, "constant"),
             Mode::Static | Mode::StaticMut => write!(f, "static"),
             Mode::ConstFn => write!(f, "constant function"),
-            Mode::Fn => write!(f, "function")
+            Mode::NonConstFn => write!(f, "function")
         }
     }
 }
@@ -113,7 +127,7 @@ struct ConstCx<'a, 'tcx> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     param_env: ty::ParamEnv<'tcx>,
     mode: Mode,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
 
     per_local: PerQualif<BitSet<Local>>,
 }
@@ -135,6 +149,12 @@ enum ValueSource<'a, 'tcx> {
     },
 }
 
+/// A "qualif"(-ication) is a way to look for something "bad" in the MIR that would disqualify some
+/// code for promotion or prevent it from evaluating at compile time. So `return true` means
+/// "I found something bad, no reason to go on searching". `false` is only returned if we
+/// definitely cannot find anything bad anywhere.
+///
+/// The default implementations proceed structurally.
 trait Qualif {
     const IDX: usize;
 
@@ -166,7 +186,7 @@ fn in_projection_structurally(
         let base_qualif = Self::in_place(cx, &proj.base);
         let qualif = base_qualif && Self::mask_for_ty(
             cx,
-            proj.base.ty(cx.mir, cx.tcx)
+            proj.base.ty(cx.body, cx.tcx)
                 .projection_ty(cx.tcx, &proj.elem)
                 .ty,
         );
@@ -245,7 +265,7 @@ fn in_rvalue_structurally(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool
                 // Special-case reborrows to be more like a copy of the reference.
                 if let Place::Projection(ref proj) = *place {
                     if let ProjectionElem::Deref = proj.elem {
-                        let base_ty = proj.base.ty(cx.mir, cx.tcx).ty;
+                        let base_ty = proj.base.ty(cx.body, cx.tcx).ty;
                         if let ty::Ref(..) = base_ty.sty {
                             return Self::in_place(cx, &proj.base);
                         }
@@ -285,7 +305,11 @@ fn in_value(cx: &ConstCx<'_, 'tcx>, source: ValueSource<'_, 'tcx>) -> bool {
     }
 }
 
-// Constant containing interior mutability (UnsafeCell).
+/// Constant containing interior mutability (`UnsafeCell<T>`).
+/// This must be ruled out to make sure that evaluating the constant at compile-time
+/// and at *any point* during the run-time would produce the same result. In particular,
+/// promotion of temporaries must not change program behavior; if the promoted could be
+/// written to, that would be a problem.
 struct HasMutInterior;
 
 impl Qualif for HasMutInterior {
@@ -301,7 +325,7 @@ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
             // allowed in constants (and the `Checker` will error), and/or it
             // won't be promoted, due to `&mut ...` or interior mutability.
             Rvalue::Ref(_, kind, ref place) => {
-                let ty = place.ty(cx.mir, cx.tcx).ty;
+                let ty = place.ty(cx.body, cx.tcx).ty;
 
                 if let BorrowKind::Mut { .. } = kind {
                     // In theory, any zero-sized value could be borrowed
@@ -314,10 +338,10 @@ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
                             _ => return true,
                         }
                     } else if let ty::Array(_, len) = ty.sty {
-                        // FIXME(eddyb) the `cx.mode == Mode::Fn` condition
+                        // FIXME(eddyb) the `cx.mode == Mode::NonConstFn` condition
                         // seems unnecessary, given that this is merely a ZST.
                         match len.assert_usize(cx.tcx) {
-                            Some(0) if cx.mode == Mode::Fn => {},
+                            Some(0) if cx.mode == Mode::NonConstFn => {},
                             _ => return true,
                         }
                     } else {
@@ -329,7 +353,7 @@ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
             Rvalue::Aggregate(ref kind, _) => {
                 if let AggregateKind::Adt(def, ..) = **kind {
                     if Some(def.did) == cx.tcx.lang_items().unsafe_cell_type() {
-                        let ty = rvalue.ty(cx.mir, cx.tcx);
+                        let ty = rvalue.ty(cx.body, cx.tcx);
                         assert_eq!(Self::in_any_value_of_ty(cx, ty), Some(true));
                         return true;
                     }
@@ -343,7 +367,10 @@ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
     }
 }
 
-// Constant containing an ADT that implements Drop.
+/// Constant containing an ADT that implements `Drop`.
+/// This must be ruled out (a) because we cannot run `Drop` during compile-time
+/// as that might not be a `const fn`, and (b) because implicit promotion would
+/// remove side-effects that occur as part of dropping that value.
 struct NeedsDrop;
 
 impl Qualif for NeedsDrop {
@@ -366,8 +393,12 @@ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
     }
 }
 
-// Not promotable at all - non-`const fn` calls, asm!,
-// pointer comparisons, ptr-to-int casts, etc.
+/// Not promotable at all - non-`const fn` calls, `asm!`,
+/// pointer comparisons, ptr-to-int casts, etc.
+/// Inside a const context all constness rules apply, so promotion simply has to follow the regular
+/// constant rules (modulo interior mutability or `Drop` rules which are handled `HasMutInterior`
+/// and `NeedsDrop` respectively). Basically this duplicates the checks that the const-checking
+/// visitor enforces by emitting errors when working in const context.
 struct IsNotPromotable;
 
 impl Qualif for IsNotPromotable {
@@ -398,9 +429,10 @@ fn in_projection(cx: &ConstCx<'_, 'tcx>, proj: &Projection<'tcx>) -> bool {
             ProjectionElem::Index(_) => {}
 
             ProjectionElem::Field(..) => {
-                if cx.mode == Mode::Fn {
-                    let base_ty = proj.base.ty(cx.mir, cx.tcx).ty;
+                if cx.mode == Mode::NonConstFn {
+                    let base_ty = proj.base.ty(cx.body, cx.tcx).ty;
                     if let Some(def) = base_ty.ty_adt_def() {
+                        // No promotion of union field accesses.
                         if def.is_union() {
                             return true;
                         }
@@ -414,8 +446,8 @@ fn in_projection(cx: &ConstCx<'_, 'tcx>, proj: &Projection<'tcx>) -> bool {
 
     fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
         match *rvalue {
-            Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if cx.mode == Mode::Fn => {
-                let operand_ty = operand.ty(cx.mir, cx.tcx);
+            Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if cx.mode == Mode::NonConstFn => {
+                let operand_ty = operand.ty(cx.body, cx.tcx);
                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
                 match (cast_in, cast_out) {
@@ -428,8 +460,8 @@ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
                 }
             }
 
-            Rvalue::BinaryOp(op, ref lhs, _) if cx.mode == Mode::Fn => {
-                if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.mir, cx.tcx).sty {
+            Rvalue::BinaryOp(op, ref lhs, _) if cx.mode == Mode::NonConstFn => {
+                if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.body, cx.tcx).sty {
                     assert!(op == BinOp::Eq || op == BinOp::Ne ||
                             op == BinOp::Le || op == BinOp::Lt ||
                             op == BinOp::Ge || op == BinOp::Gt ||
@@ -454,7 +486,7 @@ fn in_call(
         args: &[Operand<'tcx>],
         _return_ty: Ty<'tcx>,
     ) -> bool {
-        let fn_ty = callee.ty(cx.mir, cx.tcx);
+        let fn_ty = callee.ty(cx.body, cx.tcx);
         match fn_ty.sty {
             ty::FnDef(def_id, _) => {
                 match cx.tcx.fn_sig(def_id).abi() {
@@ -511,12 +543,9 @@ fn in_call(
 
 /// Refers to temporaries which cannot be promoted *implicitly*.
 /// Explicit promotion happens e.g. for constant arguments declared via `rustc_args_required_const`.
-/// Inside a const context all constness rules
-/// apply, so implicit promotion simply has to follow the regular constant rules (modulo interior
-/// mutability or `Drop` rules which are handled `HasMutInterior` and `NeedsDrop` respectively).
-/// Implicit promotion inside regular functions does not happen if `const fn` calls are involved,
-/// as the call may be perfectly alright at runtime, but fail at compile time e.g. due to addresses
-/// being compared inside the function.
+/// Implicit promotion has almost the same rules, except that disallows `const fn` except for
+/// those marked `#[rustc_promotable]`. This is to avoid changing a legitimate run-time operation
+/// into a failing compile-time operation e.g. due to addresses being compared inside the function.
 struct IsNotImplicitlyPromotable;
 
 impl Qualif for IsNotImplicitlyPromotable {
@@ -528,8 +557,8 @@ fn in_call(
         args: &[Operand<'tcx>],
         _return_ty: Ty<'tcx>,
     ) -> bool {
-        if cx.mode == Mode::Fn {
-            if let ty::FnDef(def_id, _) = callee.ty(cx.mir, cx.tcx).sty {
+        if cx.mode == Mode::NonConstFn {
+            if let ty::FnDef(def_id, _) = callee.ty(cx.body, cx.tcx).sty {
                 // Never promote runtime `const fn` calls of
                 // functions without `#[rustc_promotable]`.
                 if !cx.tcx.is_promotable_const_fn(def_id) {
@@ -589,6 +618,11 @@ fn qualifs_in_value(&self, source: ValueSource<'_, 'tcx>) -> PerQualif<bool> {
     }
 }
 
+/// Checks MIR for being admissible as a compile-time constant, using `ConstCx`
+/// for value qualifications, and accumulates writes of
+/// rvalue/call results to locals, in `local_qualif`.
+/// It also records candidates for promotion in `promotion_candidates`,
+/// both in functions and const/static items.
 struct Checker<'a, 'tcx> {
     cx: ConstCx<'a, 'tcx>,
 
@@ -620,12 +654,12 @@ fn deref(&self) -> &Self::Target {
 impl<'a, 'tcx> Checker<'a, 'tcx> {
     fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
            def_id: DefId,
-           mir: &'a Body<'tcx>,
+           body: &'a Body<'tcx>,
            mode: Mode)
            -> Self {
         assert!(def_id.is_local());
-        let mut rpo = traversal::reverse_postorder(mir);
-        let temps = promote_consts::collect_temps(mir, &mut rpo);
+        let mut rpo = traversal::reverse_postorder(body);
+        let temps = promote_consts::collect_temps(body, &mut rpo);
         rpo.reset();
 
         let param_env = tcx.param_env(def_id);
@@ -634,12 +668,12 @@ fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             tcx,
             param_env,
             mode,
-            mir,
-            per_local: PerQualif::new(BitSet::new_empty(mir.local_decls.len())),
+            body,
+            per_local: PerQualif::new(BitSet::new_empty(body.local_decls.len())),
         };
 
-        for (local, decl) in mir.local_decls.iter_enumerated() {
-            if let LocalKind::Arg = mir.local_kind(local) {
+        for (local, decl) in body.local_decls.iter_enumerated() {
+            if let LocalKind::Arg = body.local_kind(local) {
                 let qualifs = cx.qualifs_in_any_value_of_ty(decl.ty);
                 for (per_local, qualif) in &mut cx.per_local.as_mut().zip(qualifs).0 {
                     if *qualif {
@@ -650,7 +684,7 @@ fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             if !temps[local].is_promotable() {
                 cx.per_local[IsNotPromotable].insert(local);
             }
-            if let LocalKind::Var = mir.local_kind(local) {
+            if let LocalKind::Var = body.local_kind(local) {
                 // Sanity check to prevent implicit and explicit promotion of
                 // named locals
                 assert!(cx.per_local[IsNotPromotable].contains(local));
@@ -659,7 +693,7 @@ fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
         Checker {
             cx,
-            span: mir.span,
+            span: body.span,
             def_id,
             rpo,
             temp_promotion_state: temps,
@@ -672,7 +706,7 @@ fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // slightly pointless (even with feature-gating).
     fn not_const(&mut self) {
         unleash_miri!(self);
-        if self.mode != Mode::Fn {
+        if self.mode.requires_const_checking() {
             let mut err = struct_span_err!(
                 self.tcx.sess,
                 self.span,
@@ -707,7 +741,7 @@ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location
                 qualifs[HasMutInterior] = false;
                 qualifs[IsNotPromotable] = true;
 
-                if self.mode != Mode::Fn {
+                if self.mode.requires_const_checking() {
                     if let BorrowKind::Mut { .. } = kind {
                         let mut err = struct_span_err!(self.tcx.sess,  self.span, E0017,
                                                        "references in {}s may only refer \
@@ -737,7 +771,7 @@ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location
 
                 // We might have a candidate for promotion.
                 let candidate = Candidate::Ref(location);
-                // We can only promote interior borrows of promotable temps.
+                // Start by traversing to the "base", with non-deref projections removed.
                 let mut place = place;
                 while let Place::Projection(ref proj) = *place {
                     if proj.elem == ProjectionElem::Deref {
@@ -746,8 +780,12 @@ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location
                     place = &proj.base;
                 }
                 debug!("qualify_consts: promotion candidate: place={:?}", place);
+                // We can only promote interior borrows of promotable temps (non-temps
+                // don't get promoted anyway).
+                // (If we bailed out of the loop due to a `Deref` above, we will definitely
+                // not enter the conditional here.)
                 if let Place::Base(PlaceBase::Local(local)) = *place {
-                    if self.mir.local_kind(local) == LocalKind::Temp {
+                    if self.body.local_kind(local) == LocalKind::Temp {
                         debug!("qualify_consts: promotion candidate: local={:?}", local);
                         // The borrowed place doesn't have `HasMutInterior`
                         // (from `in_rvalue`), so we can safely ignore
@@ -756,6 +794,10 @@ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location
                         // `HasMutInterior`, from a type that does, e.g.:
                         // `let _: &'static _ = &(Cell::new(1), 2).1;`
                         let mut local_qualifs = self.qualifs_in_local(local);
+                        // Any qualifications, except HasMutInterior (see above), disqualify
+                        // from promotion.
+                        // This is, in particular, the "implicit promotion" version of
+                        // the check making sure that we don't run drop glue during const-eval.
                         local_qualifs[HasMutInterior] = false;
                         if !local_qualifs.0.iter().any(|&qualif| qualif) {
                             debug!("qualify_consts: promotion candidate: {:?}", candidate);
@@ -799,11 +841,11 @@ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location
             }
         };
 
-        let kind = self.mir.local_kind(index);
+        let kind = self.body.local_kind(index);
         debug!("store to {:?} {:?}", kind, index);
 
         // Only handle promotable temps in non-const functions.
-        if self.mode == Mode::Fn {
+        if self.mode == Mode::NonConstFn {
             if kind != LocalKind::Temp ||
                !self.temp_promotion_state[index].is_promotable() {
                 return;
@@ -837,16 +879,16 @@ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location
     fn check_const(&mut self) -> (u8, &'tcx BitSet<Local>) {
         debug!("const-checking {} {:?}", self.mode, self.def_id);
 
-        let mir = self.mir;
+        let body = self.body;
 
-        let mut seen_blocks = BitSet::new_empty(mir.basic_blocks().len());
+        let mut seen_blocks = BitSet::new_empty(body.basic_blocks().len());
         let mut bb = START_BLOCK;
         loop {
             seen_blocks.insert(bb.index());
 
-            self.visit_basic_block_data(bb, &mir[bb]);
+            self.visit_basic_block_data(bb, &body[bb]);
 
-            let target = match mir[bb].terminator().kind {
+            let target = match body[bb].terminator().kind {
                 TerminatorKind::Goto { target } |
                 TerminatorKind::Drop { target, .. } |
                 TerminatorKind::Assert { target, .. } |
@@ -894,7 +936,7 @@ fn check_const(&mut self) -> (u8, &'tcx BitSet<Local>) {
         for candidate in &self.promotion_candidates {
             match *candidate {
                 Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => {
-                    match self.mir[bb].statements[stmt_idx].kind {
+                    match self.body[bb].statements[stmt_idx].kind {
                         StatementKind::Assign(
                             _,
                             box Rvalue::Ref(_, _, Place::Base(PlaceBase::Local(index)))
@@ -913,138 +955,143 @@ fn check_const(&mut self) -> (u8, &'tcx BitSet<Local>) {
         // Account for errors in consts by using the
         // conservative type qualification instead.
         if qualifs[IsNotPromotable] {
-            qualifs = self.qualifs_in_any_value_of_ty(mir.return_ty());
+            qualifs = self.qualifs_in_any_value_of_ty(body.return_ty());
         }
 
         (qualifs.encode_to_bits(), self.tcx.arena.alloc(promoted_temps))
     }
 }
 
-/// Checks MIR for const-correctness, using `ConstCx`
-/// for value qualifications, and accumulates writes of
-/// rvalue/call results to locals, in `local_qualif`.
-/// For functions (constant or not), it also records
-/// candidates for promotion in `promotion_candidates`.
 impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
-    fn visit_place(&mut self,
-                    place: &Place<'tcx>,
-                    context: PlaceContext,
-                    location: Location) {
-        debug!("visit_place: place={:?} context={:?} location={:?}", place, context, location);
-        place.iterate(|place_base, place_projections| {
-            match place_base {
-                PlaceBase::Local(_) => {}
-                PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. }) => {
-                    unreachable!()
-                }
-                PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. }) => {
-                    if self.tcx
-                           .get_attrs(*def_id)
-                           .iter()
-                           .any(|attr| attr.check_name(sym::thread_local)) {
-                        if self.mode != Mode::Fn {
-                            span_err!(self.tcx.sess, self.span, E0625,
-                                      "thread-local statics cannot be \
-                                       accessed at compile-time");
-                        }
-                        return;
+    fn visit_place_base(
+        &mut self,
+        place_base: &PlaceBase<'tcx>,
+        context: PlaceContext,
+        location: Location,
+    ) {
+        self.super_place_base(place_base, context, location);
+        match place_base {
+            PlaceBase::Local(_) => {}
+            PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. }) => {
+                unreachable!()
+            }
+            PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. }) => {
+                if self.tcx
+                        .get_attrs(*def_id)
+                        .iter()
+                        .any(|attr| attr.check_name(sym::thread_local)) {
+                    if self.mode.requires_const_checking() {
+                        span_err!(self.tcx.sess, self.span, E0625,
+                                    "thread-local statics cannot be \
+                                    accessed at compile-time");
                     }
+                    return;
+                }
 
-                    // Only allow statics (not consts) to refer to other statics.
-                    if self.mode == Mode::Static || self.mode == Mode::StaticMut {
-                        if self.mode == Mode::Static && context.is_mutating_use() {
-                            // this is not strictly necessary as miri will also bail out
-                            // For interior mutability we can't really catch this statically as that
-                            // goes through raw pointers and intermediate temporaries, so miri has
-                            // to catch this anyway
-                            self.tcx.sess.span_err(
-                                self.span,
-                                "cannot mutate statics in the initializer of another static",
-                            );
-                        }
-                        return;
+                // Only allow statics (not consts) to refer to other statics.
+                if self.mode == Mode::Static || self.mode == Mode::StaticMut {
+                    if self.mode == Mode::Static && context.is_mutating_use() {
+                        // this is not strictly necessary as miri will also bail out
+                        // For interior mutability we can't really catch this statically as that
+                        // goes through raw pointers and intermediate temporaries, so miri has
+                        // to catch this anyway
+                        self.tcx.sess.span_err(
+                            self.span,
+                            "cannot mutate statics in the initializer of another static",
+                        );
                     }
-                    unleash_miri!(self);
+                    return;
+                }
+                unleash_miri!(self);
 
-                    if self.mode != Mode::Fn {
-                        let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
-                                                       "{}s cannot refer to statics, use \
-                                                        a constant instead", self.mode);
-                        if self.tcx.sess.teach(&err.get_code().unwrap()) {
-                            err.note(
-                                "Static and const variables can refer to other const variables. \
-                                 But a const variable cannot refer to a static variable."
-                            );
-                            err.help(
-                                "To fix this, the value can be extracted as a const and then used."
-                            );
-                        }
-                        err.emit()
+                if self.mode.requires_const_checking() {
+                    let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
+                                                    "{}s cannot refer to statics, use \
+                                                    a constant instead", self.mode);
+                    if self.tcx.sess.teach(&err.get_code().unwrap()) {
+                        err.note(
+                            "Static and const variables can refer to other const variables. \
+                                But a const variable cannot refer to a static variable."
+                        );
+                        err.help(
+                            "To fix this, the value can be extracted as a const and then used."
+                        );
                     }
+                    err.emit()
                 }
             }
+        }
+    }
 
-            for proj in place_projections {
-                match proj.elem {
-                    ProjectionElem::Deref => {
-                        if context.is_mutating_use() {
-                            // `not_const` errors out in const contexts
-                            self.not_const()
-                        }
-                        let base_ty = proj.base.ty(self.mir, self.tcx).ty;
-                        match self.mode {
-                            Mode::Fn => {},
-                            _ => {
-                                if let ty::RawPtr(_) = base_ty.sty {
-                                    if !self.tcx.features().const_raw_ptr_deref {
-                                        emit_feature_err(
-                                            &self.tcx.sess.parse_sess, sym::const_raw_ptr_deref,
-                                            self.span, GateIssue::Language,
-                                            &format!(
-                                                "dereferencing raw pointers in {}s is unstable",
-                                                self.mode,
-                                            ),
-                                        );
-                                    }
-                                }
+    fn visit_projection(
+        &mut self,
+        proj: &Projection<'tcx>,
+        context: PlaceContext,
+        location: Location,
+    ) {
+        debug!(
+            "visit_place_projection: proj={:?} context={:?} location={:?}",
+            proj, context, location,
+        );
+        self.super_projection(proj, context, location);
+        match proj.elem {
+            ProjectionElem::Deref => {
+                if context.is_mutating_use() {
+                    // `not_const` errors out in const contexts
+                    self.not_const()
+                }
+                let base_ty = proj.base.ty(self.body, self.tcx).ty;
+                match self.mode {
+                    Mode::NonConstFn => {},
+                    _ => {
+                        if let ty::RawPtr(_) = base_ty.sty {
+                            if !self.tcx.features().const_raw_ptr_deref {
+                                emit_feature_err(
+                                    &self.tcx.sess.parse_sess, sym::const_raw_ptr_deref,
+                                    self.span, GateIssue::Language,
+                                    &format!(
+                                        "dereferencing raw pointers in {}s is unstable",
+                                        self.mode,
+                                    ),
+                                );
                             }
                         }
                     }
+                }
+            }
 
-                    ProjectionElem::ConstantIndex {..} |
-                    ProjectionElem::Subslice {..} |
-                    ProjectionElem::Field(..) |
-                    ProjectionElem::Index(_) => {
-                        let base_ty = proj.base.ty(self.mir, self.tcx).ty;
-                        if let Some(def) = base_ty.ty_adt_def() {
-                            if def.is_union() {
-                                match self.mode {
-                                    Mode::ConstFn => {
-                                        if !self.tcx.features().const_fn_union {
-                                            emit_feature_err(
-                                                &self.tcx.sess.parse_sess, sym::const_fn_union,
-                                                self.span, GateIssue::Language,
-                                                "unions in const fn are unstable",
-                                            );
-                                        }
-                                    },
-
-                                    | Mode::Fn
-                                    | Mode::Static
-                                    | Mode::StaticMut
-                                    | Mode::Const
-                                    => {},
+            ProjectionElem::ConstantIndex {..} |
+            ProjectionElem::Subslice {..} |
+            ProjectionElem::Field(..) |
+            ProjectionElem::Index(_) => {
+                let base_ty = proj.base.ty(self.body, self.tcx).ty;
+                if let Some(def) = base_ty.ty_adt_def() {
+                    if def.is_union() {
+                        match self.mode {
+                            Mode::ConstFn => {
+                                if !self.tcx.features().const_fn_union {
+                                    emit_feature_err(
+                                        &self.tcx.sess.parse_sess, sym::const_fn_union,
+                                        self.span, GateIssue::Language,
+                                        "unions in const fn are unstable",
+                                    );
                                 }
-                            }
-                        }
-                    }
+                            },
 
-                    ProjectionElem::Downcast(..) => {
-                        self.not_const()
+                            | Mode::NonConstFn
+                            | Mode::Static
+                            | Mode::StaticMut
+                            | Mode::Const
+                            => {},
+                        }
                     }
                 }
             }
-        });
+
+            ProjectionElem::Downcast(..) => {
+                self.not_const()
+            }
+        }
     }
 
     fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
@@ -1069,17 +1116,17 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
         // Check nested operands and places.
         if let Rvalue::Ref(_, kind, ref place) = *rvalue {
             // Special-case reborrows.
-            let mut is_reborrow = false;
+            let mut reborrow_place = None;
             if let Place::Projection(ref proj) = *place {
                 if let ProjectionElem::Deref = proj.elem {
-                    let base_ty = proj.base.ty(self.mir, self.tcx).ty;
+                    let base_ty = proj.base.ty(self.body, self.tcx).ty;
                     if let ty::Ref(..) = base_ty.sty {
-                        is_reborrow = true;
+                        reborrow_place = Some(&proj.base);
                     }
                 }
             }
 
-            if is_reborrow {
+            if let Some(place) = reborrow_place {
                 let ctx = match kind {
                     BorrowKind::Shared => PlaceContext::NonMutatingUse(
                         NonMutatingUseContext::SharedBorrow,
@@ -1094,7 +1141,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                         MutatingUseContext::Borrow,
                     ),
                 };
-                self.super_place(place, ctx, location);
+                self.visit_place(place, ctx, location);
             } else {
                 self.super_rvalue(rvalue, location);
             }
@@ -1116,12 +1163,12 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
             Rvalue::Aggregate(..) => {}
 
             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
-                let operand_ty = operand.ty(self.mir, self.tcx);
+                let operand_ty = operand.ty(self.body, self.tcx);
                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
                 match (cast_in, cast_out) {
                     (CastTy::Ptr(_), CastTy::Int(_)) |
-                    (CastTy::FnPtr, CastTy::Int(_)) if self.mode != Mode::Fn => {
+                    (CastTy::FnPtr, CastTy::Int(_)) if self.mode != Mode::NonConstFn => {
                         unleash_miri!(self);
                         if !self.tcx.features().const_raw_ptr_to_usize_cast {
                             // in const fn and constants require the feature gate
@@ -1141,14 +1188,16 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
             }
 
             Rvalue::BinaryOp(op, ref lhs, _) => {
-                if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.mir, self.tcx).sty {
+                if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).sty {
                     assert!(op == BinOp::Eq || op == BinOp::Ne ||
                             op == BinOp::Le || op == BinOp::Lt ||
                             op == BinOp::Ge || op == BinOp::Gt ||
                             op == BinOp::Offset);
 
                     unleash_miri!(self);
-                    if self.mode != Mode::Fn && !self.tcx.features().const_compare_raw_pointers {
+                    if self.mode.requires_const_checking() &&
+                        !self.tcx.features().const_compare_raw_pointers
+                    {
                         // require the feature gate inside constants and const fn
                         // FIXME: make it unsafe to use these operations
                         emit_feature_err(
@@ -1164,7 +1213,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
 
             Rvalue::NullaryOp(NullOp::Box, _) => {
                 unleash_miri!(self);
-                if self.mode != Mode::Fn {
+                if self.mode.requires_const_checking() {
                     let mut err = struct_span_err!(self.tcx.sess, self.span, E0010,
                                                    "allocations are not allowed in {}s", self.mode);
                     err.span_label(self.span, format!("allocation not allowed in {}s", self.mode));
@@ -1191,11 +1240,11 @@ fn visit_terminator_kind(&mut self,
                 self.assign(dest, ValueSource::Call {
                     callee: func,
                     args,
-                    return_ty: dest.ty(self.mir, self.tcx).ty,
+                    return_ty: dest.ty(self.body, self.tcx).ty,
                 }, location);
             }
 
-            let fn_ty = func.ty(self.mir, self.tcx);
+            let fn_ty = func.ty(self.body, self.tcx);
             let mut callee_def_id = None;
             let mut is_shuffle = false;
             match fn_ty.sty {
@@ -1209,8 +1258,7 @@ fn visit_terminator_kind(&mut self,
                                 // special intrinsic that can be called diretly without an intrinsic
                                 // feature gate needs a language feature gate
                                 "transmute" => {
-                                    // never promote transmute calls
-                                    if self.mode != Mode::Fn {
+                                    if self.mode.requires_const_checking() {
                                         // const eval transmute calls only with the feature gate
                                         if !self.tcx.features().const_transmute {
                                             emit_feature_err(
@@ -1233,7 +1281,7 @@ fn visit_terminator_kind(&mut self,
                         }
                         _ => {
                             // In normal functions no calls are feature-gated.
-                            if self.mode != Mode::Fn {
+                            if self.mode.requires_const_checking() {
                                 let unleash_miri = self
                                     .tcx
                                     .sess
@@ -1292,7 +1340,7 @@ fn visit_terminator_kind(&mut self,
                     }
                 }
                 ty::FnPtr(_) => {
-                    if self.mode != Mode::Fn {
+                    if self.mode.requires_const_checking() {
                         let mut err = self.tcx.sess.struct_span_err(
                             self.span,
                             &format!("function pointers are not allowed in const fn"));
@@ -1351,13 +1399,13 @@ fn visit_terminator_kind(&mut self,
             self.super_terminator_kind(kind, location);
 
             // Deny *any* live drops anywhere other than functions.
-            if self.mode != Mode::Fn {
+            if self.mode.requires_const_checking() {
                 unleash_miri!(self);
                 // HACK(eddyb): emulate a bit of dataflow analysis,
                 // conservatively, that drop elaboration will do.
                 let needs_drop = if let Place::Base(PlaceBase::Local(local)) = *place {
                     if NeedsDrop::in_local(self, local) {
-                        Some(self.mir.local_decls[local].source_info.span)
+                        Some(self.body.local_decls[local].source_info.span)
                     } else {
                         None
                     }
@@ -1367,7 +1415,7 @@ fn visit_terminator_kind(&mut self,
 
                 if let Some(span) = needs_drop {
                     // Double-check the type being dropped, to minimize false positives.
-                    let ty = place.ty(self.mir, self.tcx).ty;
+                    let ty = place.ty(self.body, self.tcx).ty;
                     if ty.needs_drop(self.tcx, self.param_env) {
                         struct_span_err!(self.tcx.sess, span, E0493,
                                          "destructors cannot be evaluated at compile-time")
@@ -1431,14 +1479,14 @@ fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // cannot yet be stolen), because `mir_validated()`, which steals
     // from `mir_const(), forces this query to execute before
     // performing the steal.
-    let mir = &tcx.mir_const(def_id).borrow();
+    let body = &tcx.mir_const(def_id).borrow();
 
-    if mir.return_ty().references_error() {
-        tcx.sess.delay_span_bug(mir.span, "mir_const_qualif: MIR had errors");
+    if body.return_ty().references_error() {
+        tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
         return (1 << IsNotPromotable::IDX, tcx.arena.alloc(BitSet::new_empty(0)));
     }
 
-    Checker::new(tcx, def_id, mir, Mode::Const).check_const()
+    Checker::new(tcx, def_id, body, Mode::Const).check_const()
 }
 
 pub struct QualifyAndPromoteConstants;
@@ -1447,10 +1495,10 @@ impl MirPass for QualifyAndPromoteConstants {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
+                          body: &mut Body<'tcx>) {
         // There's not really any point in promoting errorful MIR.
-        if mir.return_ty().references_error() {
-            tcx.sess.delay_span_bug(mir.span, "QualifyAndPromoteConstants: MIR had errors");
+        if body.return_ty().references_error() {
+            tcx.sess.delay_span_bug(body.span, "QualifyAndPromoteConstants: MIR had errors");
             return;
         }
 
@@ -1462,12 +1510,12 @@ fn run_pass<'a, 'tcx>(&self,
         let id = tcx.hir().as_local_hir_id(def_id).unwrap();
         let mut const_promoted_temps = None;
         let mode = match tcx.hir().body_owner_kind_by_hir_id(id) {
-            hir::BodyOwnerKind::Closure => Mode::Fn,
+            hir::BodyOwnerKind::Closure => Mode::NonConstFn,
             hir::BodyOwnerKind::Fn => {
                 if tcx.is_const_fn(def_id) {
                     Mode::ConstFn
                 } else {
-                    Mode::Fn
+                    Mode::NonConstFn
                 }
             }
             hir::BodyOwnerKind::Const => {
@@ -1479,18 +1527,18 @@ fn run_pass<'a, 'tcx>(&self,
         };
 
         debug!("run_pass: mode={:?}", mode);
-        if mode == Mode::Fn || mode == Mode::ConstFn {
+        if mode == Mode::NonConstFn || mode == Mode::ConstFn {
             // This is ugly because Checker holds onto mir,
             // which can't be mutated until its scope ends.
             let (temps, candidates) = {
-                let mut checker = Checker::new(tcx, def_id, mir, mode);
+                let mut checker = Checker::new(tcx, def_id, body, mode);
                 if mode == Mode::ConstFn {
                     if tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
                         checker.check_const();
                     } else if tcx.is_min_const_fn(def_id) {
                         // enforce `min_const_fn` for stable const fns
                         use super::qualify_min_const_fn::is_min_const_fn;
-                        if let Err((span, err)) = is_min_const_fn(tcx, def_id, mir) {
+                        if let Err((span, err)) = is_min_const_fn(tcx, def_id, body) {
                             let mut diag = struct_span_err!(
                                 tcx.sess,
                                 span,
@@ -1523,12 +1571,12 @@ fn run_pass<'a, 'tcx>(&self,
             };
 
             // Do the actual promotion, now that we know what's viable.
-            promote_consts::promote_candidates(mir, tcx, temps, candidates);
+            promote_consts::promote_candidates(body, tcx, temps, candidates);
         } else {
-            if !mir.control_flow_destroyed.is_empty() {
-                let mut locals = mir.vars_iter();
+            if !body.control_flow_destroyed.is_empty() {
+                let mut locals = body.vars_iter();
                 if let Some(local) = locals.next() {
-                    let span = mir.local_decls[local].source_info.span;
+                    let span = body.local_decls[local].source_info.span;
                     let mut error = tcx.sess.struct_span_err(
                         span,
                         &format!(
@@ -1537,7 +1585,7 @@ fn run_pass<'a, 'tcx>(&self,
                             mode,
                         ),
                     );
-                    for (span, kind) in mir.control_flow_destroyed.iter() {
+                    for (span, kind) in body.control_flow_destroyed.iter() {
                         error.span_note(
                             *span,
                             &format!("use of {} here does not actually short circuit due to \
@@ -1547,7 +1595,7 @@ fn run_pass<'a, 'tcx>(&self,
                         );
                     }
                     for local in locals {
-                        let span = mir.local_decls[local].source_info.span;
+                        let span = body.local_decls[local].source_info.span;
                         error.span_note(
                             span,
                             "more locals defined here",
@@ -1560,14 +1608,14 @@ fn run_pass<'a, 'tcx>(&self,
                 // Already computed by `mir_const_qualif`.
                 const_promoted_temps.unwrap()
             } else {
-                Checker::new(tcx, def_id, mir, mode).check_const().1
+                Checker::new(tcx, def_id, body, mode).check_const().1
             };
 
             // In `const` and `static` everything without `StorageDead`
             // is `'static`, we don't have to create promoted MIR fragments,
             // just remove `Drop` and `StorageDead` on "promoted" locals.
             debug!("run_pass: promoted_temps={:?}", promoted_temps);
-            for block in mir.basic_blocks_mut() {
+            for block in body.basic_blocks_mut() {
                 block.statements.retain(|statement| {
                     match statement.kind {
                         StatementKind::StorageDead(index) => {
@@ -1602,10 +1650,10 @@ fn run_pass<'a, 'tcx>(&self,
                     return;
                 }
             }
-            let ty = mir.return_ty();
+            let ty = body.return_ty();
             tcx.infer_ctxt().enter(|infcx| {
                 let param_env = ty::ParamEnv::empty();
-                let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic);
+                let cause = traits::ObligationCause::new(body.span, id, traits::SharedStatic);
                 let mut fulfillment_cx = traits::FulfillmentContext::new();
                 fulfillment_cx.register_bound(&infcx,
                                               param_env,
index 4811380e238603bec257fef91a51a5595cf37ec4..7bafef79acd1a48ef5e18ce9ff6d5e3c4aea0b2b 100644 (file)
@@ -11,7 +11,7 @@
 pub fn is_min_const_fn(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     def_id: DefId,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
 ) -> McfResult {
     let mut current = def_id;
     loop {
@@ -59,21 +59,21 @@ pub fn is_min_const_fn(
         }
     }
 
-    for local in &mir.local_decls {
+    for local in &body.local_decls {
         check_ty(tcx, local.ty, local.source_info.span, def_id)?;
     }
     // impl trait is gone in MIR, so check the return type manually
     check_ty(
         tcx,
         tcx.fn_sig(def_id).output().skip_binder(),
-        mir.local_decls.iter().next().unwrap().source_info.span,
+        body.local_decls.iter().next().unwrap().source_info.span,
         def_id,
     )?;
 
-    for bb in mir.basic_blocks() {
-        check_terminator(tcx, mir, bb.terminator())?;
+    for bb in body.basic_blocks() {
+        check_terminator(tcx, body, bb.terminator())?;
         for stmt in &bb.statements {
-            check_statement(tcx, mir, stmt)?;
+            check_statement(tcx, body, stmt)?;
         }
     }
     Ok(())
@@ -130,7 +130,7 @@ fn check_ty(
 
 fn check_rvalue(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     rvalue: &Rvalue<'tcx>,
     span: Span,
 ) -> McfResult {
@@ -143,7 +143,7 @@ fn check_rvalue(
         }
         Rvalue::Cast(CastKind::Misc, operand, cast_ty) => {
             use rustc::ty::cast::CastTy;
-            let cast_in = CastTy::from_ty(operand.ty(mir, tcx)).expect("bad input type for cast");
+            let cast_in = CastTy::from_ty(operand.ty(body, tcx)).expect("bad input type for cast");
             let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
             match (cast_in, cast_out) {
                 (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) => Err((
@@ -173,7 +173,7 @@ fn check_rvalue(
         Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => {
             check_operand(lhs, span)?;
             check_operand(rhs, span)?;
-            let ty = lhs.ty(mir, tcx);
+            let ty = lhs.ty(body, tcx);
             if ty.is_integral() || ty.is_bool() || ty.is_char() {
                 Ok(())
             } else {
@@ -189,7 +189,7 @@ fn check_rvalue(
             "heap allocations are not allowed in const fn".into(),
         )),
         Rvalue::UnaryOp(_, operand) => {
-            let ty = operand.ty(mir, tcx);
+            let ty = operand.ty(body, tcx);
             if ty.is_integral() || ty.is_bool() {
                 check_operand(operand, span)
             } else {
@@ -210,14 +210,14 @@ fn check_rvalue(
 
 fn check_statement(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     statement: &Statement<'tcx>,
 ) -> McfResult {
     let span = statement.source_info.span;
     match &statement.kind {
         StatementKind::Assign(place, rval) => {
             check_place(place, span)?;
-            check_rvalue(tcx, mir, rval, span)
+            check_rvalue(tcx, body, rval, span)
         }
 
         StatementKind::FakeRead(_, place) => check_place(place, span),
@@ -280,7 +280,7 @@ fn check_place(
 
 fn check_terminator(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     terminator: &Terminator<'tcx>,
 ) -> McfResult {
     let span = terminator.source_info.span;
@@ -299,7 +299,7 @@ fn check_terminator(
 
         TerminatorKind::FalseEdges { .. } | TerminatorKind::SwitchInt { .. } => Err((
             span,
-            "`if`, `match`, `&&` and `||` are not stable in const fn".into(),
+            "loops and conditional expressions are not stable in const fn".into(),
         )),
         | TerminatorKind::Abort | TerminatorKind::Unreachable => {
             Err((span, "const fn with unreachable code is not stable".into()))
@@ -315,7 +315,7 @@ fn check_terminator(
             destination: _,
             cleanup: _,
         } => {
-            let fn_ty = func.ty(mir, tcx);
+            let fn_ty = func.ty(body, tcx);
             if let ty::FnDef(def_id, _) = fn_ty.sty {
 
                 // some intrinsics are waved through if called inside the
index 42818a571151e1a25efb409ec527de390c8b6e8c..30edf7f14a3d3c4ec44a9a06c60ca6943d912903 100644 (file)
 
 pub fn remove_noop_landing_pads<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    mir: &mut Body<'tcx>)
+    body: &mut Body<'tcx>)
 {
     if tcx.sess.no_landing_pads() {
         return
     }
-    debug!("remove_noop_landing_pads({:?})", mir);
+    debug!("remove_noop_landing_pads({:?})", body);
 
-    RemoveNoopLandingPads.remove_nop_landing_pads(mir)
+    RemoveNoopLandingPads.remove_nop_landing_pads(body)
 }
 
 impl MirPass for RemoveNoopLandingPads {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        remove_noop_landing_pads(tcx, mir);
+                          body: &mut Body<'tcx>) {
+        remove_noop_landing_pads(tcx, body);
     }
 }
 
@@ -34,10 +34,10 @@ impl RemoveNoopLandingPads {
     fn is_nop_landing_pad(
         &self,
         bb: BasicBlock,
-        mir: &Body<'_>,
+        body: &Body<'_>,
         nop_landing_pads: &BitSet<BasicBlock>,
     ) -> bool {
-        for stmt in &mir[bb].statements {
+        for stmt in &body[bb].statements {
             match stmt.kind {
                 StatementKind::FakeRead(..) |
                 StatementKind::StorageLive(_) |
@@ -61,7 +61,7 @@ fn is_nop_landing_pad(
             }
         }
 
-        let terminator = mir[bb].terminator();
+        let terminator = body[bb].terminator();
         match terminator.kind {
             TerminatorKind::Goto { .. } |
             TerminatorKind::Resume |
@@ -86,26 +86,26 @@ fn is_nop_landing_pad(
         }
     }
 
-    fn remove_nop_landing_pads(&self, mir: &mut Body<'_>) {
+    fn remove_nop_landing_pads(&self, body: &mut Body<'_>) {
         // make sure there's a single resume block
         let resume_block = {
-            let patch = MirPatch::new(mir);
+            let patch = MirPatch::new(body);
             let resume_block = patch.resume_block();
-            patch.apply(mir);
+            patch.apply(body);
             resume_block
         };
         debug!("remove_noop_landing_pads: resume block is {:?}", resume_block);
 
         let mut jumps_folded = 0;
         let mut landing_pads_removed = 0;
-        let mut nop_landing_pads = BitSet::new_empty(mir.basic_blocks().len());
+        let mut nop_landing_pads = BitSet::new_empty(body.basic_blocks().len());
 
         // This is a post-order traversal, so that if A post-dominates B
         // then A will be visited before B.
-        let postorder: Vec<_> = traversal::postorder(mir).map(|(bb, _)| bb).collect();
+        let postorder: Vec<_> = traversal::postorder(body).map(|(bb, _)| bb).collect();
         for bb in postorder {
             debug!("  processing {:?}", bb);
-            for target in mir[bb].terminator_mut().successors_mut() {
+            for target in body[bb].terminator_mut().successors_mut() {
                 if *target != resume_block && nop_landing_pads.contains(*target) {
                     debug!("    folding noop jump to {:?} to resume block", target);
                     *target = resume_block;
@@ -113,7 +113,7 @@ fn remove_nop_landing_pads(&self, mir: &mut Body<'_>) {
                 }
             }
 
-            match mir[bb].terminator_mut().unwind_mut() {
+            match body[bb].terminator_mut().unwind_mut() {
                 Some(unwind) => {
                     if *unwind == Some(resume_block) {
                         debug!("    removing noop landing pad");
@@ -125,7 +125,7 @@ fn remove_nop_landing_pads(&self, mir: &mut Body<'_>) {
                 _ => {}
             }
 
-            let is_nop_landing_pad = self.is_nop_landing_pad(bb, mir, &nop_landing_pads);
+            let is_nop_landing_pad = self.is_nop_landing_pad(bb, body, &nop_landing_pads);
             if is_nop_landing_pad {
                 nop_landing_pads.insert(bb);
             }
index 2b3eb9e1edf307ce91a9bef2f3dbe70dee72ac10..a6ae03e834b5e8fcf713e39de1380cd57358b2c0 100644 (file)
@@ -26,7 +26,7 @@
 
 impl MirPass for SanityCheck {
     fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          src: MirSource<'tcx>, mir: &mut Body<'tcx>) {
+                          src: MirSource<'tcx>, body: &mut Body<'tcx>) {
         let def_id = src.def_id();
         if !tcx.has_attr(def_id, sym::rustc_mir) {
             debug!("skipping rustc_peek::SanityCheck on {}", tcx.def_path_str(def_id));
@@ -37,30 +37,30 @@ fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
         let attributes = tcx.get_attrs(def_id);
         let param_env = tcx.param_env(def_id);
-        let move_data = MoveData::gather_moves(mir, tcx).unwrap();
+        let move_data = MoveData::gather_moves(body, tcx).unwrap();
         let mdpe = MoveDataParamEnv { move_data: move_data, param_env: param_env };
-        let dead_unwinds = BitSet::new_empty(mir.basic_blocks().len());
+        let dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
         let flow_inits =
-            do_dataflow(tcx, mir, def_id, &attributes, &dead_unwinds,
-                        MaybeInitializedPlaces::new(tcx, mir, &mdpe),
+            do_dataflow(tcx, body, def_id, &attributes, &dead_unwinds,
+                        MaybeInitializedPlaces::new(tcx, body, &mdpe),
                         |bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]));
         let flow_uninits =
-            do_dataflow(tcx, mir, def_id, &attributes, &dead_unwinds,
-                        MaybeUninitializedPlaces::new(tcx, mir, &mdpe),
+            do_dataflow(tcx, body, def_id, &attributes, &dead_unwinds,
+                        MaybeUninitializedPlaces::new(tcx, body, &mdpe),
                         |bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]));
         let flow_def_inits =
-            do_dataflow(tcx, mir, def_id, &attributes, &dead_unwinds,
-                        DefinitelyInitializedPlaces::new(tcx, mir, &mdpe),
+            do_dataflow(tcx, body, def_id, &attributes, &dead_unwinds,
+                        DefinitelyInitializedPlaces::new(tcx, body, &mdpe),
                         |bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]));
 
         if has_rustc_mir_with(&attributes, sym::rustc_peek_maybe_init).is_some() {
-            sanity_check_via_rustc_peek(tcx, mir, def_id, &attributes, &flow_inits);
+            sanity_check_via_rustc_peek(tcx, body, def_id, &attributes, &flow_inits);
         }
         if has_rustc_mir_with(&attributes, sym::rustc_peek_maybe_uninit).is_some() {
-            sanity_check_via_rustc_peek(tcx, mir, def_id, &attributes, &flow_uninits);
+            sanity_check_via_rustc_peek(tcx, body, def_id, &attributes, &flow_uninits);
         }
         if has_rustc_mir_with(&attributes, sym::rustc_peek_definite_init).is_some() {
-            sanity_check_via_rustc_peek(tcx, mir, def_id, &attributes, &flow_def_inits);
+            sanity_check_via_rustc_peek(tcx, body, def_id, &attributes, &flow_def_inits);
         }
         if has_rustc_mir_with(&attributes, sym::stop_after_dataflow).is_some() {
             tcx.sess.fatal("stop_after_dataflow ended compilation");
@@ -85,7 +85,7 @@ fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
 /// expression form above, then that emits an error as well, but those
 /// errors are not intended to be used for unit tests.)
 pub fn sanity_check_via_rustc_peek<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                mir: &Body<'tcx>,
+                                                body: &Body<'tcx>,
                                                 def_id: DefId,
                                                 _attributes: &[ast::Attribute],
                                                 results: &DataflowResults<'tcx, O>)
@@ -96,19 +96,19 @@ pub fn sanity_check_via_rustc_peek<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // `dataflow::build_sets`. (But note it is doing non-standard
     // stuff, so such generalization may not be realistic.)
 
-    for bb in mir.basic_blocks().indices() {
-        each_block(tcx, mir, results, bb);
+    for bb in body.basic_blocks().indices() {
+        each_block(tcx, body, results, bb);
     }
 }
 
 fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                           mir: &Body<'tcx>,
+                           body: &Body<'tcx>,
                            results: &DataflowResults<'tcx, O>,
                            bb: mir::BasicBlock) where
     O: BitDenotation<'tcx, Idx=MovePathIndex> + HasMoveData<'tcx>
 {
     let move_data = results.0.operator.move_data();
-    let mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = mir[bb];
+    let mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = body[bb];
 
     let (args, span) = match is_rustc_peek(tcx, terminator) {
         Some(args_and_span) => args_and_span,
index e7be238e850f22a70dfffa9eb72294c51691a9d0..4d1c90bc56d9e4ff7689dc04a67e4dcc9cfe76e2 100644 (file)
@@ -44,12 +44,12 @@ pub fn new(label: &str) -> Self {
     }
 }
 
-pub fn simplify_cfg(mir: &mut Body<'_>) {
-    CfgSimplifier::new(mir).simplify();
-    remove_dead_blocks(mir);
+pub fn simplify_cfg(body: &mut Body<'_>) {
+    CfgSimplifier::new(body).simplify();
+    remove_dead_blocks(body);
 
     // FIXME: Should probably be moved into some kind of pass manager
-    mir.basic_blocks_mut().raw.shrink_to_fit();
+    body.basic_blocks_mut().raw.shrink_to_fit();
 }
 
 impl MirPass for SimplifyCfg {
@@ -60,9 +60,9 @@ fn name<'a>(&'a self) -> Cow<'a, str> {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, mir);
-        simplify_cfg(mir);
+                          body: &mut Body<'tcx>) {
+        debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, body);
+        simplify_cfg(body);
     }
 }
 
@@ -72,14 +72,14 @@ pub struct CfgSimplifier<'a, 'tcx: 'a> {
 }
 
 impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> {
-    pub fn new(mir: &'a mut Body<'tcx>) -> Self {
-        let mut pred_count = IndexVec::from_elem(0u32, mir.basic_blocks());
+    pub fn new(body: &'a mut Body<'tcx>) -> Self {
+        let mut pred_count = IndexVec::from_elem(0u32, body.basic_blocks());
 
         // we can't use mir.predecessors() here because that counts
         // dead blocks, which we don't want to.
         pred_count[START_BLOCK] = 1;
 
-        for (_, data) in traversal::preorder(mir) {
+        for (_, data) in traversal::preorder(body) {
             if let Some(ref term) = data.terminator {
                 for &tgt in term.successors() {
                     pred_count[tgt] += 1;
@@ -87,7 +87,7 @@ pub fn new(mir: &'a mut Body<'tcx>) -> Self {
             }
         }
 
-        let basic_blocks = mir.basic_blocks_mut();
+        let basic_blocks = body.basic_blocks_mut();
 
         CfgSimplifier {
             basic_blocks,
@@ -263,13 +263,13 @@ fn strip_nops(&mut self) {
     }
 }
 
-pub fn remove_dead_blocks(mir: &mut Body<'_>) {
-    let mut seen = BitSet::new_empty(mir.basic_blocks().len());
-    for (bb, _) in traversal::preorder(mir) {
+pub fn remove_dead_blocks(body: &mut Body<'_>) {
+    let mut seen = BitSet::new_empty(body.basic_blocks().len());
+    for (bb, _) in traversal::preorder(body) {
         seen.insert(bb.index());
     }
 
-    let basic_blocks = mir.basic_blocks_mut();
+    let basic_blocks = body.basic_blocks_mut();
 
     let num_blocks = basic_blocks.len();
     let mut replacements : Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
@@ -299,31 +299,31 @@ impl MirPass for SimplifyLocals {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        let mut marker = DeclMarker { locals: BitSet::new_empty(mir.local_decls.len()) };
-        marker.visit_body(mir);
+                          body: &mut Body<'tcx>) {
+        let mut marker = DeclMarker { locals: BitSet::new_empty(body.local_decls.len()) };
+        marker.visit_body(body);
         // Return pointer and arguments are always live
         marker.locals.insert(RETURN_PLACE);
-        for arg in mir.args_iter() {
+        for arg in body.args_iter() {
             marker.locals.insert(arg);
         }
 
         // We may need to keep dead user variables live for debuginfo.
         if tcx.sess.opts.debuginfo == DebugInfo::Full {
-            for local in mir.vars_iter() {
+            for local in body.vars_iter() {
                 marker.locals.insert(local);
             }
         }
 
-        let map = make_local_map(&mut mir.local_decls, marker.locals);
+        let map = make_local_map(&mut body.local_decls, marker.locals);
         // Update references to all vars and tmps now
-        LocalUpdater { map }.visit_body(mir);
-        mir.local_decls.shrink_to_fit();
+        LocalUpdater { map }.visit_body(body);
+        body.local_decls.shrink_to_fit();
     }
 }
 
 /// Construct the mapping while swapping out unused stuff out from the `vec`.
-fn make_local_map<'tcx, V>(
+fn make_local_map<V>(
     vec: &mut IndexVec<Local, V>,
     mask: BitSet<Local>,
 ) -> IndexVec<Local, Option<Local>> {
index 53949bcfcd707737340477b2943aba88c9915a52..938fa772e90807e1f0482ccf01360622d4eceac2 100644 (file)
@@ -22,8 +22,8 @@ fn name<'a>(&'a self) -> Cow<'a, str> {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        for block in mir.basic_blocks_mut() {
+                          body: &mut Body<'tcx>) {
+        for block in body.basic_blocks_mut() {
             let terminator = block.terminator_mut();
             terminator.kind = match terminator.kind {
                 TerminatorKind::SwitchInt {
index ee1d2ca2a891ce1907e303fe7e730076d4ea97dd..fbfc296cc31fae4ef56150bc0da5ba9169924b38 100644 (file)
@@ -40,18 +40,18 @@ impl MirPass for UniformArrayMoveOut {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        let mut patch = MirPatch::new(mir);
+                          body: &mut Body<'tcx>) {
+        let mut patch = MirPatch::new(body);
         {
-            let mut visitor = UniformArrayMoveOutVisitor{mir, patch: &mut patch, tcx};
-            visitor.visit_body(mir);
+            let mut visitor = UniformArrayMoveOutVisitor{body, patch: &mut patch, tcx};
+            visitor.visit_body(body);
         }
-        patch.apply(mir);
+        patch.apply(body);
     }
 }
 
 struct UniformArrayMoveOutVisitor<'a, 'tcx: 'a> {
-    mir: &'a Body<'tcx>,
+    body: &'a Body<'tcx>,
     patch: &'a mut MirPatch<'tcx>,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
 }
@@ -68,7 +68,7 @@ fn visit_assign(&mut self,
                                                      from_end: false} = proj.elem {
                     // no need to transformation
                 } else {
-                    let place_ty = proj.base.ty(self.mir, self.tcx).ty;
+                    let place_ty = proj.base.ty(self.body, self.tcx).ty;
                     if let ty::Array(item_ty, const_size) = place_ty.sty {
                         if let Some(size) = const_size.assert_usize(self.tcx) {
                             assert!(size <= u32::max_value() as u64,
@@ -97,7 +97,7 @@ fn uniform(&mut self,
             ProjectionElem::Subslice{from, to} => {
                 self.patch.make_nop(location);
                 let temps : Vec<_> = (from..(size-to)).map(|i| {
-                    let temp = self.patch.new_temp(item_ty, self.mir.source_info(location).span);
+                    let temp = self.patch.new_temp(item_ty, self.body.source_info(location).span);
                     self.patch.add_statement(location, StatementKind::StorageLive(temp));
                     self.patch.add_assign(location,
                                           Place::Base(PlaceBase::Local(temp)),
@@ -165,23 +165,24 @@ impl MirPass for RestoreSubsliceArrayMoveOut {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           _src: MirSource<'tcx>,
-                          mir: &mut Body<'tcx>) {
-        let mut patch = MirPatch::new(mir);
+                          body: &mut Body<'tcx>) {
+        let mut patch = MirPatch::new(body);
         {
             let mut visitor = RestoreDataCollector {
-                locals_use: IndexVec::from_elem(LocalUse::new(), &mir.local_decls),
+                locals_use: IndexVec::from_elem(LocalUse::new(), &body.local_decls),
                 candidates: vec![],
             };
-            visitor.visit_body(mir);
+            visitor.visit_body(body);
 
             for candidate in &visitor.candidates {
-                let statement = &mir[candidate.block].statements[candidate.statement_index];
+                let statement = &body[candidate.block].statements[candidate.statement_index];
                 if let StatementKind::Assign(ref dst_place, ref rval) = statement.kind {
                     if let Rvalue::Aggregate(box AggregateKind::Array(_), ref items) = **rval {
                         let items : Vec<_> = items.iter().map(|item| {
                             if let Operand::Move(Place::Base(PlaceBase::Local(local))) = item {
                                 let local_use = &visitor.locals_use[*local];
-                                let opt_index_and_place = Self::try_get_item_source(local_use, mir);
+                                let opt_index_and_place =
+                                    Self::try_get_item_source(local_use, body);
                                 // each local should be used twice:
                                 //  in assign and in aggregate statements
                                 if local_use.use_count == 2 && opt_index_and_place.is_some() {
@@ -194,7 +195,7 @@ fn run_pass<'a, 'tcx>(&self,
 
                         let opt_src_place = items.first().and_then(|x| *x).map(|x| x.2);
                         let opt_size = opt_src_place.and_then(|src_place| {
-                            let src_ty = src_place.ty(mir, tcx).ty;
+                            let src_ty = src_place.ty(body, tcx).ty;
                             if let ty::Array(_, ref size_o) = src_ty.sty {
                                 size_o.assert_usize(tcx)
                             } else {
@@ -206,7 +207,7 @@ fn run_pass<'a, 'tcx>(&self,
                 }
             }
         }
-        patch.apply(mir);
+        patch.apply(body);
     }
 }
 
@@ -254,9 +255,9 @@ fn check_and_patch<'tcx>(candidate: Location,
     }
 
     fn try_get_item_source<'a, 'tcx>(local_use: &LocalUse,
-                                     mir: &'a Body<'tcx>) -> Option<(u32, &'a Place<'tcx>)> {
+                                     body: &'a Body<'tcx>) -> Option<(u32, &'a Place<'tcx>)> {
         if let Some(location) = local_use.first_use {
-            let block = &mir[location.block];
+            let block = &body[location.block];
             if block.statements.len() > location.statement_index {
                 let statement = &block.statements[location.statement_index];
                 if let StatementKind::Assign(
diff --git a/src/librustc_mir/util/aggregate.rs b/src/librustc_mir/util/aggregate.rs
new file mode 100644 (file)
index 0000000..98e7067
--- /dev/null
@@ -0,0 +1,76 @@
+use rustc::mir::*;
+use rustc::ty::Ty;
+use rustc::ty::layout::VariantIdx;
+use rustc_data_structures::indexed_vec::Idx;
+
+use std::iter::TrustedLen;
+
+/// Expand `lhs = Rvalue::Aggregate(kind, operands)` into assignments to the fields.
+///
+/// Produces something like
+///
+/// (lhs as Variant).field0 = arg0;     // We only have a downcast if this is an enum
+/// (lhs as Variant).field1 = arg1;
+/// discriminant(lhs) = variant_index;  // If lhs is an enum or generator.
+pub fn expand_aggregate<'tcx>(
+    mut lhs: Place<'tcx>,
+    operands: impl Iterator<Item=(Operand<'tcx>, Ty<'tcx>)> + TrustedLen,
+    kind: AggregateKind<'tcx>,
+    source_info: SourceInfo,
+) -> impl Iterator<Item=Statement<'tcx>> + TrustedLen {
+    let mut set_discriminant = None;
+    let active_field_index = match kind {
+        AggregateKind::Adt(adt_def, variant_index, _, _, active_field_index) => {
+            if adt_def.is_enum() {
+                set_discriminant = Some(Statement {
+                    kind: StatementKind::SetDiscriminant {
+                        place: lhs.clone(),
+                        variant_index,
+                    },
+                    source_info,
+                });
+                lhs = lhs.downcast(adt_def, variant_index);
+            }
+            active_field_index
+        }
+        AggregateKind::Generator(..) => {
+            // Right now we only support initializing generators to
+            // variant 0 (Unresumed).
+            let variant_index = VariantIdx::new(0);
+            set_discriminant = Some(Statement {
+                kind: StatementKind::SetDiscriminant {
+                    place: lhs.clone(),
+                    variant_index,
+                },
+                source_info,
+            });
+
+            // Operands are upvars stored on the base place, so no
+            // downcast is necessary.
+
+            None
+        }
+        _ => None
+    };
+
+    operands.into_iter().enumerate().map(move |(i, (op, ty))| {
+        let lhs_field = if let AggregateKind::Array(_) = kind {
+            // FIXME(eddyb) `offset` should be u64.
+            let offset = i as u32;
+            assert_eq!(offset as usize, i);
+            lhs.clone().elem(ProjectionElem::ConstantIndex {
+                offset,
+                // FIXME(eddyb) `min_length` doesn't appear to be used.
+                min_length: offset + 1,
+                from_end: false
+            })
+        } else {
+            let field = Field::new(active_field_index.unwrap_or(i));
+            lhs.clone().field(field, ty)
+        };
+        Statement {
+            source_info,
+            kind: StatementKind::Assign(lhs_field, box Rvalue::Use(op)),
+        }
+    }).chain(set_discriminant)
+}
index ba0190756c54277c43b1f0906d7b2ee5c87c4bc2..fac752dbf023e9675fd7231f39b35bc7a90b2c65 100644 (file)
@@ -21,19 +21,19 @@ pub struct Use {
 }
 
 impl DefUseAnalysis {
-    pub fn new(mir: &Body<'_>) -> DefUseAnalysis {
+    pub fn new(body: &Body<'_>) -> DefUseAnalysis {
         DefUseAnalysis {
-            info: IndexVec::from_elem_n(Info::new(), mir.local_decls.len()),
+            info: IndexVec::from_elem_n(Info::new(), body.local_decls.len()),
         }
     }
 
-    pub fn analyze(&mut self, mir: &Body<'_>) {
+    pub fn analyze(&mut self, body: &Body<'_>) {
         self.clear();
 
         let mut finder = DefUseFinder {
             info: mem::replace(&mut self.info, IndexVec::new()),
         };
-        finder.visit_body(mir);
+        finder.visit_body(body);
         self.info = finder.info
     }
 
@@ -47,23 +47,23 @@ pub fn local_info(&self, local: Local) -> &Info {
         &self.info[local]
     }
 
-    fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Body<'_>, mut callback: F)
+    fn mutate_defs_and_uses<F>(&self, local: Local, body: &mut Body<'_>, mut callback: F)
                                where F: for<'a> FnMut(&'a mut Local,
                                                       PlaceContext,
                                                       Location) {
         for place_use in &self.info[local].defs_and_uses {
             MutateUseVisitor::new(local,
                                   &mut callback,
-                                  mir).visit_location(mir, place_use.location)
+                                  body).visit_location(body, place_use.location)
         }
     }
 
     // FIXME(pcwalton): this should update the def-use chains.
     pub fn replace_all_defs_and_uses_with(&self,
                                           local: Local,
-                                          mir: &mut Body<'_>,
+                                          body: &mut Body<'_>,
                                           new_local: Local) {
-        self.mutate_defs_and_uses(local, mir, |local, _, _| *local = new_local)
+        self.mutate_defs_and_uses(local, body, |local, _, _| *local = new_local)
     }
 }
 
index 076ba60c6441c5da540fda8630424784236511de..b8ce31d0adfdc37502bb997644d5ea3b8497f9ef 100644 (file)
@@ -74,7 +74,7 @@ pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
     type Path : Copy + fmt::Debug;
 
     fn patch(&mut self) -> &mut MirPatch<'tcx>;
-    fn mir(&self) -> &'a Body<'tcx>;
+    fn body(&self) -> &'a Body<'tcx>;
     fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
     fn param_env(&self) -> ty::ParamEnv<'tcx>;
 
@@ -119,10 +119,11 @@ pub fn elaborate_drop<'b, 'tcx, D>(
 }
 
 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
-    where D: DropElaborator<'b, 'tcx>
+where
+    D: DropElaborator<'b, 'tcx>,
 {
     fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
-        place.ty(self.elaborator.mir(), self.tcx()).ty
+        place.ty(self.elaborator.body(), self.tcx()).ty
     }
 
     fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
@@ -147,7 +148,7 @@ fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
     //
     // FIXME: I think we should just control the flags externally,
     // and then we do not need this machinery.
-    pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
+    pub fn elaborate_drop(&mut self, bb: BasicBlock) {
         debug!("elaborate_drop({:?})", self);
         let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
         debug!("elaborate_drop({:?}): live - {:?}", self, style);
@@ -285,12 +286,12 @@ fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
     ///
     /// NOTE: this does not clear the master drop flag, so you need
     /// to point succ/unwind on a `drop_ladder_bottom`.
-    fn drop_ladder<'a>(&mut self,
-                       fields: Vec<(Place<'tcx>, Option<D::Path>)>,
-                       succ: BasicBlock,
-                       unwind: Unwind)
-                       -> (BasicBlock, Unwind)
-    {
+    fn drop_ladder(
+        &mut self,
+        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> (BasicBlock, Unwind) {
         debug!("drop_ladder({:?}, {:?})", self, fields);
 
         let mut fields = fields;
@@ -314,9 +315,7 @@ fn drop_ladder<'a>(&mut self,
         (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
     }
 
-    fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
-                               -> BasicBlock
-    {
+    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
         debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
 
         let fields = tys.iter().enumerate().map(|(i, &ty)| {
@@ -328,9 +327,7 @@ fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
         self.drop_ladder(fields, succ, unwind).0
     }
 
-    fn open_drop_for_box<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>)
-                             -> BasicBlock
-    {
+    fn open_drop_for_box(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
         debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
 
         let interior = self.place.clone().deref();
@@ -346,8 +343,7 @@ fn open_drop_for_box<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tc
         self.drop_subpath(&interior, interior_path, succ, unwind_succ)
     }
 
-    fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>)
-                             -> BasicBlock {
+    fn open_drop_for_adt(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
         debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
         if adt.variants.len() == 0 {
             return self.elaborator.patch().new_block(BasicBlockData {
@@ -505,9 +501,7 @@ fn adt_switch_block(&mut self,
         self.drop_flag_test_block(switch_block, succ, unwind)
     }
 
-    fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
-                                 -> BasicBlock
-    {
+    fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
         debug!("destructor_call_block({:?}, {:?})", self, succ);
         let tcx = self.tcx();
         let drop_trait = tcx.lang_items().drop_trait().unwrap();
@@ -787,7 +781,7 @@ fn drop_loop_pair(
     ///
     /// This creates a "drop ladder" that drops the needed fields of the
     /// ADT, both in the success case or if one of the destructors fail.
-    fn open_drop<'a>(&mut self) -> BasicBlock {
+    fn open_drop(&mut self) -> BasicBlock {
         let ty = self.place_ty(self.place);
         match ty.sty {
             ty::Closure(def_id, substs) => {
@@ -799,7 +793,7 @@ fn open_drop<'a>(&mut self) -> BasicBlock {
             // within that own generator's resume function.
             // This should only happen for the self argument on the resume function.
             // It effetively only contains upvars until the generator transformation runs.
-            // See librustc_mir/transform/generator.rs for more details.
+            // See librustc_body/transform/generator.rs for more details.
             ty::Generator(def_id, substs, _) => {
                 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
                 self.open_drop_for_tuple(&tys)
@@ -837,11 +831,12 @@ fn open_drop<'a>(&mut self) -> BasicBlock {
     /// if FLAG(self.path)
     ///     if let Some(mode) = mode: FLAG(self.path)[mode] = false
     ///     drop(self.place)
-    fn complete_drop<'a>(&mut self,
-                         drop_mode: Option<DropFlagMode>,
-                         succ: BasicBlock,
-                         unwind: Unwind) -> BasicBlock
-    {
+    fn complete_drop(
+        &mut self,
+        drop_mode: Option<DropFlagMode>,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> BasicBlock {
         debug!("complete_drop({:?},{:?})", self, drop_mode);
 
         let drop_block = self.drop_block(succ, unwind);
@@ -867,7 +862,7 @@ fn drop_flag_reset_block(&mut self,
         block
     }
 
-    fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
+    fn elaborated_drop_block(&mut self) -> BasicBlock {
         debug!("elaborated_drop_block({:?})", self);
         let unwind = self.unwind; // FIXME(#43234)
         let succ = self.succ;
@@ -876,7 +871,7 @@ fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
         blk
     }
 
-    fn box_free_block<'a>(
+    fn box_free_block(
         &mut self,
         adt: &'tcx ty::AdtDef,
         substs: SubstsRef<'tcx>,
@@ -887,12 +882,12 @@ fn box_free_block<'a>(
         self.drop_flag_test_block(block, target, unwind)
     }
 
-    fn unelaborated_free_block<'a>(
+    fn unelaborated_free_block(
         &mut self,
         adt: &'tcx ty::AdtDef,
         substs: SubstsRef<'tcx>,
         target: BasicBlock,
-        unwind: Unwind
+        unwind: Unwind,
     ) -> BasicBlock {
         let tcx = self.tcx();
         let unit_temp = Place::Base(PlaceBase::Local(self.new_temp(tcx.mk_unit())));
@@ -917,7 +912,7 @@ fn unelaborated_free_block<'a>(
         free_block
     }
 
-    fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
+    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
         let block = TerminatorKind::Drop {
             location: self.place.clone(),
             target,
@@ -947,11 +942,7 @@ fn drop_flag_test_block(&mut self,
         }
     }
 
-    fn new_block<'a>(&mut self,
-                     unwind: Unwind,
-                     k: TerminatorKind<'tcx>)
-                     -> BasicBlock
-    {
+    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
         self.elaborator.patch().new_block(BasicBlockData {
             statements: vec![],
             terminator: Some(Terminator {
@@ -966,8 +957,8 @@ fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
     }
 
     fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
-        let mir = self.elaborator.mir();
-        self.elaborator.patch().terminator_loc(mir, bb)
+        let body = self.elaborator.body();
+        self.elaborator.patch().terminator_loc(body, bb)
     }
 
     fn constant_usize(&self, val: u16) -> Operand<'tcx> {
index 20d92da9ba330e86e6635e145fbf1f3b221e4edd..fe0a1198701401028942fa0f00c65e7020dd3519 100644 (file)
@@ -15,8 +15,8 @@ pub fn write_mir_graphviz<'tcx, W>(tcx: TyCtxt<'_, '_, 'tcx>,
     where W: Write
 {
     for def_id in dump_mir_def_ids(tcx, single) {
-        let mir = &tcx.optimized_mir(def_id);
-        write_mir_fn_graphviz(tcx, def_id, mir, w)?;
+        let body = &tcx.optimized_mir(def_id);
+        write_mir_fn_graphviz(tcx, def_id, body, w)?;
     }
     Ok(())
 }
@@ -34,7 +34,7 @@ pub fn graphviz_safe_def_name(def_id: DefId) -> String {
 /// Write a graphviz DOT graph of the MIR.
 pub fn write_mir_fn_graphviz<'tcx, W>(tcx: TyCtxt<'_, '_, 'tcx>,
                                       def_id: DefId,
-                                      mir: &Body<'_>,
+                                      body: &Body<'_>,
                                       w: &mut W) -> io::Result<()>
     where W: Write
 {
@@ -46,16 +46,16 @@ pub fn write_mir_fn_graphviz<'tcx, W>(tcx: TyCtxt<'_, '_, 'tcx>,
     writeln!(w, r#"    edge [fontname="monospace"];"#)?;
 
     // Graph label
-    write_graph_label(tcx, def_id, mir, w)?;
+    write_graph_label(tcx, def_id, body, w)?;
 
     // Nodes
-    for (block, _) in mir.basic_blocks().iter_enumerated() {
-        write_node(block, mir, w)?;
+    for (block, _) in body.basic_blocks().iter_enumerated() {
+        write_node(block, body, w)?;
     }
 
     // Edges
-    for (source, _) in mir.basic_blocks().iter_enumerated() {
-        write_edges(source, mir, w)?;
+    for (source, _) in body.basic_blocks().iter_enumerated() {
+        write_edges(source, body, w)?;
     }
     writeln!(w, "}}")
 }
@@ -68,7 +68,7 @@ pub fn write_mir_fn_graphviz<'tcx, W>(tcx: TyCtxt<'_, '_, 'tcx>,
 /// `init` and `fini` are callbacks for emitting additional rows of
 /// data (using HTML enclosed with `<tr>` in the emitted text).
 pub fn write_node_label<W: Write, INIT, FINI>(block: BasicBlock,
-                                              mir: &Body<'_>,
+                                              body: &Body<'_>,
                                               w: &mut W,
                                               num_cols: u32,
                                               init: INIT,
@@ -76,7 +76,7 @@ pub fn write_node_label<W: Write, INIT, FINI>(block: BasicBlock,
     where INIT: Fn(&mut W) -> io::Result<()>,
           FINI: Fn(&mut W) -> io::Result<()>
 {
-    let data = &mir[block];
+    let data = &body[block];
 
     write!(w, r#"<table border="0" cellborder="1" cellspacing="0">"#)?;
 
@@ -110,17 +110,17 @@ pub fn write_node_label<W: Write, INIT, FINI>(block: BasicBlock,
 }
 
 /// Write a graphviz DOT node for the given basic block.
-fn write_node<W: Write>(block: BasicBlock, mir: &Body<'_>, w: &mut W) -> io::Result<()> {
+fn write_node<W: Write>(block: BasicBlock, body: &Body<'_>, w: &mut W) -> io::Result<()> {
     // Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.
     write!(w, r#"    {} [shape="none", label=<"#, node(block))?;
-    write_node_label(block, mir, w, 1, |_| Ok(()), |_| Ok(()))?;
+    write_node_label(block, body, w, 1, |_| Ok(()), |_| Ok(()))?;
     // Close the node label and the node itself.
     writeln!(w, ">];")
 }
 
 /// Write graphviz DOT edges with labels between the given basic block and all of its successors.
-fn write_edges<W: Write>(source: BasicBlock, mir: &Body<'_>, w: &mut W) -> io::Result<()> {
-    let terminator = mir[source].terminator();
+fn write_edges<W: Write>(source: BasicBlock, body: &Body<'_>, w: &mut W) -> io::Result<()> {
+    let terminator = body[source].terminator();
     let labels = terminator.kind.fmt_successor_labels();
 
     for (&target, label) in terminator.successors().zip(labels) {
@@ -135,28 +135,28 @@ fn write_edges<W: Write>(source: BasicBlock, mir: &Body<'_>, w: &mut W) -> io::R
 /// all the variables and temporaries.
 fn write_graph_label<'a, 'gcx, 'tcx, W: Write>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
                                                def_id: DefId,
-                                               mir: &Body<'_>,
+                                               body: &Body<'_>,
                                                w: &mut W)
                                                -> io::Result<()> {
     write!(w, "    label=<fn {}(", dot::escape_html(&tcx.def_path_str(def_id)))?;
 
     // fn argument types.
-    for (i, arg) in mir.args_iter().enumerate() {
+    for (i, arg) in body.args_iter().enumerate() {
         if i > 0 {
             write!(w, ", ")?;
         }
         write!(w,
                "{:?}: {}",
                Place::Base(PlaceBase::Local(arg)),
-               escape(&mir.local_decls[arg].ty)
+               escape(&body.local_decls[arg].ty)
         )?;
     }
 
-    write!(w, ") -&gt; {}", escape(&mir.return_ty()))?;
+    write!(w, ") -&gt; {}", escape(&body.return_ty()))?;
     write!(w, r#"<br align="left"/>"#)?;
 
-    for local in mir.vars_and_temps_iter() {
-        let decl = &mir.local_decls[local];
+    for local in body.vars_and_temps_iter() {
+        let decl = &body.local_decls[local];
 
         write!(w, "let ")?;
         if decl.mutability == Mutability::Mut {
index a3317d3956be43015ffbc99766df5c963fd0beda..82ec5ab5f8a6afa5d164192d756d0a9eb0c8a075 100644 (file)
@@ -57,17 +57,17 @@ pub struct LivenessResult {
 /// Computes which local variables are live within the given function
 /// `mir`, including drops.
 pub fn liveness_of_locals<'tcx>(
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
 ) -> LivenessResult {
-    let num_live_vars = mir.local_decls.len();
+    let num_live_vars = body.local_decls.len();
 
-    let def_use: IndexVec<_, DefsUses> = mir
+    let def_use: IndexVec<_, DefsUses> = body
         .basic_blocks()
         .iter()
         .map(|b| block(b, num_live_vars))
         .collect();
 
-    let mut outs: IndexVec<_, LiveVarSet> = mir
+    let mut outs: IndexVec<_, LiveVarSet> = body
         .basic_blocks()
         .indices()
         .map(|_| LiveVarSet::new_empty(num_live_vars))
@@ -77,9 +77,9 @@ pub fn liveness_of_locals<'tcx>(
 
     // queue of things that need to be re-processed, and a set containing
     // the things currently in the queue
-    let mut dirty_queue: WorkQueue<BasicBlock> = WorkQueue::with_all(mir.basic_blocks().len());
+    let mut dirty_queue: WorkQueue<BasicBlock> = WorkQueue::with_all(body.basic_blocks().len());
 
-    let predecessors = mir.predecessors();
+    let predecessors = body.predecessors();
 
     while let Some(bb) = dirty_queue.pop() {
         // bits = use ∪ (bits - def)
@@ -109,7 +109,7 @@ pub enum DefUse {
     Drop,
 }
 
-pub fn categorize<'tcx>(context: PlaceContext) -> Option<DefUse> {
+pub fn categorize(context: PlaceContext) -> Option<DefUse> {
     match context {
         ///////////////////////////////////////////////////////////////////////////
         // DEFS
@@ -258,7 +258,7 @@ pub fn dump_mir<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     pass_name: &str,
     source: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     result: &LivenessResult,
 ) {
     if !dump_enabled(tcx, pass_name, source) {
@@ -268,7 +268,7 @@ pub fn dump_mir<'a, 'tcx>(
         // see notes on #41697 below
         tcx.def_path_str(source.def_id())
     });
-    dump_matched_mir_node(tcx, pass_name, &node_path, source, mir, result);
+    dump_matched_mir_node(tcx, pass_name, &node_path, source, body, result);
 }
 
 fn dump_matched_mir_node<'a, 'tcx>(
@@ -276,7 +276,7 @@ fn dump_matched_mir_node<'a, 'tcx>(
     pass_name: &str,
     node_path: &str,
     source: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     result: &LivenessResult,
 ) {
     let mut file_path = PathBuf::new();
@@ -289,7 +289,7 @@ fn dump_matched_mir_node<'a, 'tcx>(
         writeln!(file, "// source = {:?}", source)?;
         writeln!(file, "// pass_name = {}", pass_name)?;
         writeln!(file, "")?;
-        write_mir_fn(tcx, source, mir, &mut file, result)?;
+        write_mir_fn(tcx, source, body, &mut file, result)?;
         Ok(())
     });
 }
@@ -297,12 +297,12 @@ fn dump_matched_mir_node<'a, 'tcx>(
 pub fn write_mir_fn<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     src: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     w: &mut dyn Write,
     result: &LivenessResult,
 ) -> io::Result<()> {
-    write_mir_intro(tcx, src, mir, w)?;
-    for block in mir.basic_blocks().indices() {
+    write_mir_intro(tcx, src, body, w)?;
+    for block in body.basic_blocks().indices() {
         let print = |w: &mut dyn Write, prefix, result: &IndexVec<BasicBlock, LiveVarSet>| {
             let live: Vec<String> = result[block]
                 .iter()
@@ -310,9 +310,9 @@ pub fn write_mir_fn<'a, 'tcx>(
                 .collect();
             writeln!(w, "{} {{{}}}", prefix, live.join(", "))
         };
-        write_basic_block(tcx, block, mir, &mut |_, _| Ok(()), w)?;
+        write_basic_block(tcx, block, body, &mut |_, _| Ok(()), w)?;
         print(w, "   ", &result.outs)?;
-        if block.index() + 1 != mir.basic_blocks().len() {
+        if block.index() + 1 != body.basic_blocks().len() {
             writeln!(w, "")?;
         }
     }
index 0e7f473a3e70d2af498488f8531661fdf6cac6bd..e340029434d815416e4b667de3beb1b2dc4aebbb 100644 (file)
@@ -2,6 +2,7 @@
 use rustc::ty::TyCtxt;
 use syntax_pos::Span;
 
+pub mod aggregate;
 pub mod borrowck_errors;
 pub mod elaborate_drops;
 pub mod def_use;
@@ -13,6 +14,7 @@
 pub mod liveness;
 pub mod collect_writes;
 
+pub use self::aggregate::expand_aggregate;
 pub use self::alignment::is_disaligned;
 pub use self::pretty::{dump_enabled, dump_mir, write_mir_pretty, PassWhere};
 pub use self::graphviz::{graphviz_safe_def_name, write_mir_graphviz};
index 974dda867bc9df5e7a7f0b5fef6dc6e337ee6e98..eb457dacf8467982a39222e1330580d982ac024f 100644 (file)
@@ -17,13 +17,13 @@ pub struct MirPatch<'tcx> {
 }
 
 impl<'tcx> MirPatch<'tcx> {
-    pub fn new(mir: &Body<'tcx>) -> Self {
+    pub fn new(body: &Body<'tcx>) -> Self {
         let mut result = MirPatch {
-            patch_map: IndexVec::from_elem(None, mir.basic_blocks()),
+            patch_map: IndexVec::from_elem(None, body.basic_blocks()),
             new_blocks: vec![],
             new_statements: vec![],
             new_locals: vec![],
-            next_local: mir.local_decls.len(),
+            next_local: body.local_decls.len(),
             resume_block: START_BLOCK,
             make_nop: vec![]
         };
@@ -35,7 +35,7 @@ pub fn new(mir: &Body<'tcx>) -> Self {
 
         let mut resume_block = None;
         let mut resume_stmt_block = None;
-        for (bb, block) in mir.basic_blocks().iter_enumerated() {
+        for (bb, block) in body.basic_blocks().iter_enumerated() {
             if let TerminatorKind::Resume = block.terminator().kind {
                 if block.statements.len() > 0 {
                     assert!(resume_stmt_block.is_none());
@@ -51,7 +51,7 @@ pub fn new(mir: &Body<'tcx>) -> Self {
                 statements: vec![],
                 terminator: Some(Terminator {
                     source_info: SourceInfo {
-                        span: mir.span,
+                        span: body.span,
                         scope: OUTERMOST_SOURCE_SCOPE
                     },
                     kind: TerminatorKind::Resume
@@ -75,10 +75,10 @@ pub fn is_patched(&self, bb: BasicBlock) -> bool {
         self.patch_map[bb].is_some()
     }
 
-    pub fn terminator_loc(&self, mir: &Body<'tcx>, bb: BasicBlock) -> Location {
-        let offset = match bb.index().checked_sub(mir.basic_blocks().len()) {
+    pub fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location {
+        let offset = match bb.index().checked_sub(body.basic_blocks().len()) {
             Some(index) => self.new_blocks[index].statements.len(),
-            None => mir[bb].statements.len()
+            None => body[bb].statements.len()
         };
         Location {
             block: bb,
@@ -127,21 +127,21 @@ pub fn make_nop(&mut self, loc: Location) {
         self.make_nop.push(loc);
     }
 
-    pub fn apply(self, mir: &mut Body<'tcx>) {
+    pub fn apply(self, body: &mut Body<'tcx>) {
         debug!("MirPatch: make nops at: {:?}", self.make_nop);
         for loc in self.make_nop {
-            mir.make_statement_nop(loc);
+            body.make_statement_nop(loc);
         }
         debug!("MirPatch: {:?} new temps, starting from index {}: {:?}",
-               self.new_locals.len(), mir.local_decls.len(), self.new_locals);
+               self.new_locals.len(), body.local_decls.len(), self.new_locals);
         debug!("MirPatch: {} new blocks, starting from index {}",
-               self.new_blocks.len(), mir.basic_blocks().len());
-        mir.basic_blocks_mut().extend(self.new_blocks);
-        mir.local_decls.extend(self.new_locals);
+               self.new_blocks.len(), body.basic_blocks().len());
+        body.basic_blocks_mut().extend(self.new_blocks);
+        body.local_decls.extend(self.new_locals);
         for (src, patch) in self.patch_map.into_iter_enumerated() {
             if let Some(patch) = patch {
                 debug!("MirPatch: patching block {:?}", src);
-                mir[src].terminator_mut().kind = patch;
+                body[src].terminator_mut().kind = patch;
             }
         }
 
@@ -159,9 +159,9 @@ pub fn apply(self, mir: &mut Body<'tcx>) {
                    stmt, loc, delta);
             loc.statement_index += delta;
             let source_info = Self::source_info_for_index(
-                &mir[loc.block], loc
+                &body[loc.block], loc
             );
-            mir[loc.block].statements.insert(
+            body[loc.block].statements.insert(
                 loc.statement_index, Statement {
                     source_info,
                     kind: stmt
@@ -177,10 +177,10 @@ pub fn source_info_for_index(data: &BasicBlockData<'_>, loc: Location) -> Source
         }
     }
 
-    pub fn source_info_for_location(&self, mir: &Body<'_>, loc: Location) -> SourceInfo {
-        let data = match loc.block.index().checked_sub(mir.basic_blocks().len()) {
+    pub fn source_info_for_location(&self, body: &Body<'_>, loc: Location) -> SourceInfo {
+        let data = match loc.block.index().checked_sub(body.basic_blocks().len()) {
             Some(new) => &self.new_blocks[new],
-            None => &mir[loc.block]
+            None => &body[loc.block]
         };
         Self::source_info_for_index(data, loc)
     }
index aec8ce7ced6c7609ee3eb62a6dc4b439c6597d4d..2de58d2f3d60b829343770f979c6571b7ba42e43 100644 (file)
@@ -68,7 +68,7 @@ pub fn dump_mir<'a, 'gcx, 'tcx, F>(
     pass_name: &str,
     disambiguator: &dyn Display,
     source: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     extra_data: F,
 ) where
     F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
@@ -88,7 +88,7 @@ pub fn dump_mir<'a, 'gcx, 'tcx, F>(
         &node_path,
         disambiguator,
         source,
-        mir,
+        body,
         extra_data,
     );
 }
@@ -124,7 +124,7 @@ fn dump_matched_mir_node<'a, 'gcx, 'tcx, F>(
     node_path: &str,
     disambiguator: &dyn Display,
     source: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     mut extra_data: F,
 ) where
     F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
@@ -135,13 +135,13 @@ fn dump_matched_mir_node<'a, 'gcx, 'tcx, F>(
         writeln!(file, "// source = {:?}", source)?;
         writeln!(file, "// pass_name = {}", pass_name)?;
         writeln!(file, "// disambiguator = {}", disambiguator)?;
-        if let Some(ref layout) = mir.generator_layout {
+        if let Some(ref layout) = body.generator_layout {
             writeln!(file, "// generator_layout = {:?}", layout)?;
         }
         writeln!(file, "")?;
         extra_data(PassWhere::BeforeCFG, &mut file)?;
-        write_user_type_annotations(mir, &mut file)?;
-        write_mir_fn(tcx, source, mir, &mut extra_data, &mut file)?;
+        write_user_type_annotations(body, &mut file)?;
+        write_mir_fn(tcx, source, body, &mut extra_data, &mut file)?;
         extra_data(PassWhere::AfterCFG, &mut file)?;
     };
 
@@ -149,7 +149,7 @@ fn dump_matched_mir_node<'a, 'gcx, 'tcx, F>(
         let _: io::Result<()> = try {
             let mut file =
                 create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, source)?;
-            write_mir_fn_graphviz(tcx, source.def_id(), mir, &mut file)?;
+            write_mir_fn_graphviz(tcx, source.def_id(), body, &mut file)?;
         };
     }
 }
@@ -256,7 +256,7 @@ pub fn write_mir_pretty<'a, 'gcx, 'tcx>(
 
     let mut first = true;
     for def_id in dump_mir_def_ids(tcx, single) {
-        let mir = &tcx.optimized_mir(def_id);
+        let body = &tcx.optimized_mir(def_id);
 
         if first {
             first = false;
@@ -265,15 +265,15 @@ pub fn write_mir_pretty<'a, 'gcx, 'tcx>(
             writeln!(w, "")?;
         }
 
-        write_mir_fn(tcx, MirSource::item(def_id), mir, &mut |_, _| Ok(()), w)?;
+        write_mir_fn(tcx, MirSource::item(def_id), body, &mut |_, _| Ok(()), w)?;
 
-        for (i, mir) in mir.promoted.iter_enumerated() {
+        for (i, body) in body.promoted.iter_enumerated() {
             writeln!(w, "")?;
             let src = MirSource {
                 instance: ty::InstanceDef::Item(def_id),
                 promoted: Some(i),
             };
-            write_mir_fn(tcx, src, mir, &mut |_, _| Ok(()), w)?;
+            write_mir_fn(tcx, src, body, &mut |_, _| Ok(()), w)?;
         }
     }
     Ok(())
@@ -282,18 +282,18 @@ pub fn write_mir_pretty<'a, 'gcx, 'tcx>(
 pub fn write_mir_fn<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     src: MirSource<'tcx>,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     extra_data: &mut F,
     w: &mut dyn Write,
 ) -> io::Result<()>
 where
     F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
 {
-    write_mir_intro(tcx, src, mir, w)?;
-    for block in mir.basic_blocks().indices() {
+    write_mir_intro(tcx, src, body, w)?;
+    for block in body.basic_blocks().indices() {
         extra_data(PassWhere::BeforeBlock(block), w)?;
-        write_basic_block(tcx, block, mir, extra_data, w)?;
-        if block.index() + 1 != mir.basic_blocks().len() {
+        write_basic_block(tcx, block, body, extra_data, w)?;
+        if block.index() + 1 != body.basic_blocks().len() {
             writeln!(w, "")?;
         }
     }
@@ -306,14 +306,14 @@ pub fn write_mir_fn<'a, 'gcx, 'tcx, F>(
 pub fn write_basic_block<'cx, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     block: BasicBlock,
-    mir: &Body<'tcx>,
+    body: &Body<'tcx>,
     extra_data: &mut F,
     w: &mut dyn Write,
 ) -> io::Result<()>
 where
     F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
 {
-    let data = &mir[block];
+    let data = &body[block];
 
     // Basic block label at the top.
     let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
@@ -326,11 +326,11 @@ pub fn write_basic_block<'cx, 'gcx, 'tcx, F>(
     };
     for statement in &data.statements {
         extra_data(PassWhere::BeforeLocation(current_location), w)?;
-        let indented_mir = format!("{0}{0}{1:?};", INDENT, statement);
+        let indented_body = format!("{0}{0}{1:?};", INDENT, statement);
         writeln!(
             w,
             "{:A$} // {:?}: {}",
-            indented_mir,
+            indented_body,
             current_location,
             comment(tcx, statement.source_info),
             A = ALIGN,
@@ -464,7 +464,7 @@ fn comment(tcx: TyCtxt<'_, '_, '_>, SourceInfo { span, scope }: SourceInfo) -> S
 /// Prints local variables in a scope tree.
 fn write_scope_tree(
     tcx: TyCtxt<'_, '_, '_>,
-    mir: &Body<'_>,
+    body: &Body<'_>,
     scope_tree: &FxHashMap<SourceScope, Vec<SourceScope>>,
     w: &mut dyn Write,
     parent: SourceScope,
@@ -473,8 +473,8 @@ fn write_scope_tree(
     let indent = depth * INDENT.len();
 
     // Local variable types (including the user's name in a comment).
-    for (local, local_decl) in mir.local_decls.iter_enumerated() {
-        if (1..mir.arg_count+1).contains(&local.index()) {
+    for (local, local_decl) in body.local_decls.iter_enumerated() {
+        if (1..body.arg_count+1).contains(&local.index()) {
             // Skip over argument locals, they're printed in the signature.
             continue;
         }
@@ -527,9 +527,9 @@ fn write_scope_tree(
     };
 
     for &child in children {
-        assert_eq!(mir.source_scopes[child].parent_scope, Some(parent));
+        assert_eq!(body.source_scopes[child].parent_scope, Some(parent));
         writeln!(w, "{0:1$}scope {2} {{", "", indent, child.index())?;
-        write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;
+        write_scope_tree(tcx, body, scope_tree, w, child, depth + 1)?;
         writeln!(w, "{0:1$}}}", "", depth * INDENT.len())?;
     }
 
@@ -541,15 +541,15 @@ fn write_scope_tree(
 pub fn write_mir_intro<'a, 'gcx, 'tcx>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     src: MirSource<'tcx>,
-    mir: &Body<'_>,
+    body: &Body<'_>,
     w: &mut dyn Write,
 ) -> io::Result<()> {
-    write_mir_sig(tcx, src, mir, w)?;
+    write_mir_sig(tcx, src, body, w)?;
     writeln!(w, "{{")?;
 
     // construct a scope tree and write it out
     let mut scope_tree: FxHashMap<SourceScope, Vec<SourceScope>> = Default::default();
-    for (index, scope_data) in mir.source_scopes.iter().enumerate() {
+    for (index, scope_data) in body.source_scopes.iter().enumerate() {
         if let Some(parent) = scope_data.parent_scope {
             scope_tree
                 .entry(parent)
@@ -561,7 +561,7 @@ pub fn write_mir_intro<'a, 'gcx, 'tcx>(
         }
     }
 
-    write_scope_tree(tcx, mir, &scope_tree, w, OUTERMOST_SOURCE_SCOPE, 1)?;
+    write_scope_tree(tcx, body, &scope_tree, w, OUTERMOST_SOURCE_SCOPE, 1)?;
 
     // Add an empty line before the first block is printed.
     writeln!(w, "")?;
@@ -572,7 +572,7 @@ pub fn write_mir_intro<'a, 'gcx, 'tcx>(
 fn write_mir_sig(
     tcx: TyCtxt<'_, '_, '_>,
     src: MirSource<'tcx>,
-    mir: &Body<'_>,
+    body: &Body<'_>,
     w: &mut dyn Write,
 ) -> io::Result<()> {
     use rustc::hir::def::DefKind;
@@ -605,20 +605,20 @@ fn write_mir_sig(
         write!(w, "(")?;
 
         // fn argument types.
-        for (i, arg) in mir.args_iter().enumerate() {
+        for (i, arg) in body.args_iter().enumerate() {
             if i != 0 {
                 write!(w, ", ")?;
             }
-            write!(w, "{:?}: {}", Place::Base(PlaceBase::Local(arg)), mir.local_decls[arg].ty)?;
+            write!(w, "{:?}: {}", Place::Base(PlaceBase::Local(arg)), body.local_decls[arg].ty)?;
         }
 
-        write!(w, ") -> {}", mir.return_ty())?;
+        write!(w, ") -> {}", body.return_ty())?;
     } else {
-        assert_eq!(mir.arg_count, 0);
-        write!(w, ": {} =", mir.return_ty())?;
+        assert_eq!(body.arg_count, 0);
+        write!(w, ": {} =", body.return_ty())?;
     }
 
-    if let Some(yield_ty) = mir.yield_ty {
+    if let Some(yield_ty) = body.yield_ty {
         writeln!(w)?;
         writeln!(w, "yields {}", yield_ty)?;
     }
@@ -629,14 +629,14 @@ fn write_mir_sig(
     Ok(())
 }
 
-fn write_user_type_annotations(mir: &Body<'_>, w: &mut dyn Write) -> io::Result<()> {
-    if !mir.user_type_annotations.is_empty() {
+fn write_user_type_annotations(body: &Body<'_>, w: &mut dyn Write) -> io::Result<()> {
+    if !body.user_type_annotations.is_empty() {
         writeln!(w, "| User Type Annotations")?;
     }
-    for (index, annotation) in mir.user_type_annotations.iter_enumerated() {
+    for (index, annotation) in body.user_type_annotations.iter_enumerated() {
         writeln!(w, "| {:?}: {:?} at {:?}", index.index(), annotation.user_ty, annotation.span)?;
     }
-    if !mir.user_type_annotations.is_empty() {
+    if !body.user_type_annotations.is_empty() {
         writeln!(w, "|")?;
     }
     Ok(())
index 7c48feecb211096d4b7f822ae4cc3c81ddc89240..fbd6ddf84c33aab69b988fd208a478ed341a0976 100644 (file)
@@ -13,6 +13,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #[macro_use]
 extern crate rustc;
index 2ed6f868fa1ee7cd818a675eaea810b59cbb5b1e..dd5e42684c4276581471ff68738e55078e4c8e44 100644 (file)
@@ -4,8 +4,9 @@
 use rustc::session::Session;
 use rustc::util::nodemap::FxHashMap;
 
-use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
+use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension};
 use syntax::ext::base::MacroExpanderFn;
+use syntax::ext::hygiene::Transparency;
 use syntax::symbol::{Symbol, sym};
 use syntax::ast;
 use syntax::feature_gate::AttributeType;
@@ -84,47 +85,26 @@ pub fn args<'b>(&'b self) -> &'b [ast::NestedMetaItem] {
     /// Register a syntax extension of any kind.
     ///
     /// This is the most general hook into `libsyntax`'s expansion behavior.
-    pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxExtension) {
+    pub fn register_syntax_extension(&mut self, name: ast::Name, mut extension: SyntaxExtension) {
         if name == sym::macro_rules {
             panic!("user-defined macros may not be named `macro_rules`");
         }
-        self.syntax_exts.push((name, match extension {
-            NormalTT {
-                expander,
-                def_info: _,
-                allow_internal_unstable,
-                allow_internal_unsafe,
-                local_inner_macros,
-                unstable_feature,
-                edition,
-            } => {
-                let nid = ast::CRATE_NODE_ID;
-                NormalTT {
-                    expander,
-                    def_info: Some((nid, self.krate_span)),
-                    allow_internal_unstable,
-                    allow_internal_unsafe,
-                    local_inner_macros,
-                    unstable_feature,
-                    edition,
-                }
-            }
-            IdentTT { expander, span: _, allow_internal_unstable } => {
-                IdentTT { expander, span: Some(self.krate_span), allow_internal_unstable }
-            }
-            _ => extension,
-        }));
+        if let SyntaxExtension::LegacyBang { def_info: ref mut def_info @ None, .. } = extension {
+            *def_info = Some((ast::CRATE_NODE_ID, self.krate_span));
+        }
+        self.syntax_exts.push((name, extension));
     }
 
     /// Register a macro of the usual kind.
     ///
     /// This is a convenience wrapper for `register_syntax_extension`.
-    /// It builds for you a `NormalTT` that calls `expander`,
+    /// It builds for you a `SyntaxExtension::LegacyBang` that calls `expander`,
     /// and also takes care of interning the macro's name.
     pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) {
-        self.register_syntax_extension(Symbol::intern(name), NormalTT {
+        self.register_syntax_extension(Symbol::intern(name), SyntaxExtension::LegacyBang {
             expander: Box::new(expander),
             def_info: None,
+            transparency: Transparency::SemiTransparent,
             allow_internal_unstable: None,
             allow_internal_unsafe: false,
             local_inner_macros: false,
index 6d01328cd16e686bb08ed3342bcf96efc8010cfd..42b70dd181b3cfa2ed56f213d24140e205ac5429 100644 (file)
@@ -2,6 +2,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(nll)]
 #![feature(rustc_diagnostic_macros)]
index 92faab192fa9ed6a66e8c9e23aaf81775e685d05..6d0b142fb2409e713d21ccd7b5789144797bfb64 100644 (file)
@@ -1053,7 +1053,7 @@ fn visit_trait_item(&mut self, item: &'a TraitItem) {
     }
 
     fn visit_token(&mut self, t: Token) {
-        if let Token::Interpolated(nt) = t {
+        if let token::Interpolated(nt) = t.kind {
             if let token::NtExpr(ref expr) = *nt {
                 if let ast::ExprKind::Mac(..) = expr.node {
                     self.visit_invoc(expr.id);
index 18f3c0285c210238563eda858b33e17861246191..fec7bf3b273ee891d77e134da25a3c5cad89a5e5 100644 (file)
@@ -12,6 +12,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 pub use rustc::hir::def::{Namespace, PerNS};
 
index 08ab5b853252255e996d7fab640b3b5d264731f0..2369bddf4f75fca13ae27304aa474f65e4dc9c21 100644 (file)
@@ -242,8 +242,7 @@ fn resolve_macro_path(&mut self, path: &ast::Path, kind: MacroKind, invoc_id: Ma
     fn check_unused_macros(&self) {
         for did in self.unused_macros.iter() {
             let id_span = match *self.macro_map[did] {
-                SyntaxExtension::NormalTT { def_info, .. } |
-                SyntaxExtension::DeclMacro { def_info, .. } => def_info,
+                SyntaxExtension::LegacyBang { def_info, .. } => def_info,
                 _ => None,
             };
             if let Some((id, span)) = id_span {
@@ -587,7 +586,7 @@ struct Flags: u8 {
                         match self.resolve_macro_to_res(derive, MacroKind::Derive,
                                                         &parent_scope, true, force) {
                             Ok((_, ext)) => {
-                                if let SyntaxExtension::ProcMacroDerive(_, helpers, _) = &*ext {
+                                if let SyntaxExtension::Derive(_, helpers, _) = &*ext {
                                     if helpers.contains(&ident.name) {
                                         let binding =
                                             (Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper),
index cca5682d90a9bc6abe35e059048d8b33070293d4..30b0250cec1825ae068980c94c3354386dc8e471 100644 (file)
@@ -1,8 +1,8 @@
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![feature(custom_attribute)]
 #![feature(nll)]
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 #![allow(unused_attributes)]
 
 #![recursion_limit="256"]
@@ -1017,7 +1017,7 @@ pub fn new(odir: Option<&'a Path>, cratename: &str) -> DumpHandler<'a> {
         }
     }
 
-    fn output_file(&self, ctx: &SaveContext<'_, '_>) -> File {
+    fn output_file(&self, ctx: &SaveContext<'_, '_>) -> (File, PathBuf) {
         let sess = &ctx.tcx.sess;
         let file_name = match ctx.config.output_file {
             Some(ref s) => PathBuf::from(s),
@@ -1055,7 +1055,7 @@ fn output_file(&self, ctx: &SaveContext<'_, '_>) -> File {
             |e| sess.fatal(&format!("Could not open {}: {}", file_name.display(), e)),
         );
 
-        output_file
+        (output_file, file_name)
     }
 }
 
@@ -1067,13 +1067,23 @@ fn save<'l, 'tcx>(
         cratename: &str,
         input: &'l Input,
     ) {
-        let output = &mut self.output_file(&save_ctxt);
-        let mut dumper = JsonDumper::new(output, save_ctxt.config.clone());
-        let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
+        let sess = &save_ctxt.tcx.sess;
+        let file_name = {
+            let (mut output, file_name) = self.output_file(&save_ctxt);
+            let mut dumper = JsonDumper::new(&mut output, save_ctxt.config.clone());
+            let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
 
-        visitor.dump_crate_info(cratename, krate);
-        visitor.dump_compilation_options(input, cratename);
-        visit::walk_crate(&mut visitor, krate);
+            visitor.dump_crate_info(cratename, krate);
+            visitor.dump_compilation_options(input, cratename);
+            visit::walk_crate(&mut visitor, krate);
+
+            file_name
+        };
+
+        if sess.opts.debugging_opts.emit_artifact_notifications {
+            sess.parse_sess.span_diagnostic
+                .emit_artifact_notification(&file_name, "save-analysis");
+        }
     }
 }
 
index e2c93b6d331586bbd7e5f33daab25af7c46c6b51..5831b0bcd8fa37a65e8cdb307dc13d611d227d5d 100644 (file)
@@ -5,7 +5,7 @@
 use std::cell::Cell;
 
 use syntax::parse::lexer::{self, StringReader};
-use syntax::parse::token::{self, Token};
+use syntax::parse::token::{self, TokenKind};
 use syntax_pos::*;
 
 #[derive(Clone)]
@@ -56,15 +56,15 @@ pub fn retokenise_span(&self, span: Span) -> StringReader<'a> {
         lexer::StringReader::retokenize(&self.sess.parse_sess, span)
     }
 
-    pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
+    pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
         let mut toks = self.retokenise_span(span);
         loop {
             let next = toks.real_token();
-            if next.tok == token::Eof {
+            if next == token::Eof {
                 return None;
             }
-            if next.tok == tok {
-                return Some(next.sp);
+            if next == tok {
+                return Some(next.span);
             }
         }
     }
@@ -74,12 +74,12 @@ pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
     //     let mut toks = self.retokenise_span(span);
     //     loop {
     //         let ts = toks.real_token();
-    //         if ts.tok == token::Eof {
+    //         if ts == token::Eof {
     //             return None;
     //         }
-    //         if ts.tok == token::Not {
+    //         if ts == token::Not {
     //             let ts = toks.real_token();
-    //             if ts.tok.is_ident() {
+    //             if ts.kind.is_ident() {
     //                 return Some(ts.sp);
     //             } else {
     //                 return None;
@@ -93,12 +93,12 @@ pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
     //     let mut toks = self.retokenise_span(span);
     //     let mut prev = toks.real_token();
     //     loop {
-    //         if prev.tok == token::Eof {
+    //         if prev == token::Eof {
     //             return None;
     //         }
     //         let ts = toks.real_token();
-    //         if ts.tok == token::Not {
-    //             if prev.tok.is_ident() {
+    //         if ts == token::Not {
+    //             if prev.kind.is_ident() {
     //                 return Some(prev.sp);
     //             } else {
     //                 return None;
index 8fc5e6aae34d4b8051ef89b9afa6ea25a52d7bc1..77493fbc5dfc71e9da5cd16522b571db8db8e19a 100644 (file)
@@ -583,7 +583,7 @@ pub enum Primitive {
     Pointer
 }
 
-impl<'a, 'tcx> Primitive {
+impl Primitive {
     pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
         let dl = cx.data_layout();
 
index 3fcf08adb5c8e2766c1c58630ab1b60ae93e8f19..1bebe420251fda2520ec7d9d9bf9ebcc755102d1 100644 (file)
@@ -17,6 +17,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #[macro_use] extern crate log;
 
index 96c647ca31e6fca339194abb83faba601471d514..bf61a558a46f6cc59d4439c59ca410573b891124 100644 (file)
@@ -575,7 +575,7 @@ fn fold_ex_clause_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(
         }
     }
 
-    fn visit_ex_clause_with<'gcx: 'tcx, V: TypeVisitor<'tcx>>(
+    fn visit_ex_clause_with<V: TypeVisitor<'tcx>>(
         ex_clause: &ExClause<Self>,
         visitor: &mut V,
     ) -> bool {
index 959130f1cf2289e344345314ae5b2a3bd82b7d87..7311fd96dadc78f7e271f23eb7dcd1e8984d80e3 100644 (file)
@@ -3,6 +3,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(crate_visibility_modifier)]
 #![feature(in_band_lifetimes)]
index 34f817ba570e705d4a7809e95844537e2a5666c0..5a46c9d440b5d82bb1378d636ed8fc8201409b93 100644 (file)
@@ -13,7 +13,7 @@
 use crate::namespace::Namespace;
 use rustc::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS;
 use rustc::traits;
-use rustc::ty::{self, DefIdTree, Ty, TyCtxt, ToPredicate, TypeFoldable};
+use rustc::ty::{self, DefIdTree, Ty, TyCtxt, Const, ToPredicate, TypeFoldable};
 use rustc::ty::{GenericParamDef, GenericParamDefKind};
 use rustc::ty::subst::{Kind, Subst, InternalSubsts, SubstsRef};
 use rustc::ty::wf::object_region_bounds;
@@ -49,18 +49,23 @@ fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
                                  -> &'tcx ty::GenericPredicates<'tcx>;
 
     /// Returns the lifetime to use when a lifetime is omitted (and not elided).
-    fn re_infer(&self, span: Span, _def: Option<&ty::GenericParamDef>)
+    fn re_infer(
+        &self,
+        param: Option<&ty::GenericParamDef>,
+        span: Span,
+    )
                 -> Option<ty::Region<'tcx>>;
 
     /// Returns the type to use when a type is omitted.
-    fn ty_infer(&self, span: Span) -> Ty<'tcx>;
+    fn ty_infer(&self, param: Option<&ty::GenericParamDef>, span: Span) -> Ty<'tcx>;
 
-    /// Same as `ty_infer`, but with a known type parameter definition.
-    fn ty_infer_for_def(&self,
-                        _def: &ty::GenericParamDef,
-                        span: Span) -> Ty<'tcx> {
-        self.ty_infer(span)
-    }
+    /// Returns the const to use when a const is omitted.
+    fn ct_infer(
+        &self,
+        ty: Ty<'tcx>,
+        param: Option<&ty::GenericParamDef>,
+        span: Span,
+    ) -> &'tcx Const<'tcx>;
 
     /// Projecting an associated type from a (potentially)
     /// higher-ranked trait reference is more complicated, because of
@@ -156,7 +161,7 @@ pub fn ast_region_to_region(&self,
             }
 
             None => {
-                self.re_infer(lifetime.span, def)
+                self.re_infer(def, lifetime.span)
                     .unwrap_or_else(|| {
                         // This indicates an illegal lifetime
                         // elision. `resolve_lifetime` should have
@@ -191,7 +196,7 @@ pub fn ast_path_substs_for_ty(&self,
                 span,
                 def_id,
                 generic_args,
-                item_segment.infer_types,
+                item_segment.infer_args,
                 None,
             )
         });
@@ -208,7 +213,7 @@ fn check_impl_trait(
         seg: &hir::PathSegment,
         generics: &ty::Generics,
     ) -> bool {
-        let explicit = !seg.infer_types;
+        let explicit = !seg.infer_args;
         let impl_trait = generics.params.iter().any(|param| match param.kind {
             ty::GenericParamDefKind::Type {
                 synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), ..
@@ -259,7 +264,7 @@ pub fn check_generic_arg_count_for_call(
                 GenericArgPosition::Value
             },
             def.parent.is_none() && def.has_self, // `has_self`
-            seg.infer_types || suppress_mismatch, // `infer_types`
+            seg.infer_args || suppress_mismatch, // `infer_args`
         ).0
     }
 
@@ -272,7 +277,7 @@ fn check_generic_arg_count(
         args: &hir::GenericArgs,
         position: GenericArgPosition,
         has_self: bool,
-        infer_types: bool,
+        infer_args: bool,
     ) -> (bool, Option<Vec<Span>>) {
         // At this stage we are guaranteed that the generic arguments are in the correct order, e.g.
         // that lifetimes will proceed types. So it suffices to check the number of each generic
@@ -280,7 +285,6 @@ fn check_generic_arg_count(
         let param_counts = def.own_counts();
         let arg_counts = args.own_counts();
         let infer_lifetimes = position != GenericArgPosition::Type && arg_counts.lifetimes == 0;
-        let infer_consts = position != GenericArgPosition::Type && arg_counts.consts == 0;
 
         let mut defaults: ty::GenericParamCount = Default::default();
         for param in &def.params {
@@ -333,7 +337,7 @@ fn check_generic_arg_count(
                 offset
             );
             // We enforce the following: `required` <= `provided` <= `permitted`.
-            // For kinds without defaults (i.e., lifetimes), `required == permitted`.
+            // For kinds without defaults (e.g.., lifetimes), `required == permitted`.
             // For other kinds (i.e., types), `permitted` may be greater than `required`.
             if required <= provided && provided <= permitted {
                 return (reported_late_bound_region_err.unwrap_or(false), None);
@@ -404,7 +408,7 @@ fn check_generic_arg_count(
             );
         }
         // FIXME(const_generics:defaults)
-        if !infer_consts || arg_counts.consts > param_counts.consts {
+        if !infer_args || arg_counts.consts > param_counts.consts {
             check_kind_count(
                 "const",
                 param_counts.consts,
@@ -414,7 +418,7 @@ fn check_generic_arg_count(
             );
         }
         // Note that type errors are currently be emitted *after* const errors.
-        if !infer_types
+        if !infer_args
             || arg_counts.types > param_counts.types - defaults.types - has_self as usize {
             check_kind_count(
                 "type",
@@ -511,7 +515,7 @@ pub fn create_substs_for_generic_args<'a, 'b>(
             }
 
             // Check whether this segment takes generic arguments and the user has provided any.
-            let (generic_args, infer_types) = args_for_def_id(def_id);
+            let (generic_args, infer_args) = args_for_def_id(def_id);
 
             let mut args = generic_args.iter().flat_map(|generic_args| generic_args.args.iter())
                 .peekable();
@@ -535,7 +539,7 @@ pub fn create_substs_for_generic_args<'a, 'b>(
                             | (GenericArg::Const(_), GenericParamDefKind::Lifetime) => {
                                 // We expected a lifetime argument, but got a type or const
                                 // argument. That means we're inferring the lifetimes.
-                                substs.push(inferred_kind(None, param, infer_types));
+                                substs.push(inferred_kind(None, param, infer_args));
                                 params.next();
                             }
                             (_, _) => {
@@ -556,7 +560,7 @@ pub fn create_substs_for_generic_args<'a, 'b>(
                     (None, Some(&param)) => {
                         // If there are fewer arguments than parameters, it means
                         // we're inferring the remaining arguments.
-                        substs.push(inferred_kind(Some(&substs), param, infer_types));
+                        substs.push(inferred_kind(Some(&substs), param, infer_args));
                         args.next();
                         params.next();
                     }
@@ -592,7 +596,7 @@ fn create_substs_for_ast_path<'a>(&self,
         span: Span,
         def_id: DefId,
         generic_args: &'a hir::GenericArgs,
-        infer_types: bool,
+        infer_args: bool,
         self_ty: Option<Ty<'tcx>>)
         -> (SubstsRef<'tcx>, Vec<ConvertedBinding<'tcx>>, Option<Vec<Span>>)
     {
@@ -617,7 +621,7 @@ fn create_substs_for_ast_path<'a>(&self,
             &generic_args,
             GenericArgPosition::Type,
             has_self,
-            infer_types,
+            infer_args,
         );
 
         let is_object = self_ty.map_or(false, |ty| {
@@ -644,7 +648,7 @@ fn create_substs_for_ast_path<'a>(&self,
             self_ty.is_some(),
             self_ty,
             // Provide the generic args, and whether types should be inferred.
-            |_| (Some(generic_args), infer_types),
+            |_| (Some(generic_args), infer_args),
             // Provide substitutions for parameters for which (valid) arguments have been provided.
             |param, arg| {
                 match (&param.kind, arg) {
@@ -661,11 +665,11 @@ fn create_substs_for_ast_path<'a>(&self,
                 }
             },
             // Provide substitutions for parameters for which arguments are inferred.
-            |substs, param, infer_types| {
+            |substs, param, infer_args| {
                 match param.kind {
                     GenericParamDefKind::Lifetime => tcx.lifetimes.re_static.into(),
                     GenericParamDefKind::Type { has_default, .. } => {
-                        if !infer_types && has_default {
+                        if !infer_args && has_default {
                             // No type parameter provided, but a default exists.
 
                             // If we are converting an object type, then the
@@ -693,13 +697,14 @@ fn create_substs_for_ast_path<'a>(&self,
                                        .subst_spanned(tcx, substs.unwrap(), Some(span))
                                 ).into()
                             }
-                        } else if infer_types {
+                        } else if infer_args {
                             // No type parameters were provided, we can infer all.
-                            if !default_needs_object_self(param) {
-                                self.ty_infer_for_def(param, span).into()
+                            let param = if !default_needs_object_self(param) {
+                                Some(param)
                             } else {
-                                self.ty_infer(span).into()
-                            }
+                                None
+                            };
+                            self.ty_infer(param, span).into()
                         } else {
                             // We've already errored above about the mismatch.
                             tcx.types.err.into()
@@ -707,8 +712,14 @@ fn create_substs_for_ast_path<'a>(&self,
                     }
                     GenericParamDefKind::Const => {
                         // FIXME(const_generics:defaults)
-                        // We've already errored above about the mismatch.
-                        tcx.consts.err.into()
+                        if infer_args {
+                            // No const parameters were provided, we can infer all.
+                            let ty = tcx.at(span).type_of(param.def_id);
+                            self.ct_infer(ty, Some(param), span).into()
+                        } else {
+                            // We've already errored above about the mismatch.
+                            tcx.consts.err.into()
+                        }
                     }
                 }
             },
@@ -880,7 +891,7 @@ fn create_substs_for_ast_trait_ref(
             self.create_substs_for_ast_path(span,
                                             trait_def_id,
                                             generic_args,
-                                            trait_segment.infer_types,
+                                            trait_segment.infer_args,
                                             Some(self_ty))
         })
     }
@@ -1428,7 +1439,7 @@ fn conv_object_ty_poly_trait_ref(&self,
                 if tcx.named_region(lifetime.hir_id).is_some() {
                     self.ast_region_to_region(lifetime, None)
                 } else {
-                    self.re_infer(span, None).unwrap_or_else(|| {
+                    self.re_infer(None, span).unwrap_or_else(|| {
                         span_err!(tcx.sess, span, E0228,
                             "the lifetime bound for this object type cannot be deduced \
                              from context; please supply an explicit bound");
@@ -2122,7 +2133,7 @@ pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty) -> Ty<'tcx> {
                 // values in a ExprKind::Closure, or as
                 // the type of local variables. Both of these cases are
                 // handled specially and will not descend into this routine.
-                self.ty_infer(ast_ty.span)
+                self.ty_infer(None, ast_ty.span)
             }
             hir::TyKind::CVarArgs(lt) => {
                 let va_list_did = match tcx.lang_items().va_list() {
@@ -2144,6 +2155,17 @@ pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty) -> Ty<'tcx> {
         result_ty
     }
 
+    /// Returns the `DefId` of the constant parameter that the provided expression is a path to.
+    pub fn const_param_def_id(&self, expr: &hir::Expr) -> Option<DefId> {
+        match &expr.node {
+            ExprKind::Path(hir::QPath::Resolved(_, path)) => match path.res {
+                Res::Def(DefKind::ConstParam, did) => Some(did),
+                _ => None,
+            },
+            _ => None,
+        }
+    }
+
     pub fn ast_const_to_const(
         &self,
         ast_const: &hir::AnonConst,
@@ -2174,19 +2196,17 @@ pub fn ast_const_to_const(
             }
         }
 
-        if let ExprKind::Path(ref qpath) = expr.node {
-            if let hir::QPath::Resolved(_, ref path) = qpath {
-                if let Res::Def(DefKind::ConstParam, def_id) = path.res {
-                    let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
-                    let item_id = tcx.hir().get_parent_node(node_id);
-                    let item_def_id = tcx.hir().local_def_id(item_id);
-                    let generics = tcx.generics_of(item_def_id);
-                    let index = generics.param_def_id_to_index[&tcx.hir().local_def_id(node_id)];
-                    let name = tcx.hir().name(node_id).as_interned_str();
-                    const_.val = ConstValue::Param(ty::ParamConst::new(index, name));
-                }
-            }
-        };
+        if let Some(def_id) = self.const_param_def_id(expr) {
+            // Find the name and index of the const parameter by indexing the generics of the
+            // parent item and construct a `ParamConst`.
+            let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
+            let item_id = tcx.hir().get_parent_node(node_id);
+            let item_def_id = tcx.hir().local_def_id(item_id);
+            let generics = tcx.generics_of(item_def_id);
+            let index = generics.param_def_id_to_index[&tcx.hir().local_def_id(node_id)];
+            let name = tcx.hir().name(node_id).as_interned_str();
+            const_.val = ConstValue::Param(ty::ParamConst::new(index, name));
+        }
 
         tcx.mk_const(const_)
     }
index 4427a83562e9ed50f7b993d78f9a9be3a8f4f0b9..b894fc8c83c106f2fa2afb323ffaa6f307f29059 100644 (file)
@@ -598,7 +598,7 @@ fn supplied_sig_of_closure(
         let supplied_arguments = decl.inputs.iter().map(|a| astconv.ast_ty_to_ty(a));
         let supplied_return = match decl.output {
             hir::Return(ref output) => astconv.ast_ty_to_ty(&output),
-            hir::DefaultReturn(_) => astconv.ty_infer(decl.output.span()),
+            hir::DefaultReturn(_) => astconv.ty_infer(None, decl.output.span()),
         };
 
         let result = ty::Binder::bind(self.tcx.mk_fn_sig(
index 0b14ff1db59d460d580c21b49e387ac4e1bce1ae..11598ad4c9c02dcbbe4f4d4179d05f12089bd107 100644 (file)
@@ -70,7 +70,8 @@ pub fn intrisic_operation_unsafety(intrinsic: &str) -> hir::Unsafety {
         "overflowing_add" | "overflowing_sub" | "overflowing_mul" |
         "saturating_add" | "saturating_sub" |
         "rotate_left" | "rotate_right" |
-        "ctpop" | "ctlz" | "cttz" | "bswap" | "bitreverse"
+        "ctpop" | "ctlz" | "cttz" | "bswap" | "bitreverse" |
+        "minnumf32" | "minnumf64" | "maxnumf32" | "maxnumf64"
         => hir::Unsafety::Normal,
         _ => hir::Unsafety::Unsafe,
     }
@@ -272,6 +273,10 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             }
             "fabsf32"      => (0, vec![ tcx.types.f32 ], tcx.types.f32),
             "fabsf64"      => (0, vec![ tcx.types.f64 ], tcx.types.f64),
+            "minnumf32"    => (0, vec![ tcx.types.f32, tcx.types.f32 ], tcx.types.f32),
+            "minnumf64"    => (0, vec![ tcx.types.f64, tcx.types.f64 ], tcx.types.f64),
+            "maxnumf32"    => (0, vec![ tcx.types.f32, tcx.types.f32 ], tcx.types.f32),
+            "maxnumf64"    => (0, vec![ tcx.types.f64, tcx.types.f64 ], tcx.types.f64),
             "copysignf32"  => (0, vec![ tcx.types.f32, tcx.types.f32 ], tcx.types.f32),
             "copysignf64"  => (0, vec![ tcx.types.f64, tcx.types.f64 ], tcx.types.f64),
             "floorf32"     => (0, vec![ tcx.types.f32 ], tcx.types.f32),
index a111851aa37977bea9140e75053b43a4f72cdecb..2ab8d14ed32e250691525cf5865717a9b99f7e50 100644 (file)
 use rustc_target::spec::abi::Abi;
 use rustc::infer::opaque_types::OpaqueTypeDecl;
 use rustc::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
+use rustc::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
 use rustc::middle::region;
 use rustc::mir::interpret::{ConstValue, GlobalId};
 use rustc::traits::{self, ObligationCause, ObligationCauseCode, TraitEngine};
 use rustc::ty::{
-    self, AdtKind, CanonicalUserType, Ty, TyCtxt, GenericParamDefKind, Visibility,
+    self, AdtKind, CanonicalUserType, Ty, TyCtxt, Const, GenericParamDefKind, Visibility,
     ToPolyTraitRef, ToPredicate, RegionKind, UserType
 };
 use rustc::ty::adjustment::{
@@ -1309,7 +1310,7 @@ fn check_union<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let def = tcx.adt_def(def_id);
     def.destructor(tcx); // force the destructor to be evaluated
     check_representable(tcx, span, def_id);
-
+    check_transparent(tcx, span, def_id);
     check_packed(tcx, span, def_id);
 }
 
@@ -1806,8 +1807,43 @@ fn check_transparent<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: De
         return;
     }
 
+    if adt.is_enum() {
+        if !tcx.features().transparent_enums {
+            emit_feature_err(&tcx.sess.parse_sess,
+                             sym::transparent_enums,
+                             sp,
+                             GateIssue::Language,
+                             "transparent enums are unstable");
+        }
+        if adt.variants.len() != 1 {
+            let variant_spans: Vec<_> = adt.variants.iter().map(|variant| {
+                tcx.hir().span_if_local(variant.def_id).unwrap()
+            }).collect();
+            let mut err = struct_span_err!(tcx.sess, sp, E0731,
+                            "transparent enum needs exactly one variant, but has {}",
+                            adt.variants.len());
+            if !variant_spans.is_empty() {
+                err.span_note(variant_spans, &format!("the following variants exist on `{}`",
+                                                      tcx.def_path_str(def_id)));
+            }
+            err.emit();
+            if adt.variants.is_empty() {
+                // Don't bother checking the fields. No variants (and thus no fields) exist.
+                return;
+            }
+        }
+    }
+
+    if adt.is_union() && !tcx.features().transparent_unions {
+        emit_feature_err(&tcx.sess.parse_sess,
+                         sym::transparent_unions,
+                         sp,
+                         GateIssue::Language,
+                         "transparent unions are unstable");
+    }
+
     // For each field, figure out if it's known to be a ZST and align(1)
-    let field_infos = adt.non_enum_variant().fields.iter().map(|field| {
+    let field_infos = adt.all_fields().map(|field| {
         let ty = field.ty(tcx, InternalSubsts::identity_for_item(tcx, field.did));
         let param_env = tcx.param_env(field.did);
         let layout = tcx.layout_of(param_env.and(ty));
@@ -1822,16 +1858,24 @@ fn check_transparent<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: De
     let non_zst_count = non_zst_fields.clone().count();
     if non_zst_count != 1 {
         let field_spans: Vec<_> = non_zst_fields.map(|(span, _zst, _align1)| span).collect();
-        struct_span_err!(tcx.sess, sp, E0690,
-                         "transparent struct needs exactly one non-zero-sized field, but has {}",
-                         non_zst_count)
-        .span_note(field_spans, "non-zero-sized field")
-        .emit();
+
+        let mut err = struct_span_err!(tcx.sess, sp, E0690,
+                         "{}transparent {} needs exactly one non-zero-sized field, but has {}",
+                         if adt.is_enum() { "the variant of a " } else { "" },
+                         adt.descr(),
+                         non_zst_count);
+        if !field_spans.is_empty() {
+            err.span_note(field_spans,
+                          &format!("the following non-zero-sized fields exist on `{}`:",
+                                   tcx.def_path_str(def_id)));
+        }
+        err.emit();
     }
     for (span, zst, align1) in field_infos {
         if zst && !align1 {
             span_err!(tcx.sess, span, E0691,
-                      "zero-sized field in transparent struct has alignment larger than 1");
+                      "zero-sized field in transparent {} has alignment larger than 1",
+                      adt.descr());
         }
     }
 }
@@ -1898,6 +1942,7 @@ pub fn check_enum<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     check_representable(tcx, sp, def_id);
+    check_transparent(tcx, sp, def_id);
 }
 
 fn report_unexpected_variant_res<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
@@ -1938,8 +1983,11 @@ fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
         })
     }
 
-    fn re_infer(&self, span: Span, def: Option<&ty::GenericParamDef>)
-                -> Option<ty::Region<'tcx>> {
+    fn re_infer(
+        &self,
+        def: Option<&ty::GenericParamDef>,
+        span: Span,
+    ) -> Option<ty::Region<'tcx>> {
         let v = match def {
             Some(def) => infer::EarlyBoundRegion(span, def.name),
             None => infer::MiscVariable(span)
@@ -1947,20 +1995,37 @@ fn re_infer(&self, span: Span, def: Option<&ty::GenericParamDef>)
         Some(self.next_region_var(v))
     }
 
-    fn ty_infer(&self, span: Span) -> Ty<'tcx> {
-        self.next_ty_var(TypeVariableOrigin {
-            kind: TypeVariableOriginKind::TypeInference,
-            span,
-        })
+    fn ty_infer(&self, param: Option<&ty::GenericParamDef>, span: Span) -> Ty<'tcx> {
+        if let Some(param) = param {
+            if let UnpackedKind::Type(ty) = self.var_for_def(span, param).unpack() {
+                return ty;
+            }
+            unreachable!()
+        } else {
+            self.next_ty_var(TypeVariableOrigin {
+                kind: TypeVariableOriginKind::TypeInference,
+                span,
+            })
+        }
     }
 
-    fn ty_infer_for_def(&self,
-                        ty_param_def: &ty::GenericParamDef,
-                        span: Span) -> Ty<'tcx> {
-        if let UnpackedKind::Type(ty) = self.var_for_def(span, ty_param_def).unpack() {
-            return ty;
+    fn ct_infer(
+        &self,
+        ty: Ty<'tcx>,
+        param: Option<&ty::GenericParamDef>,
+        span: Span,
+    ) -> &'tcx Const<'tcx> {
+        if let Some(param) = param {
+            if let UnpackedKind::Const(ct) = self.var_for_def(span, param).unpack() {
+                return ct;
+            }
+            unreachable!()
+        } else {
+            self.next_const_var(ty, ConstVariableOrigin {
+                kind: ConstVariableOriginKind::ConstInference,
+                span,
+            })
         }
-        unreachable!()
     }
 
     fn projected_ty_from_poly_trait_ref(&self,
@@ -2439,6 +2504,11 @@ pub fn to_ty_saving_user_provided_ty(&self, ast_ty: &hir::Ty) -> Ty<'tcx> {
         ty
     }
 
+    /// Returns the `DefId` of the constant parameter that the provided expression is a path to.
+    pub fn const_param_def_id(&self, hir_c: &hir::AnonConst) -> Option<DefId> {
+        AstConv::const_param_def_id(self, &self.tcx.hir().body(hir_c.body).value)
+    }
+
     pub fn to_const(&self, ast_c: &hir::AnonConst, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> {
         AstConv::ast_const_to_const(self, ast_c, ty)
     }
@@ -4024,7 +4094,7 @@ fn check_expr_kind(
                         hir::UnNeg => {
                             let result = self.check_user_unop(expr, oprnd_t, unop);
                             // If it's builtin, we can reuse the type, this helps inference.
-                            if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
+                            if !oprnd_t.is_numeric() {
                                 oprnd_t = result;
                             }
                         }
@@ -4414,19 +4484,24 @@ fn check_expr_kind(
             }
             ExprKind::Repeat(ref element, ref count) => {
                 let count_def_id = tcx.hir().local_def_id_from_hir_id(count.hir_id);
-                let param_env = ty::ParamEnv::empty();
-                let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), count_def_id);
-                let instance = ty::Instance::resolve(
-                    tcx.global_tcx(),
-                    param_env,
-                    count_def_id,
-                    substs,
-                ).unwrap();
-                let global_id = GlobalId {
-                    instance,
-                    promoted: None
+                let count = if self.const_param_def_id(count).is_some() {
+                    Ok(self.to_const(count, self.tcx.type_of(count_def_id)))
+                } else {
+                    let param_env = ty::ParamEnv::empty();
+                    let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), count_def_id);
+                    let instance = ty::Instance::resolve(
+                        tcx.global_tcx(),
+                        param_env,
+                        count_def_id,
+                        substs,
+                    ).unwrap();
+                    let global_id = GlobalId {
+                        instance,
+                        promoted: None
+                    };
+
+                    tcx.const_eval(param_env.and(global_id))
                 };
-                let count = tcx.const_eval(param_env.and(global_id));
 
                 let uty = match expected {
                     ExpectHasType(uty) => {
@@ -5419,10 +5494,10 @@ pub fn instantiate_value_path(&self,
                     if !infer_args_for_err.contains(&index) {
                         // Check whether the user has provided generic arguments.
                         if let Some(ref data) = segments[index].args {
-                            return (Some(data), segments[index].infer_types);
+                            return (Some(data), segments[index].infer_args);
                         }
                     }
-                    return (None, segments[index].infer_types);
+                    return (None, segments[index].infer_args);
                 }
 
                 (None, true)
@@ -5443,13 +5518,13 @@ pub fn instantiate_value_path(&self,
                 }
             },
             // Provide substitutions for parameters for which arguments are inferred.
-            |substs, param, infer_types| {
+            |substs, param, infer_args| {
                 match param.kind {
                     GenericParamDefKind::Lifetime => {
-                        self.re_infer(span, Some(param)).unwrap().into()
+                        self.re_infer(Some(param), span).unwrap().into()
                     }
                     GenericParamDefKind::Type { has_default, .. } => {
-                        if !infer_types && has_default {
+                        if !infer_args && has_default {
                             // If we have a default, then we it doesn't matter that we're not
                             // inferring the type arguments: we provide the default where any
                             // is missing.
index 2751cd0a37ec0f7fcbce879a32a18c31161d4429..5d91794506c6a9e79ff73fd39f01624c91610ce8 100644 (file)
@@ -26,7 +26,7 @@
 use rustc::ty::util::Discr;
 use rustc::ty::util::IntTypeExt;
 use rustc::ty::subst::UnpackedKind;
-use rustc::ty::{self, AdtKind, DefIdTree, ToPolyTraitRef, Ty, TyCtxt};
+use rustc::ty::{self, AdtKind, DefIdTree, ToPolyTraitRef, Ty, TyCtxt, Const};
 use rustc::ty::{ReprOptions, ToPredicate};
 use rustc::util::captures::Captures;
 use rustc::util::nodemap::FxHashMap;
@@ -47,7 +47,7 @@
 use rustc::hir::GenericParamKind;
 use rustc::hir::{self, CodegenFnAttrFlags, CodegenFnAttrs, Unsafety};
 
-use errors::Applicability;
+use errors::{Applicability, DiagnosticId};
 
 use std::iter;
 
@@ -186,24 +186,39 @@ fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
 
     fn re_infer(
         &self,
-        _span: Span,
-        _def: Option<&ty::GenericParamDef>,
+        _: Option<&ty::GenericParamDef>,
+        _: Span,
     ) -> Option<ty::Region<'tcx>> {
         None
     }
 
-    fn ty_infer(&self, span: Span) -> Ty<'tcx> {
-        struct_span_err!(
-            self.tcx().sess,
+    fn ty_infer(&self, _: Option<&ty::GenericParamDef>, span: Span) -> Ty<'tcx> {
+        self.tcx().sess.struct_span_err_with_code(
             span,
-            E0121,
-            "the type placeholder `_` is not allowed within types on item signatures"
+            "the type placeholder `_` is not allowed within types on item signatures",
+            DiagnosticId::Error("E0121".into()),
         ).span_label(span, "not allowed in type signatures")
          .emit();
 
         self.tcx().types.err
     }
 
+    fn ct_infer(
+        &self,
+        _: Ty<'tcx>,
+        _: Option<&ty::GenericParamDef>,
+        span: Span,
+    ) -> &'tcx Const<'tcx> {
+        self.tcx().sess.struct_span_err_with_code(
+            span,
+            "the const placeholder `_` is not allowed within types on item signatures",
+            DiagnosticId::Error("E0121".into()),
+        ).span_label(span, "not allowed in type signatures")
+         .emit();
+
+        self.tcx().consts.err
+    }
+
     fn projected_ty_from_poly_trait_ref(
         &self,
         span: Span,
@@ -2445,7 +2460,7 @@ fn codegen_fn_attrs<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> Codegen
     for attr in attrs.iter() {
         if attr.check_name(sym::cold) {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::COLD;
-        } else if attr.check_name(sym::allocator) {
+        } else if attr.check_name(sym::rustc_allocator) {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::ALLOCATOR;
         } else if attr.check_name(sym::unwind) {
             codegen_fn_attrs.flags |= CodegenFnAttrFlags::UNWIND;
index b5a50f43875816debe1ef11b32b13a456dcc7cd1..0b618cdf1dbb71d1b2fc67551d59a8615ec0bb78 100644 (file)
@@ -1482,8 +1482,8 @@ fn drop(&mut self) {}
 "##,
 
 E0121: r##"
-In order to be consistent with Rust's lack of global type inference, type
-placeholders are disallowed by design in item signatures.
+In order to be consistent with Rust's lack of global type inference,
+type and const placeholders are disallowed by design in item signatures.
 
 Examples of this error include:
 
@@ -4484,7 +4484,7 @@ pub fn bar() {}
 
 E0690: r##"
 A struct with the representation hint `repr(transparent)` had zero or more than
-on fields that were not guaranteed to be zero-sized.
+one fields that were not guaranteed to be zero-sized.
 
 Erroneous code example:
 
@@ -4519,8 +4519,8 @@ struct LengthWithUnit<U> {
 "##,
 
 E0691: r##"
-A struct with the `repr(transparent)` representation hint contains a zero-sized
-field that requires non-trivial alignment.
+A struct, enum, or union with the `repr(transparent)` representation hint
+contains a zero-sized field that requires non-trivial alignment.
 
 Erroneous code example:
 
@@ -4535,11 +4535,11 @@ struct LengthWithUnit<U> {
                                    //        struct has alignment larger than 1
 ```
 
-A transparent struct is supposed to be represented exactly like the piece of
-data it contains. Zero-sized fields with different alignment requirements
-potentially conflict with this property. In the example above, `Wrapper` would
-have to be aligned to 32 bytes even though `f32` has a smaller alignment
-requirement.
+A transparent struct, enum, or union is supposed to be represented exactly like
+the piece of data it contains. Zero-sized fields with different alignment
+requirements potentially conflict with this property. In the example above,
+`Wrapper` would have to be aligned to 32 bytes even though `f32` has a smaller
+alignment requirement.
 
 Consider removing the over-aligned zero-sized field:
 
@@ -4569,7 +4569,6 @@ struct LengthWithUnit<U> {
 alignment.
 "##,
 
-
 E0699: r##"
 A method was called on a raw pointer whose inner type wasn't completely known.
 
@@ -4680,6 +4679,26 @@ fn make_recursive_type() -> impl Sized {
 ```
 "##,
 
+E0731: r##"
+An enum with the representation hint `repr(transparent)` had zero or more than
+one variants.
+
+Erroneous code example:
+
+```compile_fail,E0731
+#[repr(transparent)]
+enum Status { // error: transparent enum needs exactly one variant, but has 2
+    Errno(u32),
+    Ok,
+}
+```
+
+Because transparent enums are represented exactly like one of their variants at
+run time, said variant must be uniquely determined. If there is no variant, or
+if there are multiple variants, it is not clear how the enum should be
+represented.
+"##,
+
 }
 
 register_diagnostics! {
index 30993b86a385fa6c2b4166d64747af2110dd2f4e..14dec423c57a9da21f227730d01e5d15a3705cba 100644 (file)
@@ -72,6 +72,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 #![allow(explicit_outlives_requirements)]
 
 #[macro_use] extern crate log;
index 60d6c33f81afcf910d930682f846411a8f3a8f4a..7b58312a5edef70ad55a04e8a2f76617ca901602 100644 (file)
@@ -341,7 +341,7 @@ fn extract_for_generics<'b, 'c, 'd>(
             .collect()
     }
 
-    fn make_final_bounds<'b, 'c, 'cx>(
+    fn make_final_bounds(
         &self,
         ty_to_bounds: FxHashMap<Type, FxHashSet<GenericBound>>,
         ty_to_fn: FxHashMap<Type, (Option<PolyTrait>, Option<Type>)>,
index 15108a7dbb91c392836ea59e926961ce55f79721..5a5540e7e38556096e7f0206aa626d407de3d59f 100644 (file)
@@ -471,7 +471,7 @@ fn build_macro(cx: &DocContext<'_>, did: DefId, name: ast::Name) -> clean::ItemE
         }
         LoadedMacro::ProcMacro(ext) => {
             let helpers = match &*ext {
-                &SyntaxExtension::ProcMacroDerive(_, ref syms, ..) => { syms.clean(cx) }
+                &SyntaxExtension::Derive(_, ref syms, ..) => { syms.clean(cx) }
                 _ => Vec::new(),
             };
 
index 03d16feb483a9c5110bc1e257e5115d2429f58ab..e44c35b55ffadca8325835d77034d3cfd98fe2e7 100644 (file)
@@ -1298,7 +1298,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Constant {
     }
 }
 
-impl<'tcx> Clean<Lifetime> for ty::GenericParamDef {
+impl Clean<Lifetime> for ty::GenericParamDef {
     fn clean(&self, _cx: &DocContext<'_>) -> Lifetime {
         Lifetime(self.name.to_string())
     }
@@ -2039,7 +2039,7 @@ fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
     }
 }
 
-impl<'a, 'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
+impl<'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
     fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
         let (did, sig) = *self;
         let mut names = if cx.tcx.hir().as_local_hir_id(did).is_some() {
@@ -2276,7 +2276,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Item {
     }
 }
 
-impl<'tcx> Clean<Item> for ty::AssocItem {
+impl Clean<Item> for ty::AssocItem {
     fn clean(&self, cx: &DocContext<'_>) -> Item {
         let inner = match self.kind {
             ty::AssocKind::Const => {
@@ -2768,7 +2768,10 @@ fn clean(&self, cx: &DocContext<'_>) -> Type {
                 };
                 let length = match cx.tcx.const_eval(param_env.and(cid)) {
                     Ok(length) => print_const(cx, length),
-                    Err(_) => "_".to_string(),
+                    Err(_) => cx.sess()
+                                .source_map()
+                                .span_to_snippet(cx.tcx.def_span(def_id))
+                                .unwrap_or_else(|_| "_".to_string()),
                 };
                 Array(box ty.clean(cx), length)
             },
@@ -3171,7 +3174,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Item {
     }
 }
 
-impl<'tcx> Clean<Item> for ty::FieldDef {
+impl Clean<Item> for ty::FieldDef {
     fn clean(&self, cx: &DocContext<'_>) -> Item {
         Item {
             name: Some(self.ident.name).clean(cx),
@@ -3339,7 +3342,7 @@ fn clean(&self, cx: &DocContext<'_>) -> Item {
     }
 }
 
-impl<'tcx> Clean<Item> for ty::VariantDef {
+impl Clean<Item> for ty::VariantDef {
     fn clean(&self, cx: &DocContext<'_>) -> Item {
         let kind = match self.ctor_kind {
             CtorKind::Const => VariantKind::CLike,
index 932419c78f22c1f03d6d09828e53e51d5e39b966..99ca8c43cfbe202b577e848f0752d03d83aad3f1 100644 (file)
@@ -12,8 +12,8 @@
 use std::io::prelude::*;
 
 use syntax::source_map::{SourceMap, FilePathMapping};
-use syntax::parse::lexer::{self, TokenAndSpan};
-use syntax::parse::token;
+use syntax::parse::lexer;
+use syntax::parse::token::{self, Token};
 use syntax::parse;
 use syntax::symbol::{kw, sym};
 use syntax_pos::{Span, FileName};
@@ -186,9 +186,9 @@ fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<
     }
 
     /// Gets the next token out of the lexer.
-    fn try_next_token(&mut self) -> Result<TokenAndSpan, HighlightError> {
+    fn try_next_token(&mut self) -> Result<Token, HighlightError> {
         match self.lexer.try_next_token() {
-            Ok(tas) => Ok(tas),
+            Ok(token) => Ok(token),
             Err(_) => Err(HighlightError::LexError),
         }
     }
@@ -205,7 +205,7 @@ fn write_source<W: Writer>(&mut self,
                                    -> Result<(), HighlightError> {
         loop {
             let next = self.try_next_token()?;
-            if next.tok == token::Eof {
+            if next == token::Eof {
                 break;
             }
 
@@ -218,9 +218,9 @@ fn write_source<W: Writer>(&mut self,
     // Handles an individual token from the lexer.
     fn write_token<W: Writer>(&mut self,
                               out: &mut W,
-                              tas: TokenAndSpan)
+                              token: Token)
                               -> Result<(), HighlightError> {
-        let klass = match tas.tok {
+        let klass = match token.kind {
             token::Shebang(s) => {
                 out.string(Escape(&s.as_str()), Class::None)?;
                 return Ok(());
@@ -234,7 +234,7 @@ fn write_token<W: Writer>(&mut self,
             // reference or dereference operator or a reference or pointer type, instead of the
             // bit-and or multiplication operator.
             token::BinOp(token::And) | token::BinOp(token::Star)
-                if self.lexer.peek().tok != token::Whitespace => Class::RefKeyWord,
+                if self.lexer.peek() != &token::Whitespace => Class::RefKeyWord,
 
             // Consider this as part of a macro invocation if there was a
             // leading identifier.
@@ -257,7 +257,7 @@ fn write_token<W: Writer>(&mut self,
             token::Question => Class::QuestionMark,
 
             token::Dollar => {
-                if self.lexer.peek().tok.is_ident() {
+                if self.lexer.peek().is_ident() {
                     self.in_macro_nonterminal = true;
                     Class::MacroNonTerminal
                 } else {
@@ -280,9 +280,9 @@ fn write_token<W: Writer>(&mut self,
                 // as an attribute.
 
                 // Case 1: #![inner_attribute]
-                if self.lexer.peek().tok == token::Not {
+                if self.lexer.peek() == &token::Not {
                     self.try_next_token()?; // NOTE: consumes `!` token!
-                    if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
+                    if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
                         self.in_attribute = true;
                         out.enter_span(Class::Attribute)?;
                     }
@@ -292,7 +292,7 @@ fn write_token<W: Writer>(&mut self,
                 }
 
                 // Case 2: #[outer_attribute]
-                if self.lexer.peek().tok == token::OpenDelim(token::Bracket) {
+                if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
                     self.in_attribute = true;
                     out.enter_span(Class::Attribute)?;
                 }
@@ -325,8 +325,8 @@ fn write_token<W: Writer>(&mut self,
             }
 
             // Keywords are also included in the identifier set.
-            token::Ident(ident, is_raw) => {
-                match ident.name {
+            token::Ident(name, is_raw) => {
+                match name {
                     kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
 
                     kw::SelfLower | kw::SelfUpper => Class::Self_,
@@ -335,13 +335,13 @@ fn write_token<W: Writer>(&mut self,
                     sym::Option | sym::Result => Class::PreludeTy,
                     sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
 
-                    _ if tas.tok.is_reserved_ident() => Class::KeyWord,
+                    _ if token.is_reserved_ident() => Class::KeyWord,
 
                     _ => {
                         if self.in_macro_nonterminal {
                             self.in_macro_nonterminal = false;
                             Class::MacroNonTerminal
-                        } else if self.lexer.peek().tok == token::Not {
+                        } else if self.lexer.peek() == &token::Not {
                             self.in_macro = true;
                             Class::Macro
                         } else {
@@ -359,7 +359,7 @@ fn write_token<W: Writer>(&mut self,
 
         // Anything that didn't return above is the simple case where we the
         // class just spans a single token, so we can use the `string` method.
-        out.string(Escape(&self.snip(tas.sp)), klass)?;
+        out.string(Escape(&self.snip(token.span)), klass)?;
 
         Ok(())
     }
index 3555e2e7498b8c1ead3c20043b21d143f6c61cc0..362706820593e7137bd8ce982788c02045b916d9 100644 (file)
@@ -1764,7 +1764,7 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
     }
 }
 
-impl<'a> Cache {
+impl Cache {
     fn generics(&mut self, generics: &clean::Generics) {
         for param in &generics.params {
             match param.kind {
index ffe277ae50bdc39616b7230ba7b0f760f43712e1..3b4d65352a34c69c1540c662bd3a06d5b9f359d6 100644 (file)
@@ -1,5 +1,6 @@
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
        html_playground_url = "https://play.rust-lang.org/")]
index 0556852c54ac2e741aa64b62fad4369b4c804e70..6d51278b4e5e88315e74c52248e1499aa8bdd6c2 100644 (file)
@@ -1,8 +1,8 @@
 use errors::Applicability;
-use syntax::parse::lexer::{TokenAndSpan, StringReader as Lexer};
+use syntax::parse::lexer::{StringReader as Lexer};
 use syntax::parse::{ParseSess, token};
 use syntax::source_map::FilePathMapping;
-use syntax_pos::FileName;
+use syntax_pos::{InnerSpan, FileName};
 
 use crate::clean;
 use crate::core::DocContext;
@@ -33,8 +33,8 @@ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeB
         );
 
         let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
-            while let Ok(TokenAndSpan { tok, .. }) = lexer.try_next_token() {
-                if tok == token::Eof {
+            while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
+                if kind == token::Eof {
                     break;
                 }
             }
@@ -63,7 +63,7 @@ fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeB
                 }
 
                 if code_block.syntax.is_none() && code_block.is_fenced {
-                    let sp = sp.from_inner_byte_pos(0, 3);
+                    let sp = sp.from_inner(InnerSpan::new(0, 3));
                     diag.span_suggestion(
                         sp,
                         "mark blocks that do not contain Rust code as text",
index 860ea18a58ad0f8b78aa2bd038d5d6815f4a9e80..7fbfc3e1fc0f4a1030de025ee972c1a7582da564 100644 (file)
@@ -433,7 +433,7 @@ fn macro_resolve(cx: &DocContext<'_>, path_str: &str) -> Option<Res> {
             if let Res::Def(DefKind::Macro(MacroKind::ProcMacroStub), _) = res {
                 // skip proc-macro stubs, they'll cause `get_macro` to crash
             } else {
-                if let SyntaxExtension::DeclMacro { .. } = *resolver.get_macro(res) {
+                if let SyntaxExtension::LegacyBang { .. } = *resolver.get_macro(res) {
                     return Some(res.map_id(|_| panic!("unexpected id")));
                 }
             }
index 018ab5dea6081f54a9dc431ff508f65d36fbd1fa..8fc6b9fdbe6b93d9b4f99636d6f1e7f01375b2b6 100644 (file)
@@ -6,7 +6,7 @@
 use rustc::middle::privacy::AccessLevels;
 use rustc::util::nodemap::DefIdSet;
 use std::mem;
-use syntax_pos::{DUMMY_SP, Span};
+use syntax_pos::{DUMMY_SP, InnerSpan, Span};
 use std::ops::Range;
 
 use crate::clean::{self, GetDefId, Item};
@@ -440,10 +440,10 @@ fn add_test(&mut self, _: String, _: LangString, _: usize) {
         }
     }
 
-    let sp = span_of_attrs(attrs).from_inner_byte_pos(
+    let sp = span_of_attrs(attrs).from_inner(InnerSpan::new(
         md_range.start + start_bytes,
         md_range.end + start_bytes + end_bytes,
-    );
+    ));
 
     Some(sp)
 }
index 36a1628014ddb6b47bb1cf116e52c8529cb09eac..95095c712d2c58631b8e381ac4052f85d96f75c7 100644 (file)
@@ -723,10 +723,17 @@ macro_rules! peel {
     ($name:ident, $($other:ident,)*) => (tuple! { $($other,)* })
 }
 
-/// Evaluates to the number of identifiers passed to it, for example: `count_idents!(a, b, c) == 3
-macro_rules! count_idents {
-    () => { 0 };
-    ($_i:ident, $($rest:ident,)*) => { 1 + count_idents!($($rest,)*) }
+/// Evaluates to the number of tokens passed to it.
+///
+/// Logarithmic counting: every one or two recursive expansions, the number of
+/// tokens to count is divided by two, instead of being reduced by one.
+/// Therefore, the recursion depth is the binary logarithm of the number of
+/// tokens to count, and the expanded tree is likewise very small.
+macro_rules! count {
+    ()                     => (0usize);
+    ($one:tt)              => (1usize);
+    ($($pairs:tt $_p:tt)*) => (count!($($pairs)*) << 1usize);
+    ($odd:tt $($rest:tt)*) => (count!($($rest)*) | 1usize);
 }
 
 macro_rules! tuple {
@@ -735,7 +742,7 @@ macro_rules! tuple {
         impl<$($name:Decodable),*> Decodable for ($($name,)*) {
             #[allow(non_snake_case)]
             fn decode<D: Decoder>(d: &mut D) -> Result<($($name,)*), D::Error> {
-                let len: usize = count_idents!($($name,)*);
+                let len: usize = count!($($name)*);
                 d.read_tuple(len, |d| {
                     let mut i = 0;
                     let ret = ($(d.read_tuple_arg({ i+=1; i-1 }, |d| -> Result<$name, D::Error> {
index 816a191a2a9305bc32646da04fbe7aae8b304380..30e23f1007f20b7a7e4d14f69b03a9f45298b502 100644 (file)
@@ -19,7 +19,7 @@ panic_unwind = { path = "../libpanic_unwind", optional = true }
 panic_abort = { path = "../libpanic_abort" }
 core = { path = "../libcore" }
 libc = { version = "0.2.51", default-features = false, features = ['rustc-dep-of-std'] }
-compiler_builtins = { version = "0.1.15" }
+compiler_builtins = { version = "0.1.16" }
 profiler_builtins = { path = "../libprofiler_builtins", optional = true }
 unwind = { path = "../libunwind" }
 hashbrown = { version = "0.4.0", features = ['rustc-dep-of-std'] }
@@ -87,3 +87,5 @@ std_detect_dlsym_getauxval = []
 [package.metadata.fortanix-sgx]
 # Maximum possible number of threads when testing
 threads = 125
+# Maximum heap size
+heap_size = 0x8000000
index 133540ed6b9ca8cc74a19daad483866c12b89269..7254c621611615c119cc736d6a9e9b970e155b7e 100644 (file)
@@ -188,7 +188,7 @@ pub fn signum(self) -> f32 {
         if self.is_nan() {
             NAN
         } else {
-            unsafe { intrinsics::copysignf32(1.0, self) }
+            1.0_f32.copysign(self)
         }
     }
 
index 87467aeed8b203bb147397ea2a389f779e338c29..f8bb36ad0a89ed0fa4d6342d27a8af6152a0148f 100644 (file)
@@ -166,7 +166,7 @@ pub fn signum(self) -> f64 {
         if self.is_nan() {
             NAN
         } else {
-            unsafe { intrinsics::copysignf64(1.0, self) }
+            1.0_f64.copysign(self)
         }
     }
 
index 24f728158c47260e85cdbaa3ef80094ac328c86b..e78a5defdf3bb793c06b6a56ddce25a0c0d99632 100644 (file)
@@ -482,8 +482,8 @@ mod prim_pointer { }
 /// an array. Indeed, this provides most of the API for working with arrays.
 /// Slices have a dynamic size and do not coerce to arrays.
 ///
-/// There is no way to move elements out of an array. See [`mem::replace`][replace]
-/// for an alternative.
+/// You can move elements out of an array with a slice pattern. If you want
+/// one element, see [`mem::replace`][replace].
 ///
 /// # Examples
 ///
@@ -525,6 +525,16 @@ mod prim_pointer { }
 /// for x in &array { }
 /// ```
 ///
+/// You can use a slice pattern to move elements out of an array:
+///
+/// ```
+/// fn move_away(_: String) { /* Do interesting things. */ }
+///
+/// let [john, roa] = ["John".to_string(), "Roa".to_string()];
+/// move_away(john);
+/// move_away(roa);
+/// ```
+///
 /// [slice]: primitive.slice.html
 /// [copy]: marker/trait.Copy.html
 /// [clone]: clone/trait.Clone.html
@@ -683,6 +693,10 @@ mod prim_str { }
 /// assert_eq!(tuple.2, 'c');
 /// ```
 ///
+/// The sequential nature of the tuple applies to its implementations of various
+/// traits.  For example, in `PartialOrd` and `Ord`, the elements are compared
+/// sequentially until the first non-equal set is found.
+///
 /// For more about tuples, see [the book](../book/ch03-02-data-types.html#the-tuple-type).
 ///
 /// # Trait implementations
index 598232f9f8f22e4de8bbe1506b3f5791bd8fc685..02fbcb14fa599a25b20811ffb92a922bb6f98294 100644 (file)
@@ -1368,7 +1368,7 @@ pub enum LitKind {
     FloatUnsuffixed(Symbol),
     /// A boolean literal.
     Bool(bool),
-    /// A recovered character literal that contains mutliple `char`s, most likely a typo.
+    /// Placeholder for a literal that wasn't well-formed in some way.
     Err(Symbol),
 }
 
@@ -1406,10 +1406,10 @@ pub fn is_unsuffixed(&self) -> bool {
             | LitKind::ByteStr(..)
             | LitKind::Byte(..)
             | LitKind::Char(..)
-            | LitKind::Err(..)
             | LitKind::Int(_, LitIntType::Unsuffixed)
             | LitKind::FloatUnsuffixed(..)
-            | LitKind::Bool(..) => true,
+            | LitKind::Bool(..)
+            | LitKind::Err(..) => true,
             // suffixed variants
             LitKind::Int(_, LitIntType::Signed(..))
             | LitKind::Int(_, LitIntType::Unsigned(..))
index 48948e4d0d79c1829c30cbbf199319c4ab046737..d7e43f645df7bbb05fd3fa27c90bcae5ea935ebb 100644 (file)
@@ -465,10 +465,9 @@ fn tokens(&self) -> TokenStream {
                 let mod_sep_span = Span::new(last_pos,
                                              segment.ident.span.lo(),
                                              segment.ident.span.ctxt());
-                idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into());
+                idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
             }
-            idents.push(TokenTree::Token(segment.ident.span,
-                                         Token::from_ast_ident(segment.ident)).into());
+            idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
             last_pos = segment.ident.span.hi();
         }
         self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
@@ -480,26 +479,28 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
     {
         // FIXME: Share code with `parse_path`.
         let path = match tokens.next() {
-            Some(TokenTree::Token(span, token @ Token::Ident(..))) |
-            Some(TokenTree::Token(span, token @ Token::ModSep)) => 'arm: {
-                let mut segments = if let Token::Ident(ident, _) = token {
-                    if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
+            Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
+            Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
+                let mut segments = if let token::Ident(name, _) = kind {
+                    if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
+                            = tokens.peek() {
                         tokens.next();
-                        vec![PathSegment::from_ident(ident.with_span_pos(span))]
+                        vec![PathSegment::from_ident(Ident::new(name, span))]
                     } else {
-                        break 'arm Path::from_ident(ident.with_span_pos(span));
+                        break 'arm Path::from_ident(Ident::new(name, span));
                     }
                 } else {
                     vec![PathSegment::path_root(span)]
                 };
                 loop {
-                    if let Some(TokenTree::Token(span,
-                                                    Token::Ident(ident, _))) = tokens.next() {
-                        segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
+                    if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
+                            = tokens.next() {
+                        segments.push(PathSegment::from_ident(Ident::new(name, span)));
                     } else {
                         return None;
                     }
-                    if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() {
+                    if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
+                            = tokens.peek() {
                         tokens.next();
                     } else {
                         break;
@@ -508,7 +509,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
                 let span = span.with_hi(segments.last().unwrap().ident.span.hi());
                 Path { span, segments }
             }
-            Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
+            Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
                 token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
                 token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
                 token::Nonterminal::NtPath(ref path) => path.clone(),
@@ -533,7 +534,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
+                let mut vec = vec![TokenTree::token(token::Eq, span).into()];
                 lit.tokens().append_to_tree_and_joint_vec(&mut vec);
                 TokenStream::new(vec)
             }
@@ -541,7 +542,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                 let mut tokens = Vec::new();
                 for (i, item) in list.iter().enumerate() {
                     if i > 0 {
-                        tokens.push(TokenTree::Token(span, Token::Comma).into());
+                        tokens.push(TokenTree::token(token::Comma, span).into());
                     }
                     item.tokens().append_to_tree_and_joint_vec(&mut tokens);
                 }
@@ -558,10 +559,10 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
         where I: Iterator<Item = TokenTree>,
     {
         let delimited = match tokens.peek().cloned() {
-            Some(TokenTree::Token(_, token::Eq)) => {
+            Some(TokenTree::Token(token)) if token == token::Eq => {
                 tokens.next();
-                return if let Some(TokenTree::Token(span, token)) = tokens.next() {
-                    Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
+                return if let Some(TokenTree::Token(token)) = tokens.next() {
+                    Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
                 } else {
                     None
                 };
@@ -579,7 +580,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
             let item = NestedMetaItem::from_tokens(&mut tokens)?;
             result.push(item);
             match tokens.next() {
-                None | Some(TokenTree::Token(_, Token::Comma)) => {}
+                None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
                 _ => return None,
             }
         }
@@ -605,8 +606,8 @@ fn tokens(&self) -> TokenStream {
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
         where I: Iterator<Item = TokenTree>,
     {
-        if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
-            if let Ok(lit) = Lit::from_token(&token, span) {
+        if let Some(TokenTree::Token(token)) = tokens.peek() {
+            if let Ok(lit) = Lit::from_token(token) {
                 tokens.next();
                 return Some(NestedMetaItem::Literal(lit));
             }
@@ -733,9 +734,9 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
             raw_attr.clone(),
         );
 
-        let start_span = parser.span;
+        let start_span = parser.token.span;
         let (path, tokens) = panictry!(parser.parse_meta_item_unrestricted());
-        let end_span = parser.span;
+        let end_span = parser.token.span;
         if parser.token != token::Eof {
             parse_sess.span_diagnostic
                 .span_err(start_span.to(end_span), "invalid crate attribute");
index fc413caa428dd748e8cf7610f10ac424034021fa..6123e95ccf8213ea31b8c0762f1d61bc14a74cba 100644 (file)
@@ -121,7 +121,7 @@ fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
             let mut expanded_attrs = Vec::with_capacity(1);
 
             while !parser.check(&token::CloseDelim(token::Paren)) {
-                let lo = parser.span.lo();
+                let lo = parser.token.span.lo();
                 let (path, tokens) = parser.parse_meta_item_unrestricted()?;
                 expanded_attrs.push((path, tokens, parser.prev_span.with_lo(lo)));
                 parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?;
index 0c57c23b2b5c4102e61a83e5be5f4e99fafb46b4..98351048c35266c85707d10d7b17dc49fbc3862c 100644 (file)
@@ -5,7 +5,7 @@
 use crate::source_map;
 use crate::ext::base::{ExtCtxt, MacEager, MacResult};
 use crate::ext::build::AstBuilder;
-use crate::parse::token;
+use crate::parse::token::{self, Token};
 use crate::ptr::P;
 use crate::symbol::kw;
 use crate::tokenstream::{TokenTree};
@@ -33,13 +33,15 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                    span: Span,
                                    token_tree: &[TokenTree])
                                    -> Box<dyn MacResult+'cx> {
-    let code = match (token_tree.len(), token_tree.get(0)) {
-        (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
+    let code = match token_tree {
+        [
+            TokenTree::Token(Token { kind: token::Ident(code, _), .. })
+        ] => code,
         _ => unreachable!()
     };
 
     ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
-        match diagnostics.get_mut(&code.name) {
+        match diagnostics.get_mut(&code) {
             // Previously used errors.
             Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => {
                 ecx.struct_span_warn(span, &format!(
@@ -66,20 +68,19 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                        span: Span,
                                        token_tree: &[TokenTree])
                                        -> Box<dyn MacResult+'cx> {
-    let (code, description) = match (
-        token_tree.len(),
-        token_tree.get(0),
-        token_tree.get(1),
-        token_tree.get(2)
-    ) {
-        (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
-            (code, None)
+    let (code, description) = match  token_tree {
+        [
+            TokenTree::Token(Token { kind: token::Ident(code, _), .. })
+        ] => {
+            (*code, None)
+        },
+        [
+            TokenTree::Token(Token { kind: token::Ident(code, _), .. }),
+            TokenTree::Token(Token { kind: token::Comma, .. }),
+            TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..})
+        ] => {
+            (*code, Some(*symbol))
         },
-        (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
-            Some(&TokenTree::Token(_, token::Comma)),
-            Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
-            (code, Some(symbol))
-        }
         _ => unreachable!()
     };
 
@@ -112,7 +113,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
             description,
             use_site: None
         };
-        if diagnostics.insert(code.name, info).is_some() {
+        if diagnostics.insert(code, info).is_some() {
             ecx.span_err(span, &format!(
                 "diagnostic code {} already registered", code
             ));
@@ -140,13 +141,13 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                           token_tree: &[TokenTree])
                                           -> Box<dyn MacResult+'cx> {
     assert_eq!(token_tree.len(), 3);
-    let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
+    let (crate_name, ident) = match (&token_tree[0], &token_tree[2]) {
         (
             // Crate name.
-            &TokenTree::Token(_, token::Ident(ref crate_name, _)),
+            &TokenTree::Token(Token { kind: token::Ident(crate_name, _), .. }),
             // DIAGNOSTICS ident.
-            &TokenTree::Token(_, token::Ident(ref name, _))
-        ) => (*&crate_name, name),
+            &TokenTree::Token(Token { kind: token::Ident(name, _), span })
+        ) => (crate_name, Ident::new(name, span)),
         _ => unreachable!()
     };
 
@@ -209,7 +210,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
 
     MacEager::items(smallvec![
         P(ast::Item {
-            ident: *name,
+            ident,
             attrs: Vec::new(),
             id: ast::DUMMY_NODE_ID,
             node: ast::ItemKind::Const(
index 977e6d4587709be238c9b86080c42c7ee20e54d2..598c8459d159053ce822ef235e088fcff96707ba 100644 (file)
@@ -3,7 +3,7 @@
 //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
 //! redundant. Later, these types can be converted to types for use by the rest of the compiler.
 
-use crate::syntax::ast::NodeId;
+use crate::ast::NodeId;
 use syntax_pos::MultiSpan;
 
 /// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be
index 4b5b9ff7bbeeee5b9febe53a99f84b9c7184ca29..38b7dee40c447f52a09ba554340a4d79cb6481b6 100644 (file)
@@ -1,6 +1,4 @@
-pub use SyntaxExtension::*;
-
-use crate::ast::{self, Attribute, Name, PatKind, MetaItem};
+use crate::ast::{self, Attribute, Name, PatKind};
 use crate::attr::HasAttrs;
 use crate::source_map::{SourceMap, Spanned, respan};
 use crate::edition::Edition;
@@ -137,29 +135,6 @@ pub fn derive_allowed(&self) -> bool {
     }
 }
 
-// A more flexible ItemDecorator.
-pub trait MultiItemDecorator {
-    fn expand(&self,
-              ecx: &mut ExtCtxt<'_>,
-              sp: Span,
-              meta_item: &ast::MetaItem,
-              item: &Annotatable,
-              push: &mut dyn FnMut(Annotatable));
-}
-
-impl<F> MultiItemDecorator for F
-    where F : Fn(&mut ExtCtxt<'_>, Span, &ast::MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
-{
-    fn expand(&self,
-              ecx: &mut ExtCtxt<'_>,
-              sp: Span,
-              meta_item: &ast::MetaItem,
-              item: &Annotatable,
-              push: &mut dyn FnMut(Annotatable)) {
-        (*self)(ecx, sp, meta_item, item, push)
-    }
-}
-
 // `meta_item` is the annotation, and `item` is the item being modified.
 // FIXME Decorators should follow the same pattern too.
 pub trait MultiItemModifier {
@@ -265,10 +240,13 @@ fn expand<'cx>(
 
         impl MutVisitor for AvoidInterpolatedIdents {
             fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
-                if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
-                    if let token::NtIdent(ident, is_raw) = **nt {
-                        *tt = tokenstream::TokenTree::Token(ident.span,
-                                                            token::Ident(ident, is_raw));
+                if let tokenstream::TokenTree::Token(token) = tt {
+                    if let token::Interpolated(nt) = &token.kind {
+                        if let token::NtIdent(ident, is_raw) = **nt {
+                            *tt = tokenstream::TokenTree::token(
+                                token::Ident(ident.name, is_raw), ident.span
+                            );
+                        }
                     }
                 }
                 mut_visit::noop_visit_tt(tt, self)
@@ -285,34 +263,6 @@ fn visit_mac(&mut self, mac: &mut ast::Mac) {
     }
 }
 
-pub trait IdentMacroExpander {
-    fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt<'_>,
-                   sp: Span,
-                   ident: ast::Ident,
-                   token_tree: Vec<tokenstream::TokenTree>)
-                   -> Box<dyn MacResult+'cx>;
-}
-
-pub type IdentMacroExpanderFn =
-    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, ast::Ident, Vec<tokenstream::TokenTree>)
-                -> Box<dyn MacResult+'cx>;
-
-impl<F> IdentMacroExpander for F
-    where F : for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, ast::Ident,
-                          Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx>
-{
-    fn expand<'cx>(&self,
-                   cx: &'cx mut ExtCtxt<'_>,
-                   sp: Span,
-                   ident: ast::Ident,
-                   token_tree: Vec<tokenstream::TokenTree>)
-                   -> Box<dyn MacResult+'cx>
-    {
-        (*self)(cx, sp, ident, token_tree)
-    }
-}
-
 // Use a macro because forwarding to a simple function has type system issues
 macro_rules! make_stmts_default {
     ($me:expr) => {
@@ -567,9 +517,6 @@ fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> {
     }
 }
 
-pub type BuiltinDeriveFn =
-    for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable));
-
 /// Represents different kinds of macro invocations that can be resolved.
 #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum MacroKind {
@@ -603,129 +550,116 @@ pub fn article(self) -> &'static str {
 
 /// An enum representing the different kinds of syntax extensions.
 pub enum SyntaxExtension {
-    /// A trivial "extension" that does nothing, only keeps the attribute and marks it as known.
-    NonMacroAttr { mark_used: bool },
-
-    /// A syntax extension that is attached to an item and creates new items
-    /// based upon it.
-    ///
-    /// `#[derive(...)]` is a `MultiItemDecorator`.
-    ///
-    /// Prefer ProcMacro or MultiModifier since they are more flexible.
-    MultiDecorator(Box<dyn MultiItemDecorator + sync::Sync + sync::Send>),
-
-    /// A syntax extension that is attached to an item and modifies it
-    /// in-place. Also allows decoration, i.e., creating new items.
-    MultiModifier(Box<dyn MultiItemModifier + sync::Sync + sync::Send>),
-
-    /// A function-like procedural macro. TokenStream -> TokenStream.
-    ProcMacro {
+    /// A token-based function-like macro.
+    Bang {
+        /// An expander with signature TokenStream -> TokenStream.
         expander: Box<dyn ProcMacro + sync::Sync + sync::Send>,
-        /// Whitelist of unstable features that are treated as stable inside this macro
+        /// Whitelist of unstable features that are treated as stable inside this macro.
         allow_internal_unstable: Option<Lrc<[Symbol]>>,
+        /// Edition of the crate in which this macro is defined.
         edition: Edition,
     },
 
-    /// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream.
-    /// The first TokenSteam is the attribute, the second is the annotated item.
-    /// Allows modification of the input items and adding new items, similar to
-    /// MultiModifier, but uses TokenStreams, rather than AST nodes.
-    AttrProcMacro(Box<dyn AttrProcMacro + sync::Sync + sync::Send>, Edition),
-
-    /// A normal, function-like syntax extension.
-    ///
-    /// `bytes!` is a `NormalTT`.
-    NormalTT {
+    /// An AST-based function-like macro.
+    LegacyBang {
+        /// An expander with signature TokenStream -> AST.
         expander: Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
+        /// Some info about the macro's definition point.
         def_info: Option<(ast::NodeId, Span)>,
-        /// Whether the contents of the macro can
-        /// directly use `#[unstable]` things.
-        ///
-        /// Only allows things that require a feature gate in the given whitelist
+        /// Hygienic properties of identifiers produced by this macro.
+        transparency: Transparency,
+        /// Whitelist of unstable features that are treated as stable inside this macro.
         allow_internal_unstable: Option<Lrc<[Symbol]>>,
-        /// Whether the contents of the macro can use `unsafe`
-        /// without triggering the `unsafe_code` lint.
+        /// Suppresses the `unsafe_code` lint for code produced by this macro.
         allow_internal_unsafe: bool,
-        /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
-        /// for a given macro.
+        /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`) for this macro.
         local_inner_macros: bool,
-        /// The macro's feature name if it is unstable, and the stability feature
+        /// The macro's feature name and tracking issue number if it is unstable.
         unstable_feature: Option<(Symbol, u32)>,
-        /// Edition of the crate in which the macro is defined
+        /// Edition of the crate in which this macro is defined.
         edition: Edition,
     },
 
-    /// A function-like syntax extension that has an extra ident before
-    /// the block.
-    IdentTT {
-        expander: Box<dyn IdentMacroExpander + sync::Sync + sync::Send>,
-        span: Option<Span>,
-        allow_internal_unstable: Option<Lrc<[Symbol]>>,
+    /// A token-based attribute macro.
+    Attr(
+        /// An expander with signature (TokenStream, TokenStream) -> TokenStream.
+        /// The first TokenSteam is the attribute itself, the second is the annotated item.
+        /// The produced TokenSteam replaces the input TokenSteam.
+        Box<dyn AttrProcMacro + sync::Sync + sync::Send>,
+        /// Edition of the crate in which this macro is defined.
+        Edition,
+    ),
+
+    /// An AST-based attribute macro.
+    LegacyAttr(
+        /// An expander with signature (AST, AST) -> AST.
+        /// The first AST fragment is the attribute itself, the second is the annotated item.
+        /// The produced AST fragment replaces the input AST fragment.
+        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
+    ),
+
+    /// A trivial attribute "macro" that does nothing,
+    /// only keeps the attribute and marks it as known.
+    NonMacroAttr {
+        /// Suppresses the `unused_attributes` lint for this attribute.
+        mark_used: bool,
     },
 
-    /// An attribute-like procedural macro. TokenStream -> TokenStream.
-    /// The input is the annotated item.
-    /// Allows generating code to implement a Trait for a given struct
-    /// or enum item.
-    ProcMacroDerive(Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
-                    Vec<Symbol> /* inert attribute names */, Edition),
-
-    /// An attribute-like procedural macro that derives a builtin trait.
-    BuiltinDerive(BuiltinDeriveFn),
-
-    /// A declarative macro, e.g., `macro m() {}`.
-    DeclMacro {
-        expander: Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
-        def_info: Option<(ast::NodeId, Span)>,
-        is_transparent: bool,
-        edition: Edition,
-    }
+    /// A token-based derive macro.
+    Derive(
+        /// An expander with signature TokenStream -> TokenStream (not yet).
+        /// The produced TokenSteam is appended to the input TokenSteam.
+        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
+        /// Names of helper attributes registered by this macro.
+        Vec<Symbol>,
+        /// Edition of the crate in which this macro is defined.
+        Edition,
+    ),
+
+    /// An AST-based derive macro.
+    LegacyDerive(
+        /// An expander with signature AST -> AST.
+        /// The produced AST fragment is appended to the input AST fragment.
+        Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
+    ),
 }
 
 impl SyntaxExtension {
     /// Returns which kind of macro calls this syntax extension.
     pub fn kind(&self) -> MacroKind {
         match *self {
-            SyntaxExtension::DeclMacro { .. } |
-            SyntaxExtension::NormalTT { .. } |
-            SyntaxExtension::IdentTT { .. } |
-            SyntaxExtension::ProcMacro { .. } =>
-                MacroKind::Bang,
-            SyntaxExtension::NonMacroAttr { .. } |
-            SyntaxExtension::MultiDecorator(..) |
-            SyntaxExtension::MultiModifier(..) |
-            SyntaxExtension::AttrProcMacro(..) =>
-                MacroKind::Attr,
-            SyntaxExtension::ProcMacroDerive(..) |
-            SyntaxExtension::BuiltinDerive(..) =>
-                MacroKind::Derive,
+            SyntaxExtension::Bang { .. } |
+            SyntaxExtension::LegacyBang { .. } => MacroKind::Bang,
+            SyntaxExtension::Attr(..) |
+            SyntaxExtension::LegacyAttr(..) |
+            SyntaxExtension::NonMacroAttr { .. } => MacroKind::Attr,
+            SyntaxExtension::Derive(..) |
+            SyntaxExtension::LegacyDerive(..) => MacroKind::Derive,
         }
     }
 
     pub fn default_transparency(&self) -> Transparency {
         match *self {
-            SyntaxExtension::ProcMacro { .. } |
-            SyntaxExtension::AttrProcMacro(..) |
-            SyntaxExtension::ProcMacroDerive(..) |
-            SyntaxExtension::DeclMacro { is_transparent: false, .. } => Transparency::Opaque,
-            SyntaxExtension::DeclMacro { is_transparent: true, .. } => Transparency::Transparent,
-            _ => Transparency::SemiTransparent,
+            SyntaxExtension::LegacyBang { transparency, .. } => transparency,
+            SyntaxExtension::Bang { .. } |
+            SyntaxExtension::Attr(..) |
+            SyntaxExtension::Derive(..) |
+            SyntaxExtension::NonMacroAttr { .. } => Transparency::Opaque,
+            SyntaxExtension::LegacyAttr(..) |
+            SyntaxExtension::LegacyDerive(..) => Transparency::SemiTransparent,
         }
     }
 
     pub fn edition(&self, default_edition: Edition) -> Edition {
         match *self {
-            SyntaxExtension::NormalTT { edition, .. } |
-            SyntaxExtension::DeclMacro { edition, .. } |
-            SyntaxExtension::ProcMacro { edition, .. } |
-            SyntaxExtension::AttrProcMacro(.., edition) |
-            SyntaxExtension::ProcMacroDerive(.., edition) => edition,
+            SyntaxExtension::Bang { edition, .. } |
+            SyntaxExtension::LegacyBang { edition, .. } |
+            SyntaxExtension::Attr(.., edition) |
+            SyntaxExtension::Derive(.., edition) => edition,
             // Unstable legacy stuff
             SyntaxExtension::NonMacroAttr { .. } |
-            SyntaxExtension::IdentTT { .. } |
-            SyntaxExtension::MultiDecorator(..) |
-            SyntaxExtension::MultiModifier(..) |
-            SyntaxExtension::BuiltinDerive(..) => default_edition,
+            SyntaxExtension::LegacyAttr(..) |
+            SyntaxExtension::LegacyDerive(..) => default_edition,
         }
     }
 }
index c2a73b662c680b7137f6e70813e98f84fb7f6291..084d4fd3820172258374abbf414471d5a82fb624 100644 (file)
@@ -10,7 +10,7 @@
 use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
 use crate::mut_visit::*;
 use crate::parse::{DirectoryOwnership, PResult, ParseSess};
-use crate::parse::token::{self, Token};
+use crate::parse::token;
 use crate::parse::parser::Parser;
 use crate::ptr::P;
 use crate::symbol::Symbol;
@@ -389,7 +389,7 @@ fn expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment {
                         let item = match self.cx.resolver.resolve_macro_path(
                                 path, MacroKind::Derive, Mark::root(), Vec::new(), false) {
                             Ok(ext) => match *ext {
-                                BuiltinDerive(..) => item_with_markers.clone(),
+                                SyntaxExtension::LegacyDerive(..) => item_with_markers.clone(),
                                 _ => item.clone(),
                             },
                             _ => item.clone(),
@@ -548,7 +548,7 @@ fn expand_attr_invoc(&mut self,
             _ => unreachable!(),
         };
 
-        if let NonMacroAttr { mark_used: false } = *ext {} else {
+        if let SyntaxExtension::NonMacroAttr { mark_used: false } = *ext {} else {
             // Macro attrs are always used when expanded,
             // non-macro attrs are considered used when the field says so.
             attr::mark_used(&attr);
@@ -564,35 +564,27 @@ fn expand_attr_invoc(&mut self,
         });
 
         match *ext {
-            NonMacroAttr { .. } => {
+            SyntaxExtension::NonMacroAttr { .. } => {
                 attr::mark_known(&attr);
                 item.visit_attrs(|attrs| attrs.push(attr));
                 Some(invoc.fragment_kind.expect_from_annotatables(iter::once(item)))
             }
-            MultiModifier(ref mac) => {
+            SyntaxExtension::LegacyAttr(ref mac) => {
                 let meta = attr.parse_meta(self.cx.parse_sess)
                                .map_err(|mut e| { e.emit(); }).ok()?;
                 let item = mac.expand(self.cx, attr.span, &meta, item);
                 Some(invoc.fragment_kind.expect_from_annotatables(item))
             }
-            MultiDecorator(ref mac) => {
-                let mut items = Vec::new();
-                let meta = attr.parse_meta(self.cx.parse_sess)
-                               .expect("derive meta should already have been parsed");
-                mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
-                items.push(item);
-                Some(invoc.fragment_kind.expect_from_annotatables(items))
-            }
-            AttrProcMacro(ref mac, ..) => {
+            SyntaxExtension::Attr(ref mac, ..) => {
                 self.gate_proc_macro_attr_item(attr.span, &item);
-                let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
+                let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
                     Annotatable::Item(item) => token::NtItem(item),
                     Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
                     Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
                     Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
                     Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
                     Annotatable::Expr(expr) => token::NtExpr(expr),
-                }))).into();
+                })), DUMMY_SP).into();
                 let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
                 let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
                 let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
@@ -600,7 +592,7 @@ fn expand_attr_invoc(&mut self,
                 self.gate_proc_macro_expansion(attr.span, &res);
                 res
             }
-            ProcMacroDerive(..) | BuiltinDerive(..) => {
+            SyntaxExtension::Derive(..) | SyntaxExtension::LegacyDerive(..) => {
                 self.cx.span_err(attr.span, &format!("`{}` is a derive macro", attr.path));
                 self.cx.trace_macros_diag();
                 invoc.fragment_kind.dummy(attr.span)
@@ -755,17 +747,7 @@ fn expand_bang_invoc(&mut self,
         };
 
         let opt_expanded = match *ext {
-            DeclMacro { ref expander, def_info, edition, .. } => {
-                if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
-                                                                    None, false, false, None,
-                                                                    edition) {
-                    dummy_span
-                } else {
-                    kind.make_from(expander.expand(self.cx, span, mac.node.stream(), None))
-                }
-            }
-
-            NormalTT {
+            SyntaxExtension::LegacyBang {
                 ref expander,
                 def_info,
                 ref allow_internal_unstable,
@@ -773,6 +755,7 @@ fn expand_bang_invoc(&mut self,
                 local_inner_macros,
                 unstable_feature,
                 edition,
+                ..
             } => {
                 if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
                                                                     allow_internal_unstable.clone(),
@@ -791,43 +774,22 @@ fn expand_bang_invoc(&mut self,
                 }
             }
 
-            IdentTT { ref expander, span: tt_span, ref allow_internal_unstable } => {
-                if ident.name == kw::Invalid {
-                    self.cx.span_err(path.span,
-                                    &format!("macro {}! expects an ident argument", path));
-                    self.cx.trace_macros_diag();
-                    kind.dummy(span)
-                } else {
-                    invoc.expansion_data.mark.set_expn_info(ExpnInfo {
-                        call_site: span,
-                        def_site: tt_span,
-                        format: macro_bang_format(path),
-                        allow_internal_unstable: allow_internal_unstable.clone(),
-                        allow_internal_unsafe: false,
-                        local_inner_macros: false,
-                        edition: self.cx.parse_sess.edition,
-                    });
-
-                    let input: Vec<_> = mac.node.stream().into_trees().collect();
-                    kind.make_from(expander.expand(self.cx, span, ident, input))
-                }
-            }
-
-            MultiDecorator(..) | MultiModifier(..) |
-            AttrProcMacro(..) | SyntaxExtension::NonMacroAttr { .. } => {
+            SyntaxExtension::Attr(..) |
+            SyntaxExtension::LegacyAttr(..) |
+            SyntaxExtension::NonMacroAttr { .. } => {
                 self.cx.span_err(path.span,
                                  &format!("`{}` can only be used in attributes", path));
                 self.cx.trace_macros_diag();
                 kind.dummy(span)
             }
 
-            ProcMacroDerive(..) | BuiltinDerive(..) => {
+            SyntaxExtension::Derive(..) | SyntaxExtension::LegacyDerive(..) => {
                 self.cx.span_err(path.span, &format!("`{}` is a derive macro", path));
                 self.cx.trace_macros_diag();
                 kind.dummy(span)
             }
 
-            SyntaxExtension::ProcMacro { ref expander, ref allow_internal_unstable, edition } => {
+            SyntaxExtension::Bang { ref expander, ref allow_internal_unstable, edition } => {
                 if ident.name != kw::Invalid {
                     let msg =
                         format!("macro {}! expects no ident argument, given '{}'", path, ident);
@@ -924,29 +886,29 @@ fn expand_derive_invoc(&mut self,
             edition: ext.edition(self.cx.parse_sess.edition),
         };
 
-        match *ext {
-            ProcMacroDerive(ref ext, ..) => {
-                invoc.expansion_data.mark.set_expn_info(expn_info);
-                let span = span.with_ctxt(self.cx.backtrace());
-                let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
-                    path: Path::from_ident(Ident::invalid()),
-                    span: DUMMY_SP,
-                    node: ast::MetaItemKind::Word,
+        match ext {
+            SyntaxExtension::Derive(expander, ..) | SyntaxExtension::LegacyDerive(expander) => {
+                let meta = match ext {
+                    SyntaxExtension::Derive(..) => ast::MetaItem { // FIXME(jseyfried) avoid this
+                        path: Path::from_ident(Ident::invalid()),
+                        span: DUMMY_SP,
+                        node: ast::MetaItemKind::Word,
+                    },
+                    _ => {
+                        expn_info.allow_internal_unstable = Some(vec![
+                            sym::rustc_attrs,
+                            Symbol::intern("derive_clone_copy"),
+                            Symbol::intern("derive_eq"),
+                            // RustcDeserialize and RustcSerialize
+                            Symbol::intern("libstd_sys_internals"),
+                        ].into());
+                        attr.meta()?
+                    }
                 };
-                let items = ext.expand(self.cx, span, &dummy, item);
-                Some(invoc.fragment_kind.expect_from_annotatables(items))
-            }
-            BuiltinDerive(func) => {
-                expn_info.allow_internal_unstable = Some(vec![
-                    sym::rustc_attrs,
-                    Symbol::intern("derive_clone_copy"),
-                    Symbol::intern("derive_eq"),
-                    Symbol::intern("libstd_sys_internals"), // RustcDeserialize and RustcSerialize
-                ].into());
+
                 invoc.expansion_data.mark.set_expn_info(expn_info);
                 let span = span.with_ctxt(self.cx.backtrace());
-                let mut items = Vec::new();
-                func(self.cx, span, &attr.meta()?, &item, &mut |a| items.push(a));
+                let items = expander.expand(self.cx, span, &meta, item);
                 Some(invoc.fragment_kind.expect_from_annotatables(items))
             }
             _ => {
@@ -1041,7 +1003,7 @@ pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span
             let msg = format!("macro expansion ignores token `{}` and any following",
                               self.this_token_to_string());
             // Avoid emitting backtrace info twice.
-            let def_site_span = self.span.with_ctxt(SyntaxContext::empty());
+            let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty());
             let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
             err.span_label(span, "caused by the macro expansion here");
             let msg = format!(
index e1cb90d9e71d652033ad2bb4c250516d0e34b055..4e2aab46542d203030675f84fb5c3045157ffbb8 100644 (file)
@@ -105,7 +105,7 @@ fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
             while self.p.token != token::Eof {
                 match panictry!(self.p.parse_item()) {
                     Some(item) => ret.push(item),
-                    None => self.p.diagnostic().span_fatal(self.p.span,
+                    None => self.p.diagnostic().span_fatal(self.p.token.span,
                                                            &format!("expected item, found `{}`",
                                                                     self.p.this_token_to_string()))
                                                .raise()
index 7b7cf80760f5c3f920feda9ba0eeb6f45f60e71d..4758b6a50e520f8a4f3c5b9ff3e77b16a0fe65fa 100644 (file)
@@ -74,7 +74,7 @@
 pub use ParseResult::*;
 use TokenTreeOrTokenTreeSlice::*;
 
-use crate::ast::Ident;
+use crate::ast::{Ident, Name};
 use crate::ext::tt::quoted::{self, TokenTree};
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::{Parser, PathStyle};
@@ -273,7 +273,7 @@ pub enum ParseResult<T> {
     Success(T),
     /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
     /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
-    Failure(syntax_pos::Span, Token, &'static str),
+    Failure(Token, &'static str),
     /// Fatal error (malformed macro?). Abort compilation.
     Error(syntax_pos::Span, String),
 }
@@ -417,24 +417,24 @@ fn n_rec<I: Iterator<Item = NamedMatch>>(
 
 /// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
 /// other tokens, this is "unexpected token...".
-pub fn parse_failure_msg(tok: Token) -> String {
-    match tok {
+pub fn parse_failure_msg(tok: &Token) -> String {
+    match tok.kind {
         token::Eof => "unexpected end of macro invocation".to_string(),
         _ => format!(
             "no rules expected the token `{}`",
-            pprust::token_to_string(&tok)
+            pprust::token_to_string(tok)
         ),
     }
 }
 
 /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
 fn token_name_eq(t1: &Token, t2: &Token) -> bool {
-    if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
-        id1.name == id2.name && is_raw1 == is_raw2
-    } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) {
-        id1.name == id2.name
+    if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
+        ident1.name == ident2.name && is_raw1 == is_raw2
+    } else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
+        ident1.name == ident2.name
     } else {
-        *t1 == *t2
+        t1.kind == t2.kind
     }
 }
 
@@ -467,7 +467,6 @@ fn inner_parse_loop<'root, 'tt>(
     eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
     bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
     token: &Token,
-    span: syntax_pos::Span,
 ) -> ParseResult<()> {
     // Pop items from `cur_items` until it is empty.
     while let Some(mut item) = cur_items.pop() {
@@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>(
                     // Add matches from this repetition to the `matches` of `up`
                     for idx in item.match_lo..item.match_hi {
                         let sub = item.matches[idx].clone();
-                        let span = DelimSpan::from_pair(item.sp_open, span);
+                        let span = DelimSpan::from_pair(item.sp_open, token.span);
                         new_pos.push_match(idx, MatchedSeq(sub, span));
                     }
 
@@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>(
                 TokenTree::MetaVarDecl(_, _, id) => {
                     // Built-in nonterminals never start with these tokens,
                     // so we can eliminate them from consideration.
-                    if may_begin_with(id.name, token) {
+                    if may_begin_with(token, id.name) {
                         bb_items.push(item);
                     }
                 }
@@ -609,7 +608,8 @@ fn inner_parse_loop<'root, 'tt>(
                 //
                 // At the beginning of the loop, if we reach the end of the delimited submatcher,
                 // we pop the stack to backtrack out of the descent.
-                seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
+                seq @ TokenTree::Delimited(..) |
+                seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
                     let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
                     let idx = item.idx;
                     item.stack.push(MatcherTtFrame {
@@ -621,7 +621,7 @@ fn inner_parse_loop<'root, 'tt>(
                 }
 
                 // We just matched a normal token. We can just advance the parser.
-                TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
+                TokenTree::Token(t) if token_name_eq(&t, token) => {
                     item.idx += 1;
                     next_items.push(item);
                 }
@@ -675,7 +675,7 @@ pub fn parse(
     //
     // This MatcherPos instance is allocated on the stack. All others -- and
     // there are frequently *no* others! -- are allocated on the heap.
-    let mut initial = initial_matcher_pos(ms, parser.span);
+    let mut initial = initial_matcher_pos(ms, parser.token.span);
     let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
     let mut next_items = Vec::new();
 
@@ -697,10 +697,9 @@ pub fn parse(
             &mut eof_items,
             &mut bb_items,
             &parser.token,
-            parser.span,
         ) {
             Success(_) => {}
-            Failure(sp, tok, t) => return Failure(sp, tok, t),
+            Failure(token, msg) => return Failure(token, msg),
             Error(sp, msg) => return Error(sp, msg),
         }
 
@@ -713,7 +712,7 @@ pub fn parse(
 
         // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
         // either the parse is ambiguous (which should never happen) or there is a syntax error.
-        if token_name_eq(&parser.token, &token::Eof) {
+        if parser.token == token::Eof {
             if eof_items.len() == 1 {
                 let matches = eof_items[0]
                     .matches
@@ -722,17 +721,16 @@ pub fn parse(
                 return nameize(sess, ms, matches);
             } else if eof_items.len() > 1 {
                 return Error(
-                    parser.span,
+                    parser.token.span,
                     "ambiguity: multiple successful parses".to_string(),
                 );
             } else {
                 return Failure(
-                    if parser.span.is_dummy() {
-                        parser.span
+                    Token::new(token::Eof, if parser.token.span.is_dummy() {
+                        parser.token.span
                     } else {
-                        sess.source_map().next_point(parser.span)
-                    },
-                    token::Eof,
+                        sess.source_map().next_point(parser.token.span)
+                    }),
                     "missing tokens in macro arguments",
                 );
             }
@@ -755,7 +753,7 @@ pub fn parse(
                 .join(" or ");
 
             return Error(
-                parser.span,
+                parser.token.span,
                 format!(
                     "local ambiguity: multiple parsing options: {}",
                     match next_items.len() {
@@ -770,8 +768,7 @@ pub fn parse(
         // then there is a syntax error.
         else if bb_items.is_empty() && next_items.is_empty() {
             return Failure(
-                parser.span,
-                parser.token.clone(),
+                parser.token.take(),
                 "no rules expected this token in macro call",
             );
         }
@@ -807,10 +804,9 @@ pub fn parse(
 
 /// The token is an identifier, but not `_`.
 /// We prohibit passing `_` to macros expecting `ident` for now.
-fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
-    match *token {
-        token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
-            Some((ident, is_raw)),
+fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
+    match token.kind {
+        token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
         _ => None,
     }
 }
@@ -819,7 +815,7 @@ fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
 ///
 /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
 /// token. Be conservative (return true) if not sure.
-fn may_begin_with(name: Symbol, token: &Token) -> bool {
+fn may_begin_with(token: &Token, name: Name) -> bool {
     /// Checks whether the non-terminal may contain a single (non-keyword) identifier.
     fn may_be_ident(nt: &token::Nonterminal) -> bool {
         match *nt {
@@ -831,16 +827,16 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
     match name {
         sym::expr => token.can_begin_expr(),
         sym::ty => token.can_begin_type(),
-        sym::ident => get_macro_ident(token).is_some(),
+        sym::ident => get_macro_name(token).is_some(),
         sym::literal => token.can_begin_literal_or_bool(),
-        sym::vis => match *token {
+        sym::vis => match token.kind {
             // The follow-set of :vis + "priv" keyword + interpolated
-            Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
+            token::Comma | token::Ident(..) | token::Interpolated(_) => true,
             _ => token.can_begin_type(),
         },
-        sym::block => match *token {
-            Token::OpenDelim(token::Brace) => true,
-            Token::Interpolated(ref nt) => match **nt {
+        sym::block => match token.kind {
+            token::OpenDelim(token::Brace) => true,
+            token::Interpolated(ref nt) => match **nt {
                 token::NtItem(_)
                 | token::NtPat(_)
                 | token::NtTy(_)
@@ -852,39 +848,39 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool {
             },
             _ => false,
         },
-        sym::path | sym::meta => match *token {
-            Token::ModSep | Token::Ident(..) => true,
-            Token::Interpolated(ref nt) => match **nt {
+        sym::path | sym::meta => match token.kind {
+            token::ModSep | token::Ident(..) => true,
+            token::Interpolated(ref nt) => match **nt {
                 token::NtPath(_) | token::NtMeta(_) => true,
                 _ => may_be_ident(&nt),
             },
             _ => false,
         },
-        sym::pat => match *token {
-            Token::Ident(..) |               // box, ref, mut, and other identifiers (can stricten)
-            Token::OpenDelim(token::Paren) |    // tuple pattern
-            Token::OpenDelim(token::Bracket) |  // slice pattern
-            Token::BinOp(token::And) |          // reference
-            Token::BinOp(token::Minus) |        // negative literal
-            Token::AndAnd |                     // double reference
-            Token::Literal(..) |                // literal
-            Token::DotDot |                     // range pattern (future compat)
-            Token::DotDotDot |                  // range pattern (future compat)
-            Token::ModSep |                     // path
-            Token::Lt |                         // path (UFCS constant)
-            Token::BinOp(token::Shl) => true,   // path (double UFCS)
-            Token::Interpolated(ref nt) => may_be_ident(nt),
+        sym::pat => match token.kind {
+            token::Ident(..) |               // box, ref, mut, and other identifiers (can stricten)
+            token::OpenDelim(token::Paren) |    // tuple pattern
+            token::OpenDelim(token::Bracket) |  // slice pattern
+            token::BinOp(token::And) |          // reference
+            token::BinOp(token::Minus) |        // negative literal
+            token::AndAnd |                     // double reference
+            token::Literal(..) |                // literal
+            token::DotDot |                     // range pattern (future compat)
+            token::DotDotDot |                  // range pattern (future compat)
+            token::ModSep |                     // path
+            token::Lt |                         // path (UFCS constant)
+            token::BinOp(token::Shl) => true,   // path (double UFCS)
+            token::Interpolated(ref nt) => may_be_ident(nt),
             _ => false,
         },
-        sym::lifetime => match *token {
-            Token::Lifetime(_) => true,
-            Token::Interpolated(ref nt) => match **nt {
+        sym::lifetime => match token.kind {
+            token::Lifetime(_) => true,
+            token::Interpolated(ref nt) => match **nt {
                 token::NtLifetime(_) | token::NtTT(_) => true,
                 _ => false,
             },
             _ => false,
         },
-        _ => match *token {
+        _ => match token.kind {
             token::CloseDelim(_) => false,
             _ => true,
         },
@@ -930,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal {
         sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
         sym::ty => token::NtTy(panictry!(p.parse_ty())),
         // this could be handled like a token, since it is one
-        sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
-            let span = p.span;
+        sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
+            let span = p.token.span;
             p.bump();
-            token::NtIdent(Ident::new(ident.name, span), is_raw)
+            token::NtIdent(Ident::new(name, span), is_raw)
         } else {
             let token_str = pprust::token_to_string(&p.token);
             p.fatal(&format!("expected ident, found {}", &token_str)).emit();
index 285c88357a6a8c7dd3a6718e2bf14e945eab2717..5dbf21867afa6df02811e60f539ef32559878ec2 100644 (file)
@@ -1,8 +1,8 @@
 use crate::{ast, attr};
 use crate::edition::Edition;
-use crate::ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
-use crate::ext::base::{NormalTT, TTMacroExpander};
+use crate::ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension, TTMacroExpander};
 use crate::ext::expand::{AstFragment, AstFragmentKind};
+use crate::ext::hygiene::Transparency;
 use crate::ext::tt::macro_parser::{Success, Error, Failure};
 use crate::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
 use crate::ext::tt::macro_parser::{parse, parse_failure_msg};
 use crate::feature_gate::Features;
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::Parser;
-use crate::parse::token::{self, NtTT};
-use crate::parse::token::Token::*;
+use crate::parse::token::{self, Token, NtTT};
+use crate::parse::token::TokenKind::*;
 use crate::symbol::{Symbol, kw, sym};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
 
 use errors::FatalError;
-use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
+use syntax_pos::{Span, symbol::Ident};
 use log::debug;
 
 use rustc_data_structures::fx::{FxHashMap};
@@ -47,7 +47,7 @@ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFrag
         let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
             if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
                 if !e.span.is_dummy() {  // early end of macro arm (#52866)
-                    e.replace_span_with(parser.sess.source_map().next_point(parser.span));
+                    e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
                 }
                 let msg = &e.message[0];
                 e.message[0] = (
@@ -63,7 +63,7 @@ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFrag
                 if parser.sess.source_map().span_to_filename(arm_span).is_real() {
                     e.span_label(arm_span, "in this macro arm");
                 }
-            } else if !parser.sess.source_map().span_to_filename(parser.span).is_real() {
+            } else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() {
                 e.span_label(site_span, "in this macro invocation");
             }
             e
@@ -130,9 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
     }
 
     // Which arm's failure should we report? (the one furthest along)
-    let mut best_fail_spot = DUMMY_SP;
-    let mut best_fail_tok = None;
-    let mut best_fail_text = None;
+    let mut best_failure: Option<(Token, &str)> = None;
 
     for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
         let lhs_tt = match *lhs {
@@ -190,21 +188,20 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
                     arm_span,
                 })
             }
-            Failure(sp, tok, t) => if sp.lo() >= best_fail_spot.lo() {
-                best_fail_spot = sp;
-                best_fail_tok = Some(tok);
-                best_fail_text = Some(t);
-            },
+            Failure(token, msg) => match best_failure {
+                Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
+                _ => best_failure = Some((token, msg))
+            }
             Error(err_sp, ref msg) => {
                 cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..])
             }
         }
     }
 
-    let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers"));
-    let span = best_fail_spot.substitute_dummy(sp);
-    let mut err = cx.struct_span_err(span, &best_fail_msg);
-    err.span_label(span, best_fail_text.unwrap_or(&best_fail_msg));
+    let (token, label) = best_failure.expect("ran no matchers");
+    let span = token.span.substitute_dummy(sp);
+    let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
+    err.span_label(span, label);
     if let Some(sp) = def_span {
         if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
             err.span_label(cx.source_map().def_span(sp), "when calling this macro");
@@ -269,17 +266,19 @@ pub fn compile(
     let argument_gram = vec![
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
-                quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
-                quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
+                quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, ast::Ident::from_str("tt")),
+                quoted::TokenTree::token(token::FatArrow, def.span),
+                quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, ast::Ident::from_str("tt")),
             ],
-            separator: Some(if body.legacy { token::Semi } else { token::Comma }),
+            separator: Some(Token::new(
+                if body.legacy { token::Semi } else { token::Comma }, def.span
+            )),
             op: quoted::KleeneOp::OneOrMore,
             num_captures: 2,
         })),
         // to phase into semicolon-termination instead of semicolon-separation
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
+            tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
@@ -288,11 +287,11 @@ pub fn compile(
 
     let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
         Success(m) => m,
-        Failure(sp, tok, t) => {
-            let s = parse_failure_msg(tok);
-            let sp = sp.substitute_dummy(def.span);
+        Failure(token, msg) => {
+            let s = parse_failure_msg(&token);
+            let sp = token.span.substitute_dummy(def.span);
             let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
-            err.span_label(sp, t);
+            err.span_label(sp, msg);
             err.emit();
             FatalError.raise();
         }
@@ -375,65 +374,65 @@ pub fn compile(
         valid,
     });
 
-    if body.legacy {
-        let allow_internal_unstable = attr::find_by_name(&def.attrs, sym::allow_internal_unstable)
-            .map(|attr| attr
-                .meta_item_list()
-                .map(|list| list.iter()
-                    .filter_map(|it| {
-                        let name = it.ident().map(|ident| ident.name);
-                        if name.is_none() {
-                            sess.span_diagnostic.span_err(it.span(),
-                                "allow internal unstable expects feature names")
-                        }
-                        name
-                    })
-                    .collect::<Vec<Symbol>>().into()
-                )
-                .unwrap_or_else(|| {
-                    sess.span_diagnostic.span_warn(
-                        attr.span, "allow_internal_unstable expects list of feature names. In the \
-                        future this will become a hard error. Please use `allow_internal_unstable(\
-                        foo, bar)` to only allow the `foo` and `bar` features",
-                    );
-                    vec![sym::allow_internal_unstable_backcompat_hack].into()
+    let transparency = if attr::contains_name(&def.attrs, sym::rustc_transparent_macro) {
+        Transparency::Transparent
+    } else if body.legacy {
+        Transparency::SemiTransparent
+    } else {
+        Transparency::Opaque
+    };
+
+    let allow_internal_unstable = attr::find_by_name(&def.attrs, sym::allow_internal_unstable)
+        .map(|attr| attr
+            .meta_item_list()
+            .map(|list| list.iter()
+                .filter_map(|it| {
+                    let name = it.ident().map(|ident| ident.name);
+                    if name.is_none() {
+                        sess.span_diagnostic.span_err(it.span(),
+                            "allow internal unstable expects feature names")
+                    }
+                    name
                 })
-            );
-        let allow_internal_unsafe = attr::contains_name(&def.attrs, sym::allow_internal_unsafe);
-        let mut local_inner_macros = false;
-        if let Some(macro_export) = attr::find_by_name(&def.attrs, sym::macro_export) {
-            if let Some(l) = macro_export.meta_item_list() {
-                local_inner_macros = attr::list_contains_name(&l, sym::local_inner_macros);
-            }
-        }
+                .collect::<Vec<Symbol>>().into()
+            )
+            .unwrap_or_else(|| {
+                sess.span_diagnostic.span_warn(
+                    attr.span, "allow_internal_unstable expects list of feature names. In the \
+                    future this will become a hard error. Please use `allow_internal_unstable(\
+                    foo, bar)` to only allow the `foo` and `bar` features",
+                );
+                vec![sym::allow_internal_unstable_backcompat_hack].into()
+            })
+        );
 
-        let unstable_feature = attr::find_stability(&sess,
-                                                    &def.attrs, def.span).and_then(|stability| {
-            if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
-                Some((stability.feature, issue))
-            } else {
-                None
-            }
-        });
-
-        NormalTT {
-            expander,
-            def_info: Some((def.id, def.span)),
-            allow_internal_unstable,
-            allow_internal_unsafe,
-            local_inner_macros,
-            unstable_feature,
-            edition,
+    let allow_internal_unsafe = attr::contains_name(&def.attrs, sym::allow_internal_unsafe);
+
+    let mut local_inner_macros = false;
+    if let Some(macro_export) = attr::find_by_name(&def.attrs, sym::macro_export) {
+        if let Some(l) = macro_export.meta_item_list() {
+            local_inner_macros = attr::list_contains_name(&l, sym::local_inner_macros);
         }
-    } else {
-        let is_transparent = attr::contains_name(&def.attrs, sym::rustc_transparent_macro);
+    }
 
-        SyntaxExtension::DeclMacro {
-            expander,
-            def_info: Some((def.id, def.span)),
-            is_transparent,
-            edition,
+    let unstable_feature = attr::find_stability(&sess,
+                                                &def.attrs, def.span).and_then(|stability| {
+        if let attr::StabilityLevel::Unstable { issue, .. } = stability.level {
+            Some((stability.feature, issue))
+        } else {
+            None
         }
+    });
+
+    SyntaxExtension::LegacyBang {
+        expander,
+        def_info: Some((def.id, def.span)),
+        transparency,
+        allow_internal_unstable,
+        allow_internal_unsafe,
+        local_inner_macros,
+        unstable_feature,
+        edition,
     }
 }
 
@@ -611,9 +610,8 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
                         // If the sequence contents can be empty, then the first
                         // token could be the separator token itself.
 
-                        if let (Some(ref sep), true) = (seq_rep.separator.clone(),
-                                                        subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+                        if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+                            first.add_one_maybe(TokenTree::Token(sep.clone()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -661,9 +659,8 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
                             // If the sequence contents can be empty, then the first
                             // token could be the separator token itself.
 
-                            if let (Some(ref sep), true) = (seq_rep.separator.clone(),
-                                                            subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+                            if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
+                                first.add_one_maybe(TokenTree::Token(sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
@@ -854,7 +851,7 @@ fn check_matcher_core(sess: &ParseSess,
                 // against SUFFIX
                 continue 'each_token;
             }
-            TokenTree::Sequence(sp, ref seq_rep) => {
+            TokenTree::Sequence(_, ref seq_rep) => {
                 suffix_first = build_suffix_first();
                 // The trick here: when we check the interior, we want
                 // to include the separator (if any) as a potential
@@ -867,9 +864,9 @@ fn check_matcher_core(sess: &ParseSess,
                 // work of cloning it? But then again, this way I may
                 // get a "tighter" span?
                 let mut new;
-                let my_suffix = if let Some(ref u) = seq_rep.separator {
+                let my_suffix = if let Some(sep) = &seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
+                    new.add_one_maybe(TokenTree::Token(sep.clone()));
                     &new
                 } else {
                     &suffix_first
@@ -912,7 +909,7 @@ fn check_matcher_core(sess: &ParseSess,
                             continue 'each_last;
                         }
                         IsInFollow::Yes => {}
-                        IsInFollow::No(ref possible) => {
+                        IsInFollow::No(possible) => {
                             let may_be = if last.tokens.len() == 1 &&
                                 suffix_first.tokens.len() == 1
                             {
@@ -936,7 +933,7 @@ fn check_matcher_core(sess: &ParseSess,
                                 format!("not allowed after `{}` fragments", frag_spec),
                             );
                             let msg = "allowed there are: ";
-                            match &possible[..] {
+                            match possible {
                                 &[] => {}
                                 &[t] => {
                                     err.note(&format!(
@@ -1000,7 +997,7 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
 
 enum IsInFollow {
     Yes,
-    No(Vec<&'static str>),
+    No(&'static [&'static str]),
     Invalid(String, &'static str),
 }
 
@@ -1015,7 +1012,7 @@ enum IsInFollow {
 fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
     use quoted::TokenTree;
 
-    if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
+    if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
         // closing a token tree can never be matched by any fragment;
         // iow, we always require that `(` and `)` match, etc.
         IsInFollow::Yes
@@ -1032,45 +1029,44 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
                 IsInFollow::Yes
             },
             "stmt" | "expr"  => {
-                let tokens = vec!["`=>`", "`,`", "`;`"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         FatArrow | Comma | Semi => IsInFollow::Yes,
-                        _ => IsInFollow::No(tokens),
+                        _ => IsInFollow::No(TOKENS),
                     },
-                    _ => IsInFollow::No(tokens),
+                    _ => IsInFollow::No(TOKENS),
                 }
             },
             "pat" => {
-                let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(i, false) if i.name == kw::If ||
-                                           i.name == kw::In => IsInFollow::Yes,
-                        _ => IsInFollow::No(tokens),
+                        Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
+                        _ => IsInFollow::No(TOKENS),
                     },
-                    _ => IsInFollow::No(tokens),
+                    _ => IsInFollow::No(TOKENS),
                 }
             },
             "path" | "ty" => {
-                let tokens = vec![
+                const TOKENS: &[&str] = &[
                     "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
                     "`where`",
                 ];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         OpenDelim(token::DelimToken::Brace) |
                         OpenDelim(token::DelimToken::Bracket) |
                         Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
                         BinOp(token::Or) => IsInFollow::Yes,
-                        Ident(i, false) if i.name == kw::As ||
-                                           i.name == kw::Where => IsInFollow::Yes,
-                        _ => IsInFollow::No(tokens),
+                        Ident(name, false) if name == kw::As ||
+                                              name == kw::Where => IsInFollow::Yes,
+                        _ => IsInFollow::No(TOKENS),
                     },
                     TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
                         IsInFollow::Yes,
-                    _ => IsInFollow::No(tokens),
+                    _ => IsInFollow::No(TOKENS),
                 }
             },
             "ident" | "lifetime" => {
@@ -1088,23 +1084,22 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             },
             "vis" => {
                 // Explicitly disallow `priv`, on the off chance it comes back.
-                let tokens = vec!["`,`", "an ident", "a type"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         Comma => IsInFollow::Yes,
-                        Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
-                            IsInFollow::Yes,
-                        ref tok => if tok.can_begin_type() {
+                        Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
+                        _ => if token.can_begin_type() {
                             IsInFollow::Yes
                         } else {
-                            IsInFollow::No(tokens)
+                            IsInFollow::No(TOKENS)
                         }
                     },
                     TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::ident
                                                        || frag.name == sym::ty
                                                        || frag.name == sym::path =>
                         IsInFollow::Yes,
-                    _ => IsInFollow::No(tokens),
+                    _ => IsInFollow::No(TOKENS),
                 }
             },
             "" => IsInFollow::Yes, // kw::Invalid
@@ -1150,7 +1145,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
 
 fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
-        quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
+        quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
index a029c6546595217abe0855f1f74ebb06fd5c2995..b52e3b715056741b5e7e1435cd17c43287d06b2a 100644 (file)
@@ -1,8 +1,8 @@
 use crate::ast::NodeId;
-use crate::early_buffered_lints::BufferedEarlyLintId;
 use crate::ext::tt::macro_parser;
 use crate::feature_gate::Features;
-use crate::parse::{token, ParseSess};
+use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::ParseSess;
 use crate::print::pprust;
 use crate::tokenstream::{self, DelimSpan};
 use crate::ast;
@@ -22,16 +22,6 @@ pub struct Delimited {
 }
 
 impl Delimited {
-    /// Returns the opening delimiter (possibly `NoDelim`).
-    pub fn open_token(&self) -> token::Token {
-        token::OpenDelim(self.delim)
-    }
-
-    /// Returns the closing delimiter (possibly `NoDelim`).
-    pub fn close_token(&self) -> token::Token {
-        token::CloseDelim(self.delim)
-    }
-
     /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
     pub fn open_tt(&self, span: Span) -> TokenTree {
         let open_span = if span.is_dummy() {
@@ -39,7 +29,7 @@ pub fn open_tt(&self, span: Span) -> TokenTree {
         } else {
             span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
-        TokenTree::Token(open_span, self.open_token())
+        TokenTree::token(token::OpenDelim(self.delim), open_span)
     }
 
     /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -49,7 +39,7 @@ pub fn close_tt(&self, span: Span) -> TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
-        TokenTree::Token(close_span, self.close_token())
+        TokenTree::token(token::CloseDelim(self.delim), close_span)
     }
 }
 
@@ -58,7 +48,7 @@ pub struct SequenceRepetition {
     /// The sequence of token trees
     pub tts: Vec<TokenTree>,
     /// The optional separator
-    pub separator: Option<token::Token>,
+    pub separator: Option<Token>,
     /// Whether the sequence can be repeated zero (*), or one or more times (+)
     pub op: KleeneOp,
     /// The number of `Match`s that appear in the sequence (and subsequences)
@@ -81,7 +71,7 @@ pub enum KleeneOp {
 /// are "first-class" token trees. Useful for parsing macros.
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
-    Token(Span, token::Token),
+    Token(Token),
     Delimited(DelimSpan, Lrc<Delimited>),
     /// A kleene-style repetition sequence
     Sequence(DelimSpan, Lrc<SequenceRepetition>),
@@ -144,13 +134,17 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
     /// Retrieves the `TokenTree`'s span.
     pub fn span(&self) -> Span {
         match *self {
-            TokenTree::Token(sp, _)
-            | TokenTree::MetaVar(sp, _)
-            | TokenTree::MetaVarDecl(sp, _, _) => sp,
-            TokenTree::Delimited(sp, _)
-            | TokenTree::Sequence(sp, _) => sp.entire(),
+            TokenTree::Token(Token { span, .. })
+            | TokenTree::MetaVar(span, _)
+            | TokenTree::MetaVarDecl(span, _, _) => span,
+            TokenTree::Delimited(span, _)
+            | TokenTree::Sequence(span, _) => span.entire(),
         }
     }
+
+    crate fn token(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span))
+    }
 }
 
 /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
@@ -205,20 +199,21 @@ pub fn parse(
         match tree {
             TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
                 let span = match trees.next() {
-                    Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
-                        Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
-                            Some((kind, _)) => {
-                                let span = end_sp.with_lo(start_sp.lo());
-                                result.push(TokenTree::MetaVarDecl(span, ident, kind));
-                                continue;
-                            }
-                            _ => end_sp,
+                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) =>
+                        match trees.next() {
+                            Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
+                                Some((kind, _)) => {
+                                    let span = token.span.with_lo(start_sp.lo());
+                                    result.push(TokenTree::MetaVarDecl(span, ident, kind));
+                                    continue;
+                                }
+                                _ => token.span,
+                            },
+                            tree => tree
+                                .as_ref()
+                                .map(tokenstream::TokenTree::span)
+                                .unwrap_or(span),
                         },
-                        tree => tree
-                            .as_ref()
-                            .map(tokenstream::TokenTree::span)
-                            .unwrap_or(span),
-                    },
                     tree => tree
                         .as_ref()
                         .map(tokenstream::TokenTree::span)
@@ -254,29 +249,26 @@ pub fn parse(
 /// - `sess`: the parsing session. Any errors will be emitted to this session.
 /// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
 ///   unstable features or not.
-fn parse_tree<I>(
+fn parse_tree(
     tree: tokenstream::TokenTree,
-    trees: &mut Peekable<I>,
+    trees: &mut Peekable<impl Iterator<Item = tokenstream::TokenTree>>,
     expect_matchers: bool,
     sess: &ParseSess,
     features: &Features,
     attrs: &[ast::Attribute],
     edition: Edition,
     macro_node_id: NodeId,
-) -> TokenTree
-where
-    I: Iterator<Item = tokenstream::TokenTree>,
-{
+) -> TokenTree {
     // Depending on what `tree` is, we could be parsing different parts of a macro
     match tree {
         // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
+        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
             // `tree` is followed by a delimited set of token trees. This indicates the beginning
             // of a repetition sequence in the macro (e.g. `$(pat)*`).
             Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
                 // Must have `(` not `{` or `[`
                 if delim != token::Paren {
-                    let tok = pprust::token_to_string(&token::OpenDelim(delim));
+                    let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
                     let msg = format!("expected `(`, found `{}`", tok);
                     sess.span_diagnostic.span_err(span.entire(), &msg);
                 }
@@ -291,16 +283,7 @@ fn parse_tree<I>(
                     macro_node_id,
                 );
                 // Get the Kleene operator and optional separator
-                let (separator, op) =
-                    parse_sep_and_kleene_op(
-                        trees,
-                        span.entire(),
-                        sess,
-                        features,
-                        attrs,
-                        edition,
-                        macro_node_id,
-                    );
+                let (separator, op) = parse_sep_and_kleene_op(trees, span.entire(), sess);
                 // Count the number of captured "names" (i.e., named metavars)
                 let name_captures = macro_parser::count_names(&sequence);
                 TokenTree::Sequence(
@@ -316,33 +299,32 @@ fn parse_tree<I>(
 
             // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
             // metavariable that names the crate of the invocation.
-            Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
+            Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
                 let (ident, is_raw) = token.ident().unwrap();
-                let span = ident_span.with_lo(span.lo());
+                let span = ident.span.with_lo(span.lo());
                 if ident.name == kw::Crate && !is_raw {
-                    let ident = ast::Ident::new(kw::DollarCrate, ident.span);
-                    TokenTree::Token(span, token::Ident(ident, is_raw))
+                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
                 } else {
                     TokenTree::MetaVar(span, ident)
                 }
             }
 
             // `tree` is followed by a random token. This is an error.
-            Some(tokenstream::TokenTree::Token(span, tok)) => {
+            Some(tokenstream::TokenTree::Token(token)) => {
                 let msg = format!(
                     "expected identifier, found `{}`",
-                    pprust::token_to_string(&tok)
+                    pprust::token_to_string(&token),
                 );
-                sess.span_diagnostic.span_err(span, &msg);
-                TokenTree::MetaVar(span, ast::Ident::invalid())
+                sess.span_diagnostic.span_err(token.span, &msg);
+                TokenTree::MetaVar(token.span, ast::Ident::invalid())
             }
 
             // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::Token(span, token::Dollar),
+            None => TokenTree::token(token::Dollar, span),
         },
 
         // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
+        tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
@@ -366,8 +348,8 @@ fn parse_tree<I>(
 
 /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
 /// `None`.
-fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
-    match *token {
+fn kleene_op(token: &Token) -> Option<KleeneOp> {
+    match token.kind {
         token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
         token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
         token::Question => Some(KleeneOp::ZeroOrOne),
@@ -380,17 +362,14 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
 /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
 /// - Err(span) if the next token tree is not a token
-fn parse_kleene_op<I>(
-    input: &mut I,
+fn parse_kleene_op(
+    input: &mut impl Iterator<Item = tokenstream::TokenTree>,
     span: Span,
-) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span>
-where
-    I: Iterator<Item = tokenstream::TokenTree>,
-{
+) -> Result<Result<(KleeneOp, Span), Token>, Span> {
     match input.next() {
-        Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
-            Some(op) => Ok(Ok((op, span))),
-            None => Ok(Err((tok, span))),
+        Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
+            Some(op) => Ok(Ok((op, token.span))),
+            None => Ok(Err(token)),
         },
         tree => Err(tree
             .as_ref()
@@ -411,181 +390,23 @@ fn parse_kleene_op<I>(
 /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
-///
-/// N.B., in the 2015 edition, `*` and `+` are the only Kleene operators, and `?` is a separator.
-/// In the 2018 edition however, `?` is a Kleene operator, and not a separator.
-fn parse_sep_and_kleene_op<I>(
-    input: &mut Peekable<I>,
-    span: Span,
-    sess: &ParseSess,
-    features: &Features,
-    attrs: &[ast::Attribute],
-    edition: Edition,
-    macro_node_id: NodeId,
-) -> (Option<token::Token>, KleeneOp)
-where
-    I: Iterator<Item = tokenstream::TokenTree>,
-{
-    match edition {
-        Edition::Edition2015 => parse_sep_and_kleene_op_2015(
-            input,
-            span,
-            sess,
-            features,
-            attrs,
-            macro_node_id,
-        ),
-        Edition::Edition2018 => parse_sep_and_kleene_op_2018(input, span, sess, features, attrs),
-    }
-}
-
-// `?` is a separator (with a migration warning) and never a KleeneOp.
-fn parse_sep_and_kleene_op_2015<I>(
-    input: &mut Peekable<I>,
-    span: Span,
-    sess: &ParseSess,
-    _features: &Features,
-    _attrs: &[ast::Attribute],
-    macro_node_id: NodeId,
-) -> (Option<token::Token>, KleeneOp)
-where
-    I: Iterator<Item = tokenstream::TokenTree>,
-{
-    // We basically look at two token trees here, denoted as #1 and #2 below
-    let span = match parse_kleene_op(input, span) {
-        // #1 is a `+` or `*` KleeneOp
-        //
-        // `?` is ambiguous: it could be a separator (warning) or a Kleene::ZeroOrOne (error), so
-        // we need to look ahead one more token to be sure.
-        Ok(Ok((op, _))) if op != KleeneOp::ZeroOrOne => return (None, op),
-
-        // #1 is `?` token, but it could be a Kleene::ZeroOrOne (error in 2015) without a separator
-        // or it could be a `?` separator followed by any Kleene operator. We need to look ahead 1
-        // token to find out which.
-        Ok(Ok((op, op1_span))) => {
-            assert_eq!(op, KleeneOp::ZeroOrOne);
-
-            // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
-            let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
-                kleene_op(tok2).is_some()
-            } else {
-                false
-            };
-
-            if is_1_sep {
-                // #1 is a separator and #2 should be a KleepeOp.
-                // (N.B. We need to advance the input iterator.)
-                match parse_kleene_op(input, span) {
-                    // #2 is `?`, which is not allowed as a Kleene op in 2015 edition,
-                    // but is allowed in the 2018 edition.
-                    Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
-                        sess.span_diagnostic
-                            .struct_span_err(op2_span, "expected `*` or `+`")
-                            .note("`?` is not a macro repetition operator in the 2015 edition, \
-                                 but is accepted in the 2018 edition")
-                            .emit();
-
-                        // Return a dummy
-                        return (None, KleeneOp::ZeroOrMore);
-                    }
-
-                    // #2 is a Kleene op, which is the only valid option
-                    Ok(Ok((op, _))) => {
-                        // Warn that `?` as a separator will be deprecated
-                        sess.buffer_lint(
-                            BufferedEarlyLintId::QuestionMarkMacroSep,
-                            op1_span,
-                            macro_node_id,
-                            "using `?` as a separator is deprecated and will be \
-                             a hard error in an upcoming edition",
-                        );
-
-                        return (Some(token::Question), op);
-                    }
-
-                    // #2 is a random token (this is an error) :(
-                    Ok(Err((_, _))) => op1_span,
-
-                    // #2 is not even a token at all :(
-                    Err(_) => op1_span,
-                }
-            } else {
-                // `?` is not allowed as a Kleene op in 2015,
-                // but is allowed in the 2018 edition
-                sess.span_diagnostic
-                    .struct_span_err(op1_span, "expected `*` or `+`")
-                    .note("`?` is not a macro repetition operator in the 2015 edition, \
-                         but is accepted in the 2018 edition")
-                    .emit();
-
-                // Return a dummy
-                return (None, KleeneOp::ZeroOrMore);
-            }
-        }
-
-        // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
-            // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition,
-            // but is allowed in the 2018 edition
-            Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
-                sess.span_diagnostic
-                    .struct_span_err(op2_span, "expected `*` or `+`")
-                    .note("`?` is not a macro repetition operator in the 2015 edition, \
-                        but is accepted in the 2018 edition")
-                    .emit();
-
-                // Return a dummy
-                return (None, KleeneOp::ZeroOrMore);
-            }
-
-            // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(tok), op),
-
-            // #2 is a random token :(
-            Ok(Err((_, span))) => span,
-
-            // #2 is not a token at all :(
-            Err(span) => span,
-        },
-
-        // #1 is not a token
-        Err(span) => span,
-    };
-
-    sess.span_diagnostic.span_err(span, "expected `*` or `+`");
-
-    // Return a dummy
-    (None, KleeneOp::ZeroOrMore)
-}
-
-// `?` is a Kleene op, not a separator
-fn parse_sep_and_kleene_op_2018<I>(
-    input: &mut Peekable<I>,
+fn parse_sep_and_kleene_op(
+    input: &mut Peekable<impl Iterator<Item = tokenstream::TokenTree>>,
     span: Span,
     sess: &ParseSess,
-    _features: &Features,
-    _attrs: &[ast::Attribute],
-) -> (Option<token::Token>, KleeneOp)
-where
-    I: Iterator<Item = tokenstream::TokenTree>,
-{
+) -> (Option<Token>, KleeneOp) {
     // We basically look at two token trees here, denoted as #1 and #2 below
     let span = match parse_kleene_op(input, span) {
-        // #1 is a `?` (needs feature gate)
-        Ok(Ok((op, _op1_span))) if op == KleeneOp::ZeroOrOne => {
-            return (None, op);
-        }
-
-        // #1 is a `+` or `*` KleeneOp
+        // #1 is a `?`, `+`, or `*` KleeneOp
         Ok(Ok((op, _))) => return (None, op),
 
         // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
             // #2 is the `?` Kleene op, which does not take a separator (error)
-            Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
+            Ok(Ok((KleeneOp::ZeroOrOne, _))) => {
                 // Error!
                 sess.span_diagnostic.span_err(
-                    span,
+                    token.span,
                     "the `?` macro repetition operator does not take a separator",
                 );
 
@@ -594,13 +415,10 @@ fn parse_sep_and_kleene_op_2018<I>(
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(tok), op),
-
-            // #2 is a random token :(
-            Ok(Err((_, span))) => span,
+            Ok(Ok((op, _))) => return (Some(token), op),
 
-            // #2 is not a token at all :(
-            Err(span) => span,
+            // #2 is a random token or not a token at all :(
+            Ok(Err(Token { span, .. })) | Err(span) => span,
         },
 
         // #1 is not a token
index e6b49e61937d6aae3e96670b44fd4e72213f2b02..c51f4b20c31c04689e7d2f5f68606adf259450f5 100644 (file)
@@ -8,7 +8,6 @@
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 
 use smallvec::{smallvec, SmallVec};
-use syntax_pos::DUMMY_SP;
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
@@ -109,17 +108,13 @@ pub fn transcribe(
         else {
             // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
             // go back to the beginning of the sequence.
-            if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
-                let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
+            if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
+                let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
                 *repeat_idx += 1;
-                if *repeat_idx < repeat_len {
+                if repeat_idx < repeat_len {
                     *idx = 0;
-                    if let Some(sep) = sep.clone() {
-                        let prev_span = match result.last() {
-                            Some((tt, _)) => tt.span(),
-                            None => DUMMY_SP,
-                        };
-                        result.push(TokenTree::Token(prev_span, sep).into());
+                    if let Some(sep) = sep {
+                        result.push(TokenTree::Token(sep.clone()).into());
                     }
                     continue;
                 }
@@ -225,7 +220,7 @@ pub fn transcribe(
                             result.push(tt.clone().into());
                         } else {
                             sp = sp.apply_mark(cx.current_expansion.mark);
-                            let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
+                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
                             result.push(token.into());
                         }
                     } else {
@@ -241,8 +236,8 @@ pub fn transcribe(
                     let ident =
                         Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
                     sp = sp.apply_mark(cx.current_expansion.mark);
-                    result.push(TokenTree::Token(sp, token::Dollar).into());
-                    result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
+                    result.push(TokenTree::token(token::Dollar, sp).into());
+                    result.push(TokenTree::Token(Token::from_ast_ident(ident)).into());
                 }
             }
 
@@ -259,9 +254,9 @@ pub fn transcribe(
 
             // Nothing much to do here. Just push the token to the result, being careful to
             // preserve syntax context.
-            quoted::TokenTree::Token(sp, tok) => {
+            quoted::TokenTree::Token(token) => {
                 let mut marker = Marker(cx.current_expansion.mark);
-                let mut tt = TokenTree::Token(sp, tok);
+                let mut tt = TokenTree::Token(token);
                 noop_visit_tt(&mut tt, &mut marker);
                 result.push(tt.into());
             }
index 4a95b6f69a161a7834f4908f868c155c3adc9f38..044c4b18905ef5e874e2bc2737816138573c916e 100644 (file)
@@ -551,15 +551,22 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     // Allows using `#[optimize(X)]`.
     (active, optimize_attribute, "1.34.0", Some(54882), None),
 
-    // Allows using `#[repr(align(X))]` on enums.
-    (active, repr_align_enum, "1.34.0", Some(57996), None),
-
     // Allows using C-variadics.
     (active, c_variadic, "1.34.0", Some(44930), None),
 
     // Allows the user of associated type bounds.
     (active, associated_type_bounds, "1.34.0", Some(52662), None),
 
+    // Allows calling constructor functions in `const fn`
+    // FIXME Create issue
+    (active, const_constructor, "1.37.0", Some(61456), None),
+
+    // #[repr(transparent)] on enums.
+    (active, transparent_enums, "1.37.0", Some(60405), None),
+
+    // #[repr(transparent)] on unions.
+    (active, transparent_unions, "1.37.0", Some(60405), None),
+
     // -------------------------------------------------------------------------
     // feature-group-end: actual feature gates
     // -------------------------------------------------------------------------
@@ -839,6 +846,9 @@ pub fn walk_feature_fields<F>(&self, mut f: F)
     (accepted, extern_crate_self, "1.34.0", Some(56409), None),
     // Allows arbitrary delimited token streams in non-macro attributes.
     (accepted, unrestricted_attribute_tokens, "1.34.0", Some(55208), None),
+    // Allows using `#[repr(align(X))]` on enums with equivalent semantics
+    // to wrapping an enum in a wrapper struct with `#[repr(align(X))]`.
+    (accepted, repr_align_enum, "1.37.0", Some(57996), None),
 
     // -------------------------------------------------------------------------
     // feature-group-end: accepted features
@@ -1327,6 +1337,16 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
                                                 "internal implementation detail",
                                                 cfg_fn!(rustc_attrs))),
 
+    (sym::rustc_allocator, Whitelisted, template!(Word), Gated(Stability::Unstable,
+                                                sym::rustc_attrs,
+                                                "internal implementation detail",
+                                                cfg_fn!(rustc_attrs))),
+
+    (sym::rustc_dummy, Normal, template!(Word /* doesn't matter*/), Gated(Stability::Unstable,
+                                         sym::rustc_attrs,
+                                         "used by the test suite",
+                                         cfg_fn!(rustc_attrs))),
+
     // FIXME: #14408 whitelist docs since rustdoc looks at them
     (
         sym::doc,
@@ -1953,14 +1973,14 @@ fn visit_attribute(&mut self, attr: &ast::Attribute) {
         }
 
         match attr_info {
-            Some(&(name, _, template, _)) => self.check_builtin_attribute(
-                attr,
-                name,
-                template
-            ),
-            None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() {
-                // All key-value attributes are restricted to meta-item syntax.
-                attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+            // `rustc_dummy` doesn't have any restrictions specific to built-in attributes.
+            Some(&(name, _, template, _)) if name != sym::rustc_dummy =>
+                self.check_builtin_attribute(attr, name, template),
+            _ => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() {
+                if token == token::Eq {
+                    // All key-value attributes are restricted to meta-item syntax.
+                    attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+                }
             }
         }
     }
@@ -2019,17 +2039,6 @@ fn visit_item(&mut self, i: &'a ast::Item) {
                 }
             }
 
-            ast::ItemKind::Enum(..) => {
-                for attr in attr::filter_by_name(&i.attrs[..], sym::repr) {
-                    for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
-                        if item.check_name(sym::align) {
-                            gate_feature_post!(&self, repr_align_enum, attr.span,
-                                               "`#[repr(align(x))]` on enums is experimental");
-                        }
-                    }
-                }
-            }
-
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
                 if polarity == ast::ImplPolarity::Negative {
                     gate_feature_post!(&self, optin_builtin_traits,
index 4229121b3d0759cdf370a535e66700b1aac43679..55db8da327673adda805db0b50eb807054f52716 100644 (file)
@@ -9,7 +9,9 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
+#![feature(bind_by_move_pattern_guards)]
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
 #![feature(nll)]
@@ -136,12 +138,6 @@ pub mod util {
 
 pub mod json;
 
-pub mod syntax {
-    pub use crate::ext;
-    pub use crate::parse;
-    pub use crate::ast;
-}
-
 pub mod ast;
 pub mod attr;
 pub mod source_map;
index fb1a7a680baaf532552ccbcca6790bf2bb1e0488..2889f8edfc64cc4d4cc9ea441c8c852f35d1b624 100644 (file)
@@ -576,9 +576,8 @@ pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T)
 
 pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
     match tt {
-        TokenTree::Token(span, tok) => {
-            vis.visit_span(span);
-            vis.visit_token(tok);
+        TokenTree::Token(token) => {
+            vis.visit_token(token);
         }
         TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
             vis.visit_span(open);
@@ -595,17 +594,26 @@ pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &m
     })
 }
 
-// apply ident visitor if it's an ident, apply other visits to interpolated nodes
+// Apply ident visitor if it's an ident, apply other visits to interpolated nodes.
+// In practice the ident part is not actually used by specific visitors right now,
+// but there's a test below checking that it works.
 pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
-    match t {
-        token::Ident(id, _is_raw) => vis.visit_ident(id),
-        token::Lifetime(id) => vis.visit_ident(id),
+    let Token { kind, span } = t;
+    match kind {
+        token::Ident(name, _) | token::Lifetime(name) => {
+            let mut ident = Ident::new(*name, *span);
+            vis.visit_ident(&mut ident);
+            *name = ident.name;
+            *span = ident.span;
+            return; // avoid visiting the span for the second time
+        }
         token::Interpolated(nt) => {
             let mut nt = Lrc::make_mut(nt);
             vis.visit_interpolated(&mut nt);
         }
         _ => {}
     }
+    vis.visit_span(span);
 }
 
 /// Apply visitor to elements of interpolated nodes.
@@ -1093,7 +1101,6 @@ pub fn noop_visit_expr<T: MutVisitor>(Expr { node, id, span, attrs }: &mut Expr,
             vis.visit_expr(rhs);
         }
         ExprKind::Unary(_unop, ohs) => vis.visit_expr(ohs),
-        ExprKind::Lit(_lit) => {}
         ExprKind::Cast(expr, ty) => {
             vis.visit_expr(expr);
             vis.visit_ty(ty);
@@ -1217,7 +1224,7 @@ pub fn noop_visit_expr<T: MutVisitor>(Expr { node, id, span, attrs }: &mut Expr,
         }
         ExprKind::Try(expr) => vis.visit_expr(expr),
         ExprKind::TryBlock(body) => vis.visit_block(body),
-        ExprKind::Err => {}
+        ExprKind::Lit(_) | ExprKind::Err => {}
     }
     vis.visit_id(id);
     vis.visit_span(span);
index e99a86e807f7f6294e79356f1e777e65d6f7d728..77a87e26e60d596ff4543d5fc794a0d309b46ad6 100644 (file)
@@ -24,7 +24,7 @@ impl<'a> Parser<'a> {
         let mut just_parsed_doc_comment = false;
         loop {
             debug!("parse_outer_attributes: self.token={:?}", self.token);
-            match self.token {
+            match self.token.kind {
                 token::Pound => {
                     let inner_error_reason = if just_parsed_doc_comment {
                         "an inner attribute is not permitted following an outer doc comment"
@@ -39,7 +39,7 @@ impl<'a> Parser<'a> {
                     just_parsed_doc_comment = false;
                 }
                 token::DocComment(s) => {
-                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
+                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span);
                     if attr.style != ast::AttrStyle::Outer {
                         let mut err = self.fatal("expected outer doc comment");
                         err.note("inner doc comments like this (starting with \
@@ -81,9 +81,9 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
                self.token);
-        let (span, path, tokens, style) = match self.token {
+        let (span, path, tokens, style) = match self.token.kind {
             token::Pound => {
-                let lo = self.span;
+                let lo = self.token.span;
                 self.bump();
 
                 if let InnerAttributeParsePolicy::Permitted = inner_parse_policy {
@@ -93,7 +93,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
                     self.bump();
                     if let InnerAttributeParsePolicy::NotPermitted { reason } = inner_parse_policy
                     {
-                        let span = self.span;
+                        let span = self.token.span;
                         self.diagnostic()
                             .struct_span_err(span, reason)
                             .note("inner attributes, like `#![no_std]`, annotate the item \
@@ -140,7 +140,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
     /// PATH `=` TOKEN_TREE
     /// The delimiters or `=` are still put into the resulting token stream.
     crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
-        let meta = match self.token {
+        let meta = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
                 _ => None,
@@ -157,9 +157,9 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
                self.check(&token::OpenDelim(DelimToken::Brace)) {
                    self.parse_token_tree().into()
             } else if self.eat(&token::Eq) {
-                let eq = TokenTree::Token(self.prev_span, token::Eq);
+                let eq = TokenTree::token(token::Eq, self.prev_span);
                 let mut is_interpolated_expr = false;
-                if let token::Interpolated(nt) = &self.token {
+                if let token::Interpolated(nt) = &self.token.kind {
                     if let token::NtExpr(..) = **nt {
                         is_interpolated_expr = true;
                     }
@@ -188,7 +188,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
     crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
         let mut attrs: Vec<ast::Attribute> = vec![];
         loop {
-            match self.token {
+            match self.token.kind {
                 token::Pound => {
                     // Don't even try to parse if it's not an inner attribute.
                     if !self.look_ahead(1, |t| t == &token::Not) {
@@ -201,7 +201,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
                 }
                 token::DocComment(s) => {
                     // we need to get the position of this token before we bump.
-                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
+                    let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span);
                     if attr.style == ast::AttrStyle::Inner {
                         attrs.push(attr);
                         self.bump();
@@ -236,7 +236,7 @@ fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
     /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ;
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
-        let nt_meta = match self.token {
+        let nt_meta = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref e) => Some(e.clone()),
                 _ => None,
@@ -249,7 +249,7 @@ pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
             return Ok(meta);
         }
 
-        let lo = self.span;
+        let lo = self.token.span;
         let path = self.parse_path(PathStyle::Mod)?;
         let node = self.parse_meta_item_kind()?;
         let span = lo.to(self.prev_span);
@@ -284,7 +284,7 @@ fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
 
         let found = self.this_token_to_string();
         let msg = format!("expected unsuffixed literal or identifier, found `{}`", found);
-        Err(self.diagnostic().struct_span_err(self.span, &msg))
+        Err(self.diagnostic().struct_span_err(self.token.span, &msg))
     }
 
     /// matches meta_seq = ( COMMASEP(meta_item_inner) )
index 5df22f28797a4813a49d7530e517e1f2f123de3d..9d2ac5b4b51688a82c2e94f86f4c3f650b3da221 100644 (file)
@@ -2,8 +2,9 @@
     self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
     Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
 };
-use crate::parse::{SeqSep, token, PResult, Parser};
+use crate::parse::{SeqSep, PResult, Parser};
 use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
+use crate::parse::token::{self, TokenKind};
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::source_map::Spanned;
@@ -161,7 +162,7 @@ fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
 
 impl<'a> Parser<'a> {
     pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
-        self.span_fatal(self.span, m)
+        self.span_fatal(self.token.span, m)
     }
 
     pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
@@ -173,7 +174,7 @@ pub fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> Diagnosti
     }
 
     pub fn bug(&self, m: &str) -> ! {
-        self.sess.span_diagnostic.span_bug(self.span, m)
+        self.sess.span_diagnostic.span_bug(self.token.span, m)
     }
 
     pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
@@ -198,26 +199,26 @@ pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
 
     crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
         let mut err = self.struct_span_err(
-            self.span,
+            self.token.span,
             &format!("expected identifier, found {}", self.this_token_descr()),
         );
-        if let token::Ident(ident, false) = &self.token {
-            if ident.is_raw_guess() {
+        if let token::Ident(name, false) = self.token.kind {
+            if Ident::new(name, self.token.span).is_raw_guess() {
                 err.span_suggestion(
-                    self.span,
+                    self.token.span,
                     "you can escape reserved keywords to use them as identifiers",
-                    format!("r#{}", ident),
+                    format!("r#{}", name),
                     Applicability::MaybeIncorrect,
                 );
             }
         }
         if let Some(token_descr) = self.token_descr() {
-            err.span_label(self.span, format!("expected identifier, found {}", token_descr));
+            err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
         } else {
-            err.span_label(self.span, "expected identifier");
+            err.span_label(self.token.span, "expected identifier");
             if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
                 err.span_suggestion(
-                    self.span,
+                    self.token.span,
                     "remove this comma",
                     String::new(),
                     Applicability::MachineApplicable,
@@ -229,8 +230,8 @@ pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
 
     pub fn expected_one_of_not_found(
         &mut self,
-        edible: &[token::Token],
-        inedible: &[token::Token],
+        edible: &[TokenKind],
+        inedible: &[TokenKind],
     ) -> PResult<'a, bool /* recovered */> {
         fn tokens_to_string(tokens: &[TokenType]) -> String {
             let mut i = tokens.iter();
@@ -276,11 +277,11 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
                 (self.sess.source_map().next_point(self.prev_span),
                 format!("expected {} here", expect)))
         };
-        self.last_unexpected_token_span = Some(self.span);
+        self.last_unexpected_token_span = Some(self.token.span);
         let mut err = self.fatal(&msg_exp);
         if self.token.is_ident_named(sym::and) {
             err.span_suggestion_short(
-                self.span,
+                self.token.span,
                 "use `&&` instead of `and` for the boolean operator",
                 "&&".to_string(),
                 Applicability::MaybeIncorrect,
@@ -288,13 +289,13 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
         }
         if self.token.is_ident_named(sym::or) {
             err.span_suggestion_short(
-                self.span,
+                self.token.span,
                 "use `||` instead of `or` for the boolean operator",
                 "||".to_string(),
                 Applicability::MaybeIncorrect,
             );
         }
-        let sp = if self.token == token::Token::Eof {
+        let sp = if self.token == token::Eof {
             // This is EOF, don't want to point at the following char, but rather the last token
             self.prev_span
         } else {
@@ -325,7 +326,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
             self.token.is_keyword(kw::While)
         );
         let cm = self.sess.source_map();
-        match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
+        match (cm.lookup_line(self.token.span.lo()), cm.lookup_line(sp.lo())) {
             (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => {
                 // The spans are in different lines, expected `;` and found `let` or `return`.
                 // High likelihood that it is only a missing `;`.
@@ -351,16 +352,16 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
                 //   |                   -^^^^^ unexpected token
                 //   |                   |
                 //   |                   expected one of 8 possible tokens here
-                err.span_label(self.span, label_exp);
+                err.span_label(self.token.span, label_exp);
             }
             _ if self.prev_span == syntax_pos::DUMMY_SP => {
                 // Account for macro context where the previous span might not be
                 // available to avoid incorrect output (#54841).
-                err.span_label(self.span, "unexpected token");
+                err.span_label(self.token.span, "unexpected token");
             }
             _ => {
                 err.span_label(sp, label_exp);
-                err.span_label(self.span, "unexpected token");
+                err.span_label(self.token.span, "unexpected token");
             }
         }
         Err(err)
@@ -368,7 +369,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
 
     /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
-    crate fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
+    crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
         let handler = self.diagnostic();
 
         if let Err(ref mut err) = self.parse_seq_to_before_tokens(
@@ -388,7 +389,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
     /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
     ///                                                        ^^ help: remove extra angle brackets
     /// ```
-    crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) {
+    crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
         // This function is intended to be invoked after parsing a path segment where there are two
         // cases:
         //
@@ -428,7 +429,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
 
         // Keep the span at the start so we can highlight the sequence of `>` characters to be
         // removed.
-        let lo = self.span;
+        let lo = self.token.span;
 
         // We need to look-ahead to see if we have `>` characters without moving the cursor forward
         // (since we might have the field access case and the characters we're eating are
@@ -473,7 +474,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
             // Eat from where we started until the end token so that parsing can continue
             // as if we didn't have those extra angle brackets.
             self.eat_to_tokens(&[&end]);
-            let span = lo.until(self.span);
+            let span = lo.until(self.token.span);
 
             let plural = number_of_gt > 1 || number_of_shr >= 1;
             self.diagnostic()
@@ -501,7 +502,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
         match lhs.node {
             ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
                 // respan to include both operators
-                let op_span = op.span.to(self.span);
+                let op_span = op.span.to(self.token.span);
                 let mut err = self.diagnostic().struct_span_err(op_span,
                     "chained comparison operators require parentheses");
                 if op.node == BinOpKind::Lt &&
@@ -726,28 +727,28 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
     /// closing delimiter.
     pub fn unexpected_try_recover(
         &mut self,
-        t: &token::Token,
+        t: &TokenKind,
     ) -> PResult<'a, bool /* recovered */> {
-        let token_str = pprust::token_to_string(t);
+        let token_str = pprust::token_kind_to_string(t);
         let this_token_str = self.this_token_descr();
-        let (prev_sp, sp) = match (&self.token, self.subparser_name) {
+        let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
             // Point at the end of the macro call when reaching end of macro arguments.
-            (token::Token::Eof, Some(_)) => {
-                let sp = self.sess.source_map().next_point(self.span);
+            (token::Eof, Some(_)) => {
+                let sp = self.sess.source_map().next_point(self.token.span);
                 (sp, sp)
             }
             // We don't want to point at the following span after DUMMY_SP.
             // This happens when the parser finds an empty TokenStream.
-            _ if self.prev_span == DUMMY_SP => (self.span, self.span),
+            _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span),
             // EOF, don't want to point at the following char, but rather the last token.
-            (token::Token::Eof, None) => (self.prev_span, self.span),
-            _ => (self.sess.source_map().next_point(self.prev_span), self.span),
+            (token::Eof, None) => (self.prev_span, self.token.span),
+            _ => (self.sess.source_map().next_point(self.prev_span), self.token.span),
         };
         let msg = format!(
             "expected `{}`, found {}",
             token_str,
-            match (&self.token, self.subparser_name) {
-                (token::Token::Eof, Some(origin)) => format!("end of {}", origin),
+            match (&self.token.kind, self.subparser_name) {
+                (token::Eof, Some(origin)) => format!("end of {}", origin),
                 _ => this_token_str,
             },
         );
@@ -788,7 +789,7 @@ pub fn unexpected_try_recover(
             // interpreting `await { <expr> }?` as `<expr>?.await`.
             self.parse_block_expr(
                 None,
-                self.span,
+                self.token.span,
                 BlockCheckMode::Default,
                 ThinVec::new(),
             )
@@ -818,9 +819,9 @@ pub fn unexpected_try_recover(
             self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
         {
             // future.await()
-            let lo = self.span;
+            let lo = self.token.span;
             self.bump(); // (
-            let sp = lo.to(self.span);
+            let sp = lo.to(self.token.span);
             self.bump(); // )
             self.struct_span_err(sp, "incorrect use of `await`")
                 .span_suggestion(
@@ -853,7 +854,7 @@ pub fn unexpected_try_recover(
         next_sp: Span,
         maybe_path: bool,
     ) {
-        err.span_label(self.span, "expecting a type here because of type ascription");
+        err.span_label(self.token.span, "expecting a type here because of type ascription");
         let cm = self.sess.source_map();
         let next_pos = cm.lookup_char_pos(next_sp.lo());
         let op_pos = cm.lookup_char_pos(cur_op_span.hi());
@@ -903,14 +904,14 @@ pub fn unexpected_try_recover(
 
     crate fn recover_closing_delimiter(
         &mut self,
-        tokens: &[token::Token],
+        tokens: &[TokenKind],
         mut err: DiagnosticBuilder<'a>,
     ) -> PResult<'a, bool> {
         let mut pos = None;
         // we want to use the last closing delim that would apply
         for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
             if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
-                && Some(self.span) > unmatched.unclosed_span
+                && Some(self.token.span) > unmatched.unclosed_span
             {
                 pos = Some(i);
             }
@@ -989,7 +990,7 @@ pub fn unexpected_try_recover(
                break_on_semi, break_on_block);
         loop {
             debug!("recover_stmt_ loop {:?}", self.token);
-            match self.token {
+            match self.token.kind {
                 token::OpenDelim(token::DelimToken::Brace) => {
                     brace_depth += 1;
                     self.bump();
@@ -1069,28 +1070,28 @@ pub fn unexpected_try_recover(
     crate fn expected_semi_or_open_brace(&mut self) -> PResult<'a, ast::TraitItem> {
         let token_str = self.this_token_descr();
         let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str));
-        err.span_label(self.span, "expected `;` or `{`");
+        err.span_label(self.token.span, "expected `;` or `{`");
         Err(err)
     }
 
     crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
-        if let token::DocComment(_) = self.token {
+        if let token::DocComment(_) = self.token.kind {
             let mut err = self.diagnostic().struct_span_err(
-                self.span,
+                self.token.span,
                 &format!("documentation comments cannot be applied to {}", applied_to),
             );
-            err.span_label(self.span, "doc comments are not allowed here");
+            err.span_label(self.token.span, "doc comments are not allowed here");
             err.emit();
             self.bump();
         } else if self.token == token::Pound && self.look_ahead(1, |t| {
             *t == token::OpenDelim(token::Bracket)
         }) {
-            let lo = self.span;
+            let lo = self.token.span;
             // Skip every token until next possible arg.
             while self.token != token::CloseDelim(token::Bracket) {
                 self.bump();
             }
-            let sp = lo.to(self.span);
+            let sp = lo.to(self.token.span);
             self.bump();
             let mut err = self.diagnostic().struct_span_err(
                 sp,
@@ -1214,18 +1215,18 @@ pub fn unexpected_try_recover(
     }
 
     crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
-        let (span, msg) = match (&self.token, self.subparser_name) {
-            (&token::Token::Eof, Some(origin)) => {
-                let sp = self.sess.source_map().next_point(self.span);
+        let (span, msg) = match (&self.token.kind, self.subparser_name) {
+            (&token::Eof, Some(origin)) => {
+                let sp = self.sess.source_map().next_point(self.token.span);
                 (sp, format!("expected expression, found end of {}", origin))
             }
-            _ => (self.span, format!(
+            _ => (self.token.span, format!(
                 "expected expression, found {}",
                 self.this_token_descr(),
             )),
         };
         let mut err = self.struct_span_err(span, &msg);
-        let sp = self.sess.source_map().start_point(self.span);
+        let sp = self.sess.source_map().start_point(self.token.span);
         if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
             self.sess.expr_parentheses_needed(&mut err, *sp, None);
         }
index a06a84f162a96a266b7b78e38d4acde9911fdf37..9df2898696ea62d8d041a5eb9d2dc944fd65ee73 100644 (file)
@@ -1,6 +1,6 @@
-use crate::ast::{self, Ident};
+use crate::ast;
 use crate::parse::ParseSess;
-use crate::parse::token::{self, Token};
+use crate::parse::token::{self, Token, TokenKind};
 use crate::symbol::{sym, Symbol};
 use crate::parse::unescape;
 use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
@@ -12,7 +12,6 @@
 use std::borrow::Cow;
 use std::char;
 use std::iter;
-use std::mem::replace;
 use rustc_data_structures::sync::Lrc;
 use log::debug;
 
 mod tokentrees;
 mod unicode_chars;
 
-#[derive(Clone, Debug)]
-pub struct TokenAndSpan {
-    pub tok: Token,
-    pub sp: Span,
-}
-
-impl Default for TokenAndSpan {
-    fn default() -> Self {
-        TokenAndSpan {
-            tok: token::Whitespace,
-            sp: syntax_pos::DUMMY_SP,
-        }
-    }
-}
-
 #[derive(Clone, Debug)]
 pub struct UnmatchedBrace {
     pub expected_delim: token::DelimToken,
@@ -56,8 +40,7 @@ pub struct StringReader<'a> {
     /// Stop reading src at this index.
     crate end_src_index: usize,
     // cached:
-    peek_tok: Token,
-    peek_span: Span,
+    peek_token: Token,
     peek_span_src_raw: Span,
     fatal_errs: Vec<DiagnosticBuilder<'a>>,
     // cache a direct reference to the source text, so that we don't have to
@@ -78,16 +61,7 @@ fn mk_sp_and_raw(&self, lo: BytePos, hi: BytePos) -> (Span, Span) {
         (real, raw)
     }
 
-    fn mk_ident(&self, string: &str) -> Ident {
-        let mut ident = Ident::from_str(string);
-        if let Some(span) = self.override_span {
-            ident.span = span;
-        }
-
-        ident
-    }
-
-    fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
+    fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
         match res {
             Ok(tok) => tok,
             Err(_) => {
@@ -97,18 +71,15 @@ fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
         }
     }
 
-    fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
+    fn next_token(&mut self) -> Token where Self: Sized {
         let res = self.try_next_token();
         self.unwrap_or_abort(res)
     }
 
     /// Returns the next token. EFFECT: advances the string_reader.
-    pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
+    pub fn try_next_token(&mut self) -> Result<Token, ()> {
         assert!(self.fatal_errs.is_empty());
-        let ret_val = TokenAndSpan {
-            tok: replace(&mut self.peek_tok, token::Whitespace),
-            sp: self.peek_span,
-        };
+        let ret_val = self.peek_token.take();
         self.advance_token()?;
         Ok(ret_val)
     }
@@ -135,10 +106,10 @@ fn peek_delimited(&self, from_ch: char, to_ch: char) -> Option<String> {
         return None;
     }
 
-    fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
+    fn try_real_token(&mut self) -> Result<Token, ()> {
         let mut t = self.try_next_token()?;
         loop {
-            match t.tok {
+            match t.kind {
                 token::Whitespace | token::Comment | token::Shebang(_) => {
                     t = self.try_next_token()?;
                 }
@@ -149,7 +120,7 @@ fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
         Ok(t)
     }
 
-    pub fn real_token(&mut self) -> TokenAndSpan {
+    pub fn real_token(&mut self) -> Token {
         let res = self.try_real_token();
         self.unwrap_or_abort(res)
     }
@@ -159,7 +130,7 @@ fn is_eof(&self) -> bool {
         self.ch.is_none()
     }
 
-    fn fail_unterminated_raw_string(&self, pos: BytePos, hash_count: u16) {
+    fn fail_unterminated_raw_string(&self, pos: BytePos, hash_count: u16) -> ! {
         let mut err = self.struct_span_fatal(pos, pos, "unterminated raw string");
         err.span_label(self.mk_sp(pos, pos), "unterminated raw string");
 
@@ -173,7 +144,7 @@ fn fail_unterminated_raw_string(&self, pos: BytePos, hash_count: u16) {
     }
 
     fn fatal(&self, m: &str) -> FatalError {
-        self.fatal_span(self.peek_span, m)
+        self.fatal_span(self.peek_token.span, m)
     }
 
     crate fn emit_fatal_errors(&mut self) {
@@ -194,12 +165,8 @@ pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
         buffer
     }
 
-    pub fn peek(&self) -> TokenAndSpan {
-        // FIXME(pcwalton): Bad copy!
-        TokenAndSpan {
-            tok: self.peek_tok.clone(),
-            sp: self.peek_span,
-        }
+    pub fn peek(&self) -> &Token {
+        &self.peek_token
     }
 
     /// For comments.rs, which hackily pokes into next_pos and ch
@@ -229,9 +196,7 @@ fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile
             ch: Some('\n'),
             source_file,
             end_src_index: src.len(),
-            // dummy values; not read
-            peek_tok: token::Eof,
-            peek_span: syntax_pos::DUMMY_SP,
+            peek_token: Token::dummy(),
             peek_span_src_raw: syntax_pos::DUMMY_SP,
             src,
             fatal_errs: Vec::new(),
@@ -327,40 +292,24 @@ fn struct_fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c:
         self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..])
     }
 
-    /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
-    /// escaped character to the error message
-    fn err_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) {
-        let mut m = m.to_string();
-        m.push_str(": ");
-        push_escaped_char(&mut m, c);
-        self.err_span_(from_pos, to_pos, &m[..]);
-    }
-
-    /// Advance peek_tok and peek_span to refer to the next token, and
+    /// Advance peek_token to refer to the next token, and
     /// possibly update the interner.
     fn advance_token(&mut self) -> Result<(), ()> {
         match self.scan_whitespace_or_comment() {
             Some(comment) => {
-                self.peek_span_src_raw = comment.sp;
-                self.peek_span = comment.sp;
-                self.peek_tok = comment.tok;
+                self.peek_span_src_raw = comment.span;
+                self.peek_token = comment;
             }
             None => {
-                if self.is_eof() {
-                    self.peek_tok = token::Eof;
-                    let (real, raw) = self.mk_sp_and_raw(
-                        self.source_file.end_pos,
-                        self.source_file.end_pos,
-                    );
-                    self.peek_span = real;
-                    self.peek_span_src_raw = raw;
+                let (kind, start_pos, end_pos) = if self.is_eof() {
+                    (token::Eof, self.source_file.end_pos, self.source_file.end_pos)
                 } else {
-                    let start_bytepos = self.pos;
-                    self.peek_tok = self.next_token_inner()?;
-                    let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos);
-                    self.peek_span = real;
-                    self.peek_span_src_raw = raw;
+                    let start_pos = self.pos;
+                    (self.next_token_inner()?, start_pos, self.pos)
                 };
+                let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
+                self.peek_token = Token::new(kind, real);
+                self.peek_span_src_raw = raw;
             }
         }
 
@@ -527,7 +476,7 @@ fn scan_optional_raw_name(&mut self) -> Option<ast::Name> {
 
     /// PRECONDITION: self.ch is not whitespace
     /// Eats any kind of comment.
-    fn scan_comment(&mut self) -> Option<TokenAndSpan> {
+    fn scan_comment(&mut self) -> Option<Token> {
         if let Some(c) = self.ch {
             if c.is_whitespace() {
                 let msg = "called consume_any_line_comment, but there was whitespace";
@@ -563,14 +512,14 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                         self.bump();
                     }
 
-                    let tok = if doc_comment {
+                    let kind = if doc_comment {
                         self.with_str_from(start_bpos, |string| {
                             token::DocComment(Symbol::intern(string))
                         })
                     } else {
                         token::Comment
                     };
-                    Some(TokenAndSpan { tok, sp: self.mk_sp(start_bpos, self.pos) })
+                    Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
                 }
                 Some('*') => {
                     self.bump();
@@ -594,10 +543,10 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                     while !self.ch_is('\n') && !self.is_eof() {
                         self.bump();
                     }
-                    return Some(TokenAndSpan {
-                        tok: token::Shebang(self.name_from(start)),
-                        sp: self.mk_sp(start, self.pos),
-                    });
+                    return Some(Token::new(
+                        token::Shebang(self.name_from(start)),
+                        self.mk_sp(start, self.pos),
+                    ));
                 }
             }
             None
@@ -608,7 +557,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
 
     /// If there is whitespace, shebang, or a comment, scan it. Otherwise,
     /// return `None`.
-    fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
+    fn scan_whitespace_or_comment(&mut self) -> Option<Token> {
         match self.ch.unwrap_or('\0') {
             // # to handle shebang at start of file -- this is the entry point
             // for skipping over all "junk"
@@ -622,10 +571,7 @@ fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
                 while is_pattern_whitespace(self.ch) {
                     self.bump();
                 }
-                let c = Some(TokenAndSpan {
-                    tok: token::Whitespace,
-                    sp: self.mk_sp(start_bpos, self.pos),
-                });
+                let c = Some(Token::new(token::Whitespace, self.mk_sp(start_bpos, self.pos)));
                 debug!("scanning whitespace: {:?}", c);
                 c
             }
@@ -634,7 +580,7 @@ fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
     }
 
     /// Might return a sugared-doc-attr
-    fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
+    fn scan_block_comment(&mut self) -> Option<Token> {
         // block comments starting with "/**" or "/*!" are doc-comments
         let is_doc_comment = self.ch_is('*') || self.ch_is('!');
         let start_bpos = self.pos - BytePos(2);
@@ -671,7 +617,7 @@ fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
 
         self.with_str_from(start_bpos, |string| {
             // but comments with only "*"s between two "/"s are not
-            let tok = if is_block_doc_comment(string) {
+            let kind = if is_block_doc_comment(string) {
                 let string = if has_cr {
                     self.translate_crlf(start_bpos,
                                         string,
@@ -684,10 +630,7 @@ fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
                 token::Comment
             };
 
-            Some(TokenAndSpan {
-                tok,
-                sp: self.mk_sp(start_bpos, self.pos),
-            })
+            Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
         })
     }
 
@@ -847,7 +790,7 @@ fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: us
         }
     }
 
-    fn binop(&mut self, op: token::BinOpToken) -> Token {
+    fn binop(&mut self, op: token::BinOpToken) -> TokenKind {
         self.bump();
         if self.ch_is('=') {
             self.bump();
@@ -859,7 +802,7 @@ fn binop(&mut self, op: token::BinOpToken) -> Token {
 
     /// Returns the next token from the string, advances the input past that
     /// token, and updates the interner
-    fn next_token_inner(&mut self) -> Result<Token, ()> {
+    fn next_token_inner(&mut self) -> Result<TokenKind, ()> {
         let c = self.ch;
 
         if ident_start(c) {
@@ -897,17 +840,17 @@ fn next_token_inner(&mut self) -> Result<Token, ()> {
 
                 return Ok(self.with_str_from(start, |string| {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    let ident = self.mk_ident(string);
+                    let name = ast::Name::intern(string);
 
                     if is_raw_ident {
                         let span = self.mk_sp(raw_start, self.pos);
-                        if !ident.can_be_raw() {
-                            self.err_span(span, &format!("`{}` cannot be a raw identifier", ident));
+                        if !name.can_be_raw() {
+                            self.err_span(span, &format!("`{}` cannot be a raw identifier", name));
                         }
                         self.sess.raw_identifier_spans.borrow_mut().push(span);
                     }
 
-                    token::Ident(ident, is_raw_ident)
+                    token::Ident(name, is_raw_ident)
                 }));
             }
         }
@@ -916,7 +859,7 @@ fn next_token_inner(&mut self) -> Result<Token, ()> {
             let (kind, symbol) = self.scan_number(c.unwrap());
             let suffix = self.scan_optional_raw_name();
             debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix);
-            return Ok(Token::lit(kind, symbol, suffix));
+            return Ok(TokenKind::lit(kind, symbol, suffix));
         }
 
         match c.expect("next_token_inner called at EOF") {
@@ -1077,16 +1020,9 @@ fn next_token_inner(&mut self) -> Result<Token, ()> {
                         let symbol = self.name_from(start);
                         self.bump();
                         self.validate_char_escape(start_with_quote);
-                        return Ok(Token::lit(token::Char, symbol, None));
+                        return Ok(TokenKind::lit(token::Char, symbol, None));
                     }
 
-                    // Include the leading `'` in the real identifier, for macro
-                    // expansion purposes. See #12512 for the gory details of why
-                    // this is necessary.
-                    let ident = self.with_str_from(start_with_quote, |lifetime_name| {
-                        self.mk_ident(lifetime_name)
-                    });
-
                     if starts_with_number {
                         // this is a recovered lifetime written `'1`, error but accept it
                         self.err_span_(
@@ -1096,13 +1032,16 @@ fn next_token_inner(&mut self) -> Result<Token, ()> {
                         );
                     }
 
-                    return Ok(token::Lifetime(ident));
+                    // Include the leading `'` in the real identifier, for macro
+                    // expansion purposes. See #12512 for the gory details of why
+                    // this is necessary.
+                    return Ok(token::Lifetime(self.name_from(start_with_quote)));
                 }
                 let msg = "unterminated character literal";
                 let symbol = self.scan_single_quoted_string(start_with_quote, msg);
                 self.validate_char_escape(start_with_quote);
                 let suffix = self.scan_optional_raw_name();
-                Ok(Token::lit(token::Char, symbol, suffix))
+                Ok(TokenKind::lit(token::Char, symbol, suffix))
             }
             'b' => {
                 self.bump();
@@ -1122,12 +1061,18 @@ fn next_token_inner(&mut self) -> Result<Token, ()> {
                         self.validate_byte_str_escape(start_with_quote);
                         (token::ByteStr, symbol)
                     },
-                    Some('r') => self.scan_raw_byte_string(),
+                    Some('r') => {
+                        let (start, end, hash_count) = self.scan_raw_string();
+                        let symbol = self.name_from_to(start, end);
+                        self.validate_raw_byte_str_escape(start, end);
+
+                        (token::ByteStrRaw(hash_count), symbol)
+                    }
                     _ => unreachable!(),  // Should have been a token::Ident above.
                 };
                 let suffix = self.scan_optional_raw_name();
 
-                Ok(Token::lit(kind, symbol, suffix))
+                Ok(TokenKind::lit(kind, symbol, suffix))
             }
             '"' => {
                 let start_with_quote = self.pos;
@@ -1135,85 +1080,15 @@ fn next_token_inner(&mut self) -> Result<Token, ()> {
                 let symbol = self.scan_double_quoted_string(msg);
                 self.validate_str_escape(start_with_quote);
                 let suffix = self.scan_optional_raw_name();
-                Ok(Token::lit(token::Str, symbol, suffix))
+                Ok(TokenKind::lit(token::Str, symbol, suffix))
             }
             'r' => {
-                let start_bpos = self.pos;
-                self.bump();
-                let mut hash_count: u16 = 0;
-                while self.ch_is('#') {
-                    if hash_count == 65535 {
-                        let bpos = self.next_pos;
-                        self.fatal_span_(start_bpos,
-                                         bpos,
-                                         "too many `#` symbols: raw strings may be \
-                                         delimited by up to 65535 `#` symbols").raise();
-                    }
-                    self.bump();
-                    hash_count += 1;
-                }
-
-                if self.is_eof() {
-                    self.fail_unterminated_raw_string(start_bpos, hash_count);
-                } else if !self.ch_is('"') {
-                    let last_bpos = self.pos;
-                    let curr_char = self.ch.unwrap();
-                    self.fatal_span_char(start_bpos,
-                                         last_bpos,
-                                         "found invalid character; only `#` is allowed \
-                                         in raw string delimitation",
-                                         curr_char).raise();
-                }
-                self.bump();
-                let content_start_bpos = self.pos;
-                let mut content_end_bpos;
-                let mut valid = true;
-                'outer: loop {
-                    if self.is_eof() {
-                        self.fail_unterminated_raw_string(start_bpos, hash_count);
-                    }
-                    // if self.ch_is('"') {
-                    // content_end_bpos = self.pos;
-                    // for _ in 0..hash_count {
-                    // self.bump();
-                    // if !self.ch_is('#') {
-                    // continue 'outer;
-                    let c = self.ch.unwrap();
-                    match c {
-                        '"' => {
-                            content_end_bpos = self.pos;
-                            for _ in 0..hash_count {
-                                self.bump();
-                                if !self.ch_is('#') {
-                                    continue 'outer;
-                                }
-                            }
-                            break;
-                        }
-                        '\r' => {
-                            if !self.nextch_is('\n') {
-                                let last_bpos = self.pos;
-                                self.err_span_(start_bpos,
-                                               last_bpos,
-                                               "bare CR not allowed in raw string, use \\r \
-                                                instead");
-                                valid = false;
-                            }
-                        }
-                        _ => (),
-                    }
-                    self.bump();
-                }
-
-                self.bump();
-                let symbol = if valid {
-                    self.name_from_to(content_start_bpos, content_end_bpos)
-                } else {
-                    Symbol::intern("??")
-                };
+                let (start, end, hash_count) = self.scan_raw_string();
+                let symbol = self.name_from_to(start, end);
+                self.validate_raw_str_escape(start, end);
                 let suffix = self.scan_optional_raw_name();
 
-                Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix))
+                Ok(TokenKind::lit(token::StrRaw(hash_count), symbol, suffix))
             }
             '-' => {
                 if self.nextch_is('>') {
@@ -1367,16 +1242,18 @@ fn scan_double_quoted_string(&mut self, unterminated_msg: &str) -> ast::Name {
         id
     }
 
-    fn scan_raw_byte_string(&mut self) -> (token::LitKind, Symbol) {
+    /// Scans a raw (byte) string, returning byte position range for `"<literal>"`
+    /// (including quotes) along with `#` character count in `(b)r##..."<literal>"##...`;
+    fn scan_raw_string(&mut self) -> (BytePos, BytePos, u16) {
         let start_bpos = self.pos;
         self.bump();
-        let mut hash_count = 0;
+        let mut hash_count: u16 = 0;
         while self.ch_is('#') {
             if hash_count == 65535 {
                 let bpos = self.next_pos;
                 self.fatal_span_(start_bpos,
                                  bpos,
-                                 "too many `#` symbols: raw byte strings may be \
+                                 "too many `#` symbols: raw strings may be \
                                  delimited by up to 65535 `#` symbols").raise();
             }
             self.bump();
@@ -1386,13 +1263,13 @@ fn scan_raw_byte_string(&mut self) -> (token::LitKind, Symbol) {
         if self.is_eof() {
             self.fail_unterminated_raw_string(start_bpos, hash_count);
         } else if !self.ch_is('"') {
-            let pos = self.pos;
-            let ch = self.ch.unwrap();
+            let last_bpos = self.pos;
+            let curr_char = self.ch.unwrap();
             self.fatal_span_char(start_bpos,
-                                        pos,
-                                        "found invalid character; only `#` is allowed in raw \
-                                         string delimitation",
-                                        ch).raise();
+                                 last_bpos,
+                                 "found invalid character; only `#` is allowed \
+                                 in raw string delimitation",
+                                 curr_char).raise();
         }
         self.bump();
         let content_start_bpos = self.pos;
@@ -1412,19 +1289,14 @@ fn scan_raw_byte_string(&mut self) -> (token::LitKind, Symbol) {
                     }
                     break;
                 }
-                Some(c) => {
-                    if c > '\x7F' {
-                        let pos = self.pos;
-                        self.err_span_char(pos, pos, "raw byte string must be ASCII", c);
-                    }
-                }
+                _ => (),
             }
             self.bump();
         }
 
         self.bump();
 
-        (token::ByteStrRaw(hash_count), self.name_from_to(content_start_bpos, content_end_bpos))
+        (content_start_bpos, content_end_bpos, hash_count)
     }
 
     fn validate_char_escape(&self, start_with_quote: BytePos) {
@@ -1474,6 +1346,40 @@ fn validate_str_escape(&self, start_with_quote: BytePos) {
         });
     }
 
+    fn validate_raw_str_escape(&self, content_start: BytePos, content_end: BytePos) {
+        self.with_str_from_to(content_start, content_end, |lit: &str| {
+            unescape::unescape_raw_str(lit, &mut |range, c| {
+                if let Err(err) = c {
+                    emit_unescape_error(
+                        &self.sess.span_diagnostic,
+                        lit,
+                        self.mk_sp(content_start - BytePos(1), content_end + BytePos(1)),
+                        unescape::Mode::Str,
+                        range,
+                        err,
+                    )
+                }
+            })
+        });
+    }
+
+    fn validate_raw_byte_str_escape(&self, content_start: BytePos, content_end: BytePos) {
+        self.with_str_from_to(content_start, content_end, |lit: &str| {
+            unescape::unescape_raw_byte_str(lit, &mut |range, c| {
+                if let Err(err) = c {
+                    emit_unescape_error(
+                        &self.sess.span_diagnostic,
+                        lit,
+                        self.mk_sp(content_start - BytePos(1), content_end + BytePos(1)),
+                        unescape::Mode::ByteStr,
+                        range,
+                        err,
+                    )
+                }
+            })
+        });
+    }
+
     fn validate_byte_str_escape(&self, start_with_quote: BytePos) {
         self.with_str_from_to(start_with_quote + BytePos(1), self.pos - BytePos(1), |lit| {
             unescape::unescape_byte_str(lit, &mut |range, c| {
@@ -1553,7 +1459,7 @@ fn char_at(s: &str, byte: usize) -> char {
 mod tests {
     use super::*;
 
-    use crate::ast::{Ident, CrateConfig};
+    use crate::ast::CrateConfig;
     use crate::symbol::Symbol;
     use crate::source_map::{SourceMap, FilePathMapping};
     use crate::feature_gate::UnstableFeatures;
@@ -1610,27 +1516,26 @@ fn t1() {
                                         &sh,
                                         "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                             .to_string());
-            let id = Ident::from_str("fn");
-            assert_eq!(string_reader.next_token().tok, token::Comment);
-            assert_eq!(string_reader.next_token().tok, token::Whitespace);
+            assert_eq!(string_reader.next_token(), token::Comment);
+            assert_eq!(string_reader.next_token(), token::Whitespace);
             let tok1 = string_reader.next_token();
-            let tok2 = TokenAndSpan {
-                tok: token::Ident(id, false),
-                sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
-            };
-            assert_eq!(tok1.tok, tok2.tok);
-            assert_eq!(tok1.sp, tok2.sp);
-            assert_eq!(string_reader.next_token().tok, token::Whitespace);
+            let tok2 = Token::new(
+                mk_ident("fn"),
+                Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
+            );
+            assert_eq!(tok1.kind, tok2.kind);
+            assert_eq!(tok1.span, tok2.span);
+            assert_eq!(string_reader.next_token(), token::Whitespace);
             // the 'main' id is already read:
             assert_eq!(string_reader.pos.clone(), BytePos(28));
             // read another token:
             let tok3 = string_reader.next_token();
-            let tok4 = TokenAndSpan {
-                tok: mk_ident("main"),
-                sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
-            };
-            assert_eq!(tok3.tok, tok4.tok);
-            assert_eq!(tok3.sp, tok4.sp);
+            let tok4 = Token::new(
+                mk_ident("main"),
+                Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
+            );
+            assert_eq!(tok3.kind, tok4.kind);
+            assert_eq!(tok3.span, tok4.span);
             // the lparen is already read:
             assert_eq!(string_reader.pos.clone(), BytePos(29))
         })
@@ -1638,19 +1543,19 @@ fn t1() {
 
     // check that the given reader produces the desired stream
     // of tokens (stop checking after exhausting the expected vec)
-    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<Token>) {
+    fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
         for expected_tok in &expected {
-            assert_eq!(&string_reader.next_token().tok, expected_tok);
+            assert_eq!(&string_reader.next_token(), expected_tok);
         }
     }
 
     // make the identifier by looking up the string in the interner
-    fn mk_ident(id: &str) -> Token {
-        Token::from_ast_ident(Ident::from_str(id))
+    fn mk_ident(id: &str) -> TokenKind {
+        token::Ident(Symbol::intern(id), false)
     }
 
-    fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token {
-        Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
+    fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
+        TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
     }
 
     #[test]
@@ -1698,7 +1603,7 @@ fn character_a() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
                        mk_lit(token::Char, "a", None));
         })
     }
@@ -1708,7 +1613,7 @@ fn character_space() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
                        mk_lit(token::Char, " ", None));
         })
     }
@@ -1718,7 +1623,7 @@ fn character_escaped() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
                        mk_lit(token::Char, "\\n", None));
         })
     }
@@ -1728,8 +1633,8 @@ fn lifetime_name() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
-                       token::Lifetime(Ident::from_str("'abc")));
+            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
+                       token::Lifetime(Symbol::intern("'abc")));
         })
     }
 
@@ -1738,7 +1643,7 @@ fn raw_string() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
                        mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
         })
     }
@@ -1750,10 +1655,10 @@ fn literal_suffixes() {
             let sh = mk_sess(sm.clone());
             macro_rules! test {
                 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
-                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
+                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
                                mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
                     // with a whitespace separator:
-                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
+                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
                                mk_lit(token::$tok_type, $tok_contents, None));
                 }}
             }
@@ -1768,11 +1673,11 @@ macro_rules! test {
             test!("1.0", Float, "1.0");
             test!("1.0e10", Float, "1.0e10");
 
-            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
                        mk_lit(token::Integer, "2", Some("us")));
-            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
                        mk_lit(token::StrRaw(3), "raw", Some("suffix")));
-            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
+            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
                        mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
         })
     }
@@ -1790,11 +1695,8 @@ fn nested_block_comments() {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
-            match lexer.next_token().tok {
-                token::Comment => {}
-                _ => panic!("expected a comment!"),
-            }
-            assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None));
+            assert_eq!(lexer.next_token(), token::Comment);
+            assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
         })
     }
 
@@ -1805,11 +1707,10 @@ fn crlf_comments() {
             let sh = mk_sess(sm.clone());
             let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
             let comment = lexer.next_token();
-            assert_eq!(comment.tok, token::Comment);
-            assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
-            assert_eq!(lexer.next_token().tok, token::Whitespace);
-            assert_eq!(lexer.next_token().tok,
-                    token::DocComment(Symbol::intern("/// test")));
+            assert_eq!(comment.kind, token::Comment);
+            assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
+            assert_eq!(lexer.next_token(), token::Whitespace);
+            assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
         })
     }
 }
index 4bfc5bb16c0bb236d50d140960b4648cbfbbaeb2..99d9d40a45b931b353ee3e8eb037e206c51269f0 100644 (file)
@@ -2,15 +2,15 @@
 
 use crate::print::pprust::token_to_string;
 use crate::parse::lexer::{StringReader, UnmatchedBrace};
-use crate::parse::{token, PResult};
+use crate::parse::token::{self, Token};
+use crate::parse::PResult;
 use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
     crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
         let mut tt_reader = TokenTreesReader {
             string_reader: self,
-            token: token::Eof,
-            span: syntax_pos::DUMMY_SP,
+            token: Token::dummy(),
             open_braces: Vec::new(),
             unmatched_braces: Vec::new(),
             matching_delim_spans: Vec::new(),
@@ -23,8 +23,7 @@ impl<'a> StringReader<'a> {
 
 struct TokenTreesReader<'a> {
     string_reader: StringReader<'a>,
-    token: token::Token,
-    span: Span,
+    token: Token,
     /// Stack of open delimiters and their spans. Used for error message.
     open_braces: Vec<(token::DelimToken, Span)>,
     unmatched_braces: Vec<UnmatchedBrace>,
@@ -52,7 +51,7 @@ fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
     fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
         let mut tts = vec![];
         loop {
-            if let token::CloseDelim(..) = self.token {
+            if let token::CloseDelim(..) = self.token.kind {
                 return TokenStream::new(tts);
             }
 
@@ -68,11 +67,11 @@ fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
 
     fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
         let sm = self.string_reader.sess.source_map();
-        match self.token {
+        match self.token.kind {
             token::Eof => {
                 let msg = "this file contains an un-closed delimiter";
                 let mut err = self.string_reader.sess.span_diagnostic
-                    .struct_span_err(self.span, msg);
+                    .struct_span_err(self.token.span, msg);
                 for &(_, sp) in &self.open_braces {
                     err.span_label(sp, "un-closed delimiter");
                 }
@@ -102,10 +101,10 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
             },
             token::OpenDelim(delim) => {
                 // The span for beginning of the delimited section
-                let pre_span = self.span;
+                let pre_span = self.token.span;
 
                 // Parse the open delimiter.
-                self.open_braces.push((delim, self.span));
+                self.open_braces.push((delim, self.token.span));
                 self.real_token();
 
                 // Parse the token trees within the delimiters.
@@ -114,9 +113,9 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                 let tts = self.parse_token_trees_until_close_delim();
 
                 // Expand to cover the entire delimited token tree
-                let delim_span = DelimSpan::from_pair(pre_span, self.span);
+                let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
 
-                match self.token {
+                match self.token.kind {
                     // Correct delimiter.
                     token::CloseDelim(d) if d == delim => {
                         let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
@@ -126,7 +125,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                             self.matching_delim_spans.clear();
                         } else {
                             self.matching_delim_spans.push(
-                                (open_brace, open_brace_span, self.span),
+                                (open_brace, open_brace_span, self.token.span),
                             );
                         }
                         // Parse the close delimiter.
@@ -136,16 +135,16 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                     token::CloseDelim(other) => {
                         let mut unclosed_delimiter = None;
                         let mut candidate = None;
-                        if self.last_unclosed_found_span != Some(self.span) {
+                        if self.last_unclosed_found_span != Some(self.token.span) {
                             // do not complain about the same unclosed delimiter multiple times
-                            self.last_unclosed_found_span = Some(self.span);
+                            self.last_unclosed_found_span = Some(self.token.span);
                             // This is a conservative error: only report the last unclosed
                             // delimiter. The previous unclosed delimiters could actually be
                             // closed! The parser just hasn't gotten to them yet.
                             if let Some(&(_, sp)) = self.open_braces.last() {
                                 unclosed_delimiter = Some(sp);
                             };
-                            if let Some(current_padding) = sm.span_to_margin(self.span) {
+                            if let Some(current_padding) = sm.span_to_margin(self.token.span) {
                                 for (brace, brace_span) in &self.open_braces {
                                     if let Some(padding) = sm.span_to_margin(*brace_span) {
                                         // high likelihood of these two corresponding
@@ -159,7 +158,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                             self.unmatched_braces.push(UnmatchedBrace {
                                 expected_delim: tok,
                                 found_delim: other,
-                                found_span: self.span,
+                                found_span: self.token.span,
                                 unclosed_span: unclosed_delimiter,
                                 candidate_span: candidate,
                             });
@@ -198,12 +197,12 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                 let token_str = token_to_string(&self.token);
                 let msg = format!("unexpected close delimiter: `{}`", token_str);
                 let mut err = self.string_reader.sess.span_diagnostic
-                    .struct_span_err(self.span, &msg);
-                err.span_label(self.span, "unexpected close delimiter");
+                    .struct_span_err(self.token.span, &msg);
+                err.span_label(self.token.span, "unexpected close delimiter");
                 Err(err)
             },
             _ => {
-                let tt = TokenTree::Token(self.span, self.token.clone());
+                let tt = TokenTree::Token(self.token.take());
                 // Note that testing for joint-ness here is done via the raw
                 // source span as the joint-ness is a property of the raw source
                 // rather than wanting to take `override_span` into account.
@@ -212,15 +211,13 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                 let raw = self.string_reader.peek_span_src_raw;
                 self.real_token();
                 let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
-                    && token::is_op(&self.token);
+                    && self.token.is_op();
                 Ok((tt, if is_joint { Joint } else { NonJoint }))
             }
         }
     }
 
     fn real_token(&mut self) {
-        let t = self.string_reader.real_token();
-        self.token = t.tok;
-        self.span = t.sp;
+        self.token = self.string_reader.real_token();
     }
 }
index 18019a89130e7f49219f62bb86a2139c7f574dda..ef55bf6b929336511e4dc5f3fb5d0317e500535f 100644 (file)
@@ -1,10 +1,12 @@
 //! Code related to parsing literals.
 
-use crate::ast::{self, Ident, Lit, LitKind};
+use crate::ast::{self, Lit, LitKind};
 use crate::parse::parser::Parser;
 use crate::parse::PResult;
-use crate::parse::token::{self, Token};
-use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
+use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::unescape::{unescape_char, unescape_byte};
+use crate::parse::unescape::{unescape_str, unescape_byte_str};
+use crate::parse::unescape::{unescape_raw_str, unescape_raw_byte_str};
 use crate::print::pprust;
 use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{TokenStream, TokenTree};
@@ -141,7 +143,17 @@ fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
                 // Ditto.
                 let s = symbol.as_str();
                 let symbol = if s.contains('\r') {
-                    Symbol::intern(&raw_str_lit(&s))
+                    let mut buf = String::with_capacity(s.len());
+                    let mut error = Ok(());
+                    unescape_raw_str(&s, &mut |_, unescaped_char| {
+                        match unescaped_char {
+                            Ok(c) => buf.push(c),
+                            Err(_) => error = Err(LitError::LexerError),
+                        }
+                    });
+                    error?;
+                    buf.shrink_to_fit();
+                    Symbol::intern(&buf)
                 } else {
                     symbol
                 };
@@ -161,7 +173,26 @@ fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
                 buf.shrink_to_fit();
                 LitKind::ByteStr(Lrc::new(buf))
             }
-            token::ByteStrRaw(_) => LitKind::ByteStr(Lrc::new(symbol.to_string().into_bytes())),
+            token::ByteStrRaw(_) => {
+                let s = symbol.as_str();
+                let bytes = if s.contains('\r') {
+                    let mut buf = Vec::with_capacity(s.len());
+                    let mut error = Ok(());
+                    unescape_raw_byte_str(&s, &mut |_, unescaped_byte| {
+                        match unescaped_byte {
+                            Ok(c) => buf.push(c),
+                            Err(_) => error = Err(LitError::LexerError),
+                        }
+                    });
+                    error?;
+                    buf.shrink_to_fit();
+                    buf
+                } else {
+                    symbol.to_string().into_bytes()
+                };
+
+                LitKind::ByteStr(Lrc::new(bytes))
+            },
             token::Err => LitKind::Err(symbol),
         })
     }
@@ -228,10 +259,10 @@ fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
     }
 
     /// Converts arbitrary token into an AST literal.
-    crate fn from_token(token: &Token, span: Span) -> Result<Lit, LitError> {
-        let lit = match *token {
-            token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
-                token::Lit::new(token::Bool, ident.name, None),
+    crate fn from_token(token: &Token) -> Result<Lit, LitError> {
+        let lit = match token.kind {
+            token::Ident(name, false) if name == kw::True || name == kw::False =>
+                token::Lit::new(token::Bool, name, None),
             token::Literal(lit) =>
                 lit,
             token::Interpolated(ref nt) => {
@@ -245,7 +276,7 @@ fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
             _ => return Err(LitError::NotLiteral)
         };
 
-        Lit::from_lit_token(lit, span)
+        Lit::from_lit_token(lit, token.span)
     }
 
     /// Attempts to recover an AST literal from semantic literal.
@@ -258,10 +289,10 @@ pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
     /// Losslessly convert an AST literal into a token stream.
     crate fn tokens(&self) -> TokenStream {
         let token = match self.token.kind {
-            token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
+            token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
-        TokenTree::Token(self.span, token).into()
+        TokenTree::token(token, self.span).into()
     }
 }
 
@@ -271,48 +302,50 @@ impl<'a> Parser<'a> {
         let mut recovered = None;
         if self.token == token::Dot {
             // Attempt to recover `.4` as `0.4`.
-            recovered = self.look_ahead(1, |t| {
-                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t {
-                    let next_span = self.look_ahead_span(1);
-                    if self.span.hi() == next_span.lo() {
+            recovered = self.look_ahead(1, |next_token| {
+                if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
+                        = next_token.kind {
+                    if self.token.span.hi() == next_token.span.lo() {
                         let s = String::from("0.") + &symbol.as_str();
-                        let token = Token::lit(token::Float, Symbol::intern(&s), suffix);
-                        return Some((token, self.span.to(next_span)));
+                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
+                        return Some(Token::new(kind, self.token.span.to(next_token.span)));
                     }
                 }
                 None
             });
-            if let Some((ref token, span)) = recovered {
+            if let Some(token) = &recovered {
                 self.bump();
                 self.diagnostic()
-                    .struct_span_err(span, "float literals must have an integer part")
+                    .struct_span_err(token.span, "float literals must have an integer part")
                     .span_suggestion(
-                        span,
+                        token.span,
                         "must have an integer part",
-                        pprust::token_to_string(&token),
+                        pprust::token_to_string(token),
                         Applicability::MachineApplicable,
                     )
                     .emit();
             }
         }
 
-        let (token, span) = recovered.as_ref().map_or((&self.token, self.span),
-                                                      |(token, span)| (token, *span));
-
-        match Lit::from_token(token, span) {
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        match Lit::from_token(token) {
             Ok(lit) => {
                 self.bump();
                 Ok(lit)
             }
             Err(LitError::NotLiteral) => {
                 let msg = format!("unexpected token: {}", self.this_token_descr());
-                Err(self.span_fatal(span, &msg))
+                Err(self.span_fatal(token.span, &msg))
             }
             Err(err) => {
-                let lit = token.expect_lit();
+                let (lit, span) = (token.expect_lit(), token.span);
                 self.bump();
                 err.report(&self.sess.span_diagnostic, lit, span);
-                let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix);
+                // Pack possible quotes and prefixes from the original literal into
+                // the error literal's symbol so they can be pretty-printed faithfully.
+                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+                let symbol = Symbol::intern(&pprust::literal_to_string(suffixless_lit));
+                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
                 Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
             }
         }
@@ -351,29 +384,6 @@ impl<'a> Parser<'a> {
     }
 }
 
-/// Parses a string representing a raw string literal into its final form. The
-/// only operation this does is convert embedded CRLF into a single LF.
-fn raw_str_lit(lit: &str) -> String {
-    debug!("raw_str_lit: {:?}", lit);
-    let mut res = String::with_capacity(lit.len());
-
-    let mut chars = lit.chars().peekable();
-    while let Some(c) = chars.next() {
-        if c == '\r' {
-            if *chars.peek().unwrap() != '\n' {
-                panic!("lexer accepted bare CR");
-            }
-            chars.next();
-            res.push('\n');
-        } else {
-            res.push(c);
-        }
-    }
-
-    res.shrink_to_fit();
-    res
-}
-
 // Checks if `s` looks like i32 or u1234 etc.
 fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
     s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
index f7a7aba9ecbaa1fdf3063012ebc4ed77a3d2b065..dd96c63ab0ed66a1f895319259d22982559d9623 100644 (file)
@@ -5,10 +5,11 @@
 use crate::source_map::{SourceMap, FilePathMapping};
 use crate::feature_gate::UnstableFeatures;
 use crate::parse::parser::Parser;
-use crate::syntax::parse::parser::emit_unclosed_delims;
+use crate::parse::parser::emit_unclosed_delims;
+use crate::parse::token::TokenKind;
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::diagnostics::plugin::ErrorMap;
-use crate::print::pprust::token_to_string;
+use crate::print::pprust;
 
 use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
 use rustc_data_structures::sync::{Lrc, Lock};
@@ -238,8 +239,8 @@ fn maybe_source_file_to_parser(
     let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
     let mut parser = stream_to_parser(sess, stream, None);
     parser.unclosed_delims = unclosed_delims;
-    if parser.token == token::Eof && parser.span.is_dummy() {
-        parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
+    if parser.token == token::Eof && parser.token.span.is_dummy() {
+        parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt());
     }
 
     Ok(parser)
@@ -311,7 +312,7 @@ pub fn maybe_file_to_stream(
             for unmatched in unmatched_braces {
                 let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
                     "incorrect close delimiter: `{}`",
-                    token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+                    pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
                 ));
                 db.span_label(unmatched.found_span, "incorrect close delimiter");
                 if let Some(sp) = unmatched.candidate_span {
@@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>(
 /// A sequence separator.
 pub struct SeqSep {
     /// The seperator token.
-    pub sep: Option<token::Token>,
+    pub sep: Option<TokenKind>,
     /// `true` if a trailing separator is allowed.
     pub trailing_sep_allowed: bool,
 }
 
 impl SeqSep {
-    pub fn trailing_allowed(t: token::Token) -> SeqSep {
+    pub fn trailing_allowed(t: TokenKind) -> SeqSep {
         SeqSep {
             sep: Some(t),
             trailing_sep_allowed: true,
@@ -382,10 +383,12 @@ pub fn none() -> SeqSep {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::ast::{self, Ident, PatKind};
+    use crate::ast::{self, Name, PatKind};
     use crate::attr::first_attr_value_str_by_name;
     use crate::ptr::P;
+    use crate::parse::token::Token;
     use crate::print::pprust::item_to_string;
+    use crate::symbol::{kw, sym};
     use crate::tokenstream::{DelimSpan, TokenTree};
     use crate::util::parser_testing::string_to_stream;
     use crate::util::parser_testing::{string_to_expr, string_to_item};
@@ -417,49 +420,42 @@ fn sp(a: u32, b: u32) -> Span {
     #[test]
     fn string_to_tts_macro () {
         with_default_globals(|| {
-            use crate::symbol::sym;
-
             let tts: Vec<_> =
                 string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
             let tts: &[TokenTree] = &tts[..];
 
-            match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
-                (
-                    4,
-                    Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
-                    Some(&TokenTree::Token(_, token::Not)),
-                    Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
-                    Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
-                )
-                if name_macro_rules.name == sym::macro_rules
-                && name_zip.name.as_str() == "zip" => {
+            match tts {
+                [
+                    TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }),
+                    TokenTree::Token(Token { kind: token::Not, .. }),
+                    TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
+                    TokenTree::Delimited(_, macro_delim,  macro_tts)
+                ]
+                if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => {
                     let tts = &macro_tts.trees().collect::<Vec<_>>();
-                    match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
-                        (
-                            3,
-                            Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
-                            Some(&TokenTree::Token(_, token::FatArrow)),
-                            Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
-                        )
-                        if macro_delim == token::Paren => {
+                    match &tts[..] {
+                        [
+                            TokenTree::Delimited(_, first_delim, first_tts),
+                            TokenTree::Token(Token { kind: token::FatArrow, .. }),
+                            TokenTree::Delimited(_, second_delim, second_tts),
+                        ]
+                        if macro_delim == &token::Paren => {
                             let tts = &first_tts.trees().collect::<Vec<_>>();
-                            match (tts.len(), tts.get(0), tts.get(1)) {
-                                (
-                                    2,
-                                    Some(&TokenTree::Token(_, token::Dollar)),
-                                    Some(&TokenTree::Token(_, token::Ident(ident, false))),
-                                )
-                                if first_delim == token::Paren && ident.name.as_str() == "a" => {},
+                            match &tts[..] {
+                                [
+                                    TokenTree::Token(Token { kind: token::Dollar, .. }),
+                                    TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+                                ]
+                                if first_delim == &token::Paren && name.as_str() == "a" => {},
                                 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                             }
                             let tts = &second_tts.trees().collect::<Vec<_>>();
-                            match (tts.len(), tts.get(0), tts.get(1)) {
-                                (
-                                    2,
-                                    Some(&TokenTree::Token(_, token::Dollar)),
-                                    Some(&TokenTree::Token(_, token::Ident(ident, false))),
-                                )
-                                if second_delim == token::Paren && ident.name.as_str() == "a" => {},
+                            match &tts[..] {
+                                [
+                                    TokenTree::Token(Token { kind: token::Dollar, .. }),
+                                    TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+                                ]
+                                if second_delim == &token::Paren && name.as_str() == "a" => {},
                                 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
                             }
                         },
@@ -477,26 +473,23 @@ fn string_to_tts_1() {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
             let expected = TokenStream::new(vec![
-                TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
-                TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
+                TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
                     TokenStream::new(vec![
-                        TokenTree::Token(sp(6, 7),
-                                         token::Ident(Ident::from_str("b"), false)).into(),
-                        TokenTree::Token(sp(8, 9), token::Colon).into(),
-                        TokenTree::Token(sp(10, 13),
-                                         token::Ident(Ident::from_str("i32"), false)).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
+                        TokenTree::token(token::Colon, sp(8, 9)).into(),
+                        TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
                     ]).into(),
                 ).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
                     TokenStream::new(vec![
-                        TokenTree::Token(sp(17, 18),
-                                         token::Ident(Ident::from_str("b"), false)).into(),
-                        TokenTree::Token(sp(18, 19), token::Semi).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
+                        TokenTree::token(token::Semi, sp(18, 19)).into(),
                     ]).into(),
                 ).into()
             ]);
@@ -603,8 +596,6 @@ fn wb() -> c_int { O_WRONLY as c_int }
 
     #[test] fn crlf_doc_comments() {
         with_default_globals(|| {
-            use crate::symbol::sym;
-
             let sess = ParseSess::new(FilePathMapping::empty());
 
             let name_1 = FileName::Custom("crlf_source_1".to_string());
index 790013f6eb1286f6d81b51bcdd23743e990d5b15..d9eba3bbadb687a1964868c433541c8431766981 100644 (file)
 use crate::ast::{RangeEnd, RangeSyntax};
 use crate::{ast, attr};
 use crate::ext::base::DummyResult;
+use crate::ext::hygiene::SyntaxContext;
 use crate::source_map::{self, SourceMap, Spanned, respan};
 use crate::parse::{SeqSep, classify, literal, token};
-use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
+use crate::parse::lexer::UnmatchedBrace;
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use crate::parse::token::DelimToken;
+use crate::parse::token::{Token, TokenKind, DelimToken};
 use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
 use crate::util::parser::{AssocOp, Fixity};
 use crate::print::pprust;
@@ -121,7 +122,7 @@ pub enum PathStyle {
 /// `token::Interpolated` tokens.
 macro_rules! maybe_whole_expr {
     ($p:expr) => {
-        if let token::Interpolated(nt) = &$p.token {
+        if let token::Interpolated(nt) = &$p.token.kind {
             match &**nt {
                 token::NtExpr(e) | token::NtLiteral(e) => {
                     let e = e.clone();
@@ -131,12 +132,16 @@ macro_rules! maybe_whole_expr {
                 token::NtPath(path) => {
                     let path = path.clone();
                     $p.bump();
-                    return Ok($p.mk_expr($p.span, ExprKind::Path(None, path), ThinVec::new()));
+                    return Ok($p.mk_expr(
+                        $p.token.span, ExprKind::Path(None, path), ThinVec::new()
+                    ));
                 }
                 token::NtBlock(block) => {
                     let block = block.clone();
                     $p.bump();
-                    return Ok($p.mk_expr($p.span, ExprKind::Block(block, None), ThinVec::new()));
+                    return Ok($p.mk_expr(
+                        $p.token.span, ExprKind::Block(block, None), ThinVec::new()
+                    ));
                 }
                 _ => {},
             };
@@ -147,7 +152,7 @@ macro_rules! maybe_whole_expr {
 /// As maybe_whole_expr, but for things other than expressions
 macro_rules! maybe_whole {
     ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
-        if let token::Interpolated(nt) = &$p.token {
+        if let token::Interpolated(nt) = &$p.token.kind {
             if let token::$constructor(x) = &**nt {
                 let $x = x.clone();
                 $p.bump();
@@ -161,7 +166,7 @@ macro_rules! maybe_whole {
 macro_rules! maybe_recover_from_interpolated_ty_qpath {
     ($self: expr, $allow_qpath_recovery: expr) => {
         if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
-            if let token::Interpolated(nt) = &$self.token {
+            if let token::Interpolated(nt) = &$self.token.kind {
                 if let token::NtTy(ty) = &**nt {
                     let ty = ty.clone();
                     $self.bump();
@@ -196,14 +201,17 @@ enum PrevTokenKind {
 #[derive(Clone)]
 pub struct Parser<'a> {
     pub sess: &'a ParseSess,
-    /// The current token.
-    pub token: token::Token,
-    /// The span of the current token.
-    pub span: Span,
+    /// The current normalized token.
+    /// "Normalized" means that some interpolated tokens
+    /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced
+    /// with non-interpolated identifier and lifetime tokens they refer to.
+    /// Perhaps the normalized / non-normalized setup can be simplified somehow.
+    pub token: Token,
+    /// Span of the current non-normalized token.
     meta_var_span: Option<Span>,
-    /// The span of the previous token.
+    /// Span of the previous non-normalized token.
     pub prev_span: Span,
-    /// The kind of the previous troken.
+    /// Kind of the previous normalized token (in simplified form).
     prev_token_kind: PrevTokenKind,
     restrictions: Restrictions,
     /// Used to determine the path to externally loaded source files.
@@ -295,7 +303,7 @@ fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
 }
 
 impl TokenCursor {
-    fn next(&mut self) -> TokenAndSpan {
+    fn next(&mut self) -> Token {
         loop {
             let tree = if !self.frame.open_delim {
                 self.frame.open_delim = true;
@@ -309,7 +317,7 @@ fn next(&mut self) -> TokenAndSpan {
                 self.frame = frame;
                 continue
             } else {
-                return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
+                return Token::new(token::Eof, DUMMY_SP);
             };
 
             match self.frame.last_token {
@@ -318,7 +326,7 @@ fn next(&mut self) -> TokenAndSpan {
             }
 
             match tree {
-                TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
+                TokenTree::Token(token) => return token,
                 TokenTree::Delimited(sp, delim, tts) => {
                     let frame = TokenCursorFrame::new(sp, delim, &tts);
                     self.stack.push(mem::replace(&mut self.frame, frame));
@@ -327,9 +335,9 @@ fn next(&mut self) -> TokenAndSpan {
         }
     }
 
-    fn next_desugared(&mut self) -> TokenAndSpan {
-        let (sp, name) = match self.next() {
-            TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
+    fn next_desugared(&mut self) -> Token {
+        let (name, sp) = match self.next() {
+            Token { kind: token::DocComment(name), span } => (name, span),
             tok => return tok,
         };
 
@@ -353,11 +361,11 @@ fn next_desugared(&mut self) -> TokenAndSpan {
             delim_span,
             token::Bracket,
             [
-                TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
-                TokenTree::Token(sp, token::Eq),
-                TokenTree::Token(sp, token::Token::lit(
+                TokenTree::token(token::Ident(sym::doc, false), sp),
+                TokenTree::token(token::Eq, sp),
+                TokenTree::token(TokenKind::lit(
                     token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
-                )),
+                ), sp),
             ]
             .iter().cloned().collect::<TokenStream>().into(),
         );
@@ -366,10 +374,10 @@ fn next_desugared(&mut self) -> TokenAndSpan {
             delim_span,
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
-                [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+                [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::Token(sp, token::Pound), body]
+                [TokenTree::token(token::Pound, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             },
         )));
@@ -380,7 +388,7 @@ fn next_desugared(&mut self) -> TokenAndSpan {
 
 #[derive(Clone, PartialEq)]
 crate enum TokenType {
-    Token(token::Token),
+    Token(TokenKind),
     Keyword(Symbol),
     Operator,
     Lifetime,
@@ -393,7 +401,7 @@ fn next_desugared(&mut self) -> TokenAndSpan {
 impl TokenType {
     crate fn to_string(&self) -> String {
         match *self {
-            TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
+            TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
             TokenType::Keyword(kw) => format!("`{}`", kw),
             TokenType::Operator => "an operator".to_string(),
             TokenType::Lifetime => "lifetime".to_string(),
@@ -410,7 +418,7 @@ impl TokenType {
 ///
 /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
 /// that `IDENT` is not the ident of a fn trait.
-fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
+fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
     t == &token::ModSep || t == &token::Lt ||
     t == &token::BinOp(token::Shl)
 }
@@ -468,8 +476,7 @@ pub fn new(
     ) -> Self {
         let mut parser = Parser {
             sess,
-            token: token::Whitespace,
-            span: DUMMY_SP,
+            token: Token::dummy(),
             prev_span: DUMMY_SP,
             meta_var_span: None,
             prev_token_kind: PrevTokenKind::Other,
@@ -498,14 +505,13 @@ pub fn new(
             subparser_name,
         };
 
-        let tok = parser.next_tok();
-        parser.token = tok.tok;
-        parser.span = tok.sp;
+        parser.token = parser.next_tok();
 
         if let Some(directory) = directory {
             parser.directory = directory;
-        } else if !parser.span.is_dummy() {
-            if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) {
+        } else if !parser.token.span.is_dummy() {
+            if let FileName::Real(mut path) =
+                    sess.source_map().span_to_unmapped_path(parser.token.span) {
                 path.pop();
                 parser.directory.path = Cow::from(path);
             }
@@ -515,15 +521,15 @@ pub fn new(
         parser
     }
 
-    fn next_tok(&mut self) -> TokenAndSpan {
+    fn next_tok(&mut self) -> Token {
         let mut next = if self.desugar_doc_comments {
             self.token_cursor.next_desugared()
         } else {
             self.token_cursor.next()
         };
-        if next.sp.is_dummy() {
+        if next.span.is_dummy() {
             // Tweak the location for better diagnostics, but keep syntactic context intact.
-            next.sp = self.prev_span.with_ctxt(next.sp.ctxt());
+            next.span = self.prev_span.with_ctxt(next.span.ctxt());
         }
         next
     }
@@ -534,10 +540,10 @@ pub fn this_token_to_string(&self) -> String {
     }
 
     crate fn token_descr(&self) -> Option<&'static str> {
-        Some(match &self.token {
-            t if t.is_special_ident() => "reserved identifier",
-            t if t.is_used_keyword() => "keyword",
-            t if t.is_unused_keyword() => "reserved keyword",
+        Some(match &self.token.kind {
+            _ if self.token.is_special_ident() => "reserved identifier",
+            _ if self.token.is_used_keyword() => "keyword",
+            _ if self.token.is_unused_keyword() => "reserved keyword",
             token::DocComment(..) => "doc comment",
             _ => return None,
         })
@@ -559,7 +565,7 @@ pub fn this_token_to_string(&self) -> String {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
+    pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
@@ -577,16 +583,16 @@ pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */>
     /// anything.  Signal a fatal error if next token is unexpected.
     pub fn expect_one_of(
         &mut self,
-        edible: &[token::Token],
-        inedible: &[token::Token],
+        edible: &[TokenKind],
+        inedible: &[TokenKind],
     ) -> PResult<'a, bool /* recovered */> {
-        if edible.contains(&self.token) {
+        if edible.contains(&self.token.kind) {
             self.bump();
             Ok(false)
-        } else if inedible.contains(&self.token) {
+        } else if inedible.contains(&self.token.kind) {
             // leave it in the input
             Ok(false)
-        } else if self.last_unexpected_token_span == Some(self.span) {
+        } else if self.last_unexpected_token_span == Some(self.token.span) {
             FatalError.raise();
         } else {
             self.expected_one_of_not_found(edible, inedible)
@@ -612,8 +618,8 @@ pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
     }
 
     fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(ident, _) => {
+        match self.token.kind {
+            token::Ident(name, _) => {
                 if self.token.is_reserved_ident() {
                     let mut err = self.expected_ident_found();
                     if recover {
@@ -622,9 +628,9 @@ fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
                         return Err(err);
                     }
                 }
-                let span = self.span;
+                let span = self.token.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => {
                 Err(if self.prev_token_kind == PrevTokenKind::DocComment {
@@ -640,14 +646,14 @@ fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
     /// encountered.
-    crate fn check(&mut self, tok: &token::Token) -> bool {
+    crate fn check(&mut self, tok: &TokenKind) -> bool {
         let is_present = self.token == *tok;
         if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
         is_present
     }
 
     /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
-    pub fn eat(&mut self, tok: &token::Token) -> bool {
+    pub fn eat(&mut self, tok: &TokenKind) -> bool {
         let is_present = self.check(tok);
         if is_present { self.bump() }
         is_present
@@ -732,13 +738,13 @@ fn check_const_arg(&mut self) -> bool {
     /// See issue #47856 for an example of when this may occur.
     fn eat_plus(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::Plus) => {
                 self.bump();
                 true
             }
             token::BinOpEq(token::Plus) => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 self.bump_with(token::Eq, span);
                 true
             }
@@ -763,13 +769,13 @@ fn check_plus(&mut self) -> bool {
     /// `&` and continues. If an `&` is not seen, signals an error.
     fn expect_and(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::And) => {
                 self.bump();
                 Ok(())
             }
             token::AndAnd => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 Ok(self.bump_with(token::BinOp(token::And), span))
             }
             _ => self.unexpected()
@@ -780,13 +786,13 @@ fn expect_and(&mut self) -> PResult<'a, ()> {
     /// `|` and continues. If an `|` is not seen, signals an error.
     fn expect_or(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::Or) => {
                 self.bump();
                 Ok(())
             }
             token::OrOr => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 Ok(self.bump_with(token::BinOp(token::Or), span))
             }
             _ => self.unexpected()
@@ -805,18 +811,18 @@ fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
     /// starting token.
     fn eat_lt(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Token(token::Lt));
-        let ate = match self.token {
+        let ate = match self.token.kind {
             token::Lt => {
                 self.bump();
                 true
             }
             token::BinOp(token::Shl) => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 self.bump_with(token::Lt, span);
                 true
             }
             token::LArrow => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 self.bump_with(token::BinOp(token::Minus), span);
                 true
             }
@@ -845,21 +851,21 @@ fn expect_lt(&mut self) -> PResult<'a, ()> {
     /// with a single `>` and continues. If a `>` is not seen, signals an error.
     fn expect_gt(&mut self) -> PResult<'a, ()> {
         self.expected_tokens.push(TokenType::Token(token::Gt));
-        let ate = match self.token {
+        let ate = match self.token.kind {
             token::Gt => {
                 self.bump();
                 Some(())
             }
             token::BinOp(token::Shr) => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 Some(self.bump_with(token::Gt, span))
             }
             token::BinOpEq(token::Shr) => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 Some(self.bump_with(token::Ge, span))
             }
             token::Ge => {
-                let span = self.span.with_lo(self.span.lo() + BytePos(1));
+                let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
                 Some(self.bump_with(token::Eq, span))
             }
             _ => None,
@@ -883,7 +889,7 @@ fn expect_gt(&mut self) -> PResult<'a, ()> {
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     pub fn parse_seq_to_end<T, F>(&mut self,
-                                  ket: &token::Token,
+                                  ket: &TokenKind,
                                   sep: SeqSep,
                                   f: F)
                                   -> PResult<'a, Vec<T>> where
@@ -901,7 +907,7 @@ pub fn parse_seq_to_end<T, F>(&mut self,
     /// closing bracket.
     pub fn parse_seq_to_before_end<T, F>(
         &mut self,
-        ket: &token::Token,
+        ket: &TokenKind,
         sep: SeqSep,
         f: F,
     ) -> PResult<'a, (Vec<T>, bool)>
@@ -912,7 +918,7 @@ pub fn parse_seq_to_before_end<T, F>(
 
     crate fn parse_seq_to_before_tokens<T, F>(
         &mut self,
-        kets: &[&token::Token],
+        kets: &[&TokenKind],
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: F,
@@ -928,7 +934,7 @@ pub fn parse_seq_to_before_end<T, F>(
                     TokenExpectType::NoExpect => self.token == **k,
                 }
             }) {
-            match self.token {
+            match self.token.kind {
                 token::CloseDelim(..) | token::Eof => break,
                 _ => {}
             };
@@ -945,7 +951,7 @@ pub fn parse_seq_to_before_end<T, F>(
                         Err(mut e) => {
                             // Attempt to keep parsing if it was a similar separator
                             if let Some(ref tokens) = t.similar_tokens() {
-                                if tokens.contains(&self.token) {
+                                if tokens.contains(&self.token.kind) {
                                     self.bump();
                                 }
                             }
@@ -986,8 +992,8 @@ pub fn parse_seq_to_before_end<T, F>(
     /// closing bracket.
     fn parse_unspanned_seq<T, F>(
         &mut self,
-        bra: &token::Token,
-        ket: &token::Token,
+        bra: &TokenKind,
+        ket: &TokenKind,
         sep: SeqSep,
         f: F,
     ) -> PResult<'a, Vec<T>> where
@@ -1008,10 +1014,10 @@ pub fn bump(&mut self) {
             self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
         }
 
-        self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
+        self.prev_span = self.meta_var_span.take().unwrap_or(self.token.span);
 
         // Record last token kind for possible error recovery.
-        self.prev_token_kind = match self.token {
+        self.prev_token_kind = match self.token.kind {
             token::DocComment(..) => PrevTokenKind::DocComment,
             token::Comma => PrevTokenKind::Comma,
             token::BinOp(token::Plus) => PrevTokenKind::Plus,
@@ -1022,9 +1028,7 @@ pub fn bump(&mut self) {
             _ => PrevTokenKind::Other,
         };
 
-        let next = self.next_tok();
-        self.span = next.sp;
-        self.token = next.tok;
+        self.token = self.next_tok();
         self.expected_tokens.clear();
         // check after each token
         self.process_potential_macro_variable();
@@ -1032,45 +1036,34 @@ pub fn bump(&mut self) {
 
     /// Advance the parser using provided token as a next one. Use this when
     /// consuming a part of a token. For example a single `<` from `<<`.
-    fn bump_with(&mut self, next: token::Token, span: Span) {
-        self.prev_span = self.span.with_hi(span.lo());
+    fn bump_with(&mut self, next: TokenKind, span: Span) {
+        self.prev_span = self.token.span.with_hi(span.lo());
         // It would be incorrect to record the kind of the current token, but
         // fortunately for tokens currently using `bump_with`, the
         // prev_token_kind will be of no use anyway.
         self.prev_token_kind = PrevTokenKind::Other;
-        self.span = span;
-        self.token = next;
+        self.token = Token::new(next, span);
         self.expected_tokens.clear();
     }
 
     pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
-        F: FnOnce(&token::Token) -> R,
+        F: FnOnce(&Token) -> R,
     {
         if dist == 0 {
-            return f(&self.token)
+            return f(&self.token);
         }
 
-        f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+        let frame = &self.token_cursor.frame;
+        f(&match frame.tree_cursor.look_ahead(dist - 1) {
             Some(tree) => match tree {
-                TokenTree::Token(_, tok) => tok,
-                TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
-            },
-            None => token::CloseDelim(self.token_cursor.frame.delim),
+                TokenTree::Token(token) => token,
+                TokenTree::Delimited(dspan, delim, _) =>
+                    Token::new(token::OpenDelim(delim), dspan.open),
+            }
+            None => Token::new(token::CloseDelim(frame.delim), frame.span.close)
         })
     }
 
-    crate fn look_ahead_span(&self, dist: usize) -> Span {
-        if dist == 0 {
-            return self.span
-        }
-
-        match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
-            Some(TokenTree::Token(span, _)) => span,
-            Some(TokenTree::Delimited(span, ..)) => span.entire(),
-            None => self.look_ahead_span(dist - 1),
-        }
-    }
-
     /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
     fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
         self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
@@ -1162,7 +1155,7 @@ pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem>
     fn parse_trait_item_(&mut self,
                          at_end: &mut bool,
                          mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
-        let lo = self.span;
+        let lo = self.token.span;
         self.eat_bad_pub();
         let (name, node, generics) = if self.eat_keyword(kw::Type) {
             self.parse_trait_item_assoc_ty()?
@@ -1195,7 +1188,7 @@ fn parse_trait_item_(&mut self,
                 // definition...
 
                 // We don't allow argument names to be left off in edition 2018.
-                p.parse_arg_general(p.span.rust_2018(), true, false)
+                p.parse_arg_general(p.token.span.rust_2018(), true, false)
             })?;
             generics.where_clause = self.parse_where_clause()?;
 
@@ -1209,7 +1202,7 @@ fn parse_trait_item_(&mut self,
                 decl,
             };
 
-            let body = match self.token {
+            let body = match self.token.kind {
                 token::Semi => {
                     self.bump();
                     *at_end = true;
@@ -1259,7 +1252,7 @@ fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
         if self.eat(&token::RArrow) {
             Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?))
         } else {
-            Ok(FunctionRetTy::Default(self.span.shrink_to_lo()))
+            Ok(FunctionRetTy::Default(self.token.span.shrink_to_lo()))
         }
     }
 
@@ -1283,7 +1276,7 @@ fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
         maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
         maybe_whole!(self, NtTy, |x| x);
 
-        let lo = self.span;
+        let lo = self.token.span;
         let mut impl_dyn_multi = false;
         let node = if self.eat(&token::OpenDelim(token::Paren)) {
             // `(TYPE)` is a parenthesized type.
@@ -1367,7 +1360,7 @@ fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
             // Function pointer type or bound list (trait object type) starting with a poly-trait.
             //   `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
             //   `for<'lt> Trait1<'lt> + Trait2 + 'a`
-            let lo = self.span;
+            let lo = self.token.span;
             let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
             if self.token_is_bare_fn_keyword() {
                 self.parse_ty_bare_fn(lifetime_defs)?
@@ -1382,7 +1375,7 @@ fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
             impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
             TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
         } else if self.check_keyword(kw::Dyn) &&
-                  (self.span.rust_2018() ||
+                  (self.token.span.rust_2018() ||
                    self.look_ahead(1, |t| t.can_begin_bound() &&
                                           !can_continue_type_after_non_fn_ident(t))) {
             self.bump(); // `dyn`
@@ -1477,7 +1470,7 @@ fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
     }
 
     fn is_named_argument(&self) -> bool {
-        let offset = match self.token {
+        let offset = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
                 _ => 0,
@@ -1595,9 +1588,9 @@ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>
     crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
         maybe_whole_expr!(self);
 
-        let minus_lo = self.span;
+        let minus_lo = self.token.span;
         let minus_present = self.eat(&token::BinOp(token::Minus));
-        let lo = self.span;
+        let lo = self.token.span;
         let literal = self.parse_lit()?;
         let hi = self.prev_span;
         let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
@@ -1612,22 +1605,22 @@ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>
     }
 
     fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
-                let span = self.span;
+        match self.token.kind {
+            token::Ident(name, _) if name.is_path_segment_keyword() => {
+                let span = self.token.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => self.parse_ident(),
         }
     }
 
     fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(ident, false) if ident.name == kw::Underscore => {
-                let span = self.span;
+        match self.token.kind {
+            token::Ident(name, false) if name == kw::Underscore => {
+                let span = self.token.span;
                 self.bump();
-                Ok(Ident::new(ident.name, span))
+                Ok(Ident::new(name, span))
             }
             _ => self.parse_ident(),
         }
@@ -1653,11 +1646,11 @@ fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> {
         // span in the case of something like `<T>::Bar`.
         let (mut path, path_span);
         if self.eat_keyword(kw::As) {
-            let path_lo = self.span;
+            let path_lo = self.token.span;
             path = self.parse_path(PathStyle::Type)?;
             path_span = path_lo.to(self.prev_span);
         } else {
-            path_span = self.span.to(self.span);
+            path_span = self.token.span.to(self.token.span);
             path = ast::Path { segments: Vec::new(), span: path_span };
         }
 
@@ -1695,9 +1688,9 @@ pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
             path
         });
 
-        let lo = self.meta_var_span.unwrap_or(self.span);
+        let lo = self.meta_var_span.unwrap_or(self.token.span);
         let mut segments = Vec::new();
-        let mod_sep_ctxt = self.span.ctxt();
+        let mod_sep_ctxt = self.token.span.ctxt();
         if self.eat(&token::ModSep) {
             segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
         }
@@ -1710,7 +1703,7 @@ pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
     /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
     /// attributes.
     pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
-        let meta_ident = match self.token {
+        let meta_ident = match self.token.kind {
             token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref meta) => match meta.node {
                     ast::MetaItemKind::Word => Some(meta.path.clone()),
@@ -1763,7 +1756,7 @@ pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast:
     fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
         let ident = self.parse_path_segment_ident()?;
 
-        let is_args_start = |token: &token::Token| match *token {
+        let is_args_start = |token: &Token| match token.kind {
             token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
             | token::LArrow => true,
             _ => false,
@@ -1788,7 +1781,7 @@ fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
 
             // Generic arguments are found - `<`, `(`, `::<` or `::(`.
             self.eat(&token::ModSep);
-            let lo = self.span;
+            let lo = self.token.span;
             let args = if self.eat_lt() {
                 // `<'a, T, A = U>`
                 let (args, constraints) =
@@ -1831,17 +1824,17 @@ fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
     /// Parses a single lifetime `'a` or panics.
     crate fn expect_lifetime(&mut self) -> Lifetime {
         if let Some(ident) = self.token.lifetime() {
-            let span = self.span;
+            let span = self.token.span;
             self.bump();
             Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
         } else {
-            self.span_bug(self.span, "not a lifetime")
+            self.span_bug(self.token.span, "not a lifetime")
         }
     }
 
     fn eat_label(&mut self) -> Option<Label> {
         if let Some(ident) = self.token.lifetime() {
-            let span = self.span;
+            let span = self.token.span;
             self.bump();
             Some(Label { ident: Ident::new(ident.name, span) })
         } else {
@@ -1859,8 +1852,9 @@ fn parse_mutability(&mut self) -> Mutability {
     }
 
     fn parse_field_name(&mut self) -> PResult<'a, Ident> {
-        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
-            self.expect_no_suffix(self.span, "a tuple index", suffix);
+        if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
+                self.token.kind {
+            self.expect_no_suffix(self.token.span, "a tuple index", suffix);
             self.bump();
             Ok(Ident::new(symbol, self.prev_span))
         } else {
@@ -1871,7 +1865,7 @@ fn parse_field_name(&mut self) -> PResult<'a, Ident> {
     /// Parse ident (COLON expr)?
     fn parse_field(&mut self) -> PResult<'a, Field> {
         let attrs = self.parse_outer_attributes()?;
-        let lo = self.span;
+        let lo = self.token.span;
 
         // Check if a colon exists one ahead. This means we're parsing a fieldname.
         let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| {
@@ -1883,9 +1877,9 @@ fn parse_field(&mut self) -> PResult<'a, Field> {
             // initialize a field with an eq rather than a colon.
             if self.token == token::Eq {
                 self.diagnostic()
-                    .struct_span_err(self.span, "expected `:`, found `=`")
+                    .struct_span_err(self.token.span, "expected `:`, found `=`")
                     .span_suggestion(
-                        fieldname.span.shrink_to_hi().to(self.span),
+                        fieldname.span.shrink_to_hi().to(self.token.span),
                         "replace equals symbol with a colon",
                         ":".to_string(),
                         Applicability::MachineApplicable,
@@ -1937,7 +1931,7 @@ fn mk_range(&self,
                     limits: RangeLimits)
                     -> PResult<'a, ast::ExprKind> {
         if end.is_none() && limits == RangeLimits::Closed {
-            Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd))
+            Err(self.span_fatal_err(self.token.span, Error::InclusiveRangeWithNoEnd))
         } else {
             Ok(ExprKind::Range(start, end, limits))
         }
@@ -1949,12 +1943,12 @@ fn mk_assign_op(&self, binop: ast::BinOp,
     }
 
     fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
-        let delim = match self.token {
+        let delim = match self.token.kind {
             token::OpenDelim(delim) => delim,
             _ => {
                 let msg = "expected open delimiter";
                 let mut err = self.fatal(msg);
-                err.span_label(self.span, msg);
+                err.span_label(self.token.span, msg);
                 return Err(err)
             }
         };
@@ -1987,13 +1981,13 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
         // attributes by giving them a empty "already parsed" list.
         let mut attrs = ThinVec::new();
 
-        let lo = self.span;
-        let mut hi = self.span;
+        let lo = self.token.span;
+        let mut hi = self.token.span;
 
         let ex: ExprKind;
 
-        // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
-        match self.token {
+        // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr().
+        match self.token.kind {
             token::OpenDelim(token::Paren) => {
                 self.bump();
 
@@ -2117,7 +2111,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                     }
                     let msg = "expected `while`, `for`, `loop` or `{` after a label";
                     let mut err = self.fatal(msg);
-                    err.span_label(self.span, msg);
+                    err.span_label(self.token.span, msg);
                     return Err(err);
                 }
                 if self.eat_keyword(kw::Loop) {
@@ -2150,13 +2144,13 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                     return Err(db);
                 }
                 if self.is_try_block() {
-                    let lo = self.span;
+                    let lo = self.token.span;
                     assert!(self.eat_keyword(kw::Try));
                     return self.parse_try_block(lo, attrs);
                 }
 
                 // Span::rust_2018() is somewhat expensive; don't get it repeatedly.
-                let is_span_rust_2018 = self.span.rust_2018();
+                let is_span_rust_2018 = self.token.span.rust_2018();
                 if is_span_rust_2018 && self.check_keyword(kw::Async) {
                     return if self.is_async_block() { // check for `async {` and `async move {`
                         self.parse_async_block(attrs)
@@ -2196,7 +2190,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                     // Catch this syntax error here, instead of in `parse_ident`, so
                     // that we can explicitly mention that let is not to be used as an expression
                     let mut db = self.fatal("expected expression, found statement (`let`)");
-                    db.span_label(self.span, "expected expression");
+                    db.span_label(self.token.span, "expected expression");
                     db.note("variable declaration using `let` is a statement");
                     return Err(db);
                 } else if is_span_rust_2018 && self.eat_keyword(kw::Await) {
@@ -2237,7 +2231,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                         //   |             ^ expected expression
                         // ```
                         self.bump();
-                        return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
+                        return Ok(self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()));
                     }
                     match self.parse_literal_maybe_minus() {
                         Ok(expr) => {
@@ -2350,7 +2344,7 @@ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Att
                         "cannot use a comma after the base struct",
                     );
                     err.span_suggestion_short(
-                        self.span,
+                        self.token.span,
                         "remove this comma",
                         String::new(),
                         Applicability::MachineApplicable
@@ -2363,15 +2357,13 @@ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Att
             }
 
             let mut recovery_field = None;
-            if let token::Ident(ident, _) = self.token {
+            if let token::Ident(name, _) = self.token.kind {
                 if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
                     // Use in case of error after field-looking code: `S { foo: () with a }`
-                    let mut ident = ident.clone();
-                    ident.span = self.span;
                     recovery_field = Some(ast::Field {
-                        ident,
-                        span: self.span,
-                        expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
+                        ident: Ident::new(name, self.token.span),
+                        span: self.token.span,
+                        expr: self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()),
                         is_shorthand: false,
                         attrs: ThinVec::new(),
                     });
@@ -2414,7 +2406,7 @@ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Att
             }
         }
 
-        let span = lo.to(self.span);
+        let span = lo.to(self.token.span);
         self.expect(&token::CloseDelim(token::Brace))?;
         return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
     }
@@ -2490,7 +2482,7 @@ fn parse_dot_or_call_expr_with(&mut self,
 
     // Assuming we have just parsed `.`, continue parsing into an expression.
     fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
-        if self.span.rust_2018() && self.eat_keyword(kw::Await) {
+        if self.token.span.rust_2018() && self.eat_keyword(kw::Await) {
             let span = lo.to(self.prev_span);
             let await_expr = self.mk_expr(
                 span,
@@ -2503,7 +2495,7 @@ fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Exp
         let segment = self.parse_path_segment(PathStyle::Expr)?;
         self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
 
-        Ok(match self.token {
+        Ok(match self.token.kind {
             token::OpenDelim(token::Paren) => {
                 // Method call `expr.f()`
                 let mut args = self.parse_unspanned_seq(
@@ -2542,12 +2534,12 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
 
             // expr.f
             if self.eat(&token::Dot) {
-                match self.token {
+                match self.token.kind {
                     token::Ident(..) => {
                         e = self.parse_dot_suffix(e, lo)?;
                     }
                     token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
-                        let span = self.span;
+                        let span = self.token.span;
                         self.bump();
                         let field = ExprKind::Field(e, Ident::new(symbol, span));
                         e = self.mk_expr(lo.to(span), field, ThinVec::new());
@@ -2588,13 +2580,13 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
                     _ => {
                         // FIXME Could factor this out into non_fatal_unexpected or something.
                         let actual = self.this_token_to_string();
-                        self.span_err(self.span, &format!("unexpected token: `{}`", actual));
+                        self.span_err(self.token.span, &format!("unexpected token: `{}`", actual));
                     }
                 }
                 continue;
             }
             if self.expr_is_complete(&e) { break; }
-            match self.token {
+            match self.token.kind {
                 // expr(...)
                 token::OpenDelim(token::Paren) => {
                     let seq = self.parse_unspanned_seq(
@@ -2615,7 +2607,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
                 token::OpenDelim(token::Bracket) => {
                     self.bump();
                     let ix = self.parse_expr()?;
-                    hi = self.span;
+                    hi = self.token.span;
                     self.expect(&token::CloseDelim(token::Bracket))?;
                     let index = self.mk_index(e, ix);
                     e = self.mk_expr(lo.to(hi), index, ThinVec::new())
@@ -2627,43 +2619,45 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
     }
 
     crate fn process_potential_macro_variable(&mut self) {
-        let (token, span) = match self.token {
-            token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
+        self.token = match self.token.kind {
+            token::Dollar if self.token.span.ctxt() != SyntaxContext::empty() &&
                              self.look_ahead(1, |t| t.is_ident()) => {
                 self.bump();
-                let name = match self.token {
-                    token::Ident(ident, _) => ident,
+                let name = match self.token.kind {
+                    token::Ident(name, _) => name,
                     _ => unreachable!()
                 };
-                let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
-                err.span_label(self.span, "unknown macro variable");
-                err.emit();
+                let span = self.prev_span.to(self.token.span);
+                self.diagnostic()
+                    .struct_span_fatal(span, &format!("unknown macro variable `{}`", name))
+                    .span_label(span, "unknown macro variable")
+                    .emit();
                 self.bump();
                 return
             }
             token::Interpolated(ref nt) => {
-                self.meta_var_span = Some(self.span);
+                self.meta_var_span = Some(self.token.span);
                 // Interpolated identifier and lifetime tokens are replaced with usual identifier
                 // and lifetime tokens, so the former are never encountered during normal parsing.
                 match **nt {
-                    token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
-                    token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
+                    token::NtIdent(ident, is_raw) =>
+                        Token::new(token::Ident(ident.name, is_raw), ident.span),
+                    token::NtLifetime(ident) =>
+                        Token::new(token::Lifetime(ident.name), ident.span),
                     _ => return,
                 }
             }
             _ => return,
         };
-        self.token = token;
-        self.span = span;
     }
 
     /// Parses a single token tree from the input.
     crate fn parse_token_tree(&mut self) -> TokenTree {
-        match self.token {
+        match self.token.kind {
             token::OpenDelim(..) => {
                 let frame = mem::replace(&mut self.token_cursor.frame,
                                          self.token_cursor.stack.pop().unwrap());
-                self.span = frame.span.entire();
+                self.token.span = frame.span.entire();
                 self.bump();
                 TokenTree::Delimited(
                     frame.span,
@@ -2673,9 +2667,9 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
-                let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
+                let token = self.token.take();
                 self.bump();
-                TokenTree::Token(span, token)
+                TokenTree::Token(token)
             }
         }
     }
@@ -2692,7 +2686,7 @@ pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
     pub fn parse_tokens(&mut self) -> TokenStream {
         let mut result = Vec::new();
         loop {
-            match self.token {
+            match self.token.kind {
                 token::Eof | token::CloseDelim(..) => break,
                 _ => result.push(self.parse_token_tree().into()),
             }
@@ -2705,9 +2699,9 @@ fn parse_prefix_expr(&mut self,
                              already_parsed_attrs: Option<ThinVec<Attribute>>)
                              -> PResult<'a, P<Expr>> {
         let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
-        let lo = self.span;
-        // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
-        let (hi, ex) = match self.token {
+        let lo = self.token.span;
+        // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
+        let (hi, ex) = match self.token.kind {
             token::Not => {
                 self.bump();
                 let e = self.parse_prefix_expr(None);
@@ -2760,10 +2754,10 @@ fn parse_prefix_expr(&mut self,
                 // `not` is just an ordinary identifier in Rust-the-language,
                 // but as `rustc`-the-compiler, we can issue clever diagnostics
                 // for confused users who really want to say `!`
-                let token_cannot_continue_expr = |t: &token::Token| match *t {
+                let token_cannot_continue_expr = |t: &Token| match t.kind {
                     // These tokens can start an expression after `!`, but
                     // can't continue an expression after an ident
-                    token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
+                    token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
                     token::Literal(..) | token::Pound => true,
                     token::Interpolated(ref nt) => match **nt {
                         token::NtIdent(..) | token::NtExpr(..) |
@@ -2777,13 +2771,13 @@ fn parse_prefix_expr(&mut self,
                     self.bump();
                     // Emit the error ...
                     let mut err = self.diagnostic()
-                        .struct_span_err(self.span,
+                        .struct_span_err(self.token.span,
                                          &format!("unexpected {} after identifier",
                                                   self.this_token_descr()));
                     // span the `not` plus trailing whitespace to avoid
                     // trailing whitespace after the `!` in our suggestion
                     let to_replace = self.sess.source_map()
-                        .span_until_non_whitespace(lo.to(self.span));
+                        .span_until_non_whitespace(lo.to(self.token.span));
                     err.span_suggestion_short(
                         to_replace,
                         "use `!` to perform logical negation",
@@ -2828,7 +2822,7 @@ fn parse_assoc_expr_with(&mut self,
                 LhsExpr::AttributesParsed(attrs) => Some(attrs),
                 _ => None,
             };
-            if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
+            if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
                 return self.parse_prefix_range_expr(attrs);
             } else {
                 self.parse_prefix_expr(attrs)?
@@ -2852,7 +2846,7 @@ fn parse_assoc_expr_with(&mut self,
             // `if x { a } else { b } && if y { c } else { d }`
             if !self.look_ahead(1, |t| t.is_reserved_ident()) => {
                 // These cases are ambiguous and can't be identified in the parser alone
-                let sp = self.sess.source_map().start_point(self.span);
+                let sp = self.sess.source_map().start_point(self.token.span);
                 self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
                 return Ok(lhs);
             }
@@ -2863,11 +2857,11 @@ fn parse_assoc_expr_with(&mut self,
                 // We've found an expression that would be parsed as a statement, but the next
                 // token implies this should be parsed as an expression.
                 // For example: `if let Some(x) = x { x } else { 0 } / 2`
-                let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &format!(
+                let mut err = self.sess.span_diagnostic.struct_span_err(self.token.span, &format!(
                     "expected expression, found `{}`",
                     pprust::token_to_string(&self.token),
                 ));
-                err.span_label(self.span, "expected expression");
+                err.span_label(self.token.span, "expected expression");
                 self.sess.expr_parentheses_needed(
                     &mut err,
                     lhs.span,
@@ -2890,7 +2884,7 @@ fn parse_assoc_expr_with(&mut self,
                 _ => lhs.span,
             };
 
-            let cur_op_span = self.span;
+            let cur_op_span = self.token.span;
             let restrictions = if op.is_assign_like() {
                 self.restrictions & Restrictions::NO_STRUCT_LITERAL
             } else {
@@ -2902,7 +2896,7 @@ fn parse_assoc_expr_with(&mut self,
             }
             // Check for deprecated `...` syntax
             if self.token == token::DotDotDot && op == AssocOp::DotDotEq {
-                self.err_dotdotdot_syntax(self.span);
+                self.err_dotdotdot_syntax(self.token.span);
             }
 
             self.bump();
@@ -2915,7 +2909,7 @@ fn parse_assoc_expr_with(&mut self,
                 continue
             } else if op == AssocOp::Colon {
                 let maybe_path = self.could_ascription_be_path(&lhs.node);
-                let next_sp = self.span;
+                let next_sp = self.token.span;
 
                 lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) {
                     Ok(lhs) => lhs,
@@ -3040,7 +3034,7 @@ fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
 
                 match self.parse_path(PathStyle::Expr) {
                     Ok(path) => {
-                        let (op_noun, op_verb) = match self.token {
+                        let (op_noun, op_verb) = match self.token.kind {
                             token::Lt => ("comparison", "comparing"),
                             token::BinOp(token::Shl) => ("shift", "shifting"),
                             _ => {
@@ -3060,10 +3054,12 @@ fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
                         // in AST and continue parsing.
                         let msg = format!("`<` is interpreted as a start of generic \
                                            arguments for `{}`, not a {}", path, op_noun);
-                        let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
-                        err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span),
+                        let mut err =
+                            self.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
+                        let span_after_type = parser_snapshot_after_type.token.span;
+                        err.span_label(self.look_ahead(1, |t| t.span).to(span_after_type),
                                        "interpreted as generic arguments");
-                        err.span_label(self.span, format!("not interpreted as {}", op_noun));
+                        err.span_label(self.token.span, format!("not interpreted as {}", op_noun));
 
                         let expr = mk_expr(self, P(Ty {
                             span: path.span,
@@ -3100,16 +3096,16 @@ fn parse_prefix_range_expr(&mut self,
                                -> PResult<'a, P<Expr>> {
         // Check for deprecated `...` syntax
         if self.token == token::DotDotDot {
-            self.err_dotdotdot_syntax(self.span);
+            self.err_dotdotdot_syntax(self.token.span);
         }
 
-        debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
+        debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
                       "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
                       self.token);
         let tok = self.token.clone();
         let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
-        let lo = self.span;
-        let mut hi = self.span;
+        let lo = self.token.span;
+        let mut hi = self.token.span;
         self.bump();
         let opt_end = if self.is_at_start_of_range_notation_rhs() {
             // RHS must be parsed with more associativity than the dots.
@@ -3204,13 +3200,13 @@ fn parse_lambda_expr(&mut self,
                              attrs: ThinVec<Attribute>)
                              -> PResult<'a, P<Expr>>
     {
-        let lo = self.span;
+        let lo = self.token.span;
         let movability = if self.eat_keyword(kw::Static) {
             Movability::Static
         } else {
             Movability::Movable
         };
-        let asyncness = if self.span.rust_2018() {
+        let asyncness = if self.token.span.rust_2018() {
             self.parse_asyncness()
         } else {
             IsAsync::NotAsync
@@ -3230,7 +3226,7 @@ fn parse_lambda_expr(&mut self,
             _ => {
                 // If an explicit return type is given, require a
                 // block to appear (RFC 968).
-                let body_lo = self.span;
+                let body_lo = self.token.span;
                 self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
             }
         };
@@ -3259,7 +3255,7 @@ fn parse_for_expr(&mut self, opt_label: Option<Label>,
 
         let pat = self.parse_top_level_pat()?;
         if !self.eat_keyword(kw::In) {
-            let in_span = self.prev_span.between(self.span);
+            let in_span = self.prev_span.between(self.token.span);
             let mut err = self.sess.span_diagnostic
                 .struct_span_err(in_span, "missing `in` in `for` loop");
             err.span_suggestion_short(
@@ -3321,7 +3317,7 @@ fn parse_loop_expr(&mut self, opt_label: Option<Label>,
     pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
         -> PResult<'a, P<Expr>>
     {
-        let span_lo = self.span;
+        let span_lo = self.token.span;
         self.expect_keyword(kw::Async)?;
         let capture_clause = if self.eat_keyword(kw::Move) {
             CaptureBy::Value
@@ -3359,7 +3355,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
         let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
                                                None)?;
         if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
-            if self.token == token::Token::Semi {
+            if self.token == token::Semi {
                 e.span_suggestion_short(
                     match_span,
                     "try removing this `match`",
@@ -3379,7 +3375,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
                     // Recover by skipping to the end of the block.
                     e.emit();
                     self.recover_stmt();
-                    let span = lo.to(self.span);
+                    let span = lo.to(self.token.span);
                     if self.token == token::CloseDelim(token::Brace) {
                         self.bump();
                     }
@@ -3387,23 +3383,23 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
                 }
             }
         }
-        let hi = self.span;
+        let hi = self.token.span;
         self.bump();
         return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
     }
 
     crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
         let attrs = self.parse_outer_attributes()?;
-        let lo = self.span;
+        let lo = self.token.span;
         let pats = self.parse_pats()?;
         let guard = if self.eat_keyword(kw::If) {
             Some(Guard::If(self.parse_expr()?))
         } else {
             None
         };
-        let arrow_span = self.span;
+        let arrow_span = self.token.span;
         self.expect(&token::FatArrow)?;
-        let arm_start_span = self.span;
+        let arm_start_span = self.token.span;
 
         let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)
             .map_err(|mut err| {
@@ -3414,7 +3410,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
         let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
             && self.token != token::CloseDelim(token::Brace);
 
-        let hi = self.span;
+        let hi = self.token.span;
 
         if require_comma {
             let cm = self.sess.source_map();
@@ -3433,7 +3429,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
                             //   |        |
                             //   |        arrow_span
                             // X |     &X => "x"
-                            //   |      - ^^ self.span
+                            //   |      - ^^ self.token.span
                             //   |      |
                             //   |      parsed until here as `"y" & X`
                             err.span_suggestion_short(
@@ -3512,10 +3508,10 @@ fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
             pats.push(self.parse_top_level_pat()?);
 
             if self.token == token::OrOr {
-                let mut err = self.struct_span_err(self.span,
+                let mut err = self.struct_span_err(self.token.span,
                                                    "unexpected token `||` after pattern");
                 err.span_suggestion(
-                    self.span,
+                    self.token.span,
                     "use a single `|` to specify multiple patterns",
                     "|".to_owned(),
                     Applicability::MachineApplicable
@@ -3664,7 +3660,7 @@ fn parse_pat_field(
         } else {
             // Parsing a pattern of the form "(box) (ref) (mut) fieldname"
             let is_box = self.eat_keyword(kw::Box);
-            let boxed_span = self.span;
+            let boxed_span = self.token.span;
             let is_ref = self.eat_keyword(kw::Ref);
             let is_mut = self.eat_keyword(kw::Mut);
             let fieldname = self.parse_ident()?;
@@ -3715,7 +3711,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::Fiel
 
         while self.token != token::CloseDelim(token::Brace) {
             let attrs = self.parse_outer_attributes()?;
-            let lo = self.span;
+            let lo = self.token.span;
 
             // check that a comma comes after every field
             if !ate_comma {
@@ -3729,14 +3725,14 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::Fiel
 
             if self.check(&token::DotDot) || self.token == token::DotDotDot {
                 etc = true;
-                let mut etc_sp = self.span;
+                let mut etc_sp = self.token.span;
 
                 if self.token == token::DotDotDot { // Issue #46718
                     // Accept `...` as if it were `..` to avoid further errors
-                    let mut err = self.struct_span_err(self.span,
+                    let mut err = self.struct_span_err(self.token.span,
                                                        "expected field pattern, found `...`");
                     err.span_suggestion(
-                        self.span,
+                        self.token.span,
                         "to omit remaining fields, use one fewer `.`",
                         "..".to_owned(),
                         Applicability::MachineApplicable
@@ -3752,18 +3748,19 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::Fiel
                 let token_str = self.this_token_descr();
                 let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
 
-                err.span_label(self.span, "expected `}`");
+                err.span_label(self.token.span, "expected `}`");
                 let mut comma_sp = None;
                 if self.token == token::Comma { // Issue #49257
-                    etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span));
+                    let nw_span = self.sess.source_map().span_until_non_whitespace(self.token.span);
+                    etc_sp = etc_sp.to(nw_span);
                     err.span_label(etc_sp,
                                    "`..` must be at the end and cannot have a trailing comma");
-                    comma_sp = Some(self.span);
+                    comma_sp = Some(self.token.span);
                     self.bump();
                     ate_comma = true;
                 }
 
-                etc_span = Some(etc_sp.until(self.span));
+                etc_span = Some(etc_sp.until(self.token.span));
                 if self.token == token::CloseDelim(token::Brace) {
                     // If the struct looks otherwise well formed, recover and continue.
                     if let Some(sp) = comma_sp {
@@ -3813,7 +3810,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::Fiel
                     "move the `..` to the end of the field list",
                     vec![
                         (etc_span, String::new()),
-                        (self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
+                        (self.token.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
                     ],
                     Applicability::MachineApplicable,
                 );
@@ -3825,7 +3822,7 @@ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::Fiel
 
     fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
         if self.token.is_path_start() {
-            let lo = self.span;
+            let lo = self.token.span;
             let (qself, path) = if self.eat_lt() {
                 // Parse a qualified path
                 let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
@@ -3844,14 +3841,14 @@ fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
     // helper function to decide whether to parse as ident binding or to try to do
     // something more complex like range patterns
     fn parse_as_ident(&mut self) -> bool {
-        self.look_ahead(1, |t| match *t {
+        self.look_ahead(1, |t| match t.kind {
             token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
             token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false),
             // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the
             // range pattern branch
             token::DotDot => None,
             _ => Some(true),
-        }).unwrap_or_else(|| self.look_ahead(2, |t| match *t {
+        }).unwrap_or_else(|| self.look_ahead(2, |t| match t.kind {
             token::Comma | token::CloseDelim(token::Bracket) => true,
             _ => false,
         }))
@@ -3868,7 +3865,7 @@ fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
             // parentheses in what should have been a tuple pattern; return a
             // suggestion-enhanced error here rather than choking on the comma
             // later.
-            let comma_span = self.span;
+            let comma_span = self.token.span;
             self.bump();
             if let Err(mut err) = self.parse_pat_list() {
                 // We didn't expect this to work anyway; we just wanted
@@ -3912,17 +3909,16 @@ fn parse_pat_with_range_pat(
         maybe_recover_from_interpolated_ty_qpath!(self, true);
         maybe_whole!(self, NtPat, |x| x);
 
-        let lo = self.span;
+        let lo = self.token.span;
         let pat;
-        match self.token {
+        match self.token.kind {
             token::BinOp(token::And) | token::AndAnd => {
                 // Parse &pat / &mut pat
                 self.expect_and()?;
                 let mutbl = self.parse_mutability();
-                if let token::Lifetime(ident) = self.token {
-                    let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern",
-                                                      ident));
-                    err.span_label(self.span, "unexpected lifetime");
+                if let token::Lifetime(name) = self.token.kind {
+                    let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name));
+                    err.span_label(self.token.span, "unexpected lifetime");
                     return Err(err);
                 }
                 let subpat = self.parse_pat_with_range_pat(false, expected)?;
@@ -3950,7 +3946,7 @@ fn parse_pat_with_range_pat(
                 pat = PatKind::Wild;
             } else if self.eat_keyword(kw::Mut) {
                 // Parse mut ident @ pat / mut ref ident @ pat
-                let mutref_span = self.prev_span.to(self.span);
+                let mutref_span = self.prev_span.to(self.token.span);
                 let binding_mode = if self.eat_keyword(kw::Ref) {
                     self.diagnostic()
                         .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
@@ -3990,7 +3986,7 @@ fn parse_pat_with_range_pat(
                     // Parse an unqualified path
                     (None, self.parse_path(PathStyle::Expr)?)
                 };
-                match self.token {
+                match self.token.kind {
                     token::Not if qself.is_none() => {
                         // Parse macro invocation
                         self.bump();
@@ -3999,14 +3995,14 @@ fn parse_pat_with_range_pat(
                         pat = PatKind::Mac(mac);
                     }
                     token::DotDotDot | token::DotDotEq | token::DotDot => {
-                        let end_kind = match self.token {
+                        let end_kind = match self.token.kind {
                             token::DotDot => RangeEnd::Excluded,
                             token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot),
                             token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq),
                             _ => panic!("can only parse `..`/`...`/`..=` for ranges \
                                          (checked above)"),
                         };
-                        let op_span = self.span;
+                        let op_span = self.token.span;
                         // Parse range
                         let span = lo.to(self.prev_span);
                         let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
@@ -4019,7 +4015,7 @@ fn parse_pat_with_range_pat(
                         if qself.is_some() {
                             let msg = "unexpected `{` after qualified path";
                             let mut err = self.fatal(msg);
-                            err.span_label(self.span, msg);
+                            err.span_label(self.token.span, msg);
                             return Err(err);
                         }
                         // Parse struct pattern
@@ -4036,7 +4032,7 @@ fn parse_pat_with_range_pat(
                         if qself.is_some() {
                             let msg = "unexpected `(` after qualified path";
                             let mut err = self.fatal(msg);
-                            err.span_label(self.span, msg);
+                            err.span_label(self.token.span, msg);
                             return Err(err);
                         }
                         // Parse tuple struct or enum pattern
@@ -4049,7 +4045,7 @@ fn parse_pat_with_range_pat(
                 // Try to parse everything else as literal with optional minus
                 match self.parse_literal_maybe_minus() {
                     Ok(begin) => {
-                        let op_span = self.span;
+                        let op_span = self.token.span;
                         if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
                                 self.check(&token::DotDotDot) {
                             let end_kind = if self.eat(&token::DotDotDot) {
@@ -4078,8 +4074,8 @@ fn parse_pat_with_range_pat(
                             self.this_token_descr(),
                         );
                         let mut err = self.fatal(&msg);
-                        err.span_label(self.span, format!("expected {}", expected));
-                        let sp = self.sess.source_map().start_point(self.span);
+                        err.span_label(self.token.span, format!("expected {}", expected));
+                        let sp = self.sess.source_map().start_point(self.token.span);
                         if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
                             self.sess.expr_parentheses_needed(&mut err, *sp, None);
                         }
@@ -4205,7 +4201,7 @@ fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
             }
         };
         let hi = if self.token == token::Semi {
-            self.span
+            self.token.span
         } else {
             self.prev_span
         };
@@ -4293,7 +4289,7 @@ fn is_do_catch_block(&self) -> bool {
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try) &&
         self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
-        self.span.rust_2018() &&
+        self.token.span.rust_2018() &&
         // prevent `while try {} {}`, `if try {} {} else {}`, etc.
         !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
@@ -4324,9 +4320,9 @@ fn is_auto_trait_item(&self) -> bool {
 
     fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                      -> PResult<'a, Option<P<Item>>> {
-        let token_lo = self.span;
-        let (ident, def) = match self.token {
-            token::Ident(ident, false) if ident.name == kw::Macro => {
+        let token_lo = self.token.span;
+        let (ident, def) = match self.token.kind {
+            token::Ident(name, false) if name == kw::Macro => {
                 self.bump();
                 let ident = self.parse_ident()?;
                 let tokens = if self.check(&token::OpenDelim(token::Brace)) {
@@ -4344,7 +4340,7 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                     };
                     TokenStream::new(vec![
                         args.into(),
-                        TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
+                        TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
                         body.into(),
                     ])
                 } else {
@@ -4354,8 +4350,8 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
 
                 (ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
             }
-            token::Ident(ident, _) if ident.name == sym::macro_rules &&
-                                   self.look_ahead(1, |t| *t == token::Not) => {
+            token::Ident(name, _) if name == sym::macro_rules &&
+                                     self.look_ahead(1, |t| *t == token::Not) => {
                 let prev_span = self.prev_span;
                 self.complain_if_pub_macro(&vis.node, prev_span);
                 self.bump();
@@ -4382,7 +4378,7 @@ fn parse_stmt_without_recovery(&mut self,
         maybe_whole!(self, NtStmt, |x| Some(x));
 
         let attrs = self.parse_outer_attributes()?;
-        let lo = self.span;
+        let lo = self.token.span;
 
         Ok(Some(if self.eat_keyword(kw::Let) {
             Stmt {
@@ -4436,7 +4432,7 @@ fn parse_stmt_without_recovery(&mut self,
             }
 
             // it's a macro invocation
-            let id = match self.token {
+            let id = match self.token.kind {
                 token::OpenDelim(_) => Ident::invalid(), // no special identifier
                 _ => self.parse_ident()?,
             };
@@ -4444,7 +4440,7 @@ fn parse_stmt_without_recovery(&mut self,
             // check that we're pointing at delimiters (need to check
             // again after the `if`, because of `parse_ident`
             // consuming more tokens).
-            match self.token {
+            match self.token.kind {
                 token::OpenDelim(_) => {}
                 _ => {
                     // we only expect an ident if we didn't parse one
@@ -4458,7 +4454,7 @@ fn parse_stmt_without_recovery(&mut self,
                     let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
                                                       ident_str,
                                                       tok_str));
-                    err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
+                    err.span_label(self.token.span, format!("expected {}`(` or `{{`", ident_str));
                     return Err(err)
                 },
             }
@@ -4481,7 +4477,9 @@ fn parse_stmt_without_recovery(&mut self,
                 // We used to incorrectly stop parsing macro-expanded statements here.
                 // If the next token will be an error anyway but could have parsed with the
                 // earlier behavior, stop parsing here and emit a warning to avoid breakage.
-                else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
+                else if macro_legacy_warnings &&
+                        self.token.can_begin_expr() &&
+                        match self.token.kind {
                     // These can continue an expression, so we can't stop parsing and warn.
                     token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
                     token::BinOp(token::Minus) | token::BinOp(token::Star) |
@@ -4543,7 +4541,9 @@ fn parse_stmt_without_recovery(&mut self,
                             if s.prev_token_kind == PrevTokenKind::DocComment {
                                 s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
                             } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
-                                s.span_err(s.span, "expected statement after outer attribute");
+                                s.span_err(
+                                    s.token.span, "expected statement after outer attribute"
+                                );
                             }
                         }
                     };
@@ -4583,10 +4583,10 @@ fn expr_is_complete(&self, e: &Expr) -> bool {
     pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
         maybe_whole!(self, NtBlock, |x| x);
 
-        let lo = self.span;
+        let lo = self.token.span;
 
         if !self.eat(&token::OpenDelim(token::Brace)) {
-            let sp = self.span;
+            let sp = self.token.span;
             let tok = self.this_token_descr();
             let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
             let do_not_suggest_help =
@@ -4594,7 +4594,7 @@ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
 
             if self.token.is_ident_named(sym::and) {
                 e.span_suggestion_short(
-                    self.span,
+                    self.token.span,
                     "use `&&` instead of `and` for the boolean operator",
                     "&&".to_string(),
                     Applicability::MaybeIncorrect,
@@ -4602,7 +4602,7 @@ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
             }
             if self.token.is_ident_named(sym::or) {
                 e.span_suggestion_short(
-                    self.span,
+                    self.token.span,
                     "use `||` instead of `or` for the boolean operator",
                     "||".to_string(),
                     Applicability::MaybeIncorrect,
@@ -4661,7 +4661,7 @@ pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
     fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
         maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
 
-        let lo = self.span;
+        let lo = self.token.span;
         self.expect(&token::OpenDelim(token::Brace))?;
         Ok((self.parse_inner_attributes()?,
             self.parse_block_tail(lo, BlockCheckMode::Default)?))
@@ -4678,8 +4678,8 @@ fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Blo
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     Some(Stmt {
                         id: ast::DUMMY_NODE_ID,
-                        node: StmtKind::Expr(DummyResult::raw_expr(self.span, true)),
-                        span: self.span,
+                        node: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)),
+                        span: self.token.span,
                     })
                 }
                 Ok(stmt) => stmt,
@@ -4744,7 +4744,7 @@ fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Blo
     }
 
     fn warn_missing_semicolon(&self) {
-        self.diagnostic().struct_span_warn(self.span, {
+        self.diagnostic().struct_span_warn(self.token.span, {
             &format!("expected `;`, found {}", self.this_token_descr())
         }).note({
             "This was erroneously allowed and will become a hard error in a future release"
@@ -4779,16 +4779,16 @@ fn parse_generic_bounds_common(&mut self,
         let mut last_plus_span = None;
         let mut was_negative = false;
         loop {
-            // This needs to be synchronized with `Token::can_begin_bound`.
+            // This needs to be synchronized with `TokenKind::can_begin_bound`.
             let is_bound_start = self.check_path() || self.check_lifetime() ||
                                  self.check(&token::Not) || // used for error reporting only
                                  self.check(&token::Question) ||
                                  self.check_keyword(kw::For) ||
                                  self.check(&token::OpenDelim(token::Paren));
             if is_bound_start {
-                let lo = self.span;
+                let lo = self.token.span;
                 let has_parens = self.eat(&token::OpenDelim(token::Paren));
-                let inner_lo = self.span;
+                let inner_lo = self.token.span;
                 let is_negative = self.eat(&token::Not);
                 let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
                 if self.token.is_lifetime() {
@@ -5044,13 +5044,13 @@ fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a,
     ///                  | ( < lifetimes , typaramseq ( , )? > )
     /// where   typaramseq = ( typaram ) | ( typaram , typaramseq )
     fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
-        let span_lo = self.span;
+        let span_lo = self.token.span;
         let (params, span) = if self.eat_lt() {
             let params = self.parse_generic_params()?;
             self.expect_gt()?;
             (params, span_lo.to(self.prev_span))
         } else {
-            (vec![], self.prev_span.between(self.span))
+            (vec![], self.prev_span.between(self.token.span))
         };
         Ok(ast::Generics {
             params,
@@ -5217,7 +5217,7 @@ fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyCon
         let mut misplaced_assoc_ty_constraints: Vec<Span> = Vec::new();
         let mut assoc_ty_constraints: Vec<Span> = Vec::new();
 
-        let args_lo = self.span;
+        let args_lo = self.token.span;
 
         loop {
             if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
@@ -5227,7 +5227,7 @@ fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyCon
             } else if self.check_ident() && self.look_ahead(1,
                     |t| t == &token::Eq || t == &token::Colon) {
                 // Parse associated type constraint.
-                let lo = self.span;
+                let lo = self.token.span;
                 let ident = self.parse_ident()?;
                 let kind = if self.eat(&token::Eq) {
                     AssocTyConstraintKind::Equality {
@@ -5250,8 +5250,10 @@ fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyCon
                 assoc_ty_constraints.push(span);
             } else if self.check_const_arg() {
                 // Parse const argument.
-                let expr = if let token::OpenDelim(token::Brace) = self.token {
-                    self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
+                let expr = if let token::OpenDelim(token::Brace) = self.token.kind {
+                    self.parse_block_expr(
+                        None, self.token.span, BlockCheckMode::Default, ThinVec::new()
+                    )?
                 } else if self.token.is_ident() {
                     // FIXME(const_generics): to distinguish between idents for types and consts,
                     // we should introduce a GenericArg::Ident in the AST and distinguish when
@@ -5336,7 +5338,7 @@ fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
         }
 
         loop {
-            let lo = self.span;
+            let lo = self.token.span;
             if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
                 let lifetime = self.expect_lifetime();
                 // Bounds starting with a colon are mandatory, but possibly empty.
@@ -5404,7 +5406,7 @@ fn parse_fn_args(&mut self, named_args: bool, allow_c_variadic: bool)
                      -> PResult<'a, (Vec<Arg> , bool)> {
         self.expect(&token::OpenDelim(token::Paren))?;
 
-        let sp = self.span;
+        let sp = self.token.span;
         let mut c_variadic = false;
         let (args, recovered): (Vec<Option<Arg>>, bool) =
             self.parse_seq_to_before_end(
@@ -5424,7 +5426,7 @@ fn parse_fn_args(&mut self, named_args: bool, allow_c_variadic: bool)
                             if let TyKind::CVarArgs = arg.ty.node {
                                 c_variadic = true;
                                 if p.token != token::CloseDelim(token::Paren) {
-                                    let span = p.span;
+                                    let span = p.token.span;
                                     p.span_err(span,
                                         "`...` must be the last argument of a C-variadic function");
                                     Ok(None)
@@ -5477,10 +5479,10 @@ fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
 
     /// Returns the parsed optional self argument and whether a self shortcut was used.
     fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
-        let expect_ident = |this: &mut Self| match this.token {
+        let expect_ident = |this: &mut Self| match this.token.kind {
             // Preserve hygienic context.
-            token::Ident(ident, _) =>
-                { let span = this.span; this.bump(); Ident::new(ident.name, span) }
+            token::Ident(name, _) =>
+                { let span = this.token.span; this.bump(); Ident::new(name, span) }
             _ => unreachable!()
         };
         let isolated_self = |this: &mut Self, n| {
@@ -5491,8 +5493,8 @@ fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
         // Parse optional `self` parameter of a method.
         // Only a limited set of initial token sequences is considered `self` parameters; anything
         // else is parsed as a normal function parameter list, so some lookahead is required.
-        let eself_lo = self.span;
-        let (eself, eself_ident, eself_hi) = match self.token {
+        let eself_lo = self.token.span;
+        let (eself, eself_ident, eself_hi) = match self.token.kind {
             token::BinOp(token::And) => {
                 // `&self`
                 // `&mut self`
@@ -5532,16 +5534,16 @@ fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
                 let msg = "cannot pass `self` by raw pointer";
                 (if isolated_self(self, 1) {
                     self.bump();
-                    self.struct_span_err(self.span, msg)
-                        .span_label(self.span, msg)
+                    self.struct_span_err(self.token.span, msg)
+                        .span_label(self.token.span, msg)
                         .emit();
                     SelfKind::Value(Mutability::Immutable)
                 } else if self.look_ahead(1, |t| t.is_mutability()) &&
                           isolated_self(self, 2) {
                     self.bump();
                     self.bump();
-                    self.struct_span_err(self.span, msg)
-                        .span_label(self.span, msg)
+                    self.struct_span_err(self.token.span, msg)
+                        .span_label(self.token.span, msg)
                         .emit();
                     SelfKind::Value(Mutability::Immutable)
                 } else {
@@ -5759,7 +5761,7 @@ pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
     fn parse_impl_item_(&mut self,
                         at_end: &mut bool,
                         mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
-        let lo = self.span;
+        let lo = self.token.span;
         let vis = self.parse_visibility(false)?;
         let defaultness = self.parse_defaultness();
         let (name, node, generics) = if let Some(type_) = self.eat_type() {
@@ -5803,11 +5805,7 @@ fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
         match *vis {
             VisibilityKind::Inherited => {}
             _ => {
-                let is_macro_rules: bool = match self.token {
-                    token::Ident(sid, _) => sid.name == sym::macro_rules,
-                    _ => false,
-                };
-                let mut err = if is_macro_rules {
+                let mut err = if self.token.is_keyword(sym::macro_rules) {
                     let mut err = self.diagnostic()
                         .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
                     err.span_suggestion(
@@ -5918,11 +5916,11 @@ fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'
             self.expect(&token::OpenDelim(token::Brace))?;
             let mut trait_items = vec![];
             while !self.eat(&token::CloseDelim(token::Brace)) {
-                if let token::DocComment(_) = self.token {
+                if let token::DocComment(_) = self.token.kind {
                     if self.look_ahead(1,
-                    |tok| tok == &token::Token::CloseDelim(token::Brace)) {
+                    |tok| tok == &token::CloseDelim(token::Brace)) {
                         let mut err = self.diagnostic().struct_span_err_with_code(
-                            self.span,
+                            self.token.span,
                             "found a documentation comment that doesn't document anything",
                             DiagnosticId::Error("E0584".into()),
                         );
@@ -6024,7 +6022,7 @@ fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
         let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
         let ty_first = if self.token.is_keyword(kw::For) &&
                           self.look_ahead(1, |t| t != &token::Lt) {
-            let span = self.prev_span.between(self.span);
+            let span = self.prev_span.between(self.token.span);
             self.struct_span_err(span, "missing trait in a trait impl").emit();
             P(Ty { node: TyKind::Path(None, err_path(span)), span, id: ast::DUMMY_NODE_ID })
         } else {
@@ -6033,7 +6031,7 @@ fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
 
         // If `for` is missing we try to recover.
         let has_for = self.eat_keyword(kw::For);
-        let missing_for_span = self.prev_span.between(self.span);
+        let missing_for_span = self.prev_span.between(self.token.span);
 
         let ty_second = if self.token == token::DotDot {
             // We need to report this error after `cfg` expansion for compatibility reasons
@@ -6148,7 +6146,7 @@ fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
                 "expected `where`, `{{`, `(`, or `;` after struct name, found {}",
                 token_str
             ));
-            err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
+            err.span_label(self.token.span, "expected `where`, `{`, `(`, or `;` after struct name");
             return Err(err);
         };
 
@@ -6172,7 +6170,7 @@ fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
             let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
                 "expected `where` or `{{` after union name, found {}", token_str));
-            err.span_label(self.span, "expected `where` or `{` after union name");
+            err.span_label(self.token.span, "expected `where` or `{` after union name");
             return Err(err);
         };
 
@@ -6203,7 +6201,7 @@ fn parse_record_struct_body(
             let token_str = self.this_token_descr();
             let mut err = self.fatal(&format!(
                     "expected `where`, or `{{` after struct name, found {}", token_str));
-            err.span_label(self.span, "expected `where`, or `{` after struct name");
+            err.span_label(self.token.span, "expected `where`, or `{` after struct name");
             return Err(err);
         }
 
@@ -6219,7 +6217,7 @@ fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
             SeqSep::trailing_allowed(token::Comma),
             |p| {
                 let attrs = p.parse_outer_attributes()?;
-                let lo = p.span;
+                let lo = p.token.span;
                 let vis = p.parse_visibility(true)?;
                 let ty = p.parse_ty()?;
                 Ok(StructField {
@@ -6246,14 +6244,14 @@ fn parse_single_struct_field(&mut self,
         if self.token == token::Comma {
             seen_comma = true;
         }
-        match self.token {
+        match self.token.kind {
             token::Comma => {
                 self.bump();
             }
             token::CloseDelim(token::Brace) => {}
             token::DocComment(_) => {
                 let previous_span = self.prev_span;
-                let mut err = self.span_fatal_err(self.span, Error::UselessDocComment);
+                let mut err = self.span_fatal_err(self.token.span, Error::UselessDocComment);
                 self.bump(); // consume the doc comment
                 let comma_after_doc_seen = self.eat(&token::Comma);
                 // `seen_comma` is always false, because we are inside doc block
@@ -6300,7 +6298,7 @@ fn parse_single_struct_field(&mut self,
     /// Parses an element of a struct declaration.
     fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
         let attrs = self.parse_outer_attributes()?;
-        let lo = self.span;
+        let lo = self.token.span;
         let vis = self.parse_visibility(false)?;
         self.parse_single_struct_field(lo, vis, attrs)
     }
@@ -6323,7 +6321,7 @@ pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibili
             // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
             // keyword to grab a span from for inherited visibility; an empty span at the
             // beginning of the current token would seem to be the "Schelling span".
-            return Ok(respan(self.span.shrink_to_lo(), VisibilityKind::Inherited))
+            return Ok(respan(self.token.span.shrink_to_lo(), VisibilityKind::Inherited))
         }
         let lo = self.prev_span;
 
@@ -6413,7 +6411,7 @@ fn parse_defaultness(&mut self) -> Defaultness {
     }
 
     /// Given a termination token, parses all of the items in a module.
-    fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
+    fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> {
         let mut items = vec![];
         while let Some(item) = self.parse_item()? {
             items.push(item);
@@ -6424,12 +6422,12 @@ fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a
             let token_str = self.this_token_descr();
             if !self.maybe_consume_incorrect_semicolon(&items) {
                 let mut err = self.fatal(&format!("expected item, found {}", token_str));
-                err.span_label(self.span, "expected item");
+                err.span_label(self.token.span, "expected item");
                 return Err(err);
             }
         }
 
-        let hi = if self.span.is_dummy() {
+        let hi = if self.token.span.is_dummy() {
             inner_lo
         } else {
             self.prev_span
@@ -6468,7 +6466,7 @@ fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo>
             (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
         };
 
-        let id_span = self.span;
+        let id_span = self.token.span;
         let id = self.parse_ident()?;
         if self.eat(&token::Semi) {
             if in_cfg && self.recurse_into_file_modules {
@@ -6505,7 +6503,7 @@ fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo>
             self.push_directory(id, &outer_attrs);
 
             self.expect(&token::OpenDelim(token::Brace))?;
-            let mod_inner_lo = self.span;
+            let mod_inner_lo = self.token.span;
             let attrs = self.parse_inner_attributes()?;
             let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
 
@@ -6717,7 +6715,7 @@ fn eval_src_mod(&mut self,
         let mut p0 =
             new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
         p0.cfg_mods = self.cfg_mods;
-        let mod_inner_lo = p0.span;
+        let mod_inner_lo = p0.token.span;
         let mod_attrs = p0.parse_inner_attributes()?;
         let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
         m0.inline = false;
@@ -6733,7 +6731,7 @@ fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<A
         let (ident, mut generics) = self.parse_fn_header()?;
         let decl = self.parse_fn_decl(true)?;
         generics.where_clause = self.parse_where_clause()?;
-        let hi = self.span;
+        let hi = self.token.span;
         self.expect(&token::Semi)?;
         Ok(ast::ForeignItem {
             ident,
@@ -6753,7 +6751,7 @@ fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: V
         let ident = self.parse_ident()?;
         self.expect(&token::Colon)?;
         let ty = self.parse_ty()?;
-        let hi = self.span;
+        let hi = self.token.span;
         self.expect(&token::Semi)?;
         Ok(ForeignItem {
             ident,
@@ -6771,7 +6769,7 @@ fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec
         self.expect_keyword(kw::Type)?;
 
         let ident = self.parse_ident()?;
-        let hi = self.span;
+        let hi = self.token.span;
         self.expect(&token::Semi)?;
         Ok(ast::ForeignItem {
             ident: ident,
@@ -6796,7 +6794,7 @@ fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
         let mut replacement = vec![];
         let mut fixed_crate_name = false;
         // Accept `extern crate name-like-this` for better diagnostics
-        let dash = token::Token::BinOp(token::BinOpToken::Minus);
+        let dash = token::BinOp(token::BinOpToken::Minus);
         if self.token == dash {  // Do not include `-` as part of the expected tokens list
             while self.eat(&dash) {
                 fixed_crate_name = true;
@@ -6934,7 +6932,7 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef>
         let mut any_disr = vec![];
         while self.token != token::CloseDelim(token::Brace) {
             let variant_attrs = self.parse_outer_attributes()?;
-            let vlo = self.span;
+            let vlo = self.token.span;
 
             let struct_def;
             let mut disr_expr = None;
@@ -7011,10 +7009,10 @@ fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
     /// Parses a string as an ABI spec on an extern type or module. Consumes
     /// the `extern` keyword, if one is found.
     fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
-        match self.token {
+        match self.token.kind {
             token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
             token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
-                let sp = self.span;
+                let sp = self.token.span;
                 self.expect_no_suffix(sp, "an ABI spec", suffix);
                 self.bump();
                 match abi::lookup(&symbol.as_str()) {
@@ -7046,7 +7044,7 @@ fn is_static_global(&mut self) -> bool {
                 if token.is_keyword(kw::Move) {
                     return true;
                 }
-                match *token {
+                match token.kind {
                     token::BinOp(token::Or) | token::OrOr => true,
                     _ => false,
                 }
@@ -7109,7 +7107,7 @@ fn parse_item_implementation(
             Some(P(item))
         });
 
-        let lo = self.span;
+        let lo = self.token.span;
 
         let visibility = self.parse_visibility(false)?;
 
@@ -7219,7 +7217,7 @@ fn parse_item_implementation(
 
         // Parse `async unsafe? fn`.
         if self.check_keyword(kw::Async) {
-            let async_span = self.span;
+            let async_span = self.token.span;
             if self.is_keyword_ahead(1, &[kw::Fn])
                 || self.is_keyword_ahead(2, &[kw::Fn])
             {
@@ -7242,7 +7240,7 @@ fn parse_item_implementation(
                                         item_,
                                         visibility,
                                         maybe_append(attrs, extra_attrs));
-                if self.span.rust_2015() {
+                if self.token.span.rust_2015() {
                     self.diagnostic().struct_span_err_with_code(
                         async_span,
                         "`async fn` is not permitted in the 2015 edition",
@@ -7428,9 +7426,9 @@ fn parse_item_implementation(
             //
             //     pub   S {}
             //        ^^^ `sp` points here
-            let sp = self.prev_span.between(self.span);
-            let full_sp = self.prev_span.to(self.span);
-            let ident_sp = self.span;
+            let sp = self.prev_span.between(self.token.span);
+            let full_sp = self.prev_span.to(self.token.span);
+            let ident_sp = self.token.span;
             if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
                 // possible public struct definition where `struct` was forgotten
                 let ident = self.parse_ident().unwrap();
@@ -7527,7 +7525,7 @@ fn parse_item_implementation(
         maybe_whole!(self, NtForeignItem, |ni| ni);
 
         let attrs = self.parse_outer_attributes()?;
-        let lo = self.span;
+        let lo = self.token.span;
         let visibility = self.parse_visibility(false)?;
 
         // FOREIGN STATIC ITEM
@@ -7535,9 +7533,9 @@ fn parse_item_implementation(
         if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
             if self.token.is_keyword(kw::Const) {
                 self.diagnostic()
-                    .struct_span_err(self.span, "extern items cannot be `const`")
+                    .struct_span_err(self.token.span, "extern items cannot be `const`")
                     .span_suggestion(
-                        self.span,
+                        self.token.span,
                         "try using a static value",
                         "static".to_owned(),
                         Applicability::MachineApplicable
@@ -7588,13 +7586,13 @@ fn parse_macro_use_or_failure(
         visibility: Visibility
     ) -> PResult<'a, Option<P<Item>>> {
         if macros_allowed && self.token.is_path_start() &&
-                !(self.is_async_fn() && self.span.rust_2015()) {
+                !(self.is_async_fn() && self.token.span.rust_2015()) {
             // MACRO INVOCATION ITEM
 
             let prev_span = self.prev_span;
             self.complain_if_pub_macro(&visibility.node, prev_span);
 
-            let mac_lo = self.span;
+            let mac_lo = self.token.span;
 
             // item macro.
             let pth = self.parse_path(PathStyle::Mod)?;
@@ -7639,9 +7637,9 @@ fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
                                at_end: &mut bool) -> PResult<'a, Option<Mac>>
     {
         if self.token.is_path_start() &&
-                !(self.is_async_fn() && self.span.rust_2015()) {
+                !(self.is_async_fn() && self.token.span.rust_2015()) {
             let prev_span = self.prev_span;
-            let lo = self.span;
+            let lo = self.token.span;
             let pth = self.parse_path(PathStyle::Mod)?;
 
             if pth.segments.len() == 1 {
@@ -7748,14 +7746,14 @@ fn is_import_coupler(&mut self) -> bool {
     ///            PATH [`as` IDENT]
     /// ```
     fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
-        let lo = self.span;
+        let lo = self.token.span;
 
         let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() };
         let kind = if self.check(&token::OpenDelim(token::Brace)) ||
                       self.check(&token::BinOp(token::Star)) ||
                       self.is_import_coupler() {
             // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
-            let mod_sep_ctxt = self.span.ctxt();
+            let mod_sep_ctxt = self.token.span.ctxt();
             if self.eat(&token::ModSep) {
                 prefix.segments.push(
                     PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))
@@ -7808,17 +7806,17 @@ fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
 
     /// Parses a source module as a crate. This is the main entry point for the parser.
     pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
-        let lo = self.span;
+        let lo = self.token.span;
         let krate = Ok(ast::Crate {
             attrs: self.parse_inner_attributes()?,
             module: self.parse_mod_items(&token::Eof, lo)?,
-            span: lo.to(self.span),
+            span: lo.to(self.token.span),
         });
         krate
     }
 
     pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
-        let ret = match self.token {
+        let ret = match self.token.kind {
             token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
                 (symbol, ast::StrStyle::Cooked, suffix),
             token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
@@ -7839,7 +7837,7 @@ pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
             _ => {
                 let msg = "expected string literal";
                 let mut err = self.fatal(msg);
-                err.span_label(self.span, msg);
+                err.span_label(self.token.span, msg);
                 Err(err)
             }
         }
@@ -7869,7 +7867,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
     for unmatched in unclosed_delims.iter() {
         let mut err = handler.struct_span_err(unmatched.found_span, &format!(
             "incorrect close delimiter: `{}`",
-            pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+            pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
         ));
         err.span_label(unmatched.found_span, "incorrect close delimiter");
         if let Some(sp) = unmatched.candidate_span {
index 47185df8d616563a0cf23bf24372cb5ad32d9a71..cc34883e2e8151fde9b682c6635c59995dae2f46 100644 (file)
@@ -2,18 +2,17 @@
 pub use Nonterminal::*;
 pub use DelimToken::*;
 pub use LitKind::*;
-pub use Token::*;
+pub use TokenKind::*;
 
 use crate::ast::{self};
-use crate::parse::ParseSess;
+use crate::parse::{parse_stream_from_source_str, ParseSess};
 use crate::print::pprust;
 use crate::ptr::P;
 use crate::symbol::kw;
-use crate::syntax::parse::parse_stream_from_source_str;
 use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
 
-use syntax_pos::symbol::{self, Symbol};
-use syntax_pos::{self, Span, FileName};
+use syntax_pos::symbol::Symbol;
+use syntax_pos::{self, Span, FileName, DUMMY_SP};
 use log::info;
 
 use std::fmt;
@@ -117,8 +116,8 @@ pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
     }
 }
 
-pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
-    let ident_token: Token = Ident(ident, is_raw);
+pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
+    let ident_token = Token::new(Ident(name, is_raw), span);
 
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
@@ -145,11 +144,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
         kw::While,
         kw::Yield,
         kw::Static,
-    ].contains(&ident.name)
+    ].contains(&name)
 }
 
-fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
-    let ident_token: Token = Ident(ident, is_raw);
+fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool {
+    let ident_token = Token::new(Ident(name, is_raw), span);
 
     !ident_token.is_reserved_ident() ||
     ident_token.is_path_segment_keyword() ||
@@ -162,11 +161,11 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
         kw::Extern,
         kw::Typeof,
         kw::Dyn,
-    ].contains(&ident.name)
+    ].contains(&name)
 }
 
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
-pub enum Token {
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub enum TokenKind {
     /* Expression-operator symbols. */
     Eq,
     Lt,
@@ -209,8 +208,8 @@ pub enum Token {
     Literal(Lit),
 
     /* Name components */
-    Ident(ast::Ident, /* is_raw */ bool),
-    Lifetime(ast::Ident),
+    Ident(ast::Name, /* is_raw */ bool),
+    Lifetime(ast::Name),
 
     Interpolated(Lrc<Nonterminal>),
 
@@ -231,18 +230,63 @@ pub enum Token {
     Eof,
 }
 
-// `Token` is used a lot. Make sure it doesn't unintentionally get bigger.
+// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(target_arch = "x86_64")]
-static_assert_size!(Token, 16);
+static_assert_size!(TokenKind, 16);
+
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub struct Token {
+    pub kind: TokenKind,
+    pub span: Span,
+}
+
+impl TokenKind {
+    pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind {
+        Literal(Lit::new(kind, symbol, suffix))
+    }
+
+    /// Returns tokens that are likely to be typed accidentally instead of the current token.
+    /// Enables better error recovery when the wrong token is found.
+    crate fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
+        match *self {
+            Comma => Some(vec![Dot, Lt, Semi]),
+            Semi => Some(vec![Colon, Comma]),
+            _ => None
+        }
+    }
+}
 
 impl Token {
+    crate fn new(kind: TokenKind, span: Span) -> Self {
+        Token { kind, span }
+    }
+
+    /// Some token that will be thrown away later.
+    crate fn dummy() -> Self {
+        Token::new(TokenKind::Whitespace, DUMMY_SP)
+    }
+
     /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
-    pub fn from_ast_ident(ident: ast::Ident) -> Token {
-        Ident(ident, ident.is_raw_guess())
+    crate fn from_ast_ident(ident: ast::Ident) -> Self {
+        Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
+    }
+
+    /// Return this token by value and leave a dummy token in its place.
+    crate fn take(&mut self) -> Self {
+        mem::replace(self, Token::dummy())
+    }
+
+    crate fn is_op(&self) -> bool {
+        match self.kind {
+            OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
+            Ident(..) | Lifetime(..) | Interpolated(..) |
+            Whitespace | Comment | Shebang(..) | Eof => false,
+            _ => true,
+        }
     }
 
     crate fn is_like_plus(&self) -> bool {
-        match *self {
+        match self.kind {
             BinOp(Plus) | BinOpEq(Plus) => true,
             _ => false,
         }
@@ -250,9 +294,9 @@ pub fn from_ast_ident(ident: ast::Ident) -> Token {
 
     /// Returns `true` if the token can appear at the start of an expression.
     crate fn can_begin_expr(&self) -> bool {
-        match *self {
-            Ident(ident, is_raw)              =>
-                ident_can_begin_expr(ident, is_raw), // value name or keyword
+        match self.kind {
+            Ident(name, is_raw)              =>
+                ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
             OpenDelim(..)                     | // tuple, array or block
             Literal(..)                       | // literal
             Not                               | // operator not
@@ -282,9 +326,9 @@ pub fn from_ast_ident(ident: ast::Ident) -> Token {
 
     /// Returns `true` if the token can appear at the start of a type.
     crate fn can_begin_type(&self) -> bool {
-        match *self {
-            Ident(ident, is_raw)        =>
-                ident_can_begin_type(ident, is_raw), // type name or keyword
+        match self.kind {
+            Ident(name, is_raw)        =>
+                ident_can_begin_type(name, self.span, is_raw), // type name or keyword
             OpenDelim(Paren)            | // tuple
             OpenDelim(Bracket)          | // array
             Not                         | // never
@@ -304,8 +348,8 @@ pub fn from_ast_ident(ident: ast::Ident) -> Token {
     }
 
     /// Returns `true` if the token can appear at the start of a const param.
-    pub fn can_begin_const_arg(&self) -> bool {
-        match self {
+    crate fn can_begin_const_arg(&self) -> bool {
+        match self.kind {
             OpenDelim(Brace) => true,
             Interpolated(ref nt) => match **nt {
                 NtExpr(..) => true,
@@ -323,20 +367,16 @@ pub fn can_begin_const_arg(&self) -> bool {
         self == &Question || self == &OpenDelim(Paren)
     }
 
-    pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
-        Literal(Lit::new(kind, symbol, suffix))
-    }
-
     /// Returns `true` if the token is any literal
     crate fn is_lit(&self) -> bool {
-        match *self {
+        match self.kind {
             Literal(..) => true,
             _           => false,
         }
     }
 
     crate fn expect_lit(&self) -> Lit {
-        match *self {
+        match self.kind {
             Literal(lit) => lit,
             _=> panic!("`expect_lit` called on non-literal"),
         }
@@ -345,11 +385,11 @@ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
     /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
     /// for example a '-42', or one of the boolean idents).
     crate fn can_begin_literal_or_bool(&self) -> bool {
-        match *self {
+        match self.kind {
             Literal(..)  => true,
             BinOp(Minus) => true,
-            Ident(ident, false) if ident.name == kw::True => true,
-            Ident(ident, false) if ident.name == kw::False => true,
+            Ident(name, false) if name == kw::True => true,
+            Ident(name, false) if name == kw::False => true,
             Interpolated(ref nt) => match **nt {
                 NtLiteral(..) => true,
                 _             => false,
@@ -360,8 +400,8 @@ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
 
     /// Returns an identifier if this token is an identifier.
     pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
-        match *self {
-            Ident(ident, is_raw) => Some((ident, is_raw)),
+        match self.kind {
+            Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)),
             Interpolated(ref nt) => match **nt {
                 NtIdent(ident, is_raw) => Some((ident, is_raw)),
                 _ => None,
@@ -369,10 +409,11 @@ pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
             _ => None,
         }
     }
+
     /// Returns a lifetime identifier if this token is a lifetime.
     pub fn lifetime(&self) -> Option<ast::Ident> {
-        match *self {
-            Lifetime(ident) => Some(ident),
+        match self.kind {
+            Lifetime(name) => Some(ast::Ident::new(name, self.span)),
             Interpolated(ref nt) => match **nt {
                 NtLifetime(ident) => Some(ident),
                 _ => None,
@@ -380,6 +421,7 @@ pub fn lifetime(&self) -> Option<ast::Ident> {
             _ => None,
         }
     }
+
     /// Returns `true` if the token is an identifier.
     pub fn is_ident(&self) -> bool {
         self.ident().is_some()
@@ -392,15 +434,12 @@ pub fn is_ident(&self) -> bool {
     /// Returns `true` if the token is a identifier whose name is the given
     /// string slice.
     crate fn is_ident_named(&self, name: Symbol) -> bool {
-        match self.ident() {
-            Some((ident, _)) => ident.name == name,
-            None => false
-        }
+        self.ident().map_or(false, |(ident, _)| ident.name == name)
     }
 
     /// Returns `true` if the token is an interpolated path.
     fn is_path(&self) -> bool {
-        if let Interpolated(ref nt) = *self {
+        if let Interpolated(ref nt) = self.kind {
             if let NtPath(..) = **nt {
                 return true;
             }
@@ -425,10 +464,10 @@ fn is_path(&self) -> bool {
 
     /// Returns `true` if the token is a given keyword, `kw`.
     pub fn is_keyword(&self, kw: Symbol) -> bool {
-        self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
+        self.ident().map(|(id, is_raw)| id.name == kw && !is_raw).unwrap_or(false)
     }
 
-    pub fn is_path_segment_keyword(&self) -> bool {
+    crate fn is_path_segment_keyword(&self) -> bool {
         match self.ident() {
             Some((id, false)) => id.is_path_segment_keyword(),
             _ => false,
@@ -437,7 +476,7 @@ pub fn is_path_segment_keyword(&self) -> bool {
 
     // Returns true for reserved identifiers used internally for elided lifetimes,
     // unnamed method parameters, crate root module, error recovery etc.
-    pub fn is_special_ident(&self) -> bool {
+    crate fn is_special_ident(&self) -> bool {
         match self.ident() {
             Some((id, false)) => id.is_special(),
             _ => false,
@@ -469,58 +508,52 @@ pub fn is_reserved_ident(&self) -> bool {
     }
 
     crate fn glue(self, joint: Token) -> Option<Token> {
-        Some(match self {
-            Eq => match joint {
+        let kind = match self.kind {
+            Eq => match joint.kind {
                 Eq => EqEq,
                 Gt => FatArrow,
                 _ => return None,
             },
-            Lt => match joint {
+            Lt => match joint.kind {
                 Eq => Le,
                 Lt => BinOp(Shl),
                 Le => BinOpEq(Shl),
                 BinOp(Minus) => LArrow,
                 _ => return None,
             },
-            Gt => match joint {
+            Gt => match joint.kind {
                 Eq => Ge,
                 Gt => BinOp(Shr),
                 Ge => BinOpEq(Shr),
                 _ => return None,
             },
-            Not => match joint {
+            Not => match joint.kind {
                 Eq => Ne,
                 _ => return None,
             },
-            BinOp(op) => match joint {
+            BinOp(op) => match joint.kind {
                 Eq => BinOpEq(op),
                 BinOp(And) if op == And => AndAnd,
                 BinOp(Or) if op == Or => OrOr,
                 Gt if op == Minus => RArrow,
                 _ => return None,
             },
-            Dot => match joint {
+            Dot => match joint.kind {
                 Dot => DotDot,
                 DotDot => DotDotDot,
                 _ => return None,
             },
-            DotDot => match joint {
+            DotDot => match joint.kind {
                 Dot => DotDotDot,
                 Eq => DotDotEq,
                 _ => return None,
             },
-            Colon => match joint {
+            Colon => match joint.kind {
                 Colon => ModSep,
                 _ => return None,
             },
-            SingleQuote => match joint {
-                Ident(ident, false) => {
-                    let name = Symbol::intern(&format!("'{}", ident));
-                    Lifetime(symbol::Ident {
-                        name,
-                        span: ident.span,
-                    })
-                }
+            SingleQuote => match joint.kind {
+                Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))),
                 _ => return None,
             },
 
@@ -529,26 +562,18 @@ pub fn is_reserved_ident(&self) -> bool {
             Question | OpenDelim(..) | CloseDelim(..) |
             Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
             Whitespace | Comment | Shebang(..) | Eof => return None,
-        })
-    }
+        };
 
-    /// Returns tokens that are likely to be typed accidentally instead of the current token.
-    /// Enables better error recovery when the wrong token is found.
-    crate fn similar_tokens(&self) -> Option<Vec<Token>> {
-        match *self {
-            Comma => Some(vec![Dot, Lt, Semi]),
-            Semi => Some(vec![Colon, Comma]),
-            _ => None
-        }
+        Some(Token::new(kind, self.span.to(joint.span)))
     }
 
     // See comments in `Nonterminal::to_tokenstream` for why we care about
     // *probably* equal here rather than actual equality
     crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
-        if mem::discriminant(self) != mem::discriminant(other) {
+        if mem::discriminant(&self.kind) != mem::discriminant(&other.kind) {
             return false
         }
-        match (self, other) {
+        match (&self.kind, &other.kind) {
             (&Eq, &Eq) |
             (&Lt, &Lt) |
             (&Le, &Le) |
@@ -590,10 +615,10 @@ pub fn is_reserved_ident(&self) -> bool {
 
             (&Literal(a), &Literal(b)) => a == b,
 
-            (&Lifetime(a), &Lifetime(b)) => a.name == b.name,
-            (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
-                                                       a.name == kw::DollarCrate ||
-                                                       c.name == kw::DollarCrate),
+            (&Lifetime(a), &Lifetime(b)) => a == b,
+            (&Ident(a, b), &Ident(c, d)) => b == d && (a == c ||
+                                                       a == kw::DollarCrate ||
+                                                       c == kw::DollarCrate),
 
             (&Interpolated(_), &Interpolated(_)) => false,
 
@@ -602,6 +627,12 @@ pub fn is_reserved_ident(&self) -> bool {
     }
 }
 
+impl PartialEq<TokenKind> for Token {
+    fn eq(&self, rhs: &TokenKind) -> bool {
+        self.kind == *rhs
+    }
+}
+
 #[derive(Clone, RustcEncodable, RustcDecodable)]
 /// For interpolation during macro expansion.
 pub enum Nonterminal {
@@ -691,12 +722,10 @@ pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
                 prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
             }
             Nonterminal::NtIdent(ident, is_raw) => {
-                let token = Token::Ident(ident, is_raw);
-                Some(TokenTree::Token(ident.span, token).into())
+                Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
             }
             Nonterminal::NtLifetime(ident) => {
-                let token = Token::Lifetime(ident);
-                Some(TokenTree::Token(ident.span, token).into())
+                Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
             }
             Nonterminal::NtTT(ref tt) => {
                 Some(tt.clone().into())
@@ -743,15 +772,6 @@ pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
     }
 }
 
-crate fn is_op(tok: &Token) -> bool {
-    match *tok {
-        OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
-        Ident(..) | Lifetime(..) | Interpolated(..) |
-        Whitespace | Comment | Shebang(..) | Eof => false,
-        _ => true,
-    }
-}
-
 fn prepend_attrs(sess: &ParseSess,
                  attrs: &[ast::Attribute],
                  tokens: Option<&tokenstream::TokenStream>,
@@ -781,8 +801,8 @@ fn prepend_attrs(sess: &ParseSess,
         // For simple paths, push the identifier directly
         if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
             let ident = attr.path.segments[0].ident;
-            let token = Ident(ident, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::Token(ident.span, token));
+            let token = Ident(ident.name, ident.as_str().starts_with("r#"));
+            brackets.push(tokenstream::TokenTree::token(token, ident.span));
 
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
@@ -796,7 +816,7 @@ fn prepend_attrs(sess: &ParseSess,
         // The span we list here for `#` and for `[ ... ]` are both wrong in
         // that it encompasses more than each token, but it hopefully is "good
         // enough" for now at least.
-        builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
+        builder.push(tokenstream::TokenTree::token(Pound, attr.span));
         let delim_span = DelimSpan::from_single(attr.span);
         builder.push(tokenstream::TokenTree::Delimited(
             delim_span, DelimToken::Bracket, brackets.build().into()));
index 90ee549db017b5641c8b338a9dc9b765d424edbe..22cce67b5eeb7b5202fd5b4ae50fae1bfa832617 100644 (file)
@@ -1,4 +1,4 @@
-//! Utilities for validating  string and char literals and turning them into
+//! Utilities for validating string and char literals and turning them into
 //! values they represent.
 
 use std::str::Chars;
@@ -12,6 +12,7 @@ pub(crate) enum EscapeError {
     LoneSlash,
     InvalidEscape,
     BareCarriageReturn,
+    BareCarriageReturnInRawString,
     EscapeOnlyChar,
 
     TooShortHexEscape,
@@ -29,6 +30,7 @@ pub(crate) enum EscapeError {
 
     UnicodeEscapeInByte,
     NonAsciiCharInByte,
+    NonAsciiCharInByteString,
 }
 
 /// Takes a contents of a char literal (without quotes), and returns an
@@ -66,6 +68,30 @@ pub(crate) fn unescape_byte_str<F>(literal_text: &str, callback: &mut F)
     })
 }
 
+/// Takes a contents of a string literal (without quotes) and produces a
+/// sequence of characters or errors.
+/// NOTE: Raw strings do not perform any explicit character escaping, here we
+/// only translate CRLF to LF and produce errors on bare CR.
+pub(crate) fn unescape_raw_str<F>(literal_text: &str, callback: &mut F)
+where
+    F: FnMut(Range<usize>, Result<char, EscapeError>),
+{
+    unescape_raw_str_or_byte_str(literal_text, Mode::Str, callback)
+}
+
+/// Takes a contents of a string literal (without quotes) and produces a
+/// sequence of characters or errors.
+/// NOTE: Raw strings do not perform any explicit character escaping, here we
+/// only translate CRLF to LF and produce errors on bare CR.
+pub(crate) fn unescape_raw_byte_str<F>(literal_text: &str, callback: &mut F)
+where
+    F: FnMut(Range<usize>, Result<u8, EscapeError>),
+{
+    unescape_raw_str_or_byte_str(literal_text, Mode::ByteStr, &mut |range, char| {
+        callback(range, char.map(byte_from_char))
+    })
+}
+
 #[derive(Debug, Clone, Copy)]
 pub(crate) enum Mode {
     Char,
@@ -254,9 +280,40 @@ fn skip_ascii_whitespace(chars: &mut Chars<'_>) {
     }
 }
 
+/// Takes a contents of a string literal (without quotes) and produces a
+/// sequence of characters or errors.
+/// NOTE: Raw strings do not perform any explicit character escaping, here we
+/// only translate CRLF to LF and produce errors on bare CR.
+fn unescape_raw_str_or_byte_str<F>(literal_text: &str, mode: Mode, callback: &mut F)
+where
+    F: FnMut(Range<usize>, Result<char, EscapeError>),
+{
+    assert!(mode.in_double_quotes());
+    let initial_len = literal_text.len();
+
+    let mut chars = literal_text.chars();
+    while let Some(curr) = chars.next() {
+        let start = initial_len - chars.as_str().len() - curr.len_utf8();
+
+        let result = match (curr, chars.clone().next()) {
+            ('\r', Some('\n')) => {
+                chars.next();
+                Ok('\n')
+            },
+            ('\r', _) => Err(EscapeError::BareCarriageReturnInRawString),
+            (c, _) if mode.is_bytes() && !c.is_ascii() =>
+                Err(EscapeError::NonAsciiCharInByteString),
+            (c, _) => Ok(c),
+        };
+        let end = initial_len - chars.as_str().len();
+
+        callback(start..end, result);
+    }
+}
+
 fn byte_from_char(c: char) -> u8 {
     let res = c as u32;
-    assert!(res <= u8::max_value() as u32, "guaranteed because of Mode::Byte");
+    assert!(res <= u8::max_value() as u32, "guaranteed because of Mode::Byte(Str)");
     res as u8
 }
 
index 22777c0884f47b786d1c3d55f8db6fd323fb410e..71b41161ad8c6fd00553ad0f21e526a818e04c42 100644 (file)
@@ -80,6 +80,11 @@ pub(crate) fn emit_unescape_error(
             };
             handler.span_err(span, msg);
         }
+        EscapeError::BareCarriageReturnInRawString => {
+            assert!(mode.in_double_quotes());
+            let msg = "bare CR not allowed in raw string";
+            handler.span_err(span, msg);
+        }
         EscapeError::InvalidEscape => {
             let (c, span) = last_char();
 
@@ -124,6 +129,11 @@ pub(crate) fn emit_unescape_error(
             handler.span_err(span, "byte constant must be ASCII. \
                                     Use a \\xHH escape for a non-ASCII byte")
         }
+        EscapeError::NonAsciiCharInByteString => {
+            assert!(mode.is_bytes());
+            let (_c, span) = last_char();
+            handler.span_err(span, "raw byte string must be ASCII")
+        }
         EscapeError::OutOfRangeHexEscape => {
             handler.span_err(span, "this form of character escape may only be used \
                                     with characters in the range [\\x00-\\x7f]")
index 57c01e9e3efea2aa8fe72497588151afce4c6214..4cbe590d44bfee9da94ac36e265f07c7bba801c6 100644 (file)
@@ -6,7 +6,7 @@
 use crate::util::parser::{self, AssocOp, Fixity};
 use crate::attr;
 use crate::source_map::{self, SourceMap, Spanned};
-use crate::parse::token::{self, BinOpToken, Nonterminal, Token};
+use crate::parse::token::{self, BinOpToken, Nonterminal, Token, TokenKind};
 use crate::parse::lexer::comments;
 use crate::parse::{self, ParseSess};
 use crate::print::pp::{self, Breaks};
@@ -168,9 +168,6 @@ pub fn literal_to_string(lit: token::Lit) -> String {
     let mut out = match kind {
         token::Byte          => format!("b'{}'", symbol),
         token::Char          => format!("'{}'", symbol),
-        token::Bool          |
-        token::Float         |
-        token::Integer       => symbol.to_string(),
         token::Str           => format!("\"{}\"", symbol),
         token::StrRaw(n)     => format!("r{delim}\"{string}\"{delim}",
                                         delim="#".repeat(n as usize),
@@ -179,7 +176,10 @@ pub fn literal_to_string(lit: token::Lit) -> String {
         token::ByteStrRaw(n) => format!("br{delim}\"{string}\"{delim}",
                                         delim="#".repeat(n as usize),
                                         string=symbol),
-        token::Err           => format!("'{}'", symbol),
+        token::Integer       |
+        token::Float         |
+        token::Bool          |
+        token::Err           => symbol.to_string(),
     };
 
     if let Some(suffix) = suffix {
@@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
     out
 }
 
-pub fn token_to_string(tok: &Token) -> String {
+pub fn token_kind_to_string(tok: &TokenKind) -> String {
     match *tok {
         token::Eq                   => "=".to_string(),
         token::Lt                   => "<".to_string(),
@@ -250,6 +250,10 @@ pub fn token_to_string(tok: &Token) -> String {
     }
 }
 
+pub fn token_to_string(token: &Token) -> String {
+    token_kind_to_string(&token.kind)
+}
+
 pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
     match *nt {
         token::NtExpr(ref e)        => expr_to_string(e),
@@ -724,21 +728,21 @@ fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
     /// expression arguments as expressions). It can be done! I think.
     fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
         match tt {
-            TokenTree::Token(_, ref tk) => {
-                self.writer().word(token_to_string(tk))?;
-                match *tk {
-                    parse::token::DocComment(..) => {
+            TokenTree::Token(ref token) => {
+                self.writer().word(token_to_string(&token))?;
+                match token.kind {
+                    token::DocComment(..) => {
                         self.writer().hardbreak()
                     }
                     _ => Ok(())
                 }
             }
             TokenTree::Delimited(_, delim, tts) => {
-                self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
+                self.writer().word(token_kind_to_string(&token::OpenDelim(delim)))?;
                 self.writer().space()?;
                 self.print_tts(tts)?;
                 self.writer().space()?;
-                self.writer().word(token_to_string(&token::CloseDelim(delim)))
+                self.writer().word(token_kind_to_string(&token::CloseDelim(delim)))
             },
         }
     }
index 397fb45513c15a08529608781d2c15b729c14e32..2daec9702798fd1ef5c97e2a2a5231929743f168 100644 (file)
@@ -16,7 +16,7 @@
 use crate::ext::base;
 use crate::ext::tt::{macro_parser, quoted};
 use crate::parse::Directory;
-use crate::parse::token::{self, DelimToken, Token};
+use crate::parse::token::{self, DelimToken, Token, TokenKind};
 use crate::print::pprust;
 
 use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
@@ -44,7 +44,7 @@
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
     /// A single token
-    Token(Span, token::Token),
+    Token(Token),
     /// A delimited sequence of token trees
     Delimited(DelimSpan, DelimToken, TokenStream),
 }
@@ -53,8 +53,7 @@ pub enum TokenTree {
 #[cfg(parallel_compiler)]
 fn _dummy()
 where
-    Span: Send + Sync,
-    token::Token: Send + Sync,
+    Token: Send + Sync,
     DelimSpan: Send + Sync,
     DelimToken: Send + Sync,
     TokenStream: Send + Sync,
@@ -86,12 +85,11 @@ pub fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStrea
     /// Checks if this TokenTree is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
-            (&TokenTree::Delimited(_, delim, ref tts),
-             &TokenTree::Delimited(_, delim2, ref tts2)) => {
+            (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
+            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.eq_unspanned(&tts2)
             }
-            (_, _) => false,
+            _ => false,
         }
     }
 
@@ -102,38 +100,29 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
     // different method.
     pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
-                tk.probably_equal_for_proc_macro(tk2)
+            (TokenTree::Token(token), TokenTree::Token(token2)) => {
+                token.probably_equal_for_proc_macro(token2)
             }
-            (&TokenTree::Delimited(_, delim, ref tts),
-             &TokenTree::Delimited(_, delim2, ref tts2)) => {
+            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
             }
-            (_, _) => false,
+            _ => false,
         }
     }
 
     /// Retrieves the TokenTree's span.
     pub fn span(&self) -> Span {
-        match *self {
-            TokenTree::Token(sp, _) => sp,
+        match self {
+            TokenTree::Token(token) => token.span,
             TokenTree::Delimited(sp, ..) => sp.entire(),
         }
     }
 
     /// Modify the `TokenTree`'s span in-place.
     pub fn set_span(&mut self, span: Span) {
-        match *self {
-            TokenTree::Token(ref mut sp, _) => *sp = span,
-            TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
-        }
-    }
-
-    /// Indicates if the stream is a token that is equal to the provided token.
-    pub fn eq_token(&self, t: Token) -> bool {
-        match *self {
-            TokenTree::Token(_, ref tk) => *tk == t,
-            _ => false,
+        match self {
+            TokenTree::Token(token) => token.span = span,
+            TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
         }
     }
 
@@ -141,6 +130,10 @@ pub fn joint(self) -> TokenStream {
         TokenStream::new(vec![(self, Joint)])
     }
 
+    pub fn token(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span))
+    }
+
     /// Returns the opening delimiter as a token tree.
     pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
         let open_span = if span.is_dummy() {
@@ -148,7 +141,7 @@ pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
         } else {
             span.with_hi(span.lo() + BytePos(delim.len() as u32))
         };
-        TokenTree::Token(open_span, token::OpenDelim(delim))
+        TokenTree::token(token::OpenDelim(delim), open_span)
     }
 
     /// Returns the closing delimiter as a token tree.
@@ -158,7 +151,7 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(delim.len() as u32))
         };
-        TokenTree::Token(close_span, token::CloseDelim(delim))
+        TokenTree::token(token::CloseDelim(delim), close_span)
     }
 }
 
@@ -167,7 +160,7 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
 /// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
 /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
 /// instead of a representation of the abstract syntax tree.
-/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
+/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
 ///
 /// The use of `Option` is an optimization that avoids the need for an
 /// allocation when the stream is empty. However, it is not guaranteed that an
@@ -201,18 +194,18 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
             while let Some((pos, ts)) = iter.next() {
                 if let Some((_, next)) = iter.peek() {
                     let sp = match (&ts, &next) {
-                        (_, (TokenTree::Token(_, token::Token::Comma), _)) => continue,
-                        ((TokenTree::Token(sp, token_left), NonJoint),
-                         (TokenTree::Token(_, token_right), _))
+                        (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+                        ((TokenTree::Token(token_left), NonJoint),
+                         (TokenTree::Token(token_right), _))
                         if ((token_left.is_ident() && !token_left.is_reserved_ident())
                             || token_left.is_lit()) &&
                             ((token_right.is_ident() && !token_right.is_reserved_ident())
-                            || token_right.is_lit()) => *sp,
+                            || token_right.is_lit()) => token_left.span,
                         ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
                         _ => continue,
                     };
                     let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::Token(sp, token::Comma), NonJoint);
+                    let comma = (TokenTree::token(token::Comma, sp), NonJoint);
                     suggestion = Some((pos, comma, sp));
                 }
             }
@@ -241,12 +234,6 @@ fn from(tree: TokenTree) -> TreeAndJoint {
     }
 }
 
-impl From<Token> for TokenStream {
-    fn from(token: Token) -> TokenStream {
-        TokenTree::Token(DUMMY_SP, token).into()
-    }
-}
-
 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
     fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
         TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
@@ -349,22 +336,25 @@ pub fn probably_equal_for_proc_macro(&self, other: &TokenStream) -> bool {
         // streams, making a comparison between a token stream generated from an
         // AST and a token stream which was parsed into an AST more reliable.
         fn semantic_tree(tree: &TokenTree) -> bool {
-            match tree {
-                // The pretty printer tends to add trailing commas to
-                // everything, and in particular, after struct fields.
-                | TokenTree::Token(_, Token::Comma)
-                // The pretty printer emits `NoDelim` as whitespace.
-                | TokenTree::Token(_, Token::OpenDelim(DelimToken::NoDelim))
-                | TokenTree::Token(_, Token::CloseDelim(DelimToken::NoDelim))
-                // The pretty printer collapses many semicolons into one.
-                | TokenTree::Token(_, Token::Semi)
-                // The pretty printer collapses whitespace arbitrarily and can
-                // introduce whitespace from `NoDelim`.
-                | TokenTree::Token(_, Token::Whitespace)
-                // The pretty printer can turn `$crate` into `::crate_name`
-                | TokenTree::Token(_, Token::ModSep) => false,
-                _ => true
+            if let TokenTree::Token(token) = tree {
+                if let
+                    // The pretty printer tends to add trailing commas to
+                    // everything, and in particular, after struct fields.
+                    | token::Comma
+                    // The pretty printer emits `NoDelim` as whitespace.
+                    | token::OpenDelim(DelimToken::NoDelim)
+                    | token::CloseDelim(DelimToken::NoDelim)
+                    // The pretty printer collapses many semicolons into one.
+                    | token::Semi
+                    // The pretty printer collapses whitespace arbitrarily and can
+                    // introduce whitespace from `NoDelim`.
+                    | token::Whitespace
+                    // The pretty printer can turn `$crate` into `::crate_name`
+                    | token::ModSep = token.kind {
+                    return false;
+                }
             }
+            true
         }
 
         let mut t1 = self.trees().filter(semantic_tree);
@@ -430,13 +420,12 @@ pub fn new() -> TokenStreamBuilder {
     pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
         let stream = stream.into();
         let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
-        if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint {
-            if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() {
-                if let Some(glued_tok) = last_tok.glue(tok) {
+        if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
+            if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
+                if let Some(glued_tok) = last_token.glue(token) {
                     let last_stream = self.0.pop().unwrap();
                     self.push_all_but_last_tree(&last_stream);
-                    let glued_span = last_span.to(span);
-                    let glued_tt = TokenTree::Token(glued_span, glued_tok);
+                    let glued_tt = TokenTree::Token(glued_tok);
                     let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
                     self.0.push(glued_tokenstream);
                     self.push_all_but_first_tree(&stream);
@@ -578,9 +567,8 @@ pub fn apply_mark(self, mark: Mark) -> Self {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::syntax::ast::Ident;
+    use crate::ast::Name;
     use crate::with_default_globals;
-    use crate::parse::token::Token;
     use crate::util::parser_testing::string_to_stream;
     use syntax_pos::{Span, BytePos, NO_EXPANSION};
 
@@ -664,7 +652,7 @@ fn test_is_empty() {
         with_default_globals(|| {
             let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
             let test1: TokenStream =
-                TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into();
             let test2 = string_to_ts("foo(bar::baz)");
 
             assert_eq!(test0.is_empty(), true);
@@ -677,9 +665,9 @@ fn test_is_empty() {
     fn test_dotdotdot() {
         with_default_globals(|| {
             let mut builder = TokenStreamBuilder::new();
-            builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint());
-            builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint());
-            builder.push(TokenTree::Token(sp(2, 3), Token::Dot));
+            builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(2, 3)));
             let stream = builder.build();
             assert!(stream.eq_unspanned(&string_to_ts("...")));
             assert_eq!(stream.trees().count(), 1);
index 7e306d59e35ce5363fe946b03c24111a87c7fb14..69dd96625cc020d694e9aaa039f51658237745ab 100644 (file)
@@ -1,4 +1,4 @@
-use crate::parse::token::{Token, BinOpToken};
+use crate::parse::token::{self, Token, BinOpToken};
 use crate::symbol::kw;
 use crate::ast::{self, BinOpKind};
 
@@ -71,32 +71,32 @@ impl AssocOp {
     /// Creates a new AssocOP from a token
     pub fn from_token(t: &Token) -> Option<AssocOp> {
         use AssocOp::*;
-        match *t {
-            Token::BinOpEq(k) => Some(AssignOp(k)),
-            Token::Eq => Some(Assign),
-            Token::BinOp(BinOpToken::Star) => Some(Multiply),
-            Token::BinOp(BinOpToken::Slash) => Some(Divide),
-            Token::BinOp(BinOpToken::Percent) => Some(Modulus),
-            Token::BinOp(BinOpToken::Plus) => Some(Add),
-            Token::BinOp(BinOpToken::Minus) => Some(Subtract),
-            Token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
-            Token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
-            Token::BinOp(BinOpToken::And) => Some(BitAnd),
-            Token::BinOp(BinOpToken::Caret) => Some(BitXor),
-            Token::BinOp(BinOpToken::Or) => Some(BitOr),
-            Token::Lt => Some(Less),
-            Token::Le => Some(LessEqual),
-            Token::Ge => Some(GreaterEqual),
-            Token::Gt => Some(Greater),
-            Token::EqEq => Some(Equal),
-            Token::Ne => Some(NotEqual),
-            Token::AndAnd => Some(LAnd),
-            Token::OrOr => Some(LOr),
-            Token::DotDot => Some(DotDot),
-            Token::DotDotEq => Some(DotDotEq),
+        match t.kind {
+            token::BinOpEq(k) => Some(AssignOp(k)),
+            token::Eq => Some(Assign),
+            token::BinOp(BinOpToken::Star) => Some(Multiply),
+            token::BinOp(BinOpToken::Slash) => Some(Divide),
+            token::BinOp(BinOpToken::Percent) => Some(Modulus),
+            token::BinOp(BinOpToken::Plus) => Some(Add),
+            token::BinOp(BinOpToken::Minus) => Some(Subtract),
+            token::BinOp(BinOpToken::Shl) => Some(ShiftLeft),
+            token::BinOp(BinOpToken::Shr) => Some(ShiftRight),
+            token::BinOp(BinOpToken::And) => Some(BitAnd),
+            token::BinOp(BinOpToken::Caret) => Some(BitXor),
+            token::BinOp(BinOpToken::Or) => Some(BitOr),
+            token::Lt => Some(Less),
+            token::Le => Some(LessEqual),
+            token::Ge => Some(GreaterEqual),
+            token::Gt => Some(Greater),
+            token::EqEq => Some(Equal),
+            token::Ne => Some(NotEqual),
+            token::AndAnd => Some(LAnd),
+            token::OrOr => Some(LOr),
+            token::DotDot => Some(DotDot),
+            token::DotDotEq => Some(DotDotEq),
             // DotDotDot is no longer supported, but we need some way to display the error
-            Token::DotDotDot => Some(DotDotEq),
-            Token::Colon => Some(Colon),
+            token::DotDotDot => Some(DotDotEq),
+            token::Colon => Some(Colon),
             _ if t.is_keyword(kw::As) => Some(As),
             _ => None
         }
index 334709b1521972d300eb69fa1cd6a8bfbdb78a82..24b0c37247191efb3ae0b19bf7bfd70e7b186764 100644 (file)
@@ -714,7 +714,6 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
         ExprKind::AddrOf(_, ref subexpression) | ExprKind::Unary(_, ref subexpression) => {
             visitor.visit_expr(subexpression)
         }
-        ExprKind::Lit(_) => {}
         ExprKind::Cast(ref subexpression, ref typ) | ExprKind::Type(ref subexpression, ref typ) => {
             visitor.visit_expr(subexpression);
             visitor.visit_ty(typ)
@@ -826,7 +825,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
         ExprKind::TryBlock(ref body) => {
             visitor.visit_block(body)
         }
-        ExprKind::Err => {}
+        ExprKind::Lit(_) | ExprKind::Err => {}
     }
 
     visitor.visit_expr_post(expression)
@@ -855,7 +854,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
 
 pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
     match tt {
-        TokenTree::Token(_, tok) => visitor.visit_token(tok),
+        TokenTree::Token(token) => visitor.visit_token(token),
         TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
     }
 }
index 4d7083c1a790b955abbef10861be3c7f156d0c80..b015815ac9c1ea2b79b1554e3dc36c07dad40346 100644 (file)
@@ -9,7 +9,8 @@
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::{self, token};
+use syntax::parse;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::ast::AsmDialect;
@@ -86,8 +87,8 @@ fn parse_inline_asm<'a>(
     let first_colon = tts.iter()
         .position(|tt| {
             match *tt {
-                tokenstream::TokenTree::Token(_, token::Colon) |
-                tokenstream::TokenTree::Token(_, token::ModSep) => true,
+                tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
+                tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
                 _ => false,
             }
         })
@@ -259,7 +260,7 @@ fn parse_inline_asm<'a>(
         loop {
             // MOD_SEP is a double colon '::' without space in between.
             // When encountered, the state must be advanced twice.
-            match (&p.token, state.next(), state.next().next()) {
+            match (&p.token.kind, state.next(), state.next().next()) {
                 (&token::Colon, StateNone, _) |
                 (&token::ModSep, _, StateNone) => {
                     p.bump();
index 13342c8e28e2ff3bf4d7e22c9ed0b6c2c6bffeeb..10d323ffb89f55b06cd4175912254d283421e048 100644 (file)
@@ -4,7 +4,7 @@
 use syntax::source_map::Spanned;
 use syntax::ext::base::*;
 use syntax::ext::build::AstBuilder;
-use syntax::parse::token::{self, Token};
+use syntax::parse::token::{self, TokenKind};
 use syntax::parse::parser::Parser;
 use syntax::print::pprust;
 use syntax::ptr::P;
@@ -29,12 +29,12 @@ pub fn expand_assert<'cx>(
     let panic_call = Mac_ {
         path: Path::from_ident(Ident::new(sym::panic, sp)),
         tts: custom_message.unwrap_or_else(|| {
-            TokenStream::from(TokenTree::Token(
-                DUMMY_SP,
-                Token::lit(token::Str, Symbol::intern(&format!(
+            TokenStream::from(TokenTree::token(
+                TokenKind::lit(token::Str, Symbol::intern(&format!(
                     "assertion failed: {}",
                     pprust::expr_to_string(&cond_expr).escape_debug()
                 )), None),
+                DUMMY_SP,
             ))
         }).into(),
         delim: MacDelimiter::Parenthesis,
@@ -85,7 +85,7 @@ fn parse_assert<'a>(
     if parser.token == token::Semi {
         let mut err = cx.struct_span_warn(sp, "macro requires an expression as an argument");
         err.span_suggestion(
-            parser.span,
+            parser.token.span,
             "try removing semicolon",
             String::new(),
             Applicability::MaybeIncorrect
@@ -103,8 +103,9 @@ fn parse_assert<'a>(
     //
     // Parse this as an actual message, and suggest inserting a comma. Eventually, this should be
     // turned into an error.
-    let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token {
-        let mut err = cx.struct_span_warn(parser.span, "unexpected string literal");
+    let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. })
+                                = parser.token.kind {
+        let mut err = cx.struct_span_warn(parser.token.span, "unexpected string literal");
         let comma_span = cx.source_map().next_point(parser.prev_span);
         err.span_suggestion_short(
             comma_span,
index 090d730289d26b1655eabeaeba5ca76156fa15fd..e952515bfe0ebf978b68d3fb0f40ec6a14cd65e4 100644 (file)
 use syntax::parse::token;
 use syntax_pos::Span;
 
-pub fn expand_cfg<'cx>(cx: &mut ExtCtxt<'_>,
-                       sp: Span,
-                       tts: &[tokenstream::TokenTree])
-                       -> Box<dyn base::MacResult + 'static> {
+pub fn expand_cfg(
+    cx: &mut ExtCtxt<'_>,
+    sp: Span,
+    tts: &[tokenstream::TokenTree],
+) -> Box<dyn base::MacResult + 'static> {
     let sp = sp.apply_mark(cx.current_expansion.mark);
 
     match parse_cfg(cx, sp, tts) {
index 230b00c0f8f552767f0c061c10354cb7dc834e96..68d5178372eba7b774b61dca11a1faa607937a4f 100644 (file)
@@ -22,7 +22,6 @@ pub fn expand_syntax_ext(
         match e.node {
             ast::ExprKind::Lit(ref lit) => match lit.node {
                 ast::LitKind::Str(ref s, _)
-                | ast::LitKind::Err(ref s)
                 | ast::LitKind::Float(ref s, _)
                 | ast::LitKind::FloatUnsuffixed(ref s) => {
                     accumulator.push_str(&s.as_str());
@@ -41,6 +40,9 @@ pub fn expand_syntax_ext(
                 ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..) => {
                     cx.span_err(e.span, "cannot concatenate a byte string literal");
                 }
+                ast::LitKind::Err(_) => {
+                    has_errors = true;
+                }
             },
             ast::ExprKind::Err => {
                 has_errors = true;
index 77c53f402cc9f03b7f21bdea386aa457ac368d2d..8f061abc77b8d0f91eae10c5e79af82dd397c7a2 100644 (file)
@@ -3,7 +3,7 @@
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax_pos::Span;
 use syntax_pos::symbol::{Symbol, sym};
@@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
     for (i, e) in tts.iter().enumerate() {
         if i & 1 == 1 {
             match *e {
-                TokenTree::Token(_, token::Comma) => {}
+                TokenTree::Token(Token { kind: token::Comma, .. }) => {}
                 _ => {
                     cx.span_err(sp, "concat_idents! expecting comma.");
                     return DummyResult::any(sp);
@@ -38,8 +38,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
             }
         } else {
             match *e {
-                TokenTree::Token(_, token::Ident(ident, _)) =>
-                    res_str.push_str(&ident.as_str()),
+                TokenTree::Token(Token { kind: token::Ident(name, _), .. }) =>
+                    res_str.push_str(&name.as_str()),
                 _ => {
                     cx.span_err(sp, "concat_idents! requires ident args.");
                     return DummyResult::any(sp);
index b25a9e4c50fbe9e87735ede37ae3a3b03e094e3c..844865d57c7ad04102534e86736abd736c2fabe2 100644 (file)
@@ -82,8 +82,8 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
         // }
 
         let new = {
-            let other_f = match (other_fs.len(), other_fs.get(0)) {
-                (1, Some(o_f)) => o_f,
+            let other_f = match other_fs {
+                [o_f] => o_f,
                 _ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"),
             };
 
index 6172f27261ecf730971d8d11994208d7dc4cf84c..732bb234389a0aedd1ccf474f36d27ec3621d8b2 100644 (file)
@@ -25,8 +25,8 @@ fn cs_op(cx: &mut ExtCtxt<'_>,
              -> P<Expr>
     {
         let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
-            let other_f = match (other_fs.len(), other_fs.get(0)) {
-                (1, Some(o_f)) => o_f,
+            let other_f = match other_fs {
+                [o_f] => o_f,
                 _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"),
             };
 
index 3980741f252dd947e92e15fc7644405728c97bdf..a30a7d78222f47202e348f5a53b6ee694e67c242 100644 (file)
@@ -143,8 +143,8 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
                 // }
 
                 let new = {
-                    let other_f = match (other_fs.len(), other_fs.get(0)) {
-                        (1, Some(o_f)) => o_f,
+                    let other_f = match other_fs {
+                        [o_f] => o_f,
                                 _ => {
                                     cx.span_bug(span,
                                         "not exactly 2 arguments in `derive(PartialOrd)`")
@@ -193,8 +193,8 @@ fn cs_op(less: bool,
     };
 
     let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P<Expr>, other_fs: &[P<Expr>], default| {
-        let other_f = match (other_fs.len(), other_fs.get(0)) {
-            (1, Some(o_f)) => o_f,
+        let other_f = match other_fs {
+            [o_f] => o_f,
             _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
         };
 
index 975d96951dc5571d54d8090ca80b1362e5ecb44d..98465d75e4680e9d31beb6665f871e22e0ade22b 100644 (file)
@@ -8,7 +8,7 @@
 use syntax::source_map::Span;
 use syntax::ext::base::*;
 use syntax::parse;
-use syntax::parse::token::{self, Token};
+use syntax::parse::token;
 use syntax::tokenstream;
 use syntax::visit::Visitor;
 use syntax_pos::DUMMY_SP;
@@ -68,8 +68,8 @@ fn expand(&self,
         // Mark attributes as known, and used.
         MarkAttrs(&self.attrs).visit_item(&item);
 
-        let token = Token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
+        let token = token::Interpolated(Lrc::new(token::NtItem(item)));
+        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
 
         let server = proc_macro_server::Rustc::new(ecx);
         let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
index e7f99d4578226382ea4c67f501c4488433bde4ff..7ad04aebf6e2ea6e4d5bfc2c5c49b0d3f5165216 100644 (file)
@@ -52,8 +52,8 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt<'_>,
 }
 
 fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
-    let state_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) {
-        (1, Some(o_f)) => o_f,
+    let state_expr = match &substr.nonself_args {
+        &[o_f] => o_f,
         _ => {
             cx.span_bug(trait_span,
                         "incorrect number of arguments in `derive(Hash)`")
index ac41f30e6b39fafd57748a06c5010d50588bda8d..cf54eacc3d46c8d715218c557f79a9c603750881 100644 (file)
@@ -1,8 +1,8 @@
 //! The compiler code necessary to implement the `#[derive]` extensions.
 
 use rustc_data_structures::sync::Lrc;
-use syntax::ast;
-use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension, Resolver};
+use syntax::ast::{self, MetaItem};
+use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension, Resolver, MultiItemModifier};
 use syntax::ext::build::AstBuilder;
 use syntax::ext::hygiene::{Mark, SyntaxContext};
 use syntax::ptr::P;
 #[path="cmp/ord.rs"]
 pub mod ord;
 
-
 pub mod generic;
 
+struct BuiltinDerive(
+    fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
+);
+
+impl MultiItemModifier for BuiltinDerive {
+    fn expand(&self,
+              ecx: &mut ExtCtxt<'_>,
+              span: Span,
+              meta_item: &MetaItem,
+              item: Annotatable)
+              -> Vec<Annotatable> {
+        let mut items = Vec::new();
+        (self.0)(ecx, span, meta_item, &item, &mut |a| items.push(a));
+        items
+    }
+}
+
 macro_rules! derive_traits {
     ($( $name:expr => $func:path, )+) => {
         pub fn is_builtin_trait(name: ast::Name) -> bool {
@@ -55,7 +71,7 @@ pub fn register_builtin_derives(resolver: &mut dyn Resolver) {
             $(
                 resolver.add_builtin(
                     ast::Ident::with_empty_ctxt(Symbol::intern($name)),
-                    Lrc::new(SyntaxExtension::BuiltinDerive($func))
+                    Lrc::new(SyntaxExtension::LegacyDerive(Box::new(BuiltinDerive($func))))
                 );
             )*
         }
index b5be45547cfbe3149545aa775736162bdef7ac7b..85b524786b2f5d8eb572acabcaa0eac8c392d61a 100644 (file)
@@ -28,7 +28,7 @@ enum ArgumentType {
 
 enum Position {
     Exact(usize),
-    Named(String),
+    Named(Symbol),
 }
 
 struct Context<'a, 'b: 'a> {
@@ -57,7 +57,7 @@ struct Context<'a, 'b: 'a> {
     /// Unique format specs seen for each argument.
     arg_unique_types: Vec<Vec<ArgumentType>>,
     /// Map from named arguments to their resolved indices.
-    names: FxHashMap<String, usize>,
+    names: FxHashMap<Symbol, usize>,
 
     /// The latest consecutive literal strings, or empty if there weren't any.
     literal: String,
@@ -127,9 +127,9 @@ fn parse_args<'a>(
     ecx: &mut ExtCtxt<'a>,
     sp: Span,
     tts: &[tokenstream::TokenTree]
-) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<String, usize>), DiagnosticBuilder<'a>> {
+) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>), DiagnosticBuilder<'a>> {
     let mut args = Vec::<P<ast::Expr>>::new();
-    let mut names = FxHashMap::<String, usize>::default();
+    let mut names = FxHashMap::<Symbol, usize>::default();
 
     let mut p = ecx.new_parser_from_tts(tts);
 
@@ -142,27 +142,26 @@ fn parse_args<'a>(
 
     while p.token != token::Eof {
         if !p.eat(&token::Comma) {
-            return Err(ecx.struct_span_err(p.span, "expected token: `,`"));
+            return Err(ecx.struct_span_err(p.token.span, "expected token: `,`"));
         }
         if p.token == token::Eof {
             break;
         } // accept trailing commas
         if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
             named = true;
-            let ident = if let token::Ident(i, _) = p.token {
+            let name = if let token::Ident(name, _) = p.token.kind {
                 p.bump();
-                i
+                name
             } else {
                 return Err(ecx.struct_span_err(
-                    p.span,
+                    p.token.span,
                     "expected ident, positional arguments cannot follow named arguments",
                 ));
             };
-            let name: &str = &ident.as_str();
 
             p.expect(&token::Eq)?;
             let e = p.parse_expr()?;
-            if let Some(prev) = names.get(name) {
+            if let Some(prev) = names.get(&name) {
                 ecx.struct_span_err(e.span, &format!("duplicate argument named `{}`", name))
                     .span_note(args[*prev].span, "previously here")
                     .emit();
@@ -174,7 +173,7 @@ fn parse_args<'a>(
             // if the input is valid, we can simply append to the positional
             // args. And remember the names.
             let slot = args.len();
-            names.insert(name.to_string(), slot);
+            names.insert(name, slot);
             args.push(e);
         } else {
             let e = p.parse_expr()?;
@@ -188,7 +187,7 @@ impl<'a, 'b> Context<'a, 'b> {
     fn resolve_name_inplace(&self, p: &mut parse::Piece<'_>) {
         // NOTE: the `unwrap_or` branch is needed in case of invalid format
         // arguments, e.g., `format_args!("{foo}")`.
-        let lookup = |s| *self.names.get(s).unwrap_or(&0);
+        let lookup = |s: Symbol| *self.names.get(&s).unwrap_or(&0);
 
         match *p {
             parse::String(_) => {}
@@ -222,7 +221,7 @@ fn verify_piece(&mut self, p: &parse::Piece<'_>) {
                 // it's written second, so it should come after width/precision.
                 let pos = match arg.position {
                     parse::ArgumentIs(i) | parse::ArgumentImplicitlyIs(i) => Exact(i),
-                    parse::ArgumentNamed(s) => Named(s.to_string()),
+                    parse::ArgumentNamed(s) => Named(s),
                 };
 
                 let ty = Placeholder(arg.format.ty.to_string());
@@ -232,7 +231,7 @@ fn verify_piece(&mut self, p: &parse::Piece<'_>) {
         }
     }
 
-    fn verify_count(&mut self, c: parse::Count<'_>) {
+    fn verify_count(&mut self, c: parse::Count) {
         match c {
             parse::CountImplied |
             parse::CountIs(..) => {}
@@ -240,7 +239,7 @@ fn verify_count(&mut self, c: parse::Count<'_>) {
                 self.verify_arg_type(Exact(i), Count);
             }
             parse::CountIsName(s) => {
-                self.verify_arg_type(Named(s.to_string()), Count);
+                self.verify_arg_type(Named(s), Count);
             }
         }
     }
@@ -390,7 +389,7 @@ fn rtpath(ecx: &ExtCtxt<'_>, s: &str) -> Vec<ast::Ident> {
         ecx.std_path(&[sym::fmt, sym::rt, sym::v1, Symbol::intern(s)])
     }
 
-    fn build_count(&self, c: parse::Count<'_>) -> P<ast::Expr> {
+    fn build_count(&self, c: parse::Count) -> P<ast::Expr> {
         let sp = self.macsp;
         let count = |c, arg| {
             let mut path = Context::rtpath(self.ecx, "Count");
@@ -739,7 +738,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt<'_>,
                                     sp: Span,
                                     efmt: P<ast::Expr>,
                                     args: Vec<P<ast::Expr>>,
-                                    names: FxHashMap<String, usize>,
+                                    names: FxHashMap<Symbol, usize>,
                                     append_newline: bool)
                                     -> P<ast::Expr> {
     // NOTE: this verbose way of initializing `Vec<Vec<ArgumentType>>` is because
@@ -901,15 +900,15 @@ fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> {
 
     if !parser.errors.is_empty() {
         let err = parser.errors.remove(0);
-        let sp = fmt.span.from_inner_byte_pos(err.start.unwrap(), err.end.unwrap());
+        let sp = fmt.span.from_inner(err.span);
         let mut e = ecx.struct_span_err(sp, &format!("invalid format string: {}",
                                                      err.description));
         e.span_label(sp, err.label + " in format string");
         if let Some(note) = err.note {
             e.note(&note);
         }
-        if let Some((label, start, end)) = err.secondary_label {
-            let sp = fmt.span.from_inner_byte_pos(start.unwrap(), end.unwrap());
+        if let Some((label, span)) = err.secondary_label {
+            let sp = fmt.span.from_inner(span);
             e.span_label(sp, label);
         }
         e.emit();
@@ -917,9 +916,7 @@ fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> {
     }
 
     let arg_spans = parser.arg_places.iter()
-        .map(|&(parse::SpanIndex(start), parse::SpanIndex(end))| {
-            fmt.span.from_inner_byte_pos(start, end)
-        })
+        .map(|span| fmt.span.from_inner(*span))
         .collect();
 
     let mut cx = Context {
@@ -1044,7 +1041,9 @@ macro_rules! check_foreign {
                     let mut show_doc_note = false;
 
                     let mut suggestions = vec![];
-                    for sub in foreign::$kind::iter_subs(fmt_str) {
+                    // account for `"` and account for raw strings `r#`
+                    let padding = str_style.map(|i| i + 2).unwrap_or(1);
+                    for sub in foreign::$kind::iter_subs(fmt_str, padding) {
                         let trn = match sub.translate() {
                             Some(trn) => trn,
 
@@ -1064,10 +1063,8 @@ macro_rules! check_foreign {
                             show_doc_note = true;
                         }
 
-                        if let Some((start, end)) = pos {
-                            // account for `"` and account for raw strings `r#`
-                            let padding = str_style.map(|i| i + 2).unwrap_or(1);
-                            let sp = fmt_sp.from_inner_byte_pos(start + padding, end + padding);
+                        if let Some(inner_sp) = pos {
+                            let sp = fmt_sp.from_inner(inner_sp);
                             suggestions.push((sp, trn));
                         } else {
                             diag.help(&format!("`{}` should be written as `{}`", sub, trn));
index 261b2f373cefd340370f46fff0cb28fca0649eb4..7ad5997bf2c097c0942bdb4f55615e0a78c14344 100644 (file)
@@ -1,5 +1,6 @@
 pub mod printf {
     use super::strcursor::StrCursor as Cur;
+    use syntax_pos::InnerSpan;
 
     /// Represents a single `printf`-style substitution.
     #[derive(Clone, PartialEq, Debug)]
@@ -18,7 +19,7 @@ pub fn as_str(&self) -> &str {
             }
         }
 
-        pub fn position(&self) -> Option<(usize, usize)> {
+        pub fn position(&self) -> Option<InnerSpan> {
             match *self {
                 Substitution::Format(ref fmt) => Some(fmt.position),
                 _ => None,
@@ -28,7 +29,7 @@ pub fn position(&self) -> Option<(usize, usize)> {
         pub fn set_position(&mut self, start: usize, end: usize) {
             match self {
                 Substitution::Format(ref mut fmt) => {
-                    fmt.position = (start, end);
+                    fmt.position = InnerSpan::new(start, end);
                 }
                 _ => {}
             }
@@ -65,7 +66,7 @@ pub struct Format<'a> {
         /// Type of parameter being converted.
         pub type_: &'a str,
         /// Byte offset for the start and end of this formatting directive.
-        pub position: (usize, usize),
+        pub position: InnerSpan,
     }
 
     impl Format<'_> {
@@ -263,10 +264,10 @@ fn translate(&self, s: &mut String) -> std::fmt::Result {
     }
 
     /// Returns an iterator over all substitutions in a given string.
-    pub fn iter_subs(s: &str) -> Substitutions<'_> {
+    pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> {
         Substitutions {
             s,
-            pos: 0,
+            pos: start_pos,
         }
     }
 
@@ -282,9 +283,9 @@ fn next(&mut self) -> Option<Self::Item> {
             let (mut sub, tail) = parse_next_substitution(self.s)?;
             self.s = tail;
             match sub {
-                Substitution::Format(_) => if let Some((start, end)) = sub.position() {
-                    sub.set_position(start + self.pos, end + self.pos);
-                    self.pos += end;
+                Substitution::Format(_) => if let Some(inner_span) = sub.position() {
+                    sub.set_position(inner_span.start + self.pos, inner_span.end + self.pos);
+                    self.pos += inner_span.end;
                 }
                 Substitution::Escape => self.pos += 2,
             }
@@ -373,7 +374,7 @@ macro_rules! move_to {
                     precision: None,
                     length: None,
                     type_: at.slice_between(next).unwrap(),
-                    position: (start.at, next.at),
+                    position: InnerSpan::new(start.at, next.at),
                 }),
                 next.slice_after()
             ));
@@ -560,7 +561,7 @@ macro_rules! move_to {
         drop(next);
 
         end = at;
-        let position = (start.at, end.at);
+        let position = InnerSpan::new(start.at, end.at);
 
         let f = Format {
             span: start.slice_between(end).unwrap(),
@@ -650,7 +651,7 @@ macro_rules! assert_pns_eq_sub {
                                 precision: $prec,
                                 length: $len,
                                 type_: $type_,
-                                position: $pos,
+                                position: syntax_pos::InnerSpan::new($pos.0, $pos.1),
                             }),
                             "!"
                         ))
@@ -711,7 +712,7 @@ macro_rules! assert_pns_eq_sub {
         #[test]
         fn test_iter() {
             let s = "The %d'th word %% is: `%.*s` %!\n";
-            let subs: Vec<_> = iter_subs(s).map(|sub| sub.translate()).collect();
+            let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect();
             assert_eq!(
                 subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
                 vec![Some("{}"), None, Some("{:.*}"), None]
@@ -761,6 +762,7 @@ fn test_translation() {
 
 pub mod shell {
     use super::strcursor::StrCursor as Cur;
+    use syntax_pos::InnerSpan;
 
     #[derive(Clone, PartialEq, Debug)]
     pub enum Substitution<'a> {
@@ -778,11 +780,11 @@ pub fn as_str(&self) -> String {
             }
         }
 
-        pub fn position(&self) -> Option<(usize, usize)> {
+        pub fn position(&self) -> Option<InnerSpan> {
             match self {
                 Substitution::Ordinal(_, pos) |
                 Substitution::Name(_, pos) |
-                Substitution::Escape(pos) => Some(*pos),
+                Substitution::Escape(pos) => Some(InnerSpan::new(pos.0, pos.1)),
             }
         }
 
@@ -804,10 +806,10 @@ pub fn translate(&self) -> Option<String> {
     }
 
     /// Returns an iterator over all substitutions in a given string.
-    pub fn iter_subs(s: &str) -> Substitutions<'_> {
+    pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> {
         Substitutions {
             s,
-            pos: 0,
+            pos: start_pos,
         }
     }
 
@@ -823,7 +825,7 @@ fn next(&mut self) -> Option<Self::Item> {
             match parse_next_substitution(self.s) {
                 Some((mut sub, tail)) => {
                     self.s = tail;
-                    if let Some((start, end)) = sub.position() {
+                    if let Some(InnerSpan { start, end }) = sub.position() {
                         sub.set_position(start + self.pos, end + self.pos);
                         self.pos += end;
                     }
@@ -940,7 +942,7 @@ macro_rules! assert_pns_eq_sub {
         fn test_iter() {
             use super::iter_subs;
             let s = "The $0'th word $$ is: `$WORD` $!\n";
-            let subs: Vec<_> = iter_subs(s).map(|sub| sub.translate()).collect();
+            let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect();
             assert_eq!(
                 subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
                 vec![Some("{0}"), None, Some("{WORD}")]
index fc0015442750123062ad42535a0fca8a2ae18238..3dd17207cb8e93ed6d41e2122771a269b9115d60 100644 (file)
@@ -4,6 +4,7 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(in_band_lifetimes)]
 #![feature(proc_macro_diagnostic)]
@@ -41,7 +42,9 @@
 
 use rustc_data_structures::sync::Lrc;
 use syntax::ast;
-use syntax::ext::base::{MacroExpanderFn, NormalTT, NamedSyntaxExtension, MultiModifier};
+
+use syntax::ext::base::{MacroExpanderFn, NamedSyntaxExtension, SyntaxExtension};
+use syntax::ext::hygiene::Transparency;
 use syntax::edition::Edition;
 use syntax::symbol::{sym, Symbol};
 
@@ -56,9 +59,10 @@ pub fn register_builtins(resolver: &mut dyn syntax::ext::base::Resolver,
     macro_rules! register {
         ($( $name:ident: $f:expr, )*) => { $(
             register(Symbol::intern(stringify!($name)),
-                     NormalTT {
+                     SyntaxExtension::LegacyBang {
                         expander: Box::new($f as MacroExpanderFn),
                         def_info: None,
+                        transparency: Transparency::SemiTransparent,
                         allow_internal_unstable: None,
                         allow_internal_unsafe: false,
                         local_inner_macros: false,
@@ -93,15 +97,16 @@ macro_rules! register {
         assert: assert::expand_assert,
     }
 
-    register(sym::test_case, MultiModifier(Box::new(test_case::expand)));
-    register(sym::test, MultiModifier(Box::new(test::expand_test)));
-    register(sym::bench, MultiModifier(Box::new(test::expand_bench)));
+    register(sym::test_case, SyntaxExtension::LegacyAttr(Box::new(test_case::expand)));
+    register(sym::test, SyntaxExtension::LegacyAttr(Box::new(test::expand_test)));
+    register(sym::bench, SyntaxExtension::LegacyAttr(Box::new(test::expand_bench)));
 
     // format_args uses `unstable` things internally.
     register(Symbol::intern("format_args"),
-             NormalTT {
+             SyntaxExtension::LegacyBang {
                 expander: Box::new(format::expand_format_args),
                 def_info: None,
+                transparency: Transparency::SemiTransparent,
                 allow_internal_unstable: Some(vec![sym::fmt_internals].into()),
                 allow_internal_unsafe: false,
                 local_inner_macros: false,
@@ -109,9 +114,10 @@ macro_rules! register {
                 edition,
             });
     register(sym::format_args_nl,
-             NormalTT {
+             SyntaxExtension::LegacyBang {
                  expander: Box::new(format::expand_format_args_nl),
                  def_info: None,
+                 transparency: Transparency::SemiTransparent,
                  allow_internal_unstable: Some(vec![sym::fmt_internals].into()),
                  allow_internal_unsafe: false,
                  local_inner_macros: false,
index de8b689396fb9bb94cd83a461b4f7df78150a512..29297aa913ed418c11c3e41f812db6e4478e5bd7 100644 (file)
@@ -132,7 +132,7 @@ fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribut
             }
         };
 
-        if !trait_ident.can_be_raw() {
+        if !trait_ident.name.can_be_raw() {
             self.handler.span_err(trait_attr.span,
                                   &format!("`{}` cannot be a name of derive macro", trait_ident));
         }
@@ -166,7 +166,7 @@ fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribut
                         return None;
                     }
                 };
-                if !ident.can_be_raw() {
+                if !ident.name.can_be_raw() {
                     self.handler.span_err(
                         attr.span,
                         &format!("`{}` cannot be a name of derive helper attribute", ident),
index cc05ecf8df5a6dc91f92f5a7d339178edd2b586f..00a420d3fa89922c9cd1758a5a99c9163e3700ac 100644 (file)
@@ -55,7 +55,7 @@ fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mu
         use syntax::parse::token::*;
 
         let joint = is_joint == Joint;
-        let (span, token) = match tree {
+        let Token { kind, span } = match tree {
             tokenstream::TokenTree::Delimited(span, delim, tts) => {
                 let delimiter = Delimiter::from_internal(delim);
                 return TokenTree::Group(Group {
@@ -64,7 +64,7 @@ fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mu
                     span,
                 });
             }
-            tokenstream::TokenTree::Token(span, token) => (span, token),
+            tokenstream::TokenTree::Token(token) => token,
         };
 
         macro_rules! tt {
@@ -93,7 +93,7 @@ macro_rules! op {
             }};
         }
 
-        match token {
+        match kind {
             Eq => op!('='),
             Lt => op!('<'),
             Le => op!('<', '='),
@@ -142,11 +142,10 @@ macro_rules! op {
             Question => op!('?'),
             SingleQuote => op!('\''),
 
-            Ident(ident, false) if ident.name == kw::DollarCrate =>
-                tt!(Ident::dollar_crate()),
-            Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)),
-            Lifetime(ident) => {
-                let ident = ident.without_first_quote();
+            Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
+            Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
+            Lifetime(name) => {
+                let ident = ast::Ident::new(name, span).without_first_quote();
                 stack.push(tt!(Ident::new(ident.name, false)));
                 tt!(Punct::new('\'', true))
             }
@@ -159,12 +158,12 @@ macro_rules! op {
                     escaped.extend(ch.escape_debug());
                 }
                 let stream = vec![
-                    Ident(ast::Ident::new(sym::doc, span), false),
+                    Ident(sym::doc, false),
                     Eq,
-                    Token::lit(token::Str, Symbol::intern(&escaped), None),
+                    TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                 ]
                 .into_iter()
-                .map(|token| tokenstream::TokenTree::Token(span, token))
+                .map(|kind| tokenstream::TokenTree::token(kind, span))
                 .collect();
                 stack.push(TokenTree::Group(Group {
                     delimiter: Delimiter::Bracket,
@@ -211,8 +210,7 @@ fn to_internal(self) -> TokenStream {
                 .into();
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
-                let token = Ident(ast::Ident::new(sym, span), is_raw);
-                return tokenstream::TokenTree::Token(span, token).into();
+                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
             }
             TokenTree::Literal(self::Literal {
                 lit: token::Lit { kind: token::Integer, symbol, suffix },
@@ -220,9 +218,9 @@ fn to_internal(self) -> TokenStream {
             }) if symbol.as_str().starts_with("-") => {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
-                let integer = Token::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, integer);
+                let integer = TokenKind::lit(token::Integer, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(integer, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -231,17 +229,17 @@ fn to_internal(self) -> TokenStream {
             }) if symbol.as_str().starts_with("-") => {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
-                let float = Token::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, float);
+                let float = TokenKind::lit(token::Float, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(float, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::Token(span, Literal(lit)).into()
+                return tokenstream::TokenTree::token(Literal(lit), span).into()
             }
         };
 
-        let token = match ch {
+        let kind = match ch {
             '=' => Eq,
             '<' => Lt,
             '>' => Gt,
@@ -267,7 +265,7 @@ fn to_internal(self) -> TokenStream {
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::Token(span, token);
+        let tree = tokenstream::TokenTree::token(kind, span);
         TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
     }
 }
@@ -338,7 +336,8 @@ fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
         if !Self::is_valid(&string) {
             panic!("`{:?}` is not a valid identifier", string)
         }
-        if is_raw && !ast::Ident::from_interned_str(sym.as_interned_str()).can_be_raw() {
+        // Get rid of gensyms to conservatively check rawness on the string contents only.
+        if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() {
             panic!("`{}` cannot be a raw identifier", string);
         }
         Ident { sym, is_raw, span }
index 61ef94560ccb95c6b718dd3bcdf3ebc56cc55f4f..512513e9b414c8d1aa2cdff8811755f55be18e3a 100644 (file)
@@ -16,11 +16,11 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
                                        feature_gate::EXPLAIN_TRACE_MACROS);
     }
 
-    match (tt.len(), tt.first()) {
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
+    match tt {
+        [TokenTree::Token(token)] if token.is_keyword(kw::True) => {
             cx.set_trace_macros(true);
         }
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
+        [TokenTree::Token(token)] if token.is_keyword(kw::False) => {
             cx.set_trace_macros(false);
         }
         _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
index 24aa82184ced58d9e4c7900a9c444b35ccd8545c..e7158372762dd99dba0c5f8c4594c5ce0505d4cc 100644 (file)
@@ -8,10 +8,10 @@
 
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
+#![deny(unused_lifetimes)]
 
 #![feature(const_fn)]
 #![feature(crate_visibility_modifier)]
-#![feature(custom_attribute)]
 #![feature(nll)]
 #![feature(non_exhaustive)]
 #![feature(optin_builtin_traits)]
@@ -505,10 +505,10 @@ pub fn until(self, end: Span) -> Span {
         )
     }
 
-    pub fn from_inner_byte_pos(self, start: usize, end: usize) -> Span {
+    pub fn from_inner(self, inner: InnerSpan) -> Span {
         let span = self.data();
-        Span::new(span.lo + BytePos::from_usize(start),
-                  span.lo + BytePos::from_usize(end),
+        Span::new(span.lo + BytePos::from_usize(inner.start),
+                  span.lo + BytePos::from_usize(inner.end),
                   span.ctxt)
     }
 
@@ -1396,6 +1396,18 @@ pub struct MalformedSourceMapPositions {
     pub end_pos: BytePos
 }
 
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub struct InnerSpan {
+    pub start: usize,
+    pub end: usize,
+}
+
+impl InnerSpan {
+    pub fn new(start: usize, end: usize) -> InnerSpan {
+        InnerSpan { start, end }
+    }
+}
+
 // Given a slice of line start positions and a position, returns the index of
 // the line the position is on. Returns -1 if the position is located before
 // the first line.
index 4e080d115d2a8eed7860fecc2513a2cd3e48fd57..029685bcc2429e620058b30d6f043f4e8f89c25a 100644 (file)
         conservative_impl_trait,
         console,
         const_compare_raw_pointers,
+        const_constructor,
         const_fn,
         const_fn_union,
         const_generics,
         core_intrinsics,
         crate_id,
         crate_in_paths,
+        crate_local,
         crate_name,
         crate_type,
         crate_visibility_modifier,
         deref,
         deref_mut,
         derive,
+        direct,
         doc,
         doc_alias,
         doc_cfg,
         format_args_nl,
         from,
         From,
+        from_desugaring,
         from_error,
         from_generator,
+        from_method,
         from_ok,
         from_usize,
         fundamental,
         panic_impl,
         panic_implementation,
         panic_runtime,
+        parent_trait,
         partial_cmp,
         PartialOrd,
         passes,
         rust_2018_preview,
         rust_begin_unwind,
         rustc,
+        rustc_allocator,
         rustc_allocator_nounwind,
         rustc_allow_const_fn_ptr,
         rustc_args_required_const,
         rustc_diagnostic_macros,
         rustc_dirty,
         rustc_doc_only_macro,
+        rustc_dummy,
         rustc_dump_env_program_clauses,
         rustc_dump_program_clauses,
         rustc_dump_user_substs,
         __rust_unstable_column,
         rvalue_static_promotion,
         sanitizer_runtime,
+        _Self,
         self_in_typedefs,
         self_struct_ctor,
         Send,
         trait_alias,
         transmute,
         transparent,
+        transparent_enums,
+        transparent_unions,
         trivial_bounds,
         Try,
         try_blocks,
@@ -921,10 +932,9 @@ pub struct Interner {
 
 impl Interner {
     fn prefill(init: &[&'static str]) -> Self {
-        let symbols = (0 .. init.len() as u32).map(Symbol::new);
         Interner {
-            strings: init.to_vec(),
-            names: init.iter().copied().zip(symbols).collect(),
+            strings: init.into(),
+            names: init.iter().copied().zip((0..).map(Symbol::new)).collect(),
             ..Default::default()
         }
     }
@@ -1019,6 +1029,21 @@ fn is_unused_keyword_2018(self) -> bool {
     pub fn is_doc_keyword(self) -> bool {
         self <= kw::Union
     }
+
+    /// A keyword or reserved identifier that can be used as a path segment.
+    pub fn is_path_segment_keyword(self) -> bool {
+        self == kw::Super ||
+        self == kw::SelfLower ||
+        self == kw::SelfUpper ||
+        self == kw::Crate ||
+        self == kw::PathRoot ||
+        self == kw::DollarCrate
+    }
+
+    /// This symbol can be a raw identifier.
+    pub fn can_be_raw(self) -> bool {
+        self != kw::Invalid && self != kw::Underscore && !self.is_path_segment_keyword()
+    }
 }
 
 impl Ident {
@@ -1049,24 +1074,13 @@ pub fn is_reserved(self) -> bool {
 
     /// A keyword or reserved identifier that can be used as a path segment.
     pub fn is_path_segment_keyword(self) -> bool {
-        self.name == kw::Super ||
-        self.name == kw::SelfLower ||
-        self.name == kw::SelfUpper ||
-        self.name == kw::Crate ||
-        self.name == kw::PathRoot ||
-        self.name == kw::DollarCrate
-    }
-
-    /// This identifier can be a raw identifier.
-    pub fn can_be_raw(self) -> bool {
-        self.name != kw::Invalid && self.name != kw::Underscore &&
-        !self.is_path_segment_keyword()
+        self.name.is_path_segment_keyword()
     }
 
     /// We see this identifier in a normal identifier position, like variable name or a type.
     /// How was it written originally? Did it use the raw form? Let's try to guess.
     pub fn is_raw_guess(self) -> bool {
-        self.can_be_raw() && self.is_reserved()
+        self.name.can_be_raw() && self.is_reserved()
     }
 }
 
index 711716d9b926c10c11a50eca2437571fe57ebe75..3b5ac7baf20bd29b3c0d6213ee86045d4b9fa3cb 100644 (file)
@@ -38,9 +38,6 @@
 #![deny(rust_2018_idioms)]
 
 #![cfg_attr(windows, feature(libc))]
-// Handle rustfmt skips
-#![feature(custom_attribute)]
-#![allow(unused_attributes)]
 
 use std::io::prelude::*;
 use std::io::{self, Stdout, Stderr};
index 2251c54229ee0227e615dfc7c1010da041698466..4241fcea8047dd68f7ecb08f8082946c12eb2d0e 100644 (file)
@@ -3,7 +3,6 @@
 // min-llvm-version 7.0
 
 #![crate_type = "lib"]
-#![feature(repr_align_enum)]
 
 #[repr(align(64))]
 pub enum Align64 {
diff --git a/src/test/codegen/exact_div.rs b/src/test/codegen/exact_div.rs
deleted file mode 100644 (file)
index 6a55b49..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-// compile-flags: -C no-prepopulate-passes
-
-#![crate_type = "lib"]
-#![feature(core_intrinsics)]
-
-use std::intrinsics::exact_div;
-
-// CHECK-LABEL: @exact_sdiv
-#[no_mangle]
-pub unsafe fn exact_sdiv(x: i32, y: i32) -> i32 {
-// CHECK: sdiv exact
-    exact_div(x, y)
-}
-
-// CHECK-LABEL: @exact_udiv
-#[no_mangle]
-pub unsafe fn exact_udiv(x: u32, y: u32) -> u32 {
-// CHECK: udiv exact
-    exact_div(x, y)
-}
index c2d697fd046bfd1fe92f57d1f5bf54504cb1ce14..bd121ef24adae1db555cffc70af3c327d564c2b0 100644 (file)
@@ -2,7 +2,7 @@
 // ignore-tidy-linelength
 
 #![crate_type = "lib"]
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
 pub struct S {
   _field: [i32; 8],
@@ -146,7 +146,7 @@ pub fn enum_id_2(x: Option<u8>) -> Option<u8> {
 
 // CHECK: noalias i8* @allocator()
 #[no_mangle]
-#[allocator]
+#[rustc_allocator]
 pub fn allocator() -> *const i8 {
   std::ptr::null()
 }
diff --git a/src/test/codegen/intrinsics/exact_div.rs b/src/test/codegen/intrinsics/exact_div.rs
new file mode 100644 (file)
index 0000000..68eaa39
--- /dev/null
@@ -0,0 +1,20 @@
+// compile-flags: -C no-prepopulate-passes
+
+#![crate_type = "lib"]
+#![feature(core_intrinsics)]
+
+use std::intrinsics::exact_div;
+
+// CHECK-LABEL: @exact_sdiv
+#[no_mangle]
+pub unsafe fn exact_sdiv(x: i32, y: i32) -> i32 {
+    // CHECK: sdiv exact
+    exact_div(x, y)
+}
+
+// CHECK-LABEL: @exact_udiv
+#[no_mangle]
+pub unsafe fn exact_udiv(x: u32, y: u32) -> u32 {
+    // CHECK: udiv exact
+    exact_div(x, y)
+}
diff --git a/src/test/codegen/intrinsics/likely.rs b/src/test/codegen/intrinsics/likely.rs
new file mode 100644 (file)
index 0000000..c5a0185
--- /dev/null
@@ -0,0 +1,30 @@
+// compile-flags: -C no-prepopulate-passes
+
+#![crate_type = "lib"]
+#![feature(core_intrinsics)]
+
+use std::intrinsics::{likely,unlikely};
+
+#[no_mangle]
+pub fn check_likely(x: i32, y: i32) -> Option<i32> {
+    unsafe {
+        // CHECK: call i1 @llvm.expect.i1(i1 %{{.*}}, i1 true)
+        if likely(x == y) {
+            None
+        } else {
+            Some(x + y)
+        }
+    }
+}
+
+#[no_mangle]
+pub fn check_unlikely(x: i32, y: i32) -> Option<i32> {
+    unsafe {
+        // CHECK: call i1 @llvm.expect.i1(i1 %{{.*}}, i1 false)
+        if unlikely(x == y) {
+            None
+        } else {
+            Some(x + y)
+        }
+    }
+}
diff --git a/src/test/codegen/intrinsics/move-val-init.rs b/src/test/codegen/intrinsics/move-val-init.rs
new file mode 100644 (file)
index 0000000..6222536
--- /dev/null
@@ -0,0 +1,19 @@
+// compile-flags: -C no-prepopulate-passes
+
+#![feature(core_intrinsics)]
+#![crate_type = "lib"]
+
+// test that `move_val_init` actually avoids big allocas
+
+use std::intrinsics::move_val_init;
+
+pub struct Big {
+    pub data: [u8; 65536]
+}
+
+// CHECK-LABEL: @test_mvi
+#[no_mangle]
+pub unsafe fn test_mvi(target: *mut Big, make_big: fn() -> Big) {
+    // CHECK: call void %make_big(%Big*{{[^%]*}} %target)
+    move_val_init(target, make_big());
+}
diff --git a/src/test/codegen/intrinsics/nontemporal.rs b/src/test/codegen/intrinsics/nontemporal.rs
new file mode 100644 (file)
index 0000000..3a41fb4
--- /dev/null
@@ -0,0 +1,13 @@
+// compile-flags: -O
+
+#![feature(core_intrinsics)]
+#![crate_type = "lib"]
+
+#[no_mangle]
+pub fn a(a: &mut u32, b: u32) {
+    // CHECK-LABEL: define void @a
+    // CHECK: store i32 %b, i32* %a, align 4, !nontemporal
+    unsafe {
+        std::intrinsics::nontemporal_store(a, b);
+    }
+}
diff --git a/src/test/codegen/intrinsics/prefetch.rs b/src/test/codegen/intrinsics/prefetch.rs
new file mode 100644 (file)
index 0000000..4cd38e1
--- /dev/null
@@ -0,0 +1,63 @@
+// compile-flags: -C no-prepopulate-passes
+
+#![crate_type = "lib"]
+#![feature(core_intrinsics)]
+
+use std::intrinsics::{prefetch_read_data, prefetch_write_data,
+                      prefetch_read_instruction, prefetch_write_instruction};
+
+#[no_mangle]
+pub fn check_prefetch_read_data(data: &[i8]) {
+    unsafe {
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 0, i32 1)
+        prefetch_read_data(data.as_ptr(), 0);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 1, i32 1)
+        prefetch_read_data(data.as_ptr(), 1);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 2, i32 1)
+        prefetch_read_data(data.as_ptr(), 2);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 3, i32 1)
+        prefetch_read_data(data.as_ptr(), 3);
+    }
+}
+
+#[no_mangle]
+pub fn check_prefetch_write_data(data: &[i8]) {
+    unsafe {
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 0, i32 1)
+        prefetch_write_data(data.as_ptr(), 0);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 1, i32 1)
+        prefetch_write_data(data.as_ptr(), 1);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 2, i32 1)
+        prefetch_write_data(data.as_ptr(), 2);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 3, i32 1)
+        prefetch_write_data(data.as_ptr(), 3);
+    }
+}
+
+#[no_mangle]
+pub fn check_prefetch_read_instruction(data: &[i8]) {
+    unsafe {
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 0, i32 0)
+        prefetch_read_instruction(data.as_ptr(), 0);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 1, i32 0)
+        prefetch_read_instruction(data.as_ptr(), 1);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 2, i32 0)
+        prefetch_read_instruction(data.as_ptr(), 2);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 3, i32 0)
+        prefetch_read_instruction(data.as_ptr(), 3);
+    }
+}
+
+#[no_mangle]
+pub fn check_prefetch_write_instruction(data: &[i8]) {
+    unsafe {
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 0, i32 0)
+        prefetch_write_instruction(data.as_ptr(), 0);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 1, i32 0)
+        prefetch_write_instruction(data.as_ptr(), 1);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 2, i32 0)
+        prefetch_write_instruction(data.as_ptr(), 2);
+        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 3, i32 0)
+        prefetch_write_instruction(data.as_ptr(), 3);
+    }
+}
diff --git a/src/test/codegen/intrinsics/unchecked_math.rs b/src/test/codegen/intrinsics/unchecked_math.rs
new file mode 100644 (file)
index 0000000..419c120
--- /dev/null
@@ -0,0 +1,46 @@
+#![crate_type = "lib"]
+#![feature(core_intrinsics)]
+
+use std::intrinsics::*;
+
+// CHECK-LABEL: @unchecked_add_signed
+#[no_mangle]
+pub unsafe fn unchecked_add_signed(a: i32, b: i32) -> i32 {
+    // CHECK: add nsw
+    unchecked_add(a, b)
+}
+
+// CHECK-LABEL: @unchecked_add_unsigned
+#[no_mangle]
+pub unsafe fn unchecked_add_unsigned(a: u32, b: u32) -> u32 {
+    // CHECK: add nuw
+    unchecked_add(a, b)
+}
+
+// CHECK-LABEL: @unchecked_sub_signed
+#[no_mangle]
+pub unsafe fn unchecked_sub_signed(a: i32, b: i32) -> i32 {
+    // CHECK: sub nsw
+    unchecked_sub(a, b)
+}
+
+// CHECK-LABEL: @unchecked_sub_unsigned
+#[no_mangle]
+pub unsafe fn unchecked_sub_unsigned(a: u32, b: u32) -> u32 {
+    // CHECK: sub nuw
+    unchecked_sub(a, b)
+}
+
+// CHECK-LABEL: @unchecked_mul_signed
+#[no_mangle]
+pub unsafe fn unchecked_mul_signed(a: i32, b: i32) -> i32 {
+    // CHECK: mul nsw
+    unchecked_mul(a, b)
+}
+
+// CHECK-LABEL: @unchecked_mul_unsigned
+#[no_mangle]
+pub unsafe fn unchecked_mul_unsigned(a: u32, b: u32) -> u32 {
+    // CHECK: mul nuw
+    unchecked_mul(a, b)
+}
diff --git a/src/test/codegen/likely.rs b/src/test/codegen/likely.rs
deleted file mode 100644 (file)
index c5a0185..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-// compile-flags: -C no-prepopulate-passes
-
-#![crate_type = "lib"]
-#![feature(core_intrinsics)]
-
-use std::intrinsics::{likely,unlikely};
-
-#[no_mangle]
-pub fn check_likely(x: i32, y: i32) -> Option<i32> {
-    unsafe {
-        // CHECK: call i1 @llvm.expect.i1(i1 %{{.*}}, i1 true)
-        if likely(x == y) {
-            None
-        } else {
-            Some(x + y)
-        }
-    }
-}
-
-#[no_mangle]
-pub fn check_unlikely(x: i32, y: i32) -> Option<i32> {
-    unsafe {
-        // CHECK: call i1 @llvm.expect.i1(i1 %{{.*}}, i1 false)
-        if unlikely(x == y) {
-            None
-        } else {
-            Some(x + y)
-        }
-    }
-}
diff --git a/src/test/codegen/move-val-init.rs b/src/test/codegen/move-val-init.rs
deleted file mode 100644 (file)
index 6222536..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-// compile-flags: -C no-prepopulate-passes
-
-#![feature(core_intrinsics)]
-#![crate_type = "lib"]
-
-// test that `move_val_init` actually avoids big allocas
-
-use std::intrinsics::move_val_init;
-
-pub struct Big {
-    pub data: [u8; 65536]
-}
-
-// CHECK-LABEL: @test_mvi
-#[no_mangle]
-pub unsafe fn test_mvi(target: *mut Big, make_big: fn() -> Big) {
-    // CHECK: call void %make_big(%Big*{{[^%]*}} %target)
-    move_val_init(target, make_big());
-}
diff --git a/src/test/codegen/nontemporal.rs b/src/test/codegen/nontemporal.rs
deleted file mode 100644 (file)
index 3a41fb4..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-// compile-flags: -O
-
-#![feature(core_intrinsics)]
-#![crate_type = "lib"]
-
-#[no_mangle]
-pub fn a(a: &mut u32, b: u32) {
-    // CHECK-LABEL: define void @a
-    // CHECK: store i32 %b, i32* %a, align 4, !nontemporal
-    unsafe {
-        std::intrinsics::nontemporal_store(a, b);
-    }
-}
diff --git a/src/test/codegen/prefetch.rs b/src/test/codegen/prefetch.rs
deleted file mode 100644 (file)
index 4cd38e1..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-// compile-flags: -C no-prepopulate-passes
-
-#![crate_type = "lib"]
-#![feature(core_intrinsics)]
-
-use std::intrinsics::{prefetch_read_data, prefetch_write_data,
-                      prefetch_read_instruction, prefetch_write_instruction};
-
-#[no_mangle]
-pub fn check_prefetch_read_data(data: &[i8]) {
-    unsafe {
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 0, i32 1)
-        prefetch_read_data(data.as_ptr(), 0);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 1, i32 1)
-        prefetch_read_data(data.as_ptr(), 1);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 2, i32 1)
-        prefetch_read_data(data.as_ptr(), 2);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 3, i32 1)
-        prefetch_read_data(data.as_ptr(), 3);
-    }
-}
-
-#[no_mangle]
-pub fn check_prefetch_write_data(data: &[i8]) {
-    unsafe {
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 0, i32 1)
-        prefetch_write_data(data.as_ptr(), 0);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 1, i32 1)
-        prefetch_write_data(data.as_ptr(), 1);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 2, i32 1)
-        prefetch_write_data(data.as_ptr(), 2);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 3, i32 1)
-        prefetch_write_data(data.as_ptr(), 3);
-    }
-}
-
-#[no_mangle]
-pub fn check_prefetch_read_instruction(data: &[i8]) {
-    unsafe {
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 0, i32 0)
-        prefetch_read_instruction(data.as_ptr(), 0);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 1, i32 0)
-        prefetch_read_instruction(data.as_ptr(), 1);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 2, i32 0)
-        prefetch_read_instruction(data.as_ptr(), 2);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 3, i32 0)
-        prefetch_read_instruction(data.as_ptr(), 3);
-    }
-}
-
-#[no_mangle]
-pub fn check_prefetch_write_instruction(data: &[i8]) {
-    unsafe {
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 0, i32 0)
-        prefetch_write_instruction(data.as_ptr(), 0);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 1, i32 0)
-        prefetch_write_instruction(data.as_ptr(), 1);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 2, i32 0)
-        prefetch_write_instruction(data.as_ptr(), 2);
-        // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 3, i32 0)
-        prefetch_write_instruction(data.as_ptr(), 3);
-    }
-}
index fb88f2a69ca159a4ac9d95dd8b9e1e2ad5a36d1b..e7c4b6193bc0989ac09dc2dd8be43c56c31b3328 100644 (file)
@@ -1,4 +1,5 @@
 // compile-flags: -C no-prepopulate-passes
+// ignore-tidy-linelength
 
 // ignore-arm
 // ignore-mips
@@ -7,36 +8,76 @@
 // ignore-powerpc64
 // See repr-transparent.rs
 
+#![feature(transparent_enums, transparent_unions)]
+
 #![crate_type="lib"]
 
 
+#[derive(Clone, Copy)]
 #[repr(C)]
-pub struct Big([u32; 16]);
+pub struct BigS([u32; 16]);
+
+#[repr(transparent)]
+pub struct TsBigS(BigS);
+
+#[repr(transparent)]
+pub union TuBigS {
+    field: BigS,
+}
 
 #[repr(transparent)]
-pub struct BigW(Big);
+pub enum TeBigS {
+    Variant(BigS),
+}
+
+// CHECK: define void @test_BigS(%BigS* [[BIGS_RET_ATTRS:.*]], %BigS* [[BIGS_ARG_ATTRS:.*]])
+#[no_mangle]
+pub extern fn test_BigS(_: BigS) -> BigS { loop {} }
+
+// CHECK: define void @test_TsBigS(%TsBigS* [[BIGS_RET_ATTRS]], %TsBigS* [[BIGS_ARG_ATTRS]])
+#[no_mangle]
+pub extern fn test_TsBigS(_: TsBigS) -> TsBigS { loop {} }
 
-// CHECK: define void @test_Big(%Big* [[BIG_RET_ATTRS:.*]], %Big* [[BIG_ARG_ATTRS:.*]])
+// CHECK: define void @test_TuBigS(%TuBigS* [[BIGS_RET_ATTRS]], %TuBigS* [[BIGS_ARG_ATTRS]])
 #[no_mangle]
-pub extern fn test_Big(_: Big) -> Big { loop {} }
+pub extern fn test_TuBigS(_: TuBigS) -> TuBigS { loop {} }
 
-// CHECK: define void @test_BigW(%BigW* [[BIG_RET_ATTRS]], %BigW* [[BIG_ARG_ATTRS]])
+// CHECK: define void @test_TeBigS(%"TeBigS::Variant"* [[BIGS_RET_ATTRS]], %"TeBigS::Variant"* [[BIGS_ARG_ATTRS]])
 #[no_mangle]
-pub extern fn test_BigW(_: BigW) -> BigW { loop {} }
+pub extern fn test_TeBigS(_: TeBigS) -> TeBigS { loop {} }
 
 
+#[derive(Clone, Copy)]
 #[repr(C)]
 pub union BigU {
     foo: [u32; 16],
 }
 
 #[repr(transparent)]
-pub struct BigUw(BigU);
+pub struct TsBigU(BigU);
+
+#[repr(transparent)]
+pub union TuBigU {
+    field: BigU,
+}
+
+#[repr(transparent)]
+pub enum TeBigU {
+    Variant(BigU),
+}
 
 // CHECK: define void @test_BigU(%BigU* [[BIGU_RET_ATTRS:.*]], %BigU* [[BIGU_ARG_ATTRS:.*]])
 #[no_mangle]
 pub extern fn test_BigU(_: BigU) -> BigU { loop {} }
 
-// CHECK: define void @test_BigUw(%BigUw* [[BIGU_RET_ATTRS]], %BigUw* [[BIGU_ARG_ATTRS]])
+// CHECK: define void @test_TsBigU(%TsBigU* [[BIGU_RET_ATTRS:.*]], %TsBigU* [[BIGU_ARG_ATTRS]])
+#[no_mangle]
+pub extern fn test_TsBigU(_: TsBigU) -> TsBigU { loop {} }
+
+// CHECK: define void @test_TuBigU(%TuBigU* [[BIGU_RET_ATTRS]], %TuBigU* [[BIGU_ARG_ATTRS]])
+#[no_mangle]
+pub extern fn test_TuBigU(_: TuBigU) -> TuBigU { loop {} }
+
+// CHECK: define void @test_TeBigU(%"TeBigU::Variant"* [[BIGU_RET_ATTRS]], %"TeBigU::Variant"* [[BIGU_ARG_ATTRS]])
 #[no_mangle]
-pub extern fn test_BigUw(_: BigUw) -> BigUw { loop {} }
+pub extern fn test_TeBigU(_: TeBigU) -> TeBigU { loop {} }
index 6c628ac035fd90ef67350066c5a4a7caa30afca2..5521c3c849f5363a661ec24f58235e503cfae22f 100644 (file)
 // ignore-x86_64
 // See repr-transparent.rs
 
+#![feature(transparent_enums, transparent_unions)]
+
 #![crate_type="lib"]
 
 
+#[derive(Clone, Copy)]
 #[repr(C)]
-pub struct Big([u32; 16]);
+pub struct BigS([u32; 16]);
+
+#[repr(transparent)]
+pub struct TsBigS(BigS);
+
+#[repr(transparent)]
+pub union TuBigS {
+    field: BigS,
+}
 
 #[repr(transparent)]
-pub struct BigW(Big);
+pub enum TeBigS {
+    Variant(BigS),
+}
+
+// CHECK: define void @test_BigS(%BigS* [[BIGS_RET_ATTRS:.*]], [16 x i32]
+#[no_mangle]
+pub extern fn test_BigS(_: BigS) -> BigS { loop {} }
+
+// CHECK: define void @test_TsBigS(%TsBigS* [[BIGS_RET_ATTRS]], [16 x i32]
+#[no_mangle]
+pub extern fn test_TsBigS(_: TsBigS) -> TsBigS { loop {} }
 
-// CHECK: define void @test_Big(%Big* [[BIG_RET_ATTRS:.*]], [16 x i32]
+// CHECK: define void @test_TuBigS(%TuBigS* [[BIGS_RET_ATTRS]], [16 x i32]
 #[no_mangle]
-pub extern fn test_Big(_: Big) -> Big { loop {} }
+pub extern fn test_TuBigS(_: TuBigS) -> TuBigS { loop {} }
 
-// CHECK: define void @test_BigW(%BigW* [[BIG_RET_ATTRS]], [16 x i32]
+// CHECK: define void @test_TeBigS(%"TeBigS::Variant"* [[BIGS_RET_ATTRS]], [16 x i32]
 #[no_mangle]
-pub extern fn test_BigW(_: BigW) -> BigW { loop {} }
+pub extern fn test_TeBigS(_: TeBigS) -> TeBigS { loop {} }
 
 
+#[derive(Clone, Copy)]
 #[repr(C)]
 pub union BigU {
     foo: [u32; 16],
 }
 
 #[repr(transparent)]
-pub struct BigUw(BigU);
+pub struct TsBigU(BigU);
+
+#[repr(transparent)]
+pub union TuBigU {
+    field: BigU,
+}
+
+#[repr(transparent)]
+pub enum TeBigU {
+    Variant(BigU),
+}
 
 // CHECK: define void @test_BigU(%BigU* [[BIGU_RET_ATTRS:.*]], [16 x i32]
 #[no_mangle]
 pub extern fn test_BigU(_: BigU) -> BigU { loop {} }
 
-// CHECK: define void @test_BigUw(%BigUw* [[BIGU_RET_ATTRS]], [16 x i32]
+// CHECK: define void @test_TsBigU(%TsBigU* [[BIGU_RET_ATTRS:.*]], [16 x i32]
+#[no_mangle]
+pub extern fn test_TsBigU(_: TsBigU) -> TsBigU { loop {} }
+
+// CHECK: define void @test_TuBigU(%TuBigU* [[BIGU_RET_ATTRS]], [16 x i32]
+#[no_mangle]
+pub extern fn test_TuBigU(_: TuBigU) -> TuBigU { loop {} }
+
+// CHECK: define void @test_TeBigU(%"TeBigU::Variant"* [[BIGU_RET_ATTRS]], [16 x i32]
 #[no_mangle]
-pub extern fn test_BigUw(_: BigUw) -> BigUw { loop {} }
+pub extern fn test_TeBigU(_: TeBigU) -> TeBigU { loop {} }
index cd740dc9b823324fb426f99fd4fd903443a1ef03..1a59c9b48b9765a5c7acf4cb4a6024ef6def04f4 100644 (file)
@@ -3,36 +3,76 @@
 // only-mips64
 // See repr-transparent.rs
 
+#![feature(transparent_enums, transparent_unions)]
+
 #![crate_type="lib"]
 
 
+#[derive(Clone, Copy)]
 #[repr(C)]
-pub struct Big([u32; 16]);
+pub struct BigS([u32; 16]);
+
+#[repr(transparent)]
+pub struct TsBigS(BigS);
+
+#[repr(transparent)]
+pub union TuBigS {
+    field: BigS,
+}
 
 #[repr(transparent)]
-pub struct BigW(Big);
+pub enum TeBigS {
+    Variant(BigS),
+}
+
+// CHECK: define void @test_BigS(%BigS* [[BIGS_RET_ATTRS:.*]], [8 x i64]
+#[no_mangle]
+pub extern fn test_BigS(_: BigS) -> BigS { loop {} }
+
+// CHECK: define void @test_TsBigS(%TsBigS* [[BIGS_RET_ATTRS]], [8 x i64]
+#[no_mangle]
+pub extern fn test_TsBigS(_: TsBigS) -> TsBigS { loop {} }
 
-// CHECK: define void @test_Big(%Big* [[BIG_RET_ATTRS:.*]], [8 x i64]
+// CHECK: define void @test_TuBigS(%TuBigS* [[BIGS_RET_ATTRS]], [8 x i64]
 #[no_mangle]
-pub extern fn test_Big(_: Big) -> Big { loop {} }
+pub extern fn test_TuBigS(_: TuBigS) -> TuBigS { loop {} }
 
-// CHECK: define void @test_BigW(%BigW* [[BIG_RET_ATTRS]], [8 x i64]
+// CHECK: define void @test_TeBigS(%"TeBigS::Variant"* [[BIGS_RET_ATTRS]], [8 x i64]
 #[no_mangle]
-pub extern fn test_BigW(_: BigW) -> BigW { loop {} }
+pub extern fn test_TeBigS(_: TeBigS) -> TeBigS { loop {} }
 
 
+#[derive(Clone, Copy)]
 #[repr(C)]
 pub union BigU {
     foo: [u32; 16],
 }
 
 #[repr(transparent)]
-pub struct BigUw(BigU);
+pub struct TsBigU(BigU);
+
+#[repr(transparent)]
+pub union TuBigU {
+    field: BigU,
+}
+
+#[repr(transparent)]
+pub enum TeBigU {
+    Variant(BigU),
+}
 
 // CHECK: define void @test_BigU(%BigU* [[BIGU_RET_ATTRS:.*]], [8 x i64]
 #[no_mangle]
 pub extern fn test_BigU(_: BigU) -> BigU { loop {} }
 
-// CHECK: define void @test_BigUw(%BigUw* [[BIGU_RET_ATTRS]], [8 x i64]
+// CHECK: define void @test_TsBigU(%TsBigU* [[BIGU_RET_ATTRS:.*]], [8 x i64]
+#[no_mangle]
+pub extern fn test_TsBigU(_: TsBigU) -> TsBigU { loop {} }
+
+// CHECK: define void @test_TuBigU(%TuBigU* [[BIGU_RET_ATTRS]], [8 x i64]
+#[no_mangle]
+pub extern fn test_TuBigU(_: TuBigU) -> TuBigU { loop {} }
+
+// CHECK: define void @test_TeBigU(%"TeBigU::Variant"* [[BIGU_RET_ATTRS]], [8 x i64]
 #[no_mangle]
-pub extern fn test_BigUw(_: BigUw) -> BigUw { loop {} }
+pub extern fn test_TeBigU(_: TeBigU) -> TeBigU { loop {} }
index fd655261ab85252f7758279392f9bff6475438cd..c9f3837565808707ef1c7fb0ffa19b043e4cca90 100644 (file)
@@ -1,13 +1,16 @@
 // compile-flags: -C no-prepopulate-passes
 
 #![crate_type="lib"]
-#![feature(repr_simd)]
+#![feature(repr_simd, transparent_enums, transparent_unions)]
 
 use std::marker::PhantomData;
 
+#[derive(Copy, Clone)]
 pub struct Zst1;
+#[derive(Copy, Clone)]
 pub struct Zst2(());
 
+#[derive(Copy, Clone)]
 #[repr(transparent)]
 pub struct F32(f32);
 
@@ -112,6 +115,44 @@ pub struct UnitPhantom<T, U> { val: T, unit: PhantomData<U> }
 #[no_mangle]
 pub extern fn test_Projection(_: StructWithProjection) -> StructWithProjection { loop {} }
 
+#[repr(transparent)]
+pub enum EnumF32 {
+    Variant(F32)
+}
+
+// CHECK: define float @test_EnumF32(float %arg0)
+#[no_mangle]
+pub extern fn test_EnumF32(_: EnumF32) -> EnumF32 { loop {} }
+
+#[repr(transparent)]
+pub enum EnumF32WithZsts {
+    Variant(Zst1, F32, Zst2)
+}
+
+// CHECK: define float @test_EnumF32WithZsts(float %arg0)
+#[no_mangle]
+pub extern fn test_EnumF32WithZsts(_: EnumF32WithZsts) -> EnumF32WithZsts { loop {} }
+
+#[repr(transparent)]
+pub union UnionF32 {
+    field: F32,
+}
+
+// CHECK: define float @test_UnionF32(float %arg0)
+#[no_mangle]
+pub extern fn test_UnionF32(_: UnionF32) -> UnionF32 { loop {} }
+
+#[repr(transparent)]
+pub union UnionF32WithZsts {
+    zst1: Zst1,
+    field: F32,
+    zst2: Zst2,
+}
+
+// CHECK: define float @test_UnionF32WithZsts(float %arg0)
+#[no_mangle]
+pub extern fn test_UnionF32WithZsts(_: UnionF32WithZsts) -> UnionF32WithZsts { loop {} }
+
 
 // All that remains to be tested are aggregates. They are tested in separate files called repr-
 // transparent-*.rs  with `only-*` or `ignore-*` directives, because the expected LLVM IR
diff --git a/src/test/codegen/unchecked_math.rs b/src/test/codegen/unchecked_math.rs
deleted file mode 100644 (file)
index 419c120..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-#![crate_type = "lib"]
-#![feature(core_intrinsics)]
-
-use std::intrinsics::*;
-
-// CHECK-LABEL: @unchecked_add_signed
-#[no_mangle]
-pub unsafe fn unchecked_add_signed(a: i32, b: i32) -> i32 {
-    // CHECK: add nsw
-    unchecked_add(a, b)
-}
-
-// CHECK-LABEL: @unchecked_add_unsigned
-#[no_mangle]
-pub unsafe fn unchecked_add_unsigned(a: u32, b: u32) -> u32 {
-    // CHECK: add nuw
-    unchecked_add(a, b)
-}
-
-// CHECK-LABEL: @unchecked_sub_signed
-#[no_mangle]
-pub unsafe fn unchecked_sub_signed(a: i32, b: i32) -> i32 {
-    // CHECK: sub nsw
-    unchecked_sub(a, b)
-}
-
-// CHECK-LABEL: @unchecked_sub_unsigned
-#[no_mangle]
-pub unsafe fn unchecked_sub_unsigned(a: u32, b: u32) -> u32 {
-    // CHECK: sub nuw
-    unchecked_sub(a, b)
-}
-
-// CHECK-LABEL: @unchecked_mul_signed
-#[no_mangle]
-pub unsafe fn unchecked_mul_signed(a: i32, b: i32) -> i32 {
-    // CHECK: mul nsw
-    unchecked_mul(a, b)
-}
-
-// CHECK-LABEL: @unchecked_mul_unsigned
-#[no_mangle]
-pub unsafe fn unchecked_mul_unsigned(a: u32, b: u32) -> u32 {
-    // CHECK: mul nuw
-    unchecked_mul(a, b)
-}
diff --git a/src/test/mir-opt/const_prop/ref_deref.rs b/src/test/mir-opt/const_prop/ref_deref.rs
new file mode 100644 (file)
index 0000000..2d04822
--- /dev/null
@@ -0,0 +1,21 @@
+fn main() {
+    *(&4);
+}
+
+// END RUST SOURCE
+// START rustc.main.ConstProp.before.mir
+// bb0: {
+//     ...
+//     _2 = &(promoted[0]: i32);
+//     _1 = (*_2);
+//     ...
+//}
+// END rustc.main.ConstProp.before.mir
+// START rustc.main.ConstProp.after.mir
+// bb0: {
+//     ...
+//     _2 = const Scalar(AllocId(0).0x0) : &i32;
+//     _1 = const 4i32;
+//     ...
+// }
+// END rustc.main.ConstProp.after.mir
diff --git a/src/test/mir-opt/const_prop/reify_fn_ptr.rs b/src/test/mir-opt/const_prop/reify_fn_ptr.rs
new file mode 100644 (file)
index 0000000..809eb19
--- /dev/null
@@ -0,0 +1,25 @@
+fn main() {
+    let _ = main as usize as *const fn();
+}
+
+// END RUST SOURCE
+// START rustc.main.ConstProp.before.mir
+//  bb0: {
+//      ...
+//      _3 = const main as fn() (Pointer(ReifyFnPointer));
+//      _2 = move _3 as usize (Misc);
+//      ...
+//      _1 = move _2 as *const fn() (Misc);
+//      ...
+//  }
+// END rustc.main.ConstProp.before.mir
+// START rustc.main.ConstProp.after.mir
+//  bb0: {
+//      ...
+//      _3 = const Scalar(AllocId(1).0x0) : fn();
+//      _2 = move _3 as usize (Misc);
+//      ...
+//      _1 = const Scalar(AllocId(1).0x0) : *const fn();
+//      ...
+//  }
+// END rustc.main.ConstProp.after.mir
index 3435ca07f4cd8414c8c6894be6619bd9e1a5a3fb..5babeb195a826f0fbf7e2b5ebfb36a4a58d77d60 100644 (file)
@@ -1,22 +1,22 @@
-fn test() -> &'static [u32] {
-    &[1, 2]
-}
-
 fn main() {
-    let x = test()[0];
+    (&[1u32, 2, 3] as &[u32])[1];
 }
 
 // END RUST SOURCE
 // START rustc.main.ConstProp.before.mir
-//  bb1: {
+//  bb0: {
 //      ...
-//      _3 = const 0usize;
-//      _4 = Len((*_2));
-//      _5 = Lt(_3, _4);
-//      assert(move _5, "index out of bounds: the len is move _4 but the index is _3") -> bb2;
+//      _4 = &(promoted[0]: [u32; 3]);
+//      _3 = _4;
+//      _2 = move _3 as &[u32] (Pointer(Unsize));
+//      ...
+//      _6 = const 1usize;
+//      _7 = Len((*_2));
+//      _8 = Lt(_6, _7);
+//      assert(move _8, "index out of bounds: the len is move _7 but the index is _6") -> bb1;
 //  }
-//  bb2: {
-//      _1 = (*_2)[_3];
+//  bb1: {
+//      _1 = (*_2)[_6];
 //      ...
 //      return;
 //  }
@@ -24,13 +24,17 @@ fn main() {
 // START rustc.main.ConstProp.after.mir
 //  bb0: {
 //      ...
-//      _3 = const 0usize;
-//      _4 = Len((*_2));
-//      _5 = Lt(_3, _4);
-//      assert(move _5, "index out of bounds: the len is move _4 but the index is _3") -> bb2;
+//      _4 = const Scalar(AllocId(0).0x0) : &[u32; 3];
+//      _3 = const Scalar(AllocId(0).0x0) : &[u32; 3];
+//      _2 = move _3 as &[u32] (Pointer(Unsize));
+//      ...
+//      _6 = const 1usize;
+//      _7 = const 3usize;
+//      _8 = const true;
+//      assert(const true, "index out of bounds: the len is move _7 but the index is _6") -> bb1;
 //  }
-//  bb2: {
-//      _1 = (*_2)[_3];
+//  bb1: {
+//      _1 = (*_2)[_6];
 //      ...
 //      return;
 //  }
index 67a55101d829f7a99a69c1deb6d7742bec2bfc7f..f4d848dfc7ad12084dcf1ddee976683794cc7a8f 100644 (file)
@@ -78,7 +78,8 @@ fn main() {
 //     let mut _0: Test;
 //
 //     bb0: {
-//         _0 = Test::X(move _1,);
+//         ((_0 as X).0: usize) = move _1;
+//         discriminant(_0) = 0;
 //         return;
 //     }
 // }
index f13339e334c6ec4dfa40d075a93c6ee8c6e3b1d5..0a745e7d34fc1bcab326efa5a9dc39a9ef0e4b76 100644 (file)
@@ -1,15 +1,16 @@
-// pp-exact
 // Testing that both the inner item and next outer item are
 // preserved, and that the first outer item parsed in main is not
 // accidentally carried over to each inner function
 
-#![feature(custom_attribute)]
+// pp-exact
+
+#![feature(rustc_attrs)]
 
 fn main() {
-    #![inner_attr]
-    #[outer_attr]
+    #![rustc_dummy]
+    #[rustc_dummy]
     fn f() { }
 
-    #[outer_attr]
+    #[rustc_dummy]
     fn g() { }
 }
index 355f3d5a3cfb8e5005df30c703c30c43362c70bf..44d2c5db3e668e67fad55215387a9710a4aeb250 100644 (file)
@@ -1,13 +1,14 @@
-// pp-exact
 // Tests literals in attributes.
 
-#![feature(custom_attribute)]
+// pp-exact
+
+#![feature(rustc_attrs)]
 
 fn main() {
-    #![hello("hi", 1, 2, 1.012, pi = 3.14, bye, name("John"))]
-    #[align = 8]
+    #![rustc_dummy("hi", 1, 2, 1.012, pi = 3.14, bye, name("John"))]
+    #[rustc_dummy = 8]
     fn f() { }
 
-    #[vector(1, 2, 3)]
+    #[rustc_dummy(1, 2, 3)]
     fn g() { }
 }
index eb1768683e6e9409987affe0c744629305bc0dac..d81485b555fa6c502a699f52e2ca5308d0d46be4 100644 (file)
@@ -1,20 +1,20 @@
 // pp-exact
 
-#![feature(custom_attribute)]
 #![feature(box_syntax)]
+#![feature(rustc_attrs)]
 #![feature(stmt_expr_attributes)]
 
 fn main() { }
 
 fn _0() {
 
-    #[attr]
+    #[rustc_dummy]
     foo();
 }
 
 fn _1() {
 
-    #[attr]
+    #[rustc_dummy]
     unsafe {
         // code
     }
@@ -22,11 +22,11 @@ fn _1() {
 
 fn _2() {
 
-    #[attr]
+    #[rustc_dummy]
     { foo(); }
 
     {
-        #![attr]
+        #![rustc_dummy]
 
         foo()
     }
@@ -34,51 +34,51 @@ fn _2() {
 
 fn _3() {
 
-    #[attr]
+    #[rustc_dummy]
     match () { _ => { } }
 }
 
 fn _4() {
 
-    #[attr]
+    #[rustc_dummy]
     match () {
-        #![attr]
+        #![rustc_dummy]
         _ => (),
     }
 
     let _ =
-        #[attr] match () {
-                    #![attr]
-                    () => (),
-                };
+        #[rustc_dummy] match () {
+                           #![rustc_dummy]
+                           () => (),
+                       };
 }
 
 fn _5() {
 
-    #[attr]
+    #[rustc_dummy]
     let x = 1;
 
-    let x = #[attr] 1;
+    let x = #[rustc_dummy] 1;
 
     let y = ();
     let z = ();
 
-    foo3(x, #[attr] y, z);
+    foo3(x, #[rustc_dummy] y, z);
 
-    qux(3 + #[attr] 2);
+    qux(3 + #[rustc_dummy] 2);
 }
 
 fn _6() {
 
-    #[attr]
-    [#![attr] 1, 2, 3];
+    #[rustc_dummy]
+    [#![rustc_dummy] 1, 2, 3];
 
-    let _ = #[attr] [#![attr] 1, 2, 3];
+    let _ = #[rustc_dummy] [#![rustc_dummy] 1, 2, 3];
 
-    #[attr]
-    [#![attr] 1; 4];
+    #[rustc_dummy]
+    [#![rustc_dummy] 1; 4];
 
-    let _ = #[attr] [#![attr] 1; 4];
+    let _ = #[rustc_dummy] [#![rustc_dummy] 1; 4];
 }
 
 struct Foo {
@@ -89,45 +89,41 @@ struct Foo {
 
 fn _7() {
 
-    #[attr]
-    Foo{#![attr] data: (),};
+    #[rustc_dummy]
+    Foo{#![rustc_dummy] data: (),};
 
-    let _ = #[attr] Foo{#![attr] data: (),};
+    let _ = #[rustc_dummy] Foo{#![rustc_dummy] data: (),};
 }
 
 fn _8() {
 
-    #[attr]
-    (#![attr] );
+    #[rustc_dummy]
+    (#![rustc_dummy] );
 
-    #[attr]
-    (#![attr] 0);
+    #[rustc_dummy]
+    (#![rustc_dummy] 0);
 
-    #[attr]
-    (#![attr] 0,);
+    #[rustc_dummy]
+    (#![rustc_dummy] 0,);
 
-    #[attr]
-    (#![attr] 0, 1);
+    #[rustc_dummy]
+    (#![rustc_dummy] 0, 1);
 }
 
 fn _9() {
     macro_rules! stmt_mac((  ) => { let _ = (  ) ; });
 
-    #[attr]
+    #[rustc_dummy]
     stmt_mac!();
 
-    /*
-    // pre existing pp bug: delimiter styles gets lost:
-
-    #[attr]
+    #[rustc_dummy]
     stmt_mac!{ };
 
-    #[attr]
+    #[rustc_dummy]
     stmt_mac![];
 
-    #[attr]
-    stmt_mac!{ } // pre-existing pp bug: compiler ICEs with a None unwrap
-    */
+    #[rustc_dummy]
+    stmt_mac!{ }
 
     let _ = ();
 }
@@ -135,138 +131,131 @@ fn _9() {
 macro_rules! expr_mac((  ) => { (  ) });
 
 fn _10() {
-
-    let _ = #[attr] expr_mac!();
-
-    /*
-    // pre existing pp bug: delimiter styles gets lost:
-    let _ = #[attr] expr_mac![];
-    let _ = #[attr] expr_mac!{};
-    */
+    let _ = #[rustc_dummy] expr_mac!();
+    let _ = #[rustc_dummy] expr_mac![];
+    let _ = #[rustc_dummy] expr_mac!{ };
 }
 
 fn _11() {
-    let _ = #[attr] box 0;
-    let _: [(); 0] = #[attr] [#![attr] ];
-    let _ = #[attr] [#![attr] 0, 0];
-    let _ = #[attr] [#![attr] 0; 0];
-    let _ = #[attr] foo();
-    let _ = #[attr] 1i32.clone();
-    let _ = #[attr] (#![attr] );
-    let _ = #[attr] (#![attr] 0);
-    let _ = #[attr] (#![attr] 0,);
-    let _ = #[attr] (#![attr] 0, 0);
-    let _ = #[attr] 0 + #[attr] 0;
-    let _ = #[attr] !0;
-    let _ = #[attr] -0i32;
-    let _ = #[attr] false;
-    let _ = #[attr] 'c';
-    let _ = #[attr] 0;
-    let _ = #[attr] 0 as usize;
+    let _ = #[rustc_dummy] box 0;
+    let _: [(); 0] = #[rustc_dummy] [#![rustc_dummy] ];
+    let _ = #[rustc_dummy] [#![rustc_dummy] 0, 0];
+    let _ = #[rustc_dummy] [#![rustc_dummy] 0; 0];
+    let _ = #[rustc_dummy] foo();
+    let _ = #[rustc_dummy] 1i32.clone();
+    let _ = #[rustc_dummy] (#![rustc_dummy] );
+    let _ = #[rustc_dummy] (#![rustc_dummy] 0);
+    let _ = #[rustc_dummy] (#![rustc_dummy] 0,);
+    let _ = #[rustc_dummy] (#![rustc_dummy] 0, 0);
+    let _ = #[rustc_dummy] 0 + #[rustc_dummy] 0;
+    let _ = #[rustc_dummy] !0;
+    let _ = #[rustc_dummy] -0i32;
+    let _ = #[rustc_dummy] false;
+    let _ = #[rustc_dummy] 'c';
+    let _ = #[rustc_dummy] 0;
+    let _ = #[rustc_dummy] 0 as usize;
     let _ =
-        #[attr] while false {
-                    #![attr]
-                };
+        #[rustc_dummy] while false {
+                           #![rustc_dummy]
+                       };
     let _ =
-        #[attr] while let None = Some(()) {
-                    #![attr]
-                };
+        #[rustc_dummy] while let None = Some(()) {
+                           #![rustc_dummy]
+                       };
     let _ =
-        #[attr] for _ in 0..0 {
-                    #![attr]
-                };
+        #[rustc_dummy] for _ in 0..0 {
+                           #![rustc_dummy]
+                       };
     // FIXME: pp bug, two spaces after the loop
     let _ =
-        #[attr] loop  {
-                    #![attr]
-                };
+        #[rustc_dummy] loop  {
+                           #![rustc_dummy]
+                       };
     let _ =
-        #[attr] match false {
-                    #![attr]
-                    _ => (),
-                };
-    let _ = #[attr] || #[attr] ();
-    let _ = #[attr] move || #[attr] ();
+        #[rustc_dummy] match false {
+                           #![rustc_dummy]
+                           _ => (),
+                       };
+    let _ = #[rustc_dummy] || #[rustc_dummy] ();
+    let _ = #[rustc_dummy] move || #[rustc_dummy] ();
     let _ =
-        #[attr] ||
-                    {
-                        #![attr]
-                        #[attr]
-                        ()
-                    };
+        #[rustc_dummy] ||
+                           {
+                               #![rustc_dummy]
+                               #[rustc_dummy]
+                               ()
+                           };
     let _ =
-        #[attr] move ||
-                    {
-                        #![attr]
-                        #[attr]
-                        ()
-                    };
+        #[rustc_dummy] move ||
+                           {
+                               #![rustc_dummy]
+                               #[rustc_dummy]
+                               ()
+                           };
     let _ =
-        #[attr] {
-                    #![attr]
-                };
+        #[rustc_dummy] {
+                           #![rustc_dummy]
+                       };
     let _ =
-        #[attr] {
-                    #![attr]
-                    let _ = ();
-                };
+        #[rustc_dummy] {
+                           #![rustc_dummy]
+                           let _ = ();
+                       };
     let _ =
-        #[attr] {
-                    #![attr]
-                    let _ = ();
-                    ()
-                };
+        #[rustc_dummy] {
+                           #![rustc_dummy]
+                           let _ = ();
+                           ()
+                       };
     let mut x = 0;
-    let _ = #[attr] x = 15;
-    let _ = #[attr] x += 15;
+    let _ = #[rustc_dummy] x = 15;
+    let _ = #[rustc_dummy] x += 15;
     let s = Foo{data: (),};
-    let _ = #[attr] s.data;
-    let _ = (#[attr] s).data;
+    let _ = #[rustc_dummy] s.data;
+    let _ = (#[rustc_dummy] s).data;
     let t = Bar(());
-    let _ = #[attr] t.0;
-    let _ = (#[attr] t).0;
+    let _ = #[rustc_dummy] t.0;
+    let _ = (#[rustc_dummy] t).0;
     let v = vec!(0);
-    let _ = #[attr] v[0];
-    let _ = (#[attr] v)[0];
-    let _ = #[attr] 0..#[attr] 0;
-    let _ = #[attr] 0..;
-    let _ = #[attr] (0..0);
-    let _ = #[attr] (0..);
-    let _ = #[attr] (..0);
-    let _ = #[attr] (..);
-    let _: fn(&u32) -> u32 = #[attr] std::clone::Clone::clone;
-    let _ = #[attr] &0;
-    let _ = #[attr] &mut 0;
-    let _ = #[attr] &#[attr] 0;
-    let _ = #[attr] &mut #[attr] 0;
+    let _ = #[rustc_dummy] v[0];
+    let _ = (#[rustc_dummy] v)[0];
+    let _ = #[rustc_dummy] 0..#[rustc_dummy] 0;
+    let _ = #[rustc_dummy] 0..;
+    let _ = #[rustc_dummy] (0..0);
+    let _ = #[rustc_dummy] (0..);
+    let _ = #[rustc_dummy] (..0);
+    let _ = #[rustc_dummy] (..);
+    let _: fn(&u32) -> u32 = #[rustc_dummy] std::clone::Clone::clone;
+    let _ = #[rustc_dummy] &0;
+    let _ = #[rustc_dummy] &mut 0;
+    let _ = #[rustc_dummy] &#[rustc_dummy] 0;
+    let _ = #[rustc_dummy] &mut #[rustc_dummy] 0;
     // FIXME: pp bug, extra space after keyword?
-    while false { let _ = #[attr] continue ; }
-    while true { let _ = #[attr] break ; }
-    || #[attr] return;
-    let _ = #[attr] expr_mac!();
-    /* FIXME: pp bug, losing delimiter styles
-    let _ = #[attr] expr_mac![];
-    let _ = #[attr] expr_mac!{};
-    */
-    let _ = #[attr] Foo{#![attr] data: (),};
-    let _ = #[attr] Foo{#![attr] ..s};
-    let _ = #[attr] Foo{#![attr] data: (), ..s};
-    let _ = #[attr] (#![attr] 0);
+    while false { let _ = #[rustc_dummy] continue ; }
+    while true { let _ = #[rustc_dummy] break ; }
+    || #[rustc_dummy] return;
+    let _ = #[rustc_dummy] expr_mac!();
+    let _ = #[rustc_dummy] expr_mac![];
+    let _ = #[rustc_dummy] expr_mac!{ };
+    let _ = #[rustc_dummy] Foo{#![rustc_dummy] data: (),};
+    let _ = #[rustc_dummy] Foo{#![rustc_dummy] ..s};
+    let _ = #[rustc_dummy] Foo{#![rustc_dummy] data: (), ..s};
+    let _ = #[rustc_dummy] (#![rustc_dummy] 0);
 }
 
 fn _12() {
-    #[attr]
+    #[rustc_dummy]
     let _ = 0;
 
-    #[attr]
+    #[rustc_dummy]
     0;
 
-    #[attr]
+    #[rustc_dummy]
     expr_mac!();
 
-    #[attr]
+    #[rustc_dummy]
     {
-        #![attr]
+        #![rustc_dummy]
     }
 }
 
diff --git a/src/test/run-pass-fulldeps/auxiliary/custom-derive-partial-eq.rs b/src/test/run-pass-fulldeps/auxiliary/custom-derive-partial-eq.rs
deleted file mode 100644 (file)
index 4d6ff47..0000000
+++ /dev/null
@@ -1,71 +0,0 @@
-// force-host
-
-#![feature(plugin_registrar, rustc_private)]
-
-extern crate syntax;
-extern crate syntax_ext;
-extern crate rustc_plugin;
-
-use syntax_ext::deriving;
-use deriving::generic::*;
-use deriving::generic::ty::*;
-
-use rustc_plugin::Registry;
-use syntax::ast::*;
-use syntax::source_map::Span;
-use syntax::ext::base::*;
-use syntax::ext::build::AstBuilder;
-use syntax::symbol::Symbol;
-use syntax::ptr::P;
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
-    reg.register_syntax_extension(Symbol::intern("derive_CustomPartialEq"),
-                                  MultiDecorator(Box::new(expand_deriving_partial_eq)));
-}
-
-fn expand_deriving_partial_eq(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Annotatable,
-                              push: &mut FnMut(Annotatable)) {
-    // structures are equal if all fields are equal, and non equal, if
-    // any fields are not equal or if the enum variants are different
-    fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {
-        cs_fold(true,
-                |cx, span, subexpr, self_f, other_fs| {
-                    let other_f = (other_fs.len(), other_fs.get(0)).1.unwrap();
-                    let eq = cx.expr_binary(span, BinOpKind::Eq, self_f, other_f.clone());
-                    cx.expr_binary(span, BinOpKind::And, subexpr, eq)
-                },
-                cx.expr_bool(span, true),
-                Box::new(|cx, span, _, _| cx.expr_bool(span, false)),
-                cx,
-                span,
-                substr)
-    }
-
-    let inline = cx.meta_word(span, Symbol::intern("inline"));
-    let attrs = vec![cx.attribute(span, inline)];
-    let methods = vec![MethodDef {
-        name: "eq",
-        generics: LifetimeBounds::empty(),
-        explicit_self: borrowed_explicit_self(),
-        args: vec![(borrowed_self(), "other")],
-        ret_ty: Literal(deriving::generic::ty::Path::new_local("bool")),
-        attributes: attrs,
-        is_unsafe: false,
-        unify_fieldless_variants: true,
-        combine_substructure: combine_substructure(Box::new(cs_eq)),
-    }];
-
-    let trait_def = TraitDef {
-        span: span,
-        attributes: Vec::new(),
-        path: deriving::generic::ty::Path::new(vec!["cmp", "PartialEq"]),
-        additional_bounds: Vec::new(),
-        generics: LifetimeBounds::empty(),
-        is_unsafe: false,
-        supports_unions: false,
-        methods: methods,
-        associated_types: Vec::new(),
-    };
-    trait_def.expand(cx, mitem, item, push)
-}
diff --git a/src/test/run-pass-fulldeps/auxiliary/custom-derive-plugin-attr.rs b/src/test/run-pass-fulldeps/auxiliary/custom-derive-plugin-attr.rs
deleted file mode 100644 (file)
index c6b33fb..0000000
+++ /dev/null
@@ -1,84 +0,0 @@
-// force-host
-
-#![feature(plugin_registrar)]
-#![feature(box_syntax)]
-#![feature(rustc_private)]
-
-extern crate syntax;
-extern crate syntax_ext;
-extern crate syntax_pos;
-extern crate rustc;
-extern crate rustc_plugin;
-
-use syntax::ast;
-use syntax::attr;
-use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable};
-use syntax::ext::build::AstBuilder;
-use syntax::symbol::{Symbol, sym};
-use syntax::ptr::P;
-use syntax_ext::deriving::generic::{TraitDef, MethodDef, combine_substructure};
-use syntax_ext::deriving::generic::{Substructure, Struct, EnumMatching};
-use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self};
-use syntax_pos::Span;
-use rustc_plugin::Registry;
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
-    reg.register_syntax_extension(
-        Symbol::intern("rustc_derive_TotalSum"),
-        MultiDecorator(box expand));
-}
-
-fn expand(cx: &mut ExtCtxt,
-          span: Span,
-          mitem: &ast::MetaItem,
-          item: &Annotatable,
-          push: &mut FnMut(Annotatable)) {
-    let trait_def = TraitDef {
-        span: span,
-        attributes: vec![],
-        path: Path::new_local("TotalSum"),
-        additional_bounds: vec![],
-        generics: LifetimeBounds::empty(),
-        associated_types: vec![],
-        is_unsafe: false,
-        supports_unions: false,
-        methods: vec![
-            MethodDef {
-                name: "total_sum",
-                generics: LifetimeBounds::empty(),
-                explicit_self: borrowed_explicit_self(),
-                args: vec![],
-                ret_ty: Literal(Path::new_local("isize")),
-                attributes: vec![],
-                is_unsafe: false,
-                unify_fieldless_variants: true,
-                combine_substructure: combine_substructure(Box::new(totalsum_substructure)),
-            },
-        ],
-    };
-
-    trait_def.expand(cx, mitem, item, push)
-}
-
-// Mostly copied from syntax::ext::deriving::hash
-/// Defines how the implementation for `trace()` is to be generated
-fn totalsum_substructure(cx: &mut ExtCtxt, trait_span: Span,
-                         substr: &Substructure) -> P<ast::Expr> {
-    let fields = match *substr.fields {
-        Struct(_, ref fs) | EnumMatching(.., ref fs) => fs,
-        _ => cx.span_bug(trait_span, "impossible substructure")
-    };
-
-    fields.iter().fold(cx.expr_isize(trait_span, 0), |acc, ref item| {
-        if attr::contains_name(&item.attrs, sym::ignore) {
-            acc
-        } else {
-            cx.expr_binary(item.span, ast::BinOpKind::Add, acc,
-                           cx.expr_method_call(item.span,
-                                               item.self_.clone(),
-                                               substr.method_ident,
-                                               Vec::new()))
-        }
-    })
-}
diff --git a/src/test/run-pass-fulldeps/auxiliary/custom-derive-plugin.rs b/src/test/run-pass-fulldeps/auxiliary/custom-derive-plugin.rs
deleted file mode 100644 (file)
index 874a0ec..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-// force-host
-
-#![feature(plugin_registrar)]
-#![feature(box_syntax)]
-#![feature(rustc_private)]
-
-extern crate syntax;
-extern crate syntax_ext;
-extern crate syntax_pos;
-extern crate rustc;
-extern crate rustc_plugin;
-
-use syntax::ast;
-use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable};
-use syntax::ext::build::AstBuilder;
-use syntax::symbol::Symbol;
-use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure};
-use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self};
-use syntax_pos::Span;
-use rustc_plugin::Registry;
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
-    reg.register_syntax_extension(
-        Symbol::intern("derive_TotalSum"),
-        MultiDecorator(box expand));
-
-    reg.register_syntax_extension(
-        Symbol::intern("derive_Nothing"),
-        MultiDecorator(box noop));
-}
-
-fn noop(_: &mut ExtCtxt, _: Span, _: &ast::MetaItem, _: &Annotatable, _: &mut FnMut(Annotatable)) {}
-
-fn expand(cx: &mut ExtCtxt,
-          span: Span,
-          mitem: &ast::MetaItem,
-          item: &Annotatable,
-          push: &mut FnMut(Annotatable)) {
-    let trait_def = TraitDef {
-        span: span,
-        attributes: vec![],
-        path: Path::new_local("TotalSum"),
-        additional_bounds: vec![],
-        generics: LifetimeBounds::empty(),
-        associated_types: vec![],
-        is_unsafe: false,
-        supports_unions: false,
-        methods: vec![
-            MethodDef {
-                name: "total_sum",
-                generics: LifetimeBounds::empty(),
-                explicit_self: borrowed_explicit_self(),
-                args: vec![],
-                ret_ty: Literal(Path::new_local("isize")),
-                attributes: vec![],
-                is_unsafe: false,
-                unify_fieldless_variants: true,
-                combine_substructure: combine_substructure(box |cx, span, substr| {
-                    let zero = cx.expr_isize(span, 0);
-                    cs_fold(false,
-                            |cx, span, subexpr, field, _| {
-                                cx.expr_binary(span, ast::BinOpKind::Add, subexpr,
-                                    cx.expr_method_call(span, field,
-                                        ast::Ident::from_str("total_sum"), vec![]))
-                            },
-                            zero,
-                            box |cx, span, _, _| { cx.span_bug(span, "wtf??"); },
-                            cx, span, substr)
-                }),
-            },
-        ],
-    };
-
-    trait_def.expand(cx, mitem, item, push)
-}
index 096701bd9b3ed4b4a56157d833626f608fb75d9a..330459fc08f551cfdb9b099c46009898458cd1fd 100644 (file)
 
 use std::borrow::ToOwned;
 use syntax::ast;
-use syntax::ext::hygiene;
 use syntax::ext::build::AstBuilder;
-use syntax::ext::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager, NormalTT};
+use syntax::ext::base::{SyntaxExtension, TTMacroExpander, ExtCtxt, MacResult, MacEager};
+use syntax::ext::hygiene::Transparency;
 use syntax::print::pprust;
-use syntax::ptr::P;
 use syntax::symbol::Symbol;
 use syntax_pos::Span;
 use syntax::tokenstream::TokenStream;
@@ -29,7 +28,7 @@ fn expand<'cx>(&self,
                    ecx: &'cx mut ExtCtxt,
                    sp: Span,
                    _: TokenStream,
-                   _: Option<Span>) -> Box<MacResult+'cx> {
+                   _: Option<Span>) -> Box<dyn MacResult+'cx> {
         let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i))
             .collect::<Vec<_>>().join(", ");
         MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args)))
@@ -40,9 +39,10 @@ fn expand<'cx>(&self,
 pub fn plugin_registrar(reg: &mut Registry) {
     let args = reg.args().to_owned();
     reg.register_syntax_extension(Symbol::intern("plugin_args"),
-        NormalTT {
+        SyntaxExtension::LegacyBang {
             expander: Box::new(Expander { args: args, }),
             def_info: None,
+            transparency: Transparency::SemiTransparent,
             allow_internal_unstable: None,
             allow_internal_unsafe: false,
             local_inner_macros: false,
index 216c81ca34ce5bc170bcfc3c458a518df466ae53..4d9e0129e54db2d4d3a6b4e49113742271298a87 100644 (file)
@@ -1,3 +1,9 @@
+// WARNING WARNING WARNING WARNING WARNING
+// =======================================
+//
+// This code also appears in src/doc/unstable-book/src/language-features/plugin.md.
+// Please keep the two copies in sync!  FIXME: have rustdoc read this file
+
 // force-host
 
 #![crate_type="dylib"]
 extern crate rustc;
 extern crate rustc_plugin;
 
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::tokenstream::TokenTree;
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
-use syntax::ext::build::AstBuilder;  // trait for expr_usize
+use syntax::ext::build::AstBuilder;  // A trait for expr_usize.
 use syntax_pos::Span;
 use rustc_plugin::Registry;
 
-// WARNING WARNING WARNING WARNING WARNING
-// =======================================
-//
-// This code also appears in src/doc/unstable-book/src/language-features/plugin.md.
-// Please keep the two copies in sync!  FIXME: have rustdoc read this file
-
 fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-        -> Box<MacResult + 'static> {
+        -> Box<dyn MacResult + 'static> {
 
     static NUMERALS: &'static [(&'static str, usize)] = &[
         ("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -38,7 +38,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
     }
 
     let text = match args[0] {
-        TokenTree::Token(_, token::Ident(s, _)) => s.to_string(),
+        TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);
diff --git a/src/test/run-pass-fulldeps/custom-derive-partial-eq.rs b/src/test/run-pass-fulldeps/custom-derive-partial-eq.rs
deleted file mode 100644 (file)
index ac8fff4..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-// aux-build:custom-derive-partial-eq.rs
-// ignore-stage1
-#![feature(plugin)]
-#![plugin(custom_derive_partial_eq)]
-#![allow(unused)]
-
-#[derive_CustomPartialEq] // Check that this is not a stability error.
-enum E { V1, V2 }
-
-fn main() {}
diff --git a/src/test/run-pass-fulldeps/derive-totalsum-attr.rs b/src/test/run-pass-fulldeps/derive-totalsum-attr.rs
deleted file mode 100644 (file)
index 38eaa71..0000000
+++ /dev/null
@@ -1,64 +0,0 @@
-// aux-build:custom-derive-plugin-attr.rs
-// ignore-stage1
-
-#![feature(plugin, rustc_attrs)]
-#![plugin(custom_derive_plugin_attr)]
-
-trait TotalSum {
-    fn total_sum(&self) -> isize;
-}
-
-impl TotalSum for isize {
-    fn total_sum(&self) -> isize {
-        *self
-    }
-}
-
-struct Seven;
-
-impl TotalSum for Seven {
-    fn total_sum(&self) -> isize {
-        7
-    }
-}
-
-#[rustc_derive_TotalSum]
-struct Foo {
-    seven: Seven,
-    bar: Bar,
-    baz: isize,
-    #[ignore]
-    nan: NaN,
-}
-
-#[rustc_derive_TotalSum]
-struct Bar {
-    quux: isize,
-    bleh: isize,
-    #[ignore]
-    nan: NaN2
-}
-
-struct NaN;
-
-impl TotalSum for NaN {
-    fn total_sum(&self) -> isize {
-        panic!();
-    }
-}
-
-struct NaN2;
-
-pub fn main() {
-    let v = Foo {
-        seven: Seven,
-        bar: Bar {
-            quux: 9,
-            bleh: 3,
-            nan: NaN2
-        },
-        baz: 80,
-        nan: NaN
-    };
-    assert_eq!(v.total_sum(), 99);
-}
diff --git a/src/test/run-pass-fulldeps/derive-totalsum.rs b/src/test/run-pass-fulldeps/derive-totalsum.rs
deleted file mode 100644 (file)
index 2b0bb51..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// aux-build:custom-derive-plugin.rs
-// ignore-stage1
-
-#![feature(plugin)]
-#![plugin(custom_derive_plugin)]
-
-trait TotalSum {
-    fn total_sum(&self) -> isize;
-}
-
-impl TotalSum for isize {
-    fn total_sum(&self) -> isize {
-        *self
-    }
-}
-
-struct Seven;
-
-impl TotalSum for Seven {
-    fn total_sum(&self) -> isize {
-        7
-    }
-}
-
-#[derive_TotalSum]
-struct Foo {
-    seven: Seven,
-    bar: Bar,
-    baz: isize,
-}
-
-#[derive_TotalSum]
-struct Bar {
-    quux: isize,
-    bleh: isize,
-}
-
-
-pub fn main() {
-    let v = Foo {
-        seven: Seven,
-        bar: Bar {
-            quux: 9,
-            bleh: 3,
-        },
-        baz: 80,
-    };
-    assert_eq!(v.total_sum(), 99);
-}
diff --git a/src/test/run-pass-fulldeps/issue-40663.rs b/src/test/run-pass-fulldeps/issue-40663.rs
deleted file mode 100644 (file)
index 133f630..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-#![allow(dead_code)]
-// aux-build:custom-derive-plugin.rs
-// ignore-stage1
-
-#![feature(plugin)]
-#![plugin(custom_derive_plugin)]
-
-#[derive_Nothing]
-#[derive_Nothing]
-#[derive_Nothing]
-struct S;
-
-fn main() {}
diff --git a/src/test/run-pass/async-fn-size.rs b/src/test/run-pass/async-fn-size.rs
new file mode 100644 (file)
index 0000000..05afd6d
--- /dev/null
@@ -0,0 +1,106 @@
+// edition:2018
+// aux-build:arc_wake.rs
+
+#![feature(async_await, await_macro)]
+
+extern crate arc_wake;
+
+use std::pin::Pin;
+use std::future::Future;
+use std::sync::{
+    Arc,
+    atomic::{self, AtomicUsize},
+};
+use std::task::{Context, Poll};
+use arc_wake::ArcWake;
+
+struct Counter {
+    wakes: AtomicUsize,
+}
+
+impl ArcWake for Counter {
+    fn wake(self: Arc<Self>) {
+        Self::wake_by_ref(&self)
+    }
+    fn wake_by_ref(arc_self: &Arc<Self>) {
+        arc_self.wakes.fetch_add(1, atomic::Ordering::SeqCst);
+    }
+}
+
+struct WakeOnceThenComplete(bool, u8);
+
+impl Future for WakeOnceThenComplete {
+    type Output = u8;
+    fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<u8> {
+        if self.0 {
+            Poll::Ready(self.1)
+        } else {
+            cx.waker().wake_by_ref();
+            self.0 = true;
+            Poll::Pending
+        }
+    }
+}
+
+fn wait(fut: impl Future<Output = u8>) -> u8 {
+    let mut fut = Box::pin(fut);
+    let counter = Arc::new(Counter { wakes: AtomicUsize::new(0) });
+    let waker = ArcWake::into_waker(counter.clone());
+    let mut cx = Context::from_waker(&waker);
+    loop {
+        match fut.as_mut().poll(&mut cx) {
+            Poll::Ready(out) => return out,
+            Poll::Pending => (),
+        }
+    }
+}
+
+fn base() -> WakeOnceThenComplete { WakeOnceThenComplete(false, 1) }
+
+async fn await1_level1() -> u8 {
+    await!(base())
+}
+
+async fn await2_level1() -> u8 {
+    await!(base()) + await!(base())
+}
+
+async fn await3_level1() -> u8 {
+    await!(base()) + await!(base()) + await!(base())
+}
+
+async fn await3_level2() -> u8 {
+    await!(await3_level1()) + await!(await3_level1()) + await!(await3_level1())
+}
+
+async fn await3_level3() -> u8 {
+    await!(await3_level2()) + await!(await3_level2()) + await!(await3_level2())
+}
+
+async fn await3_level4() -> u8 {
+    await!(await3_level3()) + await!(await3_level3()) + await!(await3_level3())
+}
+
+async fn await3_level5() -> u8 {
+    await!(await3_level4()) + await!(await3_level4()) + await!(await3_level4())
+}
+
+fn main() {
+    assert_eq!(2, std::mem::size_of_val(&base()));
+    assert_eq!(8, std::mem::size_of_val(&await1_level1()));
+    assert_eq!(12, std::mem::size_of_val(&await2_level1()));
+    assert_eq!(12, std::mem::size_of_val(&await3_level1()));
+    assert_eq!(20, std::mem::size_of_val(&await3_level2()));
+    assert_eq!(28, std::mem::size_of_val(&await3_level3()));
+    assert_eq!(36, std::mem::size_of_val(&await3_level4()));
+    assert_eq!(44, std::mem::size_of_val(&await3_level5()));
+
+    assert_eq!(1,   wait(base()));
+    assert_eq!(1,   wait(await1_level1()));
+    assert_eq!(2,   wait(await2_level1()));
+    assert_eq!(3,   wait(await3_level1()));
+    assert_eq!(9,   wait(await3_level2()));
+    assert_eq!(27,  wait(await3_level3()));
+    assert_eq!(81,  wait(await3_level4()));
+    assert_eq!(243, wait(await3_level5()));
+}
diff --git a/src/test/run-pass/attr-before-view-item.rs b/src/test/run-pass/attr-before-view-item.rs
deleted file mode 100644 (file)
index 14b4189..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-#![allow(unused_attributes)]
-
-// pretty-expanded FIXME #23616
-
-#![feature(custom_attribute, test)]
-
-#[foo = "bar"]
-extern crate test;
-
-pub fn main() {
-}
diff --git a/src/test/run-pass/attr-before-view-item2.rs b/src/test/run-pass/attr-before-view-item2.rs
deleted file mode 100644 (file)
index 6fc1e35..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-#![allow(unused_attributes)]
-
-// pretty-expanded FIXME #23616
-
-#![feature(custom_attribute, test)]
-
-mod m {
-    #[foo = "bar"]
-    extern crate test;
-}
-
-pub fn main() {
-}
diff --git a/src/test/run-pass/attr-mix-new.rs b/src/test/run-pass/attr-mix-new.rs
deleted file mode 100644 (file)
index 223a434..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-#![allow(unused_attributes)]
-#![allow(unknown_lints)]
-
-// pretty-expanded FIXME #23616
-
-#![allow(unused_attribute)]
-#![feature(custom_attribute)]
-
-#[foo(bar)]
-mod foo {
-  #![feature(globs)]
-}
-
-pub fn main() {}
diff --git a/src/test/run-pass/auxiliary/allocator-dummy.rs b/src/test/run-pass/auxiliary/allocator-dummy.rs
deleted file mode 100644 (file)
index bedf302..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-// no-prefer-dynamic
-
-#![feature(allocator, core_intrinsics, panic_unwind)]
-#![allocator]
-#![crate_type = "rlib"]
-#![no_std]
-
-extern crate unwind;
-
-pub static mut HITS: usize = 0;
-
-type size_t = usize;
-
-extern {
-    fn malloc(size: usize) -> *mut u8;
-    fn free(ptr: *mut u8);
-    fn calloc(size: usize, amt: usize) -> *mut u8;
-    fn realloc(ptr: *mut u8, size: usize) -> *mut u8;
-}
-
-#[no_mangle]
-pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
-    unsafe {
-        HITS += 1;
-        malloc(size as size_t) as *mut u8
-    }
-}
-
-#[no_mangle]
-pub extern fn __rust_allocate_zeroed(size: usize, _align: usize) -> *mut u8 {
-    unsafe { calloc(size as size_t, 1) as *mut u8 }
-}
-
-#[no_mangle]
-pub extern fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
-    unsafe {
-        HITS += 1;
-        free(ptr as *mut _)
-    }
-}
-
-#[no_mangle]
-pub extern fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize,
-                                align: usize) -> *mut u8 {
-    unsafe {
-        realloc(ptr as *mut _, size as size_t) as *mut u8
-    }
-}
-
-#[no_mangle]
-pub extern fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize,
-                                        size: usize, align: usize) -> usize {
-    unsafe { core::intrinsics::abort() }
-}
-
-#[no_mangle]
-pub extern fn __rust_usable_size(size: usize, align: usize) -> usize {
-    unsafe { core::intrinsics::abort() }
-}
index c423bf666e5fb010b7d3f8dac32b6bc577c7e697..361f8a1d3406ec142914ba2ccb147bf736938361 100644 (file)
@@ -6,7 +6,7 @@
 
 // pretty-expanded FIXME #23616
 
-#![feature(custom_attribute, rustc_private)]
+#![feature(rustc_private)]
 
 extern crate check_static_recursion_foreign_helper;
 extern crate libc;
index 3d3240d434292933365912ac5d50a6fe6857e27e..1d3123d216ebfb53ae63ebe190c6961c5f30c6de 100644 (file)
@@ -8,16 +8,12 @@
 const TO_LE_BYTES: [u8; 4] = 0x12_34_56_78_i32.to_le_bytes();
 const TO_NE_BYTES: [u8; 4] = i32::min_value().to_be().to_ne_bytes();
 
-fn ident<T>(ident: T) -> T {
-    ident
-}
-
 fn main() {
-    assert_eq!(REVERSE, ident(0x1e6a2c48));
-    assert_eq!(FROM_BE_BYTES, ident(0x12_34_56_78));
-    assert_eq!(FROM_LE_BYTES, ident(0x78_56_34_12));
-    assert_eq!(FROM_NE_BYTES, ident(i32::min_value()));
-    assert_eq!(TO_BE_BYTES, ident([0x12, 0x34, 0x56, 0x78]));
-    assert_eq!(TO_LE_BYTES, ident([0x78, 0x56, 0x34, 0x12]));
-    assert_eq!(TO_NE_BYTES, ident([0x80, 0, 0, 0]));
+    assert_eq!(REVERSE, 0x1e6a2c48);
+    assert_eq!(FROM_BE_BYTES, 0x12_34_56_78);
+    assert_eq!(FROM_LE_BYTES, 0x78_56_34_12);
+    assert_eq!(FROM_NE_BYTES, i32::min_value());
+    assert_eq!(TO_BE_BYTES, [0x12, 0x34, 0x56, 0x78]);
+    assert_eq!(TO_LE_BYTES, [0x78, 0x56, 0x34, 0x12]);
+    assert_eq!(TO_NE_BYTES, [0x80, 0, 0, 0]);
 }
index 82057868b73bbad56fb9f05d825c4cddbf1f3d66..9597393df72d27940010170a42fc6edcdbf61584 100644 (file)
 const NEG_A: (u32, bool) = 0u32.overflowing_neg();
 const NEG_B: (u32, bool) = core::u32::MAX.overflowing_neg();
 
-fn ident<T>(ident: T) -> T {
-    ident
-}
-
 fn main() {
-    assert_eq!(ADD_A, ident((7, false)));
-    assert_eq!(ADD_B, ident((0, true)));
+    assert_eq!(ADD_A, (7, false));
+    assert_eq!(ADD_B, (0, true));
 
-    assert_eq!(SUB_A, ident((3, false)));
-    assert_eq!(SUB_B, ident((u32::max_value(), true)));
+    assert_eq!(SUB_A, (3, false));
+    assert_eq!(SUB_B, (u32::max_value(), true));
 
-    assert_eq!(MUL_A, ident((10, false)));
-    assert_eq!(MUL_B, ident((1410065408, true)));
+    assert_eq!(MUL_A, (10, false));
+    assert_eq!(MUL_B, (1410065408, true));
 
-    assert_eq!(SHL_A, ident((0x10, false)));
-    assert_eq!(SHL_B, ident((0x10, true)));
+    assert_eq!(SHL_A, (0x10, false));
+    assert_eq!(SHL_B, (0x10, true));
 
-    assert_eq!(SHR_A, ident((0x1, false)));
-    assert_eq!(SHR_B, ident((0x1, true)));
+    assert_eq!(SHR_A, (0x1, false));
+    assert_eq!(SHR_B, (0x1, true));
 
-    assert_eq!(NEG_A, ident((0, false)));
-    assert_eq!(NEG_B, ident((1, true)));
+    assert_eq!(NEG_A, (0, false));
+    assert_eq!(NEG_B, (1, true));
 }
index 965f317c42466646a119df95c19daabd4e599e18..16946eadd632aeb7c6dd1d652c729ab47b8fd9cc 100644 (file)
 const MULTIPLE_ROTATE_LEFT: i32 = 0b0010_0001i32.rotate_left(128);
 const MULTIPLE_ROTATE_RIGHT: i32 = 0b0010_0001i32.rotate_right(128);
 
-fn ident<T>(ident: T) -> T {
-    ident
-}
-
 fn main() {
-    assert_eq!(LEFT, ident(0xb301));
-    assert_eq!(RIGHT, ident(0x0100_00b3));
+    assert_eq!(LEFT, 0xb301);
+    assert_eq!(RIGHT, 0x0100_00b3);
 
-    assert_eq!(LEFT_OVERFLOW, ident(0));
-    assert_eq!(RIGHT_OVERFLOW, ident(0));
-    assert_eq!(ONE_LEFT_OVERFLOW, ident(0b0001_0000_0000_0000));
-    assert_eq!(ONE_RIGHT_OVERFLOW, ident(0b0001_0000));
+    assert_eq!(LEFT_OVERFLOW, 0);
+    assert_eq!(RIGHT_OVERFLOW, 0);
+    assert_eq!(ONE_LEFT_OVERFLOW, 0b0001_0000_0000_0000);
+    assert_eq!(ONE_RIGHT_OVERFLOW, 0b0001_0000);
 
-    assert_eq!(NON_ZERO_LEFT_OVERFLOW, ident(0b0010_0000_0000_0000));
-    assert_eq!(NON_ZERO_RIGHT_OVERFLOW, ident(0b0000_0000_0010_0000));
+    assert_eq!(NON_ZERO_LEFT_OVERFLOW, 0b0010_0000_0000_0000);
+    assert_eq!(NON_ZERO_RIGHT_OVERFLOW, 0b0000_0000_0010_0000);
 
-    assert_eq!(ZERO_ROTATE_LEFT, ident(0b0010_0001));
-    assert_eq!(ZERO_ROTATE_RIGHT, ident(0b0111_1001));
+    assert_eq!(ZERO_ROTATE_LEFT, 0b0010_0001);
+    assert_eq!(ZERO_ROTATE_RIGHT, 0b0111_1001);
 
-    assert_eq!(MULTIPLE_ROTATE_LEFT, ident(0b0010_0001));
-    assert_eq!(MULTIPLE_ROTATE_RIGHT, ident(0b0010_0001));
+    assert_eq!(MULTIPLE_ROTATE_LEFT, 0b0010_0001);
+    assert_eq!(MULTIPLE_ROTATE_RIGHT, 0b0010_0001);
 }
index 9d656a020306921d4cc851fa592a32cd39b051fd..fcd3ef4ea025b805c7b8f20ab7261861bbaa47e4 100644 (file)
@@ -1,11 +1,21 @@
+#![feature(const_int_sign)]
+
 const NEGATIVE_A: bool = (-10i32).is_negative();
 const NEGATIVE_B: bool = 10i32.is_negative();
-const POSITIVE_A: bool= (-10i32).is_positive();
-const POSITIVE_B: bool= 10i32.is_positive();
+const POSITIVE_A: bool = (-10i32).is_positive();
+const POSITIVE_B: bool = 10i32.is_positive();
+
+const SIGNUM_POS: i32 = 10i32.signum();
+const SIGNUM_NIL: i32 = 0i32.signum();
+const SIGNUM_NEG: i32 = (-42i32).signum();
 
 fn main() {
     assert!(NEGATIVE_A);
     assert!(!NEGATIVE_B);
     assert!(!POSITIVE_A);
     assert!(POSITIVE_B);
+
+    assert_eq!(SIGNUM_POS, 1);
+    assert_eq!(SIGNUM_NIL, 0);
+    assert_eq!(SIGNUM_NEG, -1);
 }
index 140fd57ecb8026b537b5ad095b34d7a7c402e744..db86c25194f08585d3e6824242a0dd807531461e 100644 (file)
 const NEG_A: u32 = 5u32.wrapping_neg();
 const NEG_B: u32 = 1234567890u32.wrapping_neg();
 
-fn ident<T>(ident: T) -> T {
-    ident
-}
-
 fn main() {
-    assert_eq!(ADD_A, ident(255));
-    assert_eq!(ADD_B, ident(199));
+    assert_eq!(ADD_A, 255);
+    assert_eq!(ADD_B, 199);
 
-    assert_eq!(SUB_A, ident(0));
-    assert_eq!(SUB_B, ident(101));
+    assert_eq!(SUB_A, 0);
+    assert_eq!(SUB_B, 101);
 
-    assert_eq!(MUL_A, ident(120));
-    assert_eq!(MUL_B, ident(44));
+    assert_eq!(MUL_A, 120);
+    assert_eq!(MUL_B, 44);
 
-    assert_eq!(SHL_A, ident(128));
-    assert_eq!(SHL_B, ident(1));
+    assert_eq!(SHL_A, 128);
+    assert_eq!(SHL_B, 1);
 
-    assert_eq!(SHR_A, ident(1));
-    assert_eq!(SHR_B, ident(128));
+    assert_eq!(SHR_A, 1);
+    assert_eq!(SHR_B, 128);
 
-    assert_eq!(NEG_A, ident(4294967291));
-    assert_eq!(NEG_B, ident(3060399406));
+    assert_eq!(NEG_A, 4294967291);
+    assert_eq!(NEG_B, 3060399406);
 }
index cbe6d864c9c3a7eb35be33cb0679a3a2759e9546..936f31954d3dd446b8b000f43def12caf4e93df8 100644 (file)
@@ -2,7 +2,7 @@
 #![feature(test)]
 
 extern crate test;
-use test::black_box as b;
+use test::black_box as b; // prevent promotion of the argument and const-propagation of the result
 
 const BE_U32: u32 = 55u32.to_be();
 const LE_U32: u32 = 55u32.to_le();
index c5b9d837b47a896085dc83fc23e38137388394cd..67d52ad08246a38f4f84c5b726ecab0516613bb9 100644 (file)
@@ -1,15 +1,17 @@
 // run-pass
 
+#![feature(ptr_internals, test)]
+
+extern crate test;
+use test::black_box as b; // prevent promotion of the argument and const-propagation of the result
+
 use std::ptr::NonNull;
 
 const DANGLING: NonNull<u32> = NonNull::dangling();
 const CASTED: NonNull<u32> = NonNull::cast(NonNull::<i32>::dangling());
 
-fn ident<T>(ident: T) -> T {
-    ident
-}
-
 pub fn main() {
-    assert_eq!(DANGLING, ident(NonNull::dangling()));
-    assert_eq!(CASTED, ident(NonNull::dangling()));
+    // Be super-extra paranoid and cast the fn items to fn pointers before blackboxing them.
+    assert_eq!(DANGLING, b::<fn() -> _>(NonNull::dangling)());
+    assert_eq!(CASTED, b::<fn() -> _>(NonNull::dangling)());
 }
index eb371ab184166491e039136ed2d4ae954be90c8d..e8735e1a32c2ce3b1b111530617630ed97234d22 100644 (file)
@@ -1,15 +1,16 @@
 // run-pass
 
-#![feature(ptr_internals)]
+#![feature(ptr_internals, test)]
+
+extern crate test;
+use test::black_box as b; // prevent promotion of the argument and const-propagation of the result
 
 use std::ptr::Unique;
 
-const PTR: *mut u32 = Unique::empty().as_ptr();
 
-fn ident<T>(ident: T) -> T {
-    ident
-}
+const PTR: *mut u32 = Unique::empty().as_ptr();
 
 pub fn main() {
-    assert_eq!(PTR, ident(Unique::<u32>::empty().as_ptr()));
+    // Be super-extra paranoid and cast the fn items to fn pointers before blackboxing them.
+    assert_eq!(PTR, b::<fn() -> _>(Unique::<u32>::empty)().as_ptr());
 }
diff --git a/src/test/run-pass/generator/overlap-locals.rs b/src/test/run-pass/generator/overlap-locals.rs
new file mode 100644 (file)
index 0000000..704484a
--- /dev/null
@@ -0,0 +1,27 @@
+#![feature(generators)]
+
+fn main() {
+    let a = || {
+        {
+            let w: i32 = 4;
+            yield;
+            println!("{:?}", w);
+        }
+        {
+            let x: i32 = 5;
+            yield;
+            println!("{:?}", x);
+        }
+        {
+            let y: i32 = 6;
+            yield;
+            println!("{:?}", y);
+        }
+        {
+            let z: i32 = 7;
+            yield;
+            println!("{:?}", z);
+        }
+    };
+    assert_eq!(8, std::mem::size_of_val(&a));
+}
diff --git a/src/test/run-pass/item-attributes.rs b/src/test/run-pass/item-attributes.rs
deleted file mode 100644 (file)
index e3ed350..0000000
+++ /dev/null
@@ -1,184 +0,0 @@
-#![allow(non_camel_case_types)]
-#![allow(non_upper_case_globals)]
-#![allow(unused_attributes)]
-#![allow(dead_code)]
-#![allow(unknown_lints)]
-// These are attributes of the implicit crate. Really this just needs to parse
-// for completeness since .rs files linked from .rc files support this
-// notation to specify their module's attributes
-
-#![feature(custom_attribute)]
-#![allow(unused_attribute)]
-#![attr1 = "val"]
-#![attr2 = "val"]
-#![attr3]
-#![attr4(attr5)]
-
-#![crate_id="foobar#0.1"]
-
-// These are attributes of the following mod
-#[attr1 = "val"]
-#[attr2 = "val"]
-mod test_first_item_in_file_mod {}
-
-mod test_single_attr_outer {
-    #[attr = "val"]
-    pub static x: isize = 10;
-
-    #[attr = "val"]
-    pub fn f() { }
-
-    #[attr = "val"]
-    pub mod mod1 {}
-
-    pub mod rustrt {
-        #[attr = "val"]
-        extern {}
-    }
-}
-
-mod test_multi_attr_outer {
-    #[attr1 = "val"]
-    #[attr2 = "val"]
-    pub static x: isize = 10;
-
-    #[attr1 = "val"]
-    #[attr2 = "val"]
-    pub fn f() { }
-
-    #[attr1 = "val"]
-    #[attr2 = "val"]
-    pub mod mod1 {}
-
-    pub mod rustrt {
-        #[attr1 = "val"]
-        #[attr2 = "val"]
-        extern {}
-    }
-
-    #[attr1 = "val"]
-    #[attr2 = "val"]
-    struct t {x: isize}
-}
-
-mod test_stmt_single_attr_outer {
-    pub fn f() {
-        #[attr = "val"]
-        static x: isize = 10;
-
-        #[attr = "val"]
-        fn f() { }
-
-        #[attr = "val"]
-        mod mod1 {
-        }
-
-        mod rustrt {
-            #[attr = "val"]
-            extern {
-            }
-        }
-    }
-}
-
-mod test_stmt_multi_attr_outer {
-    pub fn f() {
-
-        #[attr1 = "val"]
-        #[attr2 = "val"]
-        static x: isize = 10;
-
-        #[attr1 = "val"]
-        #[attr2 = "val"]
-        fn f() { }
-
-        #[attr1 = "val"]
-        #[attr2 = "val"]
-        mod mod1 {
-        }
-
-        mod rustrt {
-            #[attr1 = "val"]
-            #[attr2 = "val"]
-            extern {
-            }
-        }
-    }
-}
-
-mod test_attr_inner {
-    pub mod m {
-        // This is an attribute of mod m
-        #![attr = "val"]
-    }
-}
-
-mod test_attr_inner_then_outer {
-    pub mod m {
-        // This is an attribute of mod m
-        #![attr = "val"]
-        // This is an attribute of fn f
-        #[attr = "val"]
-        fn f() { }
-    }
-}
-
-mod test_attr_inner_then_outer_multi {
-    pub mod m {
-        // This is an attribute of mod m
-        #![attr1 = "val"]
-        #![attr2 = "val"]
-        // This is an attribute of fn f
-        #[attr1 = "val"]
-        #[attr2 = "val"]
-        fn f() { }
-    }
-}
-
-mod test_distinguish_syntax_ext {
-    pub fn f() {
-        format!("test{}", "s");
-        #[attr = "val"]
-        fn g() { }
-    }
-}
-
-mod test_other_forms {
-    #[attr]
-    #[attr(word)]
-    #[attr(attr(word))]
-    #[attr(key1 = "val", key2 = "val", attr)]
-    pub fn f() { }
-}
-
-mod test_foreign_items {
-    pub mod rustrt {
-        extern {
-            #![attr]
-
-            #[attr]
-            fn rust_get_test_int() -> u32;
-        }
-    }
-}
-
-
-// FIXME(#623): - these aren't supported yet
-/*mod test_literals {
-    #![str = "s"]
-    #![char = 'c']
-    #![isize = 100]
-    #![usize = 100_usize]
-    #![mach_int = 100u32]
-    #![float = 1.0]
-    #![mach_float = 1.0f32]
-    #![nil = ()]
-    #![bool = true]
-    mod m {}
-}*/
-
-fn test_fn_inner() {
-    #![inner_fn_attr]
-}
-
-pub fn main() { }
index 126cab67c1c04d636d80c04ada67a420d4b765e2..f9d1b17b8dd823a2c5debdeea4b62425b8db5b8e 100644 (file)
@@ -30,6 +30,9 @@ fn main() {
     let s = r"string\r
 literal";\r
     assert_eq!(s, "string\nliteral");\r
+    let s = br"byte string\r
+literal";\r
+    assert_eq!(s, "byte string\nliteral".as_bytes());\r
 \r
     // validate that our source file has CRLF endings\r
     let source = include_str!("lexer-crlf-line-endings-string-literal-doc-comment.rs");\r
diff --git a/src/test/run-pass/macros/macro-at-most-once-rep-2015.rs b/src/test/run-pass/macros/macro-at-most-once-rep-2015.rs
new file mode 100644 (file)
index 0000000..66597c0
--- /dev/null
@@ -0,0 +1,50 @@
+// run-pass
+
+#![allow(unused_mut)]
+
+// Check that when `?` is followed by what looks like a Kleene operator (?, +, and *)
+// then that `?` is not interpreted as a separator. In other words, `$(pat)?+` matches `pat +`
+// or `+` but does not match `pat` or `pat ? pat`.
+
+// edition:2015
+
+macro_rules! foo {
+    // Check for `?`.
+    ($($a:ident)? ? $num:expr) => {
+        foo!($($a)? ; $num);
+    };
+    // Check for `+`.
+    ($($a:ident)? + $num:expr) => {
+        foo!($($a)? ; $num);
+    };
+    // Check for `*`.
+    ($($a:ident)? * $num:expr) => {
+        foo!($($a)? ; $num);
+    };
+    // Check for `;`, not a kleene operator.
+    ($($a:ident)? ; $num:expr) => {
+        let mut x = 0;
+
+        $(
+            x += $a;
+        )?
+
+        assert_eq!(x, $num);
+    };
+}
+
+pub fn main() {
+    let a = 1;
+
+    // Accept 0 repetitions.
+    foo!( ; 0);
+    foo!( + 0);
+    foo!( * 0);
+    foo!( ? 0);
+
+    // Accept 1 repetition.
+    foo!(a ; 1);
+    foo!(a + 1);
+    foo!(a * 1);
+    foo!(a ? 1);
+}
diff --git a/src/test/run-pass/macros/macro-at-most-once-rep-2018.rs b/src/test/run-pass/macros/macro-at-most-once-rep-2018.rs
new file mode 100644 (file)
index 0000000..b37f385
--- /dev/null
@@ -0,0 +1,50 @@
+// run-pass
+
+#![allow(unused_mut)]
+
+// Check that when `?` is followed by what looks like a Kleene operator (?, +, and *)
+// then that `?` is not interpreted as a separator. In other words, `$(pat)?+` matches `pat +`
+// or `+` but does not match `pat` or `pat ? pat`.
+
+// edition:2018
+
+macro_rules! foo {
+    // Check for `?`.
+    ($($a:ident)? ? $num:expr) => {
+        foo!($($a)? ; $num);
+    };
+    // Check for `+`.
+    ($($a:ident)? + $num:expr) => {
+        foo!($($a)? ; $num);
+    };
+    // Check for `*`.
+    ($($a:ident)? * $num:expr) => {
+        foo!($($a)? ; $num);
+    };
+    // Check for `;`, not a kleene operator.
+    ($($a:ident)? ; $num:expr) => {
+        let mut x = 0;
+
+        $(
+            x += $a;
+        )?
+
+        assert_eq!(x, $num);
+    };
+}
+
+pub fn main() {
+    let a = 1;
+
+    // Accept 0 repetitions.
+    foo!( ; 0);
+    foo!( + 0);
+    foo!( * 0);
+    foo!( ? 0);
+
+    // Accept 1 repetition.
+    foo!(a ; 1);
+    foo!(a + 1);
+    foo!(a * 1);
+    foo!(a ? 1);
+}
diff --git a/src/test/run-pass/macros/macro-at-most-once-rep.rs b/src/test/run-pass/macros/macro-at-most-once-rep.rs
deleted file mode 100644 (file)
index 582ef08..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-// run-pass
-#![allow(unused_mut)]
-// The logic for parsing Kleene operators in macros has a special case to disambiguate `?`.
-// Specifically, `$(pat)?` is the ZeroOrOne operator whereas `$(pat)?+` or `$(pat)?*` are the
-// ZeroOrMore and OneOrMore operators using `?` as a separator. These tests are intended to
-// exercise that logic in the macro parser.
-//
-// Moreover, we also throw in some tests for using a separator with `?`, which is meaningless but
-// included for consistency with `+` and `*`.
-//
-// This test focuses on non-error cases and making sure the correct number of repetitions happen.
-
-// edition:2018
-
-macro_rules! foo {
-    ($($a:ident)? ; $num:expr) => { {
-        let mut x = 0;
-
-        $(
-            x += $a;
-         )?
-
-        assert_eq!(x, $num);
-    } }
-}
-
-pub fn main() {
-    let a = 1;
-
-    // accept 0 or 1 repetitions
-    foo!( ; 0);
-    foo!(a ; 1);
-}
index 953f6be53c5d4300fcd8dabe88ff9e070474d9e9..d382e8b719713dacd439b9e95abeb3a4b492b5dd 100644 (file)
@@ -1,5 +1,4 @@
 // run-pass
-#![feature(custom_attribute)]
 
 macro_rules! compiles_fine {
     (#[$at:meta]) => {
diff --git a/src/test/run-pass/methods/method-attributes.rs b/src/test/run-pass/methods/method-attributes.rs
deleted file mode 100644 (file)
index c7d8b3b..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-// run-pass
-#![allow(unused_attributes)]
-#![allow(non_camel_case_types)]
-
-// pp-exact - Make sure we print all the attributes
-// pretty-expanded FIXME #23616
-
-#![feature(custom_attribute)]
-
-#[frobable]
-trait frobable {
-    #[frob_attr]
-    fn frob(&self);
-    #[defrob_attr]
-    fn defrob(&self);
-}
-
-#[int_frobable]
-impl frobable for isize {
-    #[frob_attr1]
-    fn frob(&self) {
-        #![frob_attr2]
-    }
-
-    #[defrob_attr1]
-    fn defrob(&self) {
-        #![defrob_attr2]
-    }
-}
-
-pub fn main() { }
index f275c67fdf01fca9836ade65930ca4d2d488719c..ea3ad7aed492666b10b7e68985b215089e6fb926 100644 (file)
@@ -1,5 +1,4 @@
 // run-pass
-#![feature(as_cell)]
 
 use std::cell::Cell;
 
index 8d72b1f6f0d24b99b1307706e6307a623b6e7499..fa872caa3b47edf003d5d94369072b4be858e5f5 100644 (file)
@@ -1,6 +1,5 @@
 // run-pass
 #![allow(dead_code)]
-#![feature(repr_align_enum)]
 
 use std::mem;
 
diff --git a/src/test/run-pass/structs-enums/class-attributes-1.rs b/src/test/run-pass/structs-enums/class-attributes-1.rs
deleted file mode 100644 (file)
index 11ea29e..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-// run-pass
-#![allow(unused_attributes)]
-#![allow(non_camel_case_types)]
-
-// pp-exact - Make sure we actually print the attributes
-#![feature(custom_attribute)]
-
-struct cat {
-    name: String,
-}
-
-impl Drop for cat {
-    #[cat_dropper]
-    fn drop(&mut self) { println!("{} landed on hir feet" , self . name); }
-}
-
-
-#[cat_maker]
-fn cat(name: String) -> cat { cat{name: name,} }
-
-pub fn main() { let _kitty = cat("Spotty".to_string()); }
diff --git a/src/test/run-pass/structs-enums/class-attributes-2.rs b/src/test/run-pass/structs-enums/class-attributes-2.rs
deleted file mode 100644 (file)
index d6cf63e..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-// run-pass
-#![allow(unused_attributes)]
-#![allow(non_camel_case_types)]
-
-#![feature(custom_attribute)]
-
-struct cat {
-  name: String,
-}
-
-impl Drop for cat {
-    #[cat_dropper]
-    /**
-       Actually, cats don't always land on their feet when you drop them.
-    */
-    fn drop(&mut self) {
-        println!("{} landed on hir feet", self.name);
-    }
-}
-
-#[cat_maker]
-/**
-Maybe it should technically be a kitten_maker.
-*/
-fn cat(name: String) -> cat {
-    cat {
-        name: name
-    }
-}
-
-pub fn main() {
-  let _kitty = cat("Spotty".to_string());
-}
index 87629665bc2bd6dc015fb558f3b0eb8599c9dd31..f871c218558603175ec75150d27bf8f54ecea348 100644 (file)
@@ -1,4 +1,6 @@
 // run-pass
+#![feature(transparent_unions)]
+
 use std::mem::size_of;
 use std::num::NonZeroUsize;
 use std::ptr::NonNull;
@@ -10,6 +12,11 @@ trait Trait { fn dummy(&self) { } }
 impl<T> Mirror for T { type Image = T; }
 struct ParamTypeStruct<T>(T);
 struct AssocTypeStruct<T>(<T as Mirror>::Image);
+#[repr(transparent)]
+union MaybeUninitUnion<T: Copy> {
+    _value: T,
+    _uninit: (),
+}
 
 fn main() {
     // Functions
@@ -29,9 +36,12 @@ fn main() {
     // Pointers - Box<T>
     assert_eq!(size_of::<Box<isize>>(), size_of::<Option<Box<isize>>>());
 
-    // The optimization can't apply to raw pointers
+    // The optimization can't apply to raw pointers or unions with a ZST field.
     assert!(size_of::<Option<*const isize>>() != size_of::<*const isize>());
     assert!(Some(0 as *const isize).is_some()); // Can't collapse None to null
+    assert_ne!(size_of::<fn(isize)>(), size_of::<Option<MaybeUninitUnion<fn(isize)>>>());
+    assert_ne!(size_of::<&str>(), size_of::<Option<MaybeUninitUnion<&str>>>());
+    assert_ne!(size_of::<NonNull<isize>>(), size_of::<Option<MaybeUninitUnion<NonNull<isize>>>>());
 
     struct Foo {
         _a: Box<isize>
diff --git a/src/test/run-pass/variant-attributes.rs b/src/test/run-pass/variant-attributes.rs
deleted file mode 100644 (file)
index 19de3ff..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-#![allow(unused_attributes)]
-#![allow(non_camel_case_types)]
-#![allow(dead_code)]
-// pp-exact - Make sure we actually print the attributes
-// pretty-expanded FIXME #23616
-
-#![feature(custom_attribute)]
-
-enum crew_of_enterprise_d {
-
-    #[captain]
-    jean_luc_picard,
-
-    #[oldcommander]
-    william_t_riker,
-
-    #[chief_medical_officer]
-    beverly_crusher,
-
-    #[ships_councellor]
-    deanna_troi,
-
-    #[lieutenant_oldcommander]
-    data,
-
-    #[chief_of_security]
-    worf,
-
-    #[chief_engineer]
-    geordi_la_forge,
-}
-
-fn boldly_go(_crew_member: crew_of_enterprise_d, _where: String) { }
-
-pub fn main() {
-    boldly_go(crew_of_enterprise_d::worf,
-              "where no one has gone before".to_string());
-}
index 6b00293b6e52ad46a5f078c579590e6595375ddb..02901dbf3aa11bdf092a806af26a93352c1ece0a 100644 (file)
@@ -1,3 +1,5 @@
+#![feature(generators)]
+
 #![allow(non_camel_case_types)]
 #![allow(dead_code)]
 #![allow(unreachable_code)]
@@ -141,6 +143,12 @@ fn r#match() {
     assert_eq!(val, ());
 }
 
+fn i_yield() {
+    static || {
+        yield yield yield yield yield yield yield yield yield;
+    };
+}
+
 pub fn main() {
     strange();
     funny();
@@ -157,4 +165,5 @@ pub fn main() {
     special_characters();
     punch_card();
     r#match();
+    i_yield();
 }
diff --git a/src/test/rustdoc/const-generics/const-generic-slice.rs b/src/test/rustdoc/const-generics/const-generic-slice.rs
new file mode 100644 (file)
index 0000000..60d9677
--- /dev/null
@@ -0,0 +1,12 @@
+#![crate_name = "foo"]
+#![feature(const_generics)]
+
+pub trait Array {
+    type Item;
+}
+
+// @has foo/trait.Array.html
+// @has - '//h3[@class="impl"]' 'impl<T, const N: usize> Array for [T; N]'
+impl <T, const N: usize> Array for [T; N] {
+    type Item = T;
+}
index 424d7ebb37444a49120f747197782a8df2e68534..349e3f4a836a0bbf5cf1bb67a7f9072333590cb4 100644 (file)
@@ -6,7 +6,7 @@ LL |         match *s { S(v) => v }
    |               |      |
    |               |      data moved here
    |               |      move occurs because `v` has type `std::vec::Vec<isize>`, which does not implement the `Copy` trait
-   |               help: consider removing the `*`: `s`
+   |               help: consider borrowing here: `&*s`
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/async-await/issue-61452.rs b/src/test/ui/async-await/issue-61452.rs
new file mode 100644 (file)
index 0000000..20b9b64
--- /dev/null
@@ -0,0 +1,14 @@
+// edition:2018
+#![feature(async_await)]
+
+pub async fn f(x: Option<usize>) {
+    x.take();
+    //~^ ERROR cannot borrow `x` as mutable, as it is not declared as mutable [E0596]
+}
+
+pub async fn g(x: usize) {
+    x += 1;
+    //~^ ERROR cannot assign twice to immutable variable `x` [E0384]
+}
+
+fn main() {}
diff --git a/src/test/ui/async-await/issue-61452.stderr b/src/test/ui/async-await/issue-61452.stderr
new file mode 100644 (file)
index 0000000..742490d
--- /dev/null
@@ -0,0 +1,23 @@
+error[E0596]: cannot borrow `x` as mutable, as it is not declared as mutable
+  --> $DIR/issue-61452.rs:5:5
+   |
+LL | pub async fn f(x: Option<usize>) {
+   |                - help: consider changing this to be mutable: `mut x`
+LL |     x.take();
+   |     ^ cannot borrow as mutable
+
+error[E0384]: cannot assign twice to immutable variable `x`
+  --> $DIR/issue-61452.rs:10:5
+   |
+LL | pub async fn g(x: usize) {
+   |                -
+   |                |
+   |                first assignment to `x`
+   |                help: make this binding mutable: `mut x`
+LL |     x += 1;
+   |     ^^^^^^ cannot assign twice to immutable variable
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0384, E0596.
+For more information about an error, try `rustc --explain E0384`.
index 6aacb9d572aea670dcefebda3d0e89fb11125e25..c301492b9e21a52bae33c540475f00f0aba2921f 100644 (file)
@@ -1,4 +1,2 @@
-#![feature(custom_attribute)]
-
 #[my_attr = !] //~ ERROR unexpected token: `!`
 fn main() {}
index 571779dfa1ae72f740d58e14cdc87170ff622aa1..bb37c2e0cc4734b1d499bbc4416a2e5cc9a1365d 100644 (file)
@@ -1,5 +1,5 @@
 error: unexpected token: `!`
-  --> $DIR/attr-eq-token-tree.rs:3:13
+  --> $DIR/attr-eq-token-tree.rs:1:13
    |
 LL | #[my_attr = !]
    |             ^
index 1df2947cbe2ddd565ec4fd9c814fd241b65e1314..a0b82375e777d02edc058e849374602bef6d8568 100644 (file)
@@ -1,7 +1,6 @@
 #![feature(repr_simd)]
-#![feature(repr_align_enum)]
 
-#[repr(C)] //~ ERROR: attribute should be applied to struct, enum or union
+#[repr(C)] //~ ERROR: attribute should be applied to struct, enum, or union
 fn f() {}
 
 #[repr(C)]
index f8ad7eec454c4f93bfabe690439b30c3a4b402db..82d80d8d0b1738a23a86677d2ce2417ee0d0dcd1 100644 (file)
@@ -1,13 +1,13 @@
-error[E0517]: attribute should be applied to struct, enum or union
-  --> $DIR/attr-usage-repr.rs:4:8
+error[E0517]: attribute should be applied to struct, enum, or union
+  --> $DIR/attr-usage-repr.rs:3:8
    |
 LL | #[repr(C)]
    |        ^
 LL | fn f() {}
-   | --------- not a struct, enum or union
+   | --------- not a struct, enum, or union
 
 error[E0517]: attribute should be applied to enum
-  --> $DIR/attr-usage-repr.rs:16:8
+  --> $DIR/attr-usage-repr.rs:15:8
    |
 LL | #[repr(i8)]
    |        ^^
@@ -15,7 +15,7 @@ LL | struct SInt(f64, f64);
    | ---------------------- not an enum
 
 error[E0517]: attribute should be applied to struct or union
-  --> $DIR/attr-usage-repr.rs:25:8
+  --> $DIR/attr-usage-repr.rs:24:8
    |
 LL | #[repr(packed)]
    |        ^^^^^^
@@ -23,7 +23,7 @@ LL | enum EPacked { A, B }
    | --------------------- not a struct or union
 
 error[E0517]: attribute should be applied to struct
-  --> $DIR/attr-usage-repr.rs:28:8
+  --> $DIR/attr-usage-repr.rs:27:8
    |
 LL | #[repr(simd)]
    |        ^^^^
diff --git a/src/test/ui/attributes/attr-before-view-item.rs b/src/test/ui/attributes/attr-before-view-item.rs
new file mode 100644 (file)
index 0000000..fc040bd
--- /dev/null
@@ -0,0 +1,10 @@
+// compile-pass
+// pretty-expanded FIXME #23616
+
+#![feature(rustc_attrs)]
+#![feature(test)]
+
+#[rustc_dummy = "bar"]
+extern crate test;
+
+fn main() {}
diff --git a/src/test/ui/attributes/attr-before-view-item2.rs b/src/test/ui/attributes/attr-before-view-item2.rs
new file mode 100644 (file)
index 0000000..c7fad38
--- /dev/null
@@ -0,0 +1,12 @@
+// compile-pass
+// pretty-expanded FIXME #23616
+
+#![feature(rustc_attrs)]
+#![feature(test)]
+
+mod m {
+    #[rustc_dummy = "bar"]
+    extern crate test;
+}
+
+fn main() {}
diff --git a/src/test/ui/attributes/attr-mix-new.rs b/src/test/ui/attributes/attr-mix-new.rs
new file mode 100644 (file)
index 0000000..d9cb551
--- /dev/null
@@ -0,0 +1,11 @@
+// compile-pass
+// pretty-expanded FIXME #23616
+
+#![feature(rustc_attrs)]
+
+#[rustc_dummy(bar)]
+mod foo {
+  #![feature(globs)]
+}
+
+fn main() {}
diff --git a/src/test/ui/attributes/attrs-with-no-formal-in-generics-1.rs b/src/test/ui/attributes/attrs-with-no-formal-in-generics-1.rs
new file mode 100644 (file)
index 0000000..ca5fdd9
--- /dev/null
@@ -0,0 +1,15 @@
+// This test checks variations on `<#[attr] 'a, #[oops]>`, where
+// `#[oops]` is left dangling (that is, it is unattached, with no
+// formal binding following it).
+
+#![feature(rustc_attrs)]
+
+struct RefIntPair<'a, 'b>(&'a u32, &'b u32);
+
+impl<#[rustc_1] 'a, 'b, #[oops]> RefIntPair<'a, 'b> {
+    //~^ ERROR trailing attribute after generic parameter
+}
+
+fn main() {
+
+}
diff --git a/src/test/ui/attributes/attrs-with-no-formal-in-generics-1.stderr b/src/test/ui/attributes/attrs-with-no-formal-in-generics-1.stderr
new file mode 100644 (file)
index 0000000..55e7a98
--- /dev/null
@@ -0,0 +1,8 @@
+error: trailing attribute after generic parameter
+  --> $DIR/attrs-with-no-formal-in-generics-1.rs:9:25
+   |
+LL | impl<#[rustc_1] 'a, 'b, #[oops]> RefIntPair<'a, 'b> {
+   |                         ^^^^^^^ attributes must go before parameters
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/attributes/attrs-with-no-formal-in-generics-2.rs b/src/test/ui/attributes/attrs-with-no-formal-in-generics-2.rs
new file mode 100644 (file)
index 0000000..c795612
--- /dev/null
@@ -0,0 +1,12 @@
+// This test checks variations on `<#[attr] 'a, #[oops]>`, where
+// `#[oops]` is left dangling (that is, it is unattached, with no
+// formal binding following it).
+
+#![feature(rustc_attrs)]
+
+struct RefAny<'a, T>(&'a T);
+
+impl<#[rustc_1] 'a, #[rustc_2] T, #[oops]> RefAny<'a, T> {}
+//~^ ERROR trailing attribute after generic parameter
+
+fn main() {}
diff --git a/src/test/ui/attributes/attrs-with-no-formal-in-generics-2.stderr b/src/test/ui/attributes/attrs-with-no-formal-in-generics-2.stderr
new file mode 100644 (file)
index 0000000..acd0ae3
--- /dev/null
@@ -0,0 +1,8 @@
+error: trailing attribute after generic parameter
+  --> $DIR/attrs-with-no-formal-in-generics-2.rs:9:35
+   |
+LL | impl<#[rustc_1] 'a, #[rustc_2] T, #[oops]> RefAny<'a, T> {}
+   |                                   ^^^^^^^ attributes must go before parameters
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/attributes/attrs-with-no-formal-in-generics-3.rs b/src/test/ui/attributes/attrs-with-no-formal-in-generics-3.rs
new file mode 100644 (file)
index 0000000..3cfc70b
--- /dev/null
@@ -0,0 +1,12 @@
+// This test checks variations on `<#[attr] 'a, #[oops]>`, where
+// `#[oops]` is left dangling (that is, it is unattached, with no
+// formal binding following it).
+
+struct RefIntPair<'a, 'b>(&'a u32, &'b u32);
+
+fn hof_lt<Q>(_: Q)
+    where Q: for <#[allow(unused)] 'a, 'b, #[oops]> Fn(RefIntPair<'a,'b>) -> &'b u32
+    //~^ ERROR trailing attribute after generic parameter
+{}
+
+fn main() {}
diff --git a/src/test/ui/attributes/attrs-with-no-formal-in-generics-3.stderr b/src/test/ui/attributes/attrs-with-no-formal-in-generics-3.stderr
new file mode 100644 (file)
index 0000000..b9ca009
--- /dev/null
@@ -0,0 +1,8 @@
+error: trailing attribute after generic parameter
+  --> $DIR/attrs-with-no-formal-in-generics-3.rs:8:44
+   |
+LL |     where Q: for <#[allow(unused)] 'a, 'b, #[oops]> Fn(RefIntPair<'a,'b>) -> &'b u32
+   |                                            ^^^^^^^ attributes must go before parameters
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/attributes/class-attributes-1.rs b/src/test/ui/attributes/class-attributes-1.rs
new file mode 100644 (file)
index 0000000..7808367
--- /dev/null
@@ -0,0 +1,19 @@
+// compile-pass
+// pp-exact - Make sure we actually print the attributes
+
+#![feature(rustc_attrs)]
+
+struct Cat {
+    name: String,
+}
+
+impl Drop for Cat {
+    #[rustc_dummy]
+    fn drop(&mut self) { println!("{} landed on hir feet" , self . name); }
+}
+
+
+#[rustc_dummy]
+fn cat(name: String) -> Cat { Cat{name: name,} }
+
+fn main() { let _kitty = cat("Spotty".to_string()); }
diff --git a/src/test/ui/attributes/class-attributes-2.rs b/src/test/ui/attributes/class-attributes-2.rs
new file mode 100644 (file)
index 0000000..348c70f
--- /dev/null
@@ -0,0 +1,31 @@
+// compile-pass
+
+#![feature(rustc_attrs)]
+
+struct Cat {
+    name: String,
+}
+
+impl Drop for Cat {
+    #[rustc_dummy]
+    /**
+       Actually, cats don't always land on their feet when you drop them.
+    */
+    fn drop(&mut self) {
+        println!("{} landed on hir feet", self.name);
+    }
+}
+
+#[rustc_dummy]
+/**
+Maybe it should technically be a kitten_maker.
+*/
+fn cat(name: String) -> Cat {
+    Cat {
+        name: name
+    }
+}
+
+fn main() {
+    let _kitty = cat("Spotty".to_string());
+}
diff --git a/src/test/ui/attributes/item-attributes.rs b/src/test/ui/attributes/item-attributes.rs
new file mode 100644 (file)
index 0000000..72c9a35
--- /dev/null
@@ -0,0 +1,181 @@
+// These are attributes of the implicit crate. Really this just needs to parse
+// for completeness since .rs files linked from .rc files support this
+// notation to specify their module's attributes
+
+// compile-pass
+
+#![feature(rustc_attrs)]
+
+#![rustc_dummy = "val"]
+#![rustc_dummy = "val"]
+#![rustc_dummy]
+#![rustc_dummy(attr5)]
+
+#![crate_id="foobar#0.1"]
+
+// These are attributes of the following mod
+#[rustc_dummy = "val"]
+#[rustc_dummy = "val"]
+mod test_first_item_in_file_mod {}
+
+mod test_single_attr_outer {
+    #[rustc_dummy = "val"]
+    pub static X: isize = 10;
+
+    #[rustc_dummy = "val"]
+    pub fn f() { }
+
+    #[rustc_dummy = "val"]
+    pub mod mod1 {}
+
+    pub mod rustrt {
+        #[rustc_dummy = "val"]
+        extern {}
+    }
+}
+
+mod test_multi_attr_outer {
+    #[rustc_dummy = "val"]
+    #[rustc_dummy = "val"]
+    pub static X: isize = 10;
+
+    #[rustc_dummy = "val"]
+    #[rustc_dummy = "val"]
+    pub fn f() { }
+
+    #[rustc_dummy = "val"]
+    #[rustc_dummy = "val"]
+    pub mod mod1 {}
+
+    pub mod rustrt {
+        #[rustc_dummy = "val"]
+        #[rustc_dummy = "val"]
+        extern {}
+    }
+
+    #[rustc_dummy = "val"]
+    #[rustc_dummy = "val"]
+    struct T {x: isize}
+}
+
+mod test_stmt_single_attr_outer {
+    pub fn f() {
+        #[rustc_dummy = "val"]
+        static X: isize = 10;
+
+        #[rustc_dummy = "val"]
+        fn f() { }
+
+        #[rustc_dummy = "val"]
+        mod mod1 {
+        }
+
+        mod rustrt {
+            #[rustc_dummy = "val"]
+            extern {
+            }
+        }
+    }
+}
+
+mod test_stmt_multi_attr_outer {
+    pub fn f() {
+
+        #[rustc_dummy = "val"]
+        #[rustc_dummy = "val"]
+        static X: isize = 10;
+
+        #[rustc_dummy = "val"]
+        #[rustc_dummy = "val"]
+        fn f() { }
+
+        #[rustc_dummy = "val"]
+        #[rustc_dummy = "val"]
+        mod mod1 {
+        }
+
+        mod rustrt {
+            #[rustc_dummy = "val"]
+            #[rustc_dummy = "val"]
+            extern {
+            }
+        }
+    }
+}
+
+mod test_attr_inner {
+    pub mod m {
+        // This is an attribute of mod m
+        #![rustc_dummy = "val"]
+    }
+}
+
+mod test_attr_inner_then_outer {
+    pub mod m {
+        // This is an attribute of mod m
+        #![rustc_dummy = "val"]
+        // This is an attribute of fn f
+        #[rustc_dummy = "val"]
+        fn f() { }
+    }
+}
+
+mod test_attr_inner_then_outer_multi {
+    pub mod m {
+        // This is an attribute of mod m
+        #![rustc_dummy = "val"]
+        #![rustc_dummy = "val"]
+        // This is an attribute of fn f
+        #[rustc_dummy = "val"]
+        #[rustc_dummy = "val"]
+        fn f() { }
+    }
+}
+
+mod test_distinguish_syntax_ext {
+    pub fn f() {
+        format!("test{}", "s");
+        #[rustc_dummy = "val"]
+        fn g() { }
+    }
+}
+
+mod test_other_forms {
+    #[rustc_dummy]
+    #[rustc_dummy(word)]
+    #[rustc_dummy(attr(word))]
+    #[rustc_dummy(key1 = "val", key2 = "val", attr)]
+    pub fn f() { }
+}
+
+mod test_foreign_items {
+    pub mod rustrt {
+        extern {
+            #![rustc_dummy]
+
+            #[rustc_dummy]
+            fn rust_get_test_int() -> u32;
+        }
+    }
+}
+
+
+// FIXME(#623): - these aren't supported yet
+/*mod test_literals {
+    #![str = "s"]
+    #![char = 'c']
+    #![isize = 100]
+    #![usize = 100_usize]
+    #![mach_int = 100u32]
+    #![float = 1.0]
+    #![mach_float = 1.0f32]
+    #![nil = ()]
+    #![bool = true]
+    mod m {}
+}*/
+
+fn test_fn_inner() {
+    #![rustc_dummy]
+}
+
+fn main() {}
diff --git a/src/test/ui/attributes/method-attributes.rs b/src/test/ui/attributes/method-attributes.rs
new file mode 100644 (file)
index 0000000..2d608ac
--- /dev/null
@@ -0,0 +1,28 @@
+// compile-pass
+// pp-exact - Make sure we print all the attributes
+// pretty-expanded FIXME #23616
+
+#![feature(rustc_attrs)]
+
+#[rustc_dummy]
+trait Frobable {
+    #[rustc_dummy]
+    fn frob(&self);
+    #[rustc_dummy]
+    fn defrob(&self);
+}
+
+#[rustc_dummy]
+impl Frobable for isize {
+    #[rustc_dummy]
+    fn frob(&self) {
+        #![rustc_dummy]
+    }
+
+    #[rustc_dummy]
+    fn defrob(&self) {
+        #![rustc_dummy]
+    }
+}
+
+fn main() {}
diff --git a/src/test/ui/attributes/obsolete-attr.rs b/src/test/ui/attributes/obsolete-attr.rs
new file mode 100644 (file)
index 0000000..89e2ad2
--- /dev/null
@@ -0,0 +1,7 @@
+// Obsolete attributes fall back to feature gated custom attributes.
+
+#[ab_isize="stdcall"] extern {} //~ ERROR attribute `ab_isize` is currently unknown
+
+#[fixed_stack_segment] fn f() {} //~ ERROR attribute `fixed_stack_segment` is currently unknown
+
+fn main() {}
diff --git a/src/test/ui/attributes/obsolete-attr.stderr b/src/test/ui/attributes/obsolete-attr.stderr
new file mode 100644 (file)
index 0000000..2ed7f87
--- /dev/null
@@ -0,0 +1,21 @@
+error[E0658]: The attribute `fixed_stack_segment` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/obsolete-attr.rs:5:3
+   |
+LL | #[fixed_stack_segment] fn f() {}
+   |   ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `ab_isize` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/obsolete-attr.rs:3:3
+   |
+LL | #[ab_isize="stdcall"] extern {}
+   |   ^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/attributes/unknown-attr.rs b/src/test/ui/attributes/unknown-attr.rs
new file mode 100644 (file)
index 0000000..e2a4f32
--- /dev/null
@@ -0,0 +1,9 @@
+// Unknown attributes fall back to feature gated custom attributes.
+
+#![feature(custom_inner_attributes)]
+
+#![mutable_doc] //~ ERROR attribute `mutable_doc` is currently unknown
+
+#[dance] mod a {} //~ ERROR attribute `dance` is currently unknown
+
+#[dance] fn main() {} //~ ERROR attribute `dance` is currently unknown
diff --git a/src/test/ui/attributes/unknown-attr.stderr b/src/test/ui/attributes/unknown-attr.stderr
new file mode 100644 (file)
index 0000000..d0ac581
--- /dev/null
@@ -0,0 +1,30 @@
+error[E0658]: The attribute `mutable_doc` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/unknown-attr.rs:5:4
+   |
+LL | #![mutable_doc]
+   |    ^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `dance` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/unknown-attr.rs:7:3
+   |
+LL | #[dance] mod a {}
+   |   ^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `dance` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/unknown-attr.rs:9:3
+   |
+LL | #[dance] fn main() {}
+   |   ^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/attributes/variant-attributes.rs b/src/test/ui/attributes/variant-attributes.rs
new file mode 100644 (file)
index 0000000..a910340
--- /dev/null
@@ -0,0 +1,37 @@
+// compile-pass
+// pp-exact - Make sure we actually print the attributes
+// pretty-expanded FIXME #23616
+
+#![allow(non_camel_case_types)]
+#![feature(rustc_attrs)]
+
+enum crew_of_enterprise_d {
+
+    #[rustc_dummy]
+    jean_luc_picard,
+
+    #[rustc_dummy]
+    william_t_riker,
+
+    #[rustc_dummy]
+    beverly_crusher,
+
+    #[rustc_dummy]
+    deanna_troi,
+
+    #[rustc_dummy]
+    data,
+
+    #[rustc_dummy]
+    worf,
+
+    #[rustc_dummy]
+    geordi_la_forge,
+}
+
+fn boldly_go(_crew_member: crew_of_enterprise_d, _where: String) { }
+
+fn main() {
+    boldly_go(crew_of_enterprise_d::worf,
+              "where no one has gone before".to_string());
+}
diff --git a/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-1.rs b/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-1.rs
deleted file mode 100644 (file)
index ca5fdd9..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-// This test checks variations on `<#[attr] 'a, #[oops]>`, where
-// `#[oops]` is left dangling (that is, it is unattached, with no
-// formal binding following it).
-
-#![feature(rustc_attrs)]
-
-struct RefIntPair<'a, 'b>(&'a u32, &'b u32);
-
-impl<#[rustc_1] 'a, 'b, #[oops]> RefIntPair<'a, 'b> {
-    //~^ ERROR trailing attribute after generic parameter
-}
-
-fn main() {
-
-}
diff --git a/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-1.stderr b/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-1.stderr
deleted file mode 100644 (file)
index 55e7a98..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-error: trailing attribute after generic parameter
-  --> $DIR/attrs-with-no-formal-in-generics-1.rs:9:25
-   |
-LL | impl<#[rustc_1] 'a, 'b, #[oops]> RefIntPair<'a, 'b> {
-   |                         ^^^^^^^ attributes must go before parameters
-
-error: aborting due to previous error
-
diff --git a/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-2.rs b/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-2.rs
deleted file mode 100644 (file)
index c795612..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-// This test checks variations on `<#[attr] 'a, #[oops]>`, where
-// `#[oops]` is left dangling (that is, it is unattached, with no
-// formal binding following it).
-
-#![feature(rustc_attrs)]
-
-struct RefAny<'a, T>(&'a T);
-
-impl<#[rustc_1] 'a, #[rustc_2] T, #[oops]> RefAny<'a, T> {}
-//~^ ERROR trailing attribute after generic parameter
-
-fn main() {}
diff --git a/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-2.stderr b/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-2.stderr
deleted file mode 100644 (file)
index acd0ae3..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-error: trailing attribute after generic parameter
-  --> $DIR/attrs-with-no-formal-in-generics-2.rs:9:35
-   |
-LL | impl<#[rustc_1] 'a, #[rustc_2] T, #[oops]> RefAny<'a, T> {}
-   |                                   ^^^^^^^ attributes must go before parameters
-
-error: aborting due to previous error
-
diff --git a/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-3.rs b/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-3.rs
deleted file mode 100644 (file)
index 3cfc70b..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-// This test checks variations on `<#[attr] 'a, #[oops]>`, where
-// `#[oops]` is left dangling (that is, it is unattached, with no
-// formal binding following it).
-
-struct RefIntPair<'a, 'b>(&'a u32, &'b u32);
-
-fn hof_lt<Q>(_: Q)
-    where Q: for <#[allow(unused)] 'a, 'b, #[oops]> Fn(RefIntPair<'a,'b>) -> &'b u32
-    //~^ ERROR trailing attribute after generic parameter
-{}
-
-fn main() {}
diff --git a/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-3.stderr b/src/test/ui/attrs-with-no-formal-in-generics/attrs-with-no-formal-in-generics-3.stderr
deleted file mode 100644 (file)
index b9ca009..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-error: trailing attribute after generic parameter
-  --> $DIR/attrs-with-no-formal-in-generics-3.rs:8:44
-   |
-LL |     where Q: for <#[allow(unused)] 'a, 'b, #[oops]> Fn(RefIntPair<'a,'b>) -> &'b u32
-   |                                            ^^^^^^^ attributes must go before parameters
-
-error: aborting due to previous error
-
index 908b2c0ff5ee8ef6e4900998f4d0b2211be0ecc0..5880a1abb818ca384749f2d98c81c6e97e371606 100644 (file)
@@ -5,7 +5,7 @@ LL |         let _b = *y;
    |                  ^^
    |                  |
    |                  move occurs because `*y` has type `std::boxed::Box<i32>`, which does not implement the `Copy` trait
-   |                  help: consider removing the `*`: `y`
+   |                  help: consider borrowing here: `&*y`
 
 error: aborting due to previous error
 
index 13d3faab6508a4c3fac647e5c7de5e847cd45a69..d56b9f562c932dfc5c51feb38a4916298a01ca1a 100644 (file)
@@ -2,7 +2,7 @@ error[E0507]: cannot move out of `f.0` which is behind a shared reference
   --> $DIR/borrowck-move-error-with-note.rs:11:11
    |
 LL |     match *f {
-   |           ^^ help: consider removing the `*`: `f`
+   |           ^^ help: consider borrowing here: `&*f`
 LL |         Foo::Foo1(num1,
    |                   ---- data moved here
 LL |                   num2) => (),
index 6c806e0896b37edd2367ef0b38dad0eba6e1ecb3..7dfae33920e1cd146e420df19fefc1d7ce986745 100644 (file)
@@ -5,7 +5,7 @@ LL |     let y = *x;
    |             ^^
    |             |
    |             move occurs because `*x` has type `std::boxed::Box<isize>`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `x`
+   |             help: consider borrowing here: `&*x`
 
 error: aborting due to previous error
 
index da3e5c54b75f74f3d27f1d5b2b44ab3ed2a7e0ed..1501644fac758e1ef9024d09056ae258fc872dac 100644 (file)
@@ -5,7 +5,7 @@ LL |     let _x = *Rc::new("hi".to_string());
    |              ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |              |
    |              move occurs because value has type `std::string::String`, which does not implement the `Copy` trait
-   |              help: consider removing the `*`: `Rc::new("hi".to_string())`
+   |              help: consider borrowing here: `&*Rc::new("hi".to_string())`
 
 error: aborting due to previous error
 
index 6ba801b9714bf1ea00ed3cc97a44e45c0d403666..78d44f320619973ec26a63cde793d65224c31a2a 100644 (file)
@@ -5,7 +5,7 @@ LL |             *array
    |             ^^^^^^
    |             |
    |             move occurs because `*array` has type `std::vec::Vec<Value>`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `array`
+   |             help: consider borrowing here: `&*array`
 
 error: aborting due to previous error
 
diff --git a/src/test/ui/const-generics/array-wrapper-struct-ctor.rs b/src/test/ui/const-generics/array-wrapper-struct-ctor.rs
new file mode 100644 (file)
index 0000000..d83846f
--- /dev/null
@@ -0,0 +1,15 @@
+// run-pass
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+struct ArrayStruct<T, const N: usize> {
+    data: [T; N],
+}
+
+struct ArrayTuple<T, const N: usize>([T; N]);
+
+fn main() {
+    let _ = ArrayStruct { data: [0u32; 8] };
+    let _ = ArrayTuple([0u32; 8]);
+}
diff --git a/src/test/ui/const-generics/array-wrapper-struct-ctor.stderr b/src/test/ui/const-generics/array-wrapper-struct-ctor.stderr
new file mode 100644 (file)
index 0000000..bd18264
--- /dev/null
@@ -0,0 +1,6 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/array-wrapper-struct-ctor.rs:3:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+
diff --git a/src/test/ui/const-generics/derive-debug-array-wrapper.rs b/src/test/ui/const-generics/derive-debug-array-wrapper.rs
new file mode 100644 (file)
index 0000000..a29cb90
--- /dev/null
@@ -0,0 +1,9 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+#[derive(Debug)]
+struct X<const N: usize> {
+    a: [u32; N], //~ ERROR `[u32; _]` doesn't implement `std::fmt::Debug`
+}
+
+fn main() {}
diff --git a/src/test/ui/const-generics/derive-debug-array-wrapper.stderr b/src/test/ui/const-generics/derive-debug-array-wrapper.stderr
new file mode 100644 (file)
index 0000000..5bab1d1
--- /dev/null
@@ -0,0 +1,19 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/derive-debug-array-wrapper.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+
+error[E0277]: `[u32; _]` doesn't implement `std::fmt::Debug`
+  --> $DIR/derive-debug-array-wrapper.rs:6:5
+   |
+LL |     a: [u32; N],
+   |     ^^^^^^^^^^^ `[u32; _]` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug`
+   |
+   = help: the trait `std::fmt::Debug` is not implemented for `[u32; _]`
+   = note: required because of the requirements on the impl of `std::fmt::Debug` for `&[u32; _]`
+   = note: required for the cast to the object type `dyn std::fmt::Debug`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/const-generics/issue-61336-1.rs b/src/test/ui/const-generics/issue-61336-1.rs
new file mode 100644 (file)
index 0000000..5b5e431
--- /dev/null
@@ -0,0 +1,12 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
+    [x; N]
+    //~^ ERROR array lengths can't depend on generic parameters
+}
+
+fn main() {
+    let x: [u32; 5] = f::<u32, 5>(3);
+    assert_eq!(x, [3u32; 5]);
+}
diff --git a/src/test/ui/const-generics/issue-61336-1.stderr b/src/test/ui/const-generics/issue-61336-1.stderr
new file mode 100644 (file)
index 0000000..1a5bb9f
--- /dev/null
@@ -0,0 +1,14 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61336-1.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+
+error: array lengths can't depend on generic parameters
+  --> $DIR/issue-61336-1.rs:5:9
+   |
+LL |     [x; N]
+   |         ^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/const-generics/issue-61336.rs b/src/test/ui/const-generics/issue-61336.rs
new file mode 100644 (file)
index 0000000..9593037
--- /dev/null
@@ -0,0 +1,16 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
+    [x; N]
+}
+
+fn g<T, const N: usize>(x: T) -> [T; N] {
+    [x; N]
+    //~^ ERROR the trait bound `T: std::marker::Copy` is not satisfied [E0277]
+}
+
+fn main() {
+    let x: [u32; 5] = f::<u32, 5>(3);
+    assert_eq!(x, [3u32; 5]);
+}
diff --git a/src/test/ui/const-generics/issue-61336.stderr b/src/test/ui/const-generics/issue-61336.stderr
new file mode 100644 (file)
index 0000000..9939a59
--- /dev/null
@@ -0,0 +1,18 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+  --> $DIR/issue-61336.rs:1:12
+   |
+LL | #![feature(const_generics)]
+   |            ^^^^^^^^^^^^^^
+
+error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
+  --> $DIR/issue-61336.rs:9:5
+   |
+LL |     [x; N]
+   |     ^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
+   |
+   = help: consider adding a `where T: std::marker::Copy` bound
+   = note: the `Copy` trait is required because the repeated element will be copied
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
index c9d5ede61ade47c89e8fe420755173b9c80c3ea4..c9f3a7659f9cd216615164ca711aef5c1a8c83bc 100644 (file)
@@ -16,6 +16,12 @@ warning: attempt to divide by zero
 LL |     println!("{}", 1/(1-1));
    |                    ^^^^^^^
 
+warning: this expression will panic at runtime
+  --> $DIR/promoted_errors.rs:9:20
+   |
+LL |     println!("{}", 1/(1-1));
+   |                    ^^^^^^^ attempt to divide by zero
+
 warning: attempt to divide by zero
   --> $DIR/promoted_errors.rs:11:14
    |
@@ -34,6 +40,12 @@ warning: attempt to divide by zero
 LL |     println!("{}", 1/(false as u32));
    |                    ^^^^^^^^^^^^^^^^
 
+warning: this expression will panic at runtime
+  --> $DIR/promoted_errors.rs:14:20
+   |
+LL |     println!("{}", 1/(false as u32));
+   |                    ^^^^^^^^^^^^^^^^ attempt to divide by zero
+
 warning: attempt to divide by zero
   --> $DIR/promoted_errors.rs:16:14
    |
diff --git a/src/test/ui/consts/const_constructor/const-construct-call.rs b/src/test/ui/consts/const_constructor/const-construct-call.rs
new file mode 100644 (file)
index 0000000..f2d2bda
--- /dev/null
@@ -0,0 +1,116 @@
+// Test that constructors are considered to be const fns with the required feature.
+
+// run-pass
+
+// revisions: min_const_fn const_fn
+
+#![cfg_attr(const_fn, feature(const_fn))]
+
+#![feature(const_constructor)]
+
+// Ctor(..) is transformed to Ctor { 0: ... } in HAIR lowering, so directly
+// calling constructors doesn't require them to be const.
+
+type ExternalType = std::panic::AssertUnwindSafe<(Option<i32>, Result<i32, bool>)>;
+
+const fn call_external_constructors_in_local_vars() -> ExternalType {
+    let f = Some;
+    let g = Err;
+    let h = std::panic::AssertUnwindSafe;
+    let x = f(5);
+    let y = g(false);
+    let z = h((x, y));
+    z
+}
+
+const CALL_EXTERNAL_CONSTRUCTORS_IN_LOCAL_VARS: ExternalType = {
+    let f = Some;
+    let g = Err;
+    let h = std::panic::AssertUnwindSafe;
+    let x = f(5);
+    let y = g(false);
+    let z = h((x, y));
+    z
+};
+
+const fn call_external_constructors_in_temps() -> ExternalType {
+    let x = { Some }(5);
+    let y = (*&Err)(false);
+    let z = [std::panic::AssertUnwindSafe][0]((x, y));
+    z
+}
+
+const CALL_EXTERNAL_CONSTRUCTORS_IN_TEMPS: ExternalType = {
+    let x = { Some }(5);
+    let y = (*&Err)(false);
+    let z = [std::panic::AssertUnwindSafe][0]((x, y));
+    z
+};
+
+#[derive(Debug, PartialEq)]
+enum LocalOption<T> {
+    Some(T),
+    _None,
+}
+
+#[derive(Debug, PartialEq)]
+enum LocalResult<T, E> {
+    _Ok(T),
+    Err(E),
+}
+
+#[derive(Debug, PartialEq)]
+struct LocalAssertUnwindSafe<T>(T);
+
+type LocalType = LocalAssertUnwindSafe<(LocalOption<i32>, LocalResult<i32, bool>)>;
+
+const fn call_local_constructors_in_local_vars() -> LocalType {
+    let f = LocalOption::Some;
+    let g = LocalResult::Err;
+    let h = LocalAssertUnwindSafe;
+    let x = f(5);
+    let y = g(false);
+    let z = h((x, y));
+    z
+}
+
+const CALL_LOCAL_CONSTRUCTORS_IN_LOCAL_VARS: LocalType = {
+    let f = LocalOption::Some;
+    let g = LocalResult::Err;
+    let h = LocalAssertUnwindSafe;
+    let x = f(5);
+    let y = g(false);
+    let z = h((x, y));
+    z
+};
+
+const fn call_local_constructors_in_temps() -> LocalType {
+    let x = { LocalOption::Some }(5);
+    let y = (*&LocalResult::Err)(false);
+    let z = [LocalAssertUnwindSafe][0]((x, y));
+    z
+}
+
+const CALL_LOCAL_CONSTRUCTORS_IN_TEMPS: LocalType = {
+    let x = { LocalOption::Some }(5);
+    let y = (*&LocalResult::Err)(false);
+    let z = [LocalAssertUnwindSafe][0]((x, y));
+    z
+};
+
+fn main() {
+    assert_eq!(
+        (
+            call_external_constructors_in_local_vars().0,
+            call_external_constructors_in_temps().0,
+            call_local_constructors_in_local_vars(),
+            call_local_constructors_in_temps(),
+        ),
+        (
+            CALL_EXTERNAL_CONSTRUCTORS_IN_LOCAL_VARS.0,
+            CALL_EXTERNAL_CONSTRUCTORS_IN_TEMPS.0,
+            CALL_LOCAL_CONSTRUCTORS_IN_LOCAL_VARS,
+            CALL_LOCAL_CONSTRUCTORS_IN_TEMPS,
+        )
+    );
+}
diff --git a/src/test/ui/consts/const_constructor/feature-gate-const_constructor.const_fn.stderr b/src/test/ui/consts/const_constructor/feature-gate-const_constructor.const_fn.stderr
new file mode 100644 (file)
index 0000000..fa4f83e
--- /dev/null
@@ -0,0 +1,34 @@
+error: `std::prelude::v1::Some` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:9:37
+   |
+LL | const EXTERNAL_CONST: Option<i32> = {Some}(1);
+   |                                     ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: `E::V` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:12:24
+   |
+LL | const LOCAL_CONST: E = {E::V}(1);
+   |                        ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: `std::prelude::v1::Some` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:17:13
+   |
+LL |     let _ = {Some}(1);
+   |             ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: `E::V` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:23:13
+   |
+LL |     let _ = {E::V}(1);
+   |             ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: aborting due to 4 previous errors
+
diff --git a/src/test/ui/consts/const_constructor/feature-gate-const_constructor.min_const_fn.stderr b/src/test/ui/consts/const_constructor/feature-gate-const_constructor.min_const_fn.stderr
new file mode 100644 (file)
index 0000000..fa4f83e
--- /dev/null
@@ -0,0 +1,34 @@
+error: `std::prelude::v1::Some` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:9:37
+   |
+LL | const EXTERNAL_CONST: Option<i32> = {Some}(1);
+   |                                     ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: `E::V` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:12:24
+   |
+LL | const LOCAL_CONST: E = {E::V}(1);
+   |                        ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: `std::prelude::v1::Some` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:17:13
+   |
+LL |     let _ = {Some}(1);
+   |             ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: `E::V` is not yet stable as a const fn
+  --> $DIR/feature-gate-const_constructor.rs:23:13
+   |
+LL |     let _ = {E::V}(1);
+   |             ^^^^^^^^^
+   |
+   = help: add `#![feature(const_constructor)]` to the crate attributes to enable
+
+error: aborting due to 4 previous errors
+
diff --git a/src/test/ui/consts/const_constructor/feature-gate-const_constructor.rs b/src/test/ui/consts/const_constructor/feature-gate-const_constructor.rs
new file mode 100644 (file)
index 0000000..b37fd2f
--- /dev/null
@@ -0,0 +1,28 @@
+// revisions: min_const_fn const_fn
+
+#![cfg_attr(const_fn, feature(const_fn))]
+
+enum E {
+    V(i32),
+}
+
+const EXTERNAL_CONST: Option<i32> = {Some}(1);
+//[min_const_fn]~^ ERROR is not yet stable as a const fn
+//[const_fn]~^^ ERROR is not yet stable as a const fn
+const LOCAL_CONST: E = {E::V}(1);
+//[min_const_fn]~^ ERROR is not yet stable as a const fn
+//[const_fn]~^^ ERROR is not yet stable as a const fn
+
+const fn external_fn() {
+    let _ = {Some}(1);
+    //[min_const_fn]~^ ERROR is not yet stable as a const fn
+    //[const_fn]~^^ ERROR is not yet stable as a const fn
+}
+
+const fn local_fn() {
+    let _ = {E::V}(1);
+    //[min_const_fn]~^ ERROR is not yet stable as a const fn
+    //[const_fn]~^^ ERROR is not yet stable as a const fn
+}
+
+fn main() {}
index 7a10c469c51a0c2dbc985eea14af9c65638ee19d..abbdb4ab632dc2d7a2956d00be7ef452f82e2982 100644 (file)
@@ -160,7 +160,7 @@ LL | const fn foo30_2_with_unsafe(x: *mut u32) -> usize { unsafe { x as usize }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:100:38
    |
 LL | const fn foo30_4(b: bool) -> usize { if b { 1 } else { 42 } }
@@ -169,7 +169,7 @@ LL | const fn foo30_4(b: bool) -> usize { if b { 1 } else { 42 } }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:102:29
    |
 LL | const fn foo30_5(b: bool) { while b { } }
@@ -178,7 +178,7 @@ LL | const fn foo30_5(b: bool) { while b { } }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:104:44
    |
 LL | const fn foo36(a: bool, b: bool) -> bool { a && b }
@@ -187,7 +187,7 @@ LL | const fn foo36(a: bool, b: bool) -> bool { a && b }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:106:44
    |
 LL | const fn foo37(a: bool, b: bool) -> bool { a || b }
index 96b6057c8fd2d571f3e7e144961a9c503774e866..40e7107e4a15e1654309a068a612819d59d0e576 100644 (file)
@@ -98,13 +98,13 @@ const fn foo30_2(x: *mut u32) -> usize { x as usize }
 const fn foo30_2_with_unsafe(x: *mut u32) -> usize { unsafe { x as usize } }
 //~^ ERROR casting pointers to ints is unstable
 const fn foo30_4(b: bool) -> usize { if b { 1 } else { 42 } }
-//~^ ERROR `if`, `match`, `&&` and `||` are not stable in const fn
+//~^ ERROR loops and conditional expressions are not stable in const fn
 const fn foo30_5(b: bool) { while b { } } //~ ERROR not stable in const fn
 const fn foo30_6() -> bool { let x = true; x }
 const fn foo36(a: bool, b: bool) -> bool { a && b }
-//~^ ERROR `if`, `match`, `&&` and `||` are not stable in const fn
+//~^ ERROR loops and conditional expressions are not stable in const fn
 const fn foo37(a: bool, b: bool) -> bool { a || b }
-//~^ ERROR `if`, `match`, `&&` and `||` are not stable in const fn
+//~^ ERROR loops and conditional expressions are not stable in const fn
 const fn inc(x: &mut i32) { *x += 1 }
 //~^ ERROR mutable references in const fn are unstable
 
index e388b443d23445d95f3e40a1de8278649d894c64..28a5ffb2015945ecefd3c7aea04d668b614c004e 100644 (file)
@@ -160,7 +160,7 @@ LL | const fn foo30_2_with_unsafe(x: *mut u32) -> usize { unsafe { x as usize }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:100:38
    |
 LL | const fn foo30_4(b: bool) -> usize { if b { 1 } else { 42 } }
@@ -169,7 +169,7 @@ LL | const fn foo30_4(b: bool) -> usize { if b { 1 } else { 42 } }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:102:29
    |
 LL | const fn foo30_5(b: bool) { while b { } }
@@ -178,7 +178,7 @@ LL | const fn foo30_5(b: bool) { while b { } }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:104:44
    |
 LL | const fn foo36(a: bool, b: bool) -> bool { a && b }
@@ -187,7 +187,7 @@ LL | const fn foo36(a: bool, b: bool) -> bool { a && b }
    = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
    = help: add #![feature(const_fn)] to the crate attributes to enable
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/min_const_fn.rs:106:44
    |
 LL | const fn foo37(a: bool, b: bool) -> bool { a || b }
index 79dde3c18e8fa995ed58414fb85beb8dec9a39b1..6002506689e12c8cb3a0a55e57d1637bd7db0643 100644 (file)
@@ -15,7 +15,7 @@ pub const fn as_val(&self) -> u8 {
         use self::Foo::*;
 
         match *self {
-            Prob => 0x1, //~ ERROR `if`, `match`, `&&` and `||` are not stable in const fn
+            Prob => 0x1, //~ ERROR loops and conditional expressions are not stable in const fn
         }
     }
 }
index b8ad775f1c34fb2c6025e82fd351439d1b045637..1e092c8af99674bdc8ddaa7468ead797dd74a0af 100644 (file)
@@ -10,7 +10,7 @@ error[E0019]: constant contains unimplemented expression type
 LL |     x => 42,
    |     ^
 
-error[E0723]: `if`, `match`, `&&` and `||` are not stable in const fn
+error[E0723]: loops and conditional expressions are not stable in const fn
   --> $DIR/single_variant_match_ice.rs:18:13
    |
 LL |             Prob => 0x1,
index 95cefe53938fb0b0c35a6a96b5c10f3732d5cb68..2434921390245d13248e18dea565fc31ab5bc82f 100644 (file)
@@ -1,7 +1,5 @@
 // Unresolved multi-segment attributes are not treated as custom.
 
-#![feature(custom_attribute)]
-
 mod existent {}
 
 #[existent::nonexistent] //~ ERROR failed to resolve: could not find `nonexistent` in `existent`
index 9ba9c00e55bc88283bf2b1d7ba8002dfa6ac2264..57eca211ed10eb5e34f04338c24259c79fd4d40e 100644 (file)
@@ -1,5 +1,5 @@
 error[E0433]: failed to resolve: could not find `nonexistent` in `existent`
-  --> $DIR/custom-attribute-multisegment.rs:7:13
+  --> $DIR/custom-attribute-multisegment.rs:5:13
    |
 LL | #[existent::nonexistent]
    |             ^^^^^^^^^^^ could not find `nonexistent` in `existent`
index e256c07de2672d5d621b04e8d9d4cb95205e218c..2cfca1724c81bd15b31739f11aa1a5baecb40bc8 100644 (file)
@@ -1,10 +1,10 @@
-error[E0517]: attribute should be applied to struct, enum or union
+error[E0517]: attribute should be applied to struct, enum, or union
   --> $DIR/E0517.rs:1:8
    |
 LL | #[repr(C)]
    |        ^
 LL | type Foo = u8;
-   | -------------- not a struct, enum or union
+   | -------------- not a struct, enum, or union
 
 error[E0517]: attribute should be applied to struct or union
   --> $DIR/E0517.rs:4:8
@@ -22,14 +22,14 @@ LL | #[repr(u8)]
 LL | struct Foo3 {bar: bool, baz: bool}
    | ---------------------------------- not an enum
 
-error[E0517]: attribute should be applied to struct, enum or union
+error[E0517]: attribute should be applied to struct, enum, or union
   --> $DIR/E0517.rs:10:8
    |
 LL |   #[repr(C)]
    |          ^
 LL | / impl Foo3 {
 LL | | }
-   | |_- not a struct, enum or union
+   | |_- not a struct, enum, or union
 
 error: aborting due to 4 previous errors
 
index fbae093f2ac2d696180d83fd667d63c517db96c2..c98fd7ffea8bee3c461dbe87915a956a257b5724 100644 (file)
@@ -1,6 +1,6 @@
 // skip-codegen
 // compile-pass
-#![feature(custom_attribute)]
+
 macro_rules! mac {
     {} => {
         #[cfg(attr)]
index bd151c8a4e4e7717edc7115b8eaad6ff8d96ed69..c3975f6b9255e657943b25ab39cd8c3a1720a1a5 100644 (file)
@@ -12,4 +12,9 @@ mod nonexistent_env {
     //~^ ERROR environment variable `NON_EXISTENT` not defined
 }
 
+mod erroneous_literal {
+    include!(concat!("NON_EXISTENT"suffix, "/data.rs"));
+    //~^ ERROR suffixes on a string literal are invalid
+}
+
 fn main() {}
index 9f6570ab2a0f3b92fdc7010e0e793b37939fd0d7..9d68131beabd7a7fe72f2406ec89a8edf69bbb35 100644 (file)
@@ -4,6 +4,12 @@ error: environment variable `NON_EXISTENT` not defined
 LL |     include!(concat!(env!("NON_EXISTENT"), "/data.rs"));
    |                      ^^^^^^^^^^^^^^^^^^^^
 
+error: suffixes on a string literal are invalid
+  --> $DIR/issue-55897.rs:16:22
+   |
+LL |     include!(concat!("NON_EXISTENT"suffix, "/data.rs"));
+   |                      ^^^^^^^^^^^^^^^^^^^^ invalid suffix `suffix`
+
 error[E0432]: unresolved import `prelude`
   --> $DIR/issue-55897.rs:1:5
    |
@@ -21,6 +27,6 @@ LL |     include!(concat!(env!("NON_EXISTENT"), "/data.rs"));
    |
    = note: import resolution is stuck, try simplifying macro imports
 
-error: aborting due to 3 previous errors
+error: aborting due to 4 previous errors
 
 For more information about this error, try `rustc --explain E0432`.
diff --git a/src/test/ui/feature-gates/feature-gate-repr_align_enum.rs b/src/test/ui/feature-gates/feature-gate-repr_align_enum.rs
deleted file mode 100644 (file)
index 8b68caa..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-#[repr(align(16))]
-struct Foo(u64);
-
-#[repr(align(8))] //~ ERROR `#[repr(align(x))]` on enums is experimental
-enum Bar {
-    Foo { foo: Foo },
-    Baz,
-}
-
-fn main() { }
diff --git a/src/test/ui/feature-gates/feature-gate-repr_align_enum.stderr b/src/test/ui/feature-gates/feature-gate-repr_align_enum.stderr
deleted file mode 100644 (file)
index 36924f4..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0658]: `#[repr(align(x))]` on enums is experimental
-  --> $DIR/feature-gate-repr_align_enum.rs:4:1
-   |
-LL | #[repr(align(8))]
-   | ^^^^^^^^^^^^^^^^^
-   |
-   = note: for more information, see https://github.com/rust-lang/rust/issues/57996
-   = help: add #![feature(repr_align_enum)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/feature-gates/feature-gate-transparent_enums.rs b/src/test/ui/feature-gates/feature-gate-transparent_enums.rs
new file mode 100644 (file)
index 0000000..0a7a73a
--- /dev/null
@@ -0,0 +1,6 @@
+#[repr(transparent)]
+enum OkButUnstableEnum { //~ ERROR transparent enums are unstable
+    Foo((), String, ()),
+}
+
+fn main() {}
diff --git a/src/test/ui/feature-gates/feature-gate-transparent_enums.stderr b/src/test/ui/feature-gates/feature-gate-transparent_enums.stderr
new file mode 100644 (file)
index 0000000..4b22654
--- /dev/null
@@ -0,0 +1,14 @@
+error[E0658]: transparent enums are unstable
+  --> $DIR/feature-gate-transparent_enums.rs:2:1
+   |
+LL | / enum OkButUnstableEnum {
+LL | |     Foo((), String, ()),
+LL | | }
+   | |_^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/60405
+   = help: add #![feature(transparent_enums)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/feature-gates/feature-gate-transparent_unions.rs b/src/test/ui/feature-gates/feature-gate-transparent_unions.rs
new file mode 100644 (file)
index 0000000..73cac0a
--- /dev/null
@@ -0,0 +1,7 @@
+#[repr(transparent)]
+union OkButUnstableUnion { //~ ERROR transparent unions are unstable
+    field: u8,
+    zst: (),
+}
+
+fn main() {}
diff --git a/src/test/ui/feature-gates/feature-gate-transparent_unions.stderr b/src/test/ui/feature-gates/feature-gate-transparent_unions.stderr
new file mode 100644 (file)
index 0000000..933b227
--- /dev/null
@@ -0,0 +1,15 @@
+error[E0658]: transparent unions are unstable
+  --> $DIR/feature-gate-transparent_unions.rs:2:1
+   |
+LL | / union OkButUnstableUnion {
+LL | |     field: u8,
+LL | |     zst: (),
+LL | | }
+   | |_^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/60405
+   = help: add #![feature(transparent_unions)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
index 4376876ecd6948d37d16258efd0608fff8d3bea4..e0491093a722aad3ca9a2af0ed7c541a8edec61b 100644 (file)
@@ -9,7 +9,7 @@ note: lint level defined here
    |
 LL | #![deny(improper_ctypes)]
    |         ^^^^^^^^^^^^^^^
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/issue-14309.rs:4:1
    |
@@ -24,7 +24,7 @@ error: `extern` block uses type `A` which is not FFI-safe: this struct has unspe
 LL |     fn bar(x: B);
    |               ^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/issue-14309.rs:4:1
    |
@@ -39,7 +39,7 @@ error: `extern` block uses type `A` which is not FFI-safe: this struct has unspe
 LL |     fn qux(x: A2);
    |               ^^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/issue-14309.rs:4:1
    |
@@ -54,7 +54,7 @@ error: `extern` block uses type `A` which is not FFI-safe: this struct has unspe
 LL |     fn quux(x: B2);
    |                ^^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/issue-14309.rs:4:1
    |
@@ -69,7 +69,7 @@ error: `extern` block uses type `A` which is not FFI-safe: this struct has unspe
 LL |     fn fred(x: D);
    |                ^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/issue-14309.rs:4:1
    |
index a8ff2548b73dcfcecfe7317be3c1aba1669ecf33..142d8e21532d82b2ccb8942e82a8b08d119e51dc 100644 (file)
@@ -10,7 +10,7 @@ note: lint level defined here
 LL | #![deny(warnings)]
    |         ^^^^^^^^
    = note: #[deny(improper_ctypes)] implied by #[deny(warnings)]
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/issue-16250.rs:3:1
    |
index 1bbd874a448022fba141ceb608e6fad16fa2e5fa..d276231dc0c967103b894dff7784ed953f5daa48 100644 (file)
@@ -5,7 +5,7 @@ LL |     let a = unsafe { *mut_ref() };
    |                      ^^^^^^^^^^
    |                      |
    |                      move occurs because value has type `T`, which does not implement the `Copy` trait
-   |                      help: consider removing the `*`: `mut_ref()`
+   |                      help: consider borrowing here: `&*mut_ref()`
 
 error[E0507]: cannot move out of a shared reference
   --> $DIR/issue-20801.rs:29:22
@@ -14,7 +14,7 @@ LL |     let b = unsafe { *imm_ref() };
    |                      ^^^^^^^^^^
    |                      |
    |                      move occurs because value has type `T`, which does not implement the `Copy` trait
-   |                      help: consider removing the `*`: `imm_ref()`
+   |                      help: consider borrowing here: `&*imm_ref()`
 
 error[E0507]: cannot move out of a raw pointer
   --> $DIR/issue-20801.rs:32:22
@@ -23,7 +23,7 @@ LL |     let c = unsafe { *mut_ptr() };
    |                      ^^^^^^^^^^
    |                      |
    |                      move occurs because value has type `T`, which does not implement the `Copy` trait
-   |                      help: consider removing the `*`: `mut_ptr()`
+   |                      help: consider borrowing here: `&*mut_ptr()`
 
 error[E0507]: cannot move out of a raw pointer
   --> $DIR/issue-20801.rs:35:22
@@ -32,7 +32,7 @@ LL |     let d = unsafe { *const_ptr() };
    |                      ^^^^^^^^^^^^
    |                      |
    |                      move occurs because value has type `T`, which does not implement the `Copy` trait
-   |                      help: consider removing the `*`: `const_ptr()`
+   |                      help: consider borrowing here: `&*const_ptr()`
 
 error: aborting due to 4 previous errors
 
index 7b270ceb688148a8d3a076609021dfd4133b68be..2424a1c92cd644a80981c1f7f204f7a379babf85 100644 (file)
@@ -1,8 +1,7 @@
 // compile-pass
-#![allow(unused_attributes)]
 // compile-flags:--cfg set1
 
-#![cfg_attr(set1, feature(custom_attribute))]
+#![cfg_attr(set1, feature(rustc_attrs))]
+#![rustc_dummy]
 
-#![foobar]
 fn main() {}
index 794c1d19893885c299c32f9ee7e4a310a4c359ba..45eb5e4008089c2ea6206109ac4d59159c466a48 100644 (file)
@@ -1,4 +1,4 @@
 fn main() {
     #[inline] struct Foo;  //~ ERROR attribute should be applied to function or closure
-    #[repr(C)] fn foo() {} //~ ERROR attribute should be applied to struct, enum or union
+    #[repr(C)] fn foo() {} //~ ERROR attribute should be applied to struct, enum, or union
 }
index 51d1f51d1c90903dbd31dc4164cda6260310b9eb..20534e1ae82c57a918a69dd844efd57160d26868 100644 (file)
@@ -4,11 +4,11 @@ error[E0518]: attribute should be applied to function or closure
 LL |     #[inline] struct Foo;
    |     ^^^^^^^^^ ----------- not a function or closure
 
-error[E0517]: attribute should be applied to struct, enum or union
+error[E0517]: attribute should be applied to struct, enum, or union
   --> $DIR/issue-31769.rs:3:12
    |
 LL |     #[repr(C)] fn foo() {}
-   |            ^   ----------- not a struct, enum or union
+   |            ^   ----------- not a struct, enum, or union
 
 error: aborting due to 2 previous errors
 
index e5b1cd93614bb572cd586c255f2969302aa9d4bc..a8e31a648c9ed4e67a41319d43afeecdc75b17fb 100644 (file)
@@ -1,7 +1,7 @@
 #![allow(unused_macros)]
 
 macro_rules! assign {
-    (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+`
+    (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected one of: `*`, `+`, or `?`
         $($a)* = $($b)*
     }
 }
index e04e16e2a037562a11f194214b2aa9aa328c96b0..62e7dff547692ead4c1aab1a46b6eb54a92d7aff 100644 (file)
@@ -1,4 +1,4 @@
-error: expected `*` or `+`
+error: expected one of: `*`, `+`, or `?`
   --> $DIR/issue-39388.rs:4:22
    |
 LL |     (($($a:tt)*) = ($($b:tt))*) => {
index c2f0cc6f0ffed2a6b2e5d4f1a015436490351039..339c1a3b8f617f9e7e69f5b6d45401ee40dc7739 100644 (file)
@@ -32,7 +32,7 @@ error[E0517]: attribute should not be applied to a statement
 LL |     #[repr(nothing)]
    |     ^^^^^^^^^^^^^^^^
 LL |     let _x = 0;
-   |     ----------- not a struct, enum or union
+   |     ----------- not a struct, enum, or union
 
 error[E0517]: attribute should not be applied to an expression
   --> $DIR/issue-43988.rs:18:5
@@ -42,7 +42,7 @@ LL |       #[repr(something_not_real)]
 LL | /     loop {
 LL | |         ()
 LL | |     };
-   | |_____- not defining a struct, enum or union
+   | |_____- not defining a struct, enum, or union
 
 error[E0517]: attribute should not be applied to a statement
   --> $DIR/issue-43988.rs:24:5
@@ -50,7 +50,7 @@ error[E0517]: attribute should not be applied to a statement
 LL |     #[repr]
    |     ^^^^^^^
 LL |     let _y = "123";
-   |     --------------- not a struct, enum or union
+   |     --------------- not a struct, enum, or union
 
 error[E0518]: attribute should be applied to function or closure
   --> $DIR/issue-43988.rs:31:5
@@ -64,7 +64,7 @@ error[E0517]: attribute should not be applied to an expression
   --> $DIR/issue-43988.rs:35:14
    |
 LL |     let _z = #[repr] 1;
-   |              ^^^^^^^ - not defining a struct, enum or union
+   |              ^^^^^^^ - not defining a struct, enum, or union
 
 error: aborting due to 9 previous errors
 
index d3e11d2f7ed6478ef826d3f72dec65f0746396ce..45eeffff7a6ac392116befb82460348b61270999 100644 (file)
@@ -1,3 +1,4 @@
+#![feature(transparent_enums, transparent_unions)]
 #![deny(improper_ctypes)]
 #![allow(dead_code)]
 
@@ -18,7 +19,17 @@ enum U8 { A, B, C }
 enum Isize { A, B, C }
 
 #[repr(transparent)]
-struct Transparent<T>(T, std::marker::PhantomData<Z>);
+struct TransparentStruct<T>(T, std::marker::PhantomData<Z>);
+
+#[repr(transparent)]
+enum TransparentEnum<T> {
+   Variant(T, std::marker::PhantomData<Z>),
+}
+
+#[repr(transparent)]
+union TransparentUnion<T: Copy> {
+   field: T,
+}
 
 struct Rust<T>(T);
 
@@ -47,7 +58,10 @@ enum Isize { A, B, C }
    fn nonzero_i128(x: Option<num::NonZeroI128>);
    //~^ ERROR 128-bit integers don't currently have a known stable ABI
    fn nonzero_isize(x: Option<num::NonZeroIsize>);
-   fn repr_transparent(x: Option<Transparent<num::NonZeroU8>>);
+   fn transparent_struct(x: Option<TransparentStruct<num::NonZeroU8>>);
+   fn transparent_enum(x: Option<TransparentEnum<num::NonZeroU8>>);
+   fn transparent_union(x: Option<TransparentUnion<num::NonZeroU8>>);
+   //~^ ERROR enum has no representation hint
    fn repr_rust(x: Option<Rust<num::NonZeroU8>>); //~ ERROR enum has no representation hint
    fn no_result(x: Result<(), num::NonZeroI32>); //~ ERROR enum has no representation hint
 }
index 6b807f48aaa82576ff092c38d8e555889404cdc1..2a60cd12d993652595388e9ee76f08c64949c9e9 100644 (file)
@@ -1,74 +1,82 @@
 error: `extern` block uses type `U` which is not FFI-safe: enum has no representation hint
-  --> $DIR/lint-ctypes-enum.rs:27:13
+  --> $DIR/lint-ctypes-enum.rs:38:13
    |
 LL |    fn uf(x: U);
    |             ^
    |
 note: lint level defined here
-  --> $DIR/lint-ctypes-enum.rs:1:9
+  --> $DIR/lint-ctypes-enum.rs:2:9
    |
 LL | #![deny(improper_ctypes)]
    |         ^^^^^^^^^^^^^^^
-   = help: consider adding a #[repr(...)] attribute to this enum
+   = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
 note: type defined here
-  --> $DIR/lint-ctypes-enum.rs:7:1
+  --> $DIR/lint-ctypes-enum.rs:8:1
    |
 LL | enum U { A }
    | ^^^^^^^^^^^^
 
 error: `extern` block uses type `B` which is not FFI-safe: enum has no representation hint
-  --> $DIR/lint-ctypes-enum.rs:28:13
+  --> $DIR/lint-ctypes-enum.rs:39:13
    |
 LL |    fn bf(x: B);
    |             ^
    |
-   = help: consider adding a #[repr(...)] attribute to this enum
+   = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
 note: type defined here
-  --> $DIR/lint-ctypes-enum.rs:8:1
+  --> $DIR/lint-ctypes-enum.rs:9:1
    |
 LL | enum B { C, D }
    | ^^^^^^^^^^^^^^^
 
 error: `extern` block uses type `T` which is not FFI-safe: enum has no representation hint
-  --> $DIR/lint-ctypes-enum.rs:29:13
+  --> $DIR/lint-ctypes-enum.rs:40:13
    |
 LL |    fn tf(x: T);
    |             ^
    |
-   = help: consider adding a #[repr(...)] attribute to this enum
+   = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
 note: type defined here
-  --> $DIR/lint-ctypes-enum.rs:9:1
+  --> $DIR/lint-ctypes-enum.rs:10:1
    |
 LL | enum T { E, F, G }
    | ^^^^^^^^^^^^^^^^^^
 
 error: `extern` block uses type `u128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
-  --> $DIR/lint-ctypes-enum.rs:40:23
+  --> $DIR/lint-ctypes-enum.rs:51:23
    |
 LL |    fn nonzero_u128(x: Option<num::NonZeroU128>);
    |                       ^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: `extern` block uses type `i128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
-  --> $DIR/lint-ctypes-enum.rs:47:23
+  --> $DIR/lint-ctypes-enum.rs:58:23
    |
 LL |    fn nonzero_i128(x: Option<num::NonZeroI128>);
    |                       ^^^^^^^^^^^^^^^^^^^^^^^^
 
+error: `extern` block uses type `std::option::Option<TransparentUnion<std::num::NonZeroU8>>` which is not FFI-safe: enum has no representation hint
+  --> $DIR/lint-ctypes-enum.rs:63:28
+   |
+LL |    fn transparent_union(x: Option<TransparentUnion<num::NonZeroU8>>);
+   |                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
+
 error: `extern` block uses type `std::option::Option<Rust<std::num::NonZeroU8>>` which is not FFI-safe: enum has no representation hint
-  --> $DIR/lint-ctypes-enum.rs:51:20
+  --> $DIR/lint-ctypes-enum.rs:65:20
    |
 LL |    fn repr_rust(x: Option<Rust<num::NonZeroU8>>);
    |                    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-   = help: consider adding a #[repr(...)] attribute to this enum
+   = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
 
 error: `extern` block uses type `std::result::Result<(), std::num::NonZeroI32>` which is not FFI-safe: enum has no representation hint
-  --> $DIR/lint-ctypes-enum.rs:52:20
+  --> $DIR/lint-ctypes-enum.rs:66:20
    |
 LL |    fn no_result(x: Result<(), num::NonZeroI32>);
    |                    ^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-   = help: consider adding a #[repr(...)] attribute to this enum
+   = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
 
-error: aborting due to 7 previous errors
+error: aborting due to 8 previous errors
 
index 03c18e4530b82dea2d588784fd916caefa5cb931..c78463beb6559492ecc45c9bc016946bcab3ae7b 100644 (file)
@@ -9,7 +9,7 @@ note: lint level defined here
    |
 LL | #![deny(improper_ctypes)]
    |         ^^^^^^^^^^^^^^^
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/lint-ctypes.rs:24:1
    |
@@ -22,7 +22,7 @@ error: `extern` block uses type `Foo` which is not FFI-safe: this struct has uns
 LL |     pub fn ptr_type2(size: *const Foo);
    |                            ^^^^^^^^^^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 note: type defined here
   --> $DIR/lint-ctypes.rs:24:1
    |
@@ -51,7 +51,7 @@ error: `extern` block uses type `std::boxed::Box<u32>` which is not FFI-safe: th
 LL |     pub fn box_type(p: Box<u32>);
    |                        ^^^^^^^^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 
 error: `extern` block uses type `char` which is not FFI-safe: the `char` type has no C equivalent
   --> $DIR/lint-ctypes.rs:51:25
@@ -142,7 +142,7 @@ error: `extern` block uses type `std::boxed::Box<u32>` which is not FFI-safe: th
 LL |     pub fn fn_contained(p: RustBadRet);
    |                            ^^^^^^^^^^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 
 error: `extern` block uses type `i128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
   --> $DIR/lint-ctypes.rs:64:32
@@ -164,7 +164,7 @@ error: `extern` block uses type `std::boxed::Box<u32>` which is not FFI-safe: th
 LL |     pub fn transparent_fn(p: TransparentBadFn);
    |                              ^^^^^^^^^^^^^^^^
    |
-   = help: consider adding a #[repr(C)] or #[repr(transparent)] attribute to this struct
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
 
 error: aborting due to 20 previous errors
 
diff --git a/src/test/ui/lint/lint-obsolete-attr.rs b/src/test/ui/lint/lint-obsolete-attr.rs
deleted file mode 100644 (file)
index 149948b..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-// When denying at the crate level, be sure to not get random warnings from the
-// injected intrinsics by the compiler.
-
-#![deny(unused_attributes)]
-#![allow(dead_code)]
-#![feature(custom_attribute)]
-
-#[ab_isize="stdcall"] extern {} //~ ERROR unused attribute
-
-#[fixed_stack_segment] fn f() {} //~ ERROR unused attribute
-
-fn main() {}
diff --git a/src/test/ui/lint/lint-obsolete-attr.stderr b/src/test/ui/lint/lint-obsolete-attr.stderr
deleted file mode 100644 (file)
index c06bd26..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-error: unused attribute
-  --> $DIR/lint-obsolete-attr.rs:8:1
-   |
-LL | #[ab_isize="stdcall"] extern {}
-   | ^^^^^^^^^^^^^^^^^^^^^
-   |
-note: lint level defined here
-  --> $DIR/lint-obsolete-attr.rs:4:9
-   |
-LL | #![deny(unused_attributes)]
-   |         ^^^^^^^^^^^^^^^^^
-
-error: unused attribute
-  --> $DIR/lint-obsolete-attr.rs:10:1
-   |
-LL | #[fixed_stack_segment] fn f() {}
-   | ^^^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to 2 previous errors
-
diff --git a/src/test/ui/lint/lint-unknown-attr.rs b/src/test/ui/lint/lint-unknown-attr.rs
deleted file mode 100644 (file)
index 828b869..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-// When denying at the crate level, be sure to not get random warnings from the
-// injected intrinsics by the compiler.
-
-#![feature(custom_attribute)]
-#![deny(unused_attributes)]
-
-#![mutable_doc] //~ ERROR unused attribute
-
-#[dance] mod a {} //~ ERROR unused attribute
-
-#[dance] fn main() {} //~ ERROR unused attribute
diff --git a/src/test/ui/lint/lint-unknown-attr.stderr b/src/test/ui/lint/lint-unknown-attr.stderr
deleted file mode 100644 (file)
index 9817760..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-error: unused attribute
-  --> $DIR/lint-unknown-attr.rs:9:1
-   |
-LL | #[dance] mod a {}
-   | ^^^^^^^^
-   |
-note: lint level defined here
-  --> $DIR/lint-unknown-attr.rs:5:9
-   |
-LL | #![deny(unused_attributes)]
-   |         ^^^^^^^^^^^^^^^^^
-
-error: unused attribute
-  --> $DIR/lint-unknown-attr.rs:11:1
-   |
-LL | #[dance] fn main() {}
-   | ^^^^^^^^
-
-error: unused attribute
-  --> $DIR/lint-unknown-attr.rs:7:1
-   |
-LL | #![mutable_doc]
-   | ^^^^^^^^^^^^^^^
-
-error: aborting due to 3 previous errors
-
diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.rs b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.rs
deleted file mode 100644 (file)
index 2d8d2ec..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-// Test behavior of `?` macro _kleene op_ under the 2015 edition. Namely, it doesn't exist.
-
-// edition:2015
-
-macro_rules! bar {
-    ($(a)?) => {} //~ERROR expected `*` or `+`
-}
-
-macro_rules! baz {
-    ($(a),?) => {} //~ERROR expected `*` or `+`
-}
-
-fn main() {}
diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr
deleted file mode 100644 (file)
index e78f283..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-error: expected `*` or `+`
-  --> $DIR/macro-at-most-once-rep-2015-ques-rep.rs:6:10
-   |
-LL |     ($(a)?) => {}
-   |          ^
-   |
-   = note: `?` is not a macro repetition operator in the 2015 edition, but is accepted in the 2018 edition
-
-error: expected `*` or `+`
-  --> $DIR/macro-at-most-once-rep-2015-ques-rep.rs:10:11
-   |
-LL |     ($(a),?) => {}
-   |           ^
-   |
-   = note: `?` is not a macro repetition operator in the 2015 edition, but is accepted in the 2018 edition
-
-error: aborting due to 2 previous errors
-
diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs
deleted file mode 100644 (file)
index c8c920f..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-// Test behavior of `?` macro _separator_ under the 2015 edition. Namely, `?` can be used as a
-// separator, but you get a migration warning for the edition.
-
-// edition:2015
-// compile-pass
-
-#![warn(rust_2018_compatibility)]
-
-macro_rules! bar {
-    ($(a)?*) => {} //~WARN using `?` as a separator
-    //~^WARN this was previously accepted
-}
-
-macro_rules! baz {
-    ($(a)?+) => {} //~WARN using `?` as a separator
-    //~^WARN this was previously accepted
-}
-
-fn main() {
-    bar!();
-    bar!(a);
-    bar!(a?a);
-    bar!(a?a?a?a?a);
-
-    baz!(a);
-    baz!(a?a);
-    baz!(a?a?a?a?a);
-}
diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr
deleted file mode 100644 (file)
index bf1861a..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-warning: using `?` as a separator is deprecated and will be a hard error in an upcoming edition
-  --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:10:10
-   |
-LL |     ($(a)?*) => {}
-   |          ^
-   |
-note: lint level defined here
-  --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:7:9
-   |
-LL | #![warn(rust_2018_compatibility)]
-   |         ^^^^^^^^^^^^^^^^^^^^^^^
-   = note: #[warn(question_mark_macro_sep)] implied by #[warn(rust_2018_compatibility)]
-   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in the 2018 edition!
-   = note: for more information, see issue #48075 <https://github.com/rust-lang/rust/issues/48075>
-
-warning: using `?` as a separator is deprecated and will be a hard error in an upcoming edition
-  --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:15:10
-   |
-LL |     ($(a)?+) => {}
-   |          ^
-   |
-   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in the 2018 edition!
-   = note: for more information, see issue #48075 <https://github.com/rust-lang/rust/issues/48075>
-
diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015.rs b/src/test/ui/macros/macro-at-most-once-rep-2015.rs
new file mode 100644 (file)
index 0000000..f68100d
--- /dev/null
@@ -0,0 +1,42 @@
+// Tests that `?` is a Kleene op and not a macro separator in the 2015 edition.
+
+// edition:2015
+
+macro_rules! foo {
+    ($(a)?) => {};
+}
+
+// The Kleene op `?` does not admit a separator before it.
+macro_rules! baz {
+    ($(a),?) => {}; //~ERROR the `?` macro repetition operator
+}
+
+macro_rules! barplus {
+    ($(a)?+) => {}; // ok. matches "a+" and "+"
+}
+
+macro_rules! barstar {
+    ($(a)?*) => {}; // ok. matches "a*" and "*"
+}
+
+pub fn main() {
+    foo!();
+    foo!(a);
+    foo!(a?); //~ ERROR no rules expected the token `?`
+    foo!(a?a); //~ ERROR no rules expected the token `?`
+    foo!(a?a?a); //~ ERROR no rules expected the token `?`
+
+    barplus!(); //~ERROR unexpected end of macro invocation
+    barplus!(a); //~ERROR unexpected end of macro invocation
+    barplus!(a?); //~ ERROR no rules expected the token `?`
+    barplus!(a?a); //~ ERROR no rules expected the token `?`
+    barplus!(a+);
+    barplus!(+);
+
+    barstar!(); //~ERROR unexpected end of macro invocation
+    barstar!(a); //~ERROR unexpected end of macro invocation
+    barstar!(a?); //~ ERROR no rules expected the token `?`
+    barstar!(a?a); //~ ERROR no rules expected the token `?`
+    barstar!(a*);
+    barstar!(*);
+}
diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015.stderr
new file mode 100644 (file)
index 0000000..f9871ab
--- /dev/null
@@ -0,0 +1,107 @@
+error: the `?` macro repetition operator does not take a separator
+  --> $DIR/macro-at-most-once-rep-2015.rs:11:10
+   |
+LL |     ($(a),?) => {};
+   |          ^
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:25:11
+   |
+LL | macro_rules! foo {
+   | ---------------- when calling this macro
+...
+LL |     foo!(a?);
+   |           ^ no rules expected this token in macro call
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:26:11
+   |
+LL | macro_rules! foo {
+   | ---------------- when calling this macro
+...
+LL |     foo!(a?a);
+   |           ^ no rules expected this token in macro call
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:27:11
+   |
+LL | macro_rules! foo {
+   | ---------------- when calling this macro
+...
+LL |     foo!(a?a?a);
+   |           ^ no rules expected this token in macro call
+
+error: unexpected end of macro invocation
+  --> $DIR/macro-at-most-once-rep-2015.rs:29:5
+   |
+LL | macro_rules! barplus {
+   | -------------------- when calling this macro
+...
+LL |     barplus!();
+   |     ^^^^^^^^^^^ missing tokens in macro arguments
+
+error: unexpected end of macro invocation
+  --> $DIR/macro-at-most-once-rep-2015.rs:30:15
+   |
+LL | macro_rules! barplus {
+   | -------------------- when calling this macro
+...
+LL |     barplus!(a);
+   |               ^ missing tokens in macro arguments
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:31:15
+   |
+LL | macro_rules! barplus {
+   | -------------------- when calling this macro
+...
+LL |     barplus!(a?);
+   |               ^ no rules expected this token in macro call
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:32:15
+   |
+LL | macro_rules! barplus {
+   | -------------------- when calling this macro
+...
+LL |     barplus!(a?a);
+   |               ^ no rules expected this token in macro call
+
+error: unexpected end of macro invocation
+  --> $DIR/macro-at-most-once-rep-2015.rs:36:5
+   |
+LL | macro_rules! barstar {
+   | -------------------- when calling this macro
+...
+LL |     barstar!();
+   |     ^^^^^^^^^^^ missing tokens in macro arguments
+
+error: unexpected end of macro invocation
+  --> $DIR/macro-at-most-once-rep-2015.rs:37:15
+   |
+LL | macro_rules! barstar {
+   | -------------------- when calling this macro
+...
+LL |     barstar!(a);
+   |               ^ missing tokens in macro arguments
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:38:15
+   |
+LL | macro_rules! barstar {
+   | -------------------- when calling this macro
+...
+LL |     barstar!(a?);
+   |               ^ no rules expected this token in macro call
+
+error: no rules expected the token `?`
+  --> $DIR/macro-at-most-once-rep-2015.rs:39:15
+   |
+LL | macro_rules! barstar {
+   | -------------------- when calling this macro
+...
+LL |     barstar!(a?a);
+   |               ^ no rules expected this token in macro call
+
+error: aborting due to 12 previous errors
+
index da072adec1516c2c2e12e78a681712bec8c95919..886a25bbcbcb20cc5143b2e8d4e55e39106e0d75 100644 (file)
@@ -6,6 +6,7 @@ macro_rules! foo {
     ($(a)?) => {};
 }
 
+// The Kleene op `?` does not admit a separator before it.
 macro_rules! baz {
     ($(a),?) => {}; //~ERROR the `?` macro repetition operator
 }
index f285c7cc7c2797f86142f408d89ab0c2971e9fa4..bfe5883b03fa04e56c0f211b924c3a6fcd3d1531 100644 (file)
@@ -1,11 +1,11 @@
 error: the `?` macro repetition operator does not take a separator
-  --> $DIR/macro-at-most-once-rep-2018.rs:10:10
+  --> $DIR/macro-at-most-once-rep-2018.rs:11:10
    |
 LL |     ($(a),?) => {};
    |          ^
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:24:11
+  --> $DIR/macro-at-most-once-rep-2018.rs:25:11
    |
 LL | macro_rules! foo {
    | ---------------- when calling this macro
@@ -14,7 +14,7 @@ LL |     foo!(a?);
    |           ^ no rules expected this token in macro call
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:25:11
+  --> $DIR/macro-at-most-once-rep-2018.rs:26:11
    |
 LL | macro_rules! foo {
    | ---------------- when calling this macro
@@ -23,7 +23,7 @@ LL |     foo!(a?a);
    |           ^ no rules expected this token in macro call
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:26:11
+  --> $DIR/macro-at-most-once-rep-2018.rs:27:11
    |
 LL | macro_rules! foo {
    | ---------------- when calling this macro
@@ -32,7 +32,7 @@ LL |     foo!(a?a?a);
    |           ^ no rules expected this token in macro call
 
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-2018.rs:28:5
+  --> $DIR/macro-at-most-once-rep-2018.rs:29:5
    |
 LL | macro_rules! barplus {
    | -------------------- when calling this macro
@@ -41,7 +41,7 @@ LL |     barplus!();
    |     ^^^^^^^^^^^ missing tokens in macro arguments
 
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-2018.rs:29:15
+  --> $DIR/macro-at-most-once-rep-2018.rs:30:15
    |
 LL | macro_rules! barplus {
    | -------------------- when calling this macro
@@ -50,7 +50,7 @@ LL |     barplus!(a);
    |               ^ missing tokens in macro arguments
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:30:15
+  --> $DIR/macro-at-most-once-rep-2018.rs:31:15
    |
 LL | macro_rules! barplus {
    | -------------------- when calling this macro
@@ -59,7 +59,7 @@ LL |     barplus!(a?);
    |               ^ no rules expected this token in macro call
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:31:15
+  --> $DIR/macro-at-most-once-rep-2018.rs:32:15
    |
 LL | macro_rules! barplus {
    | -------------------- when calling this macro
@@ -68,7 +68,7 @@ LL |     barplus!(a?a);
    |               ^ no rules expected this token in macro call
 
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-2018.rs:35:5
+  --> $DIR/macro-at-most-once-rep-2018.rs:36:5
    |
 LL | macro_rules! barstar {
    | -------------------- when calling this macro
@@ -77,7 +77,7 @@ LL |     barstar!();
    |     ^^^^^^^^^^^ missing tokens in macro arguments
 
 error: unexpected end of macro invocation
-  --> $DIR/macro-at-most-once-rep-2018.rs:36:15
+  --> $DIR/macro-at-most-once-rep-2018.rs:37:15
    |
 LL | macro_rules! barstar {
    | -------------------- when calling this macro
@@ -86,7 +86,7 @@ LL |     barstar!(a);
    |               ^ missing tokens in macro arguments
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:37:15
+  --> $DIR/macro-at-most-once-rep-2018.rs:38:15
    |
 LL | macro_rules! barstar {
    | -------------------- when calling this macro
@@ -95,7 +95,7 @@ LL |     barstar!(a?);
    |               ^ no rules expected this token in macro call
 
 error: no rules expected the token `?`
-  --> $DIR/macro-at-most-once-rep-2018.rs:38:15
+  --> $DIR/macro-at-most-once-rep-2018.rs:39:15
    |
 LL | macro_rules! barstar {
    | -------------------- when calling this macro
index 268ddda1b3c2529061e3b9c8986b28bcbf78d4ea..56a902315661293e21971ce57cf9ea0003e2ce8d 100644 (file)
@@ -1,4 +1,4 @@
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
 macro_rules! test { ($nm:ident,
                      #[$a:meta],
@@ -12,7 +12,7 @@ macro_rules! test { ($nm:ident,
       #[cfg(not(qux))],
       pub fn bar() { });
 
-#[qux]
+#[rustc_dummy]
 fn main() {
     a::bar();
     //~^ ERROR failed to resolve: use of undeclared type or module `a`
index a35f6283afb2e7029e4945094d26924f536465cd..542486927dfd115f2bb7d9b33c5369ca2a83cc3b 100644 (file)
@@ -55,10 +55,10 @@ LL |     ($($a:ty, $b:ty)* -) => ();
    = note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`
 
 error: `$ty:ty` is followed by `-`, which is not allowed for `ty` fragments
-  --> $DIR/macro-input-future-proofing.rs:18:7
+  --> $DIR/macro-input-future-proofing.rs:18:15
    |
 LL |     ($($ty:ty)-+) => ();
-   |       ^^^^^^^^ not allowed after `ty` fragments
+   |               ^ not allowed after `ty` fragments
    |
    = note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`
 
index aa70060425f651aa9a03c5b6fd2654485c07c403..0752f7e3153c19e5b7ac870bf4185b4fa6235a12 100644 (file)
@@ -1,4 +1,4 @@
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
 macro_rules! test { ($nm:ident,
                      #[$a:meta],
@@ -13,7 +13,7 @@ macro_rules! test { ($nm:ident,
       pub fn bar() { });
 
 // test1!(#[bar])
-#[qux]
+#[rustc_dummy]
 fn main() {
     a::bar(); //~ ERROR cannot find function `bar` in module `a`
     b::bar();
index 5fba8002e1c488ca4bba2616eff19fb21c47efbc..93cc97d45830b2a9ae9a651c535eb272fb4be198 100644 (file)
@@ -1,6 +1,9 @@
 error: no rules expected the token `enum E { }`
   --> $DIR/nonterminal-matching.rs:19:10
    |
+LL |     macro n(a $nt_item b) {
+   |     --------------------- when calling this macro
+...
 LL |     n!(a $nt_item b);
    |          ^^^^^^^^ no rules expected this token in macro call
 ...
index 7c4ca3c017e7b33ad8634147566bf4c234fe64b6..5101b5caeea09c31df69bf6a56586e7979b02bf2 100644 (file)
@@ -1,9 +1,9 @@
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
 macro_rules! check {
     ($expr: expr) => (
-        #[my_attr = $expr] //~ ERROR unexpected token: `-0`
-                           //~| ERROR unexpected token: `0 + 0`
+        #[rustc_dummy = $expr] //~ ERROR unexpected token: `-0`
+                               //~| ERROR unexpected token: `0 + 0`
         use main as _;
     );
 }
index e805416172bab8d58133ea77e026cf0546341cc8..bcd2ef545d815ed93bbb93eca31d21059563a167 100644 (file)
@@ -7,19 +7,19 @@ LL | check!(0u8);
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: unexpected token: `-0`
-  --> $DIR/malformed-interpolated.rs:5:21
+  --> $DIR/malformed-interpolated.rs:5:25
    |
-LL |         #[my_attr = $expr]
-   |                     ^^^^^
+LL |         #[rustc_dummy = $expr]
+   |                         ^^^^^
 ...
 LL | check!(-0); // ERROR, see above
    | ----------- in this macro invocation
 
 error: unexpected token: `0 + 0`
-  --> $DIR/malformed-interpolated.rs:5:21
+  --> $DIR/malformed-interpolated.rs:5:25
    |
-LL |         #[my_attr = $expr]
-   |                     ^^^^^
+LL |         #[rustc_dummy = $expr]
+   |                         ^^^^^
 ...
 LL | check!(0 + 0); // ERROR, see above
    | -------------- in this macro invocation
index 4a9635b060db36e924278bb36abfa8edd0178a0c..7db5d731acd17baa49079f5113f688cf7e14c142 100644 (file)
@@ -5,7 +5,7 @@ LL |     let x = { *r };
    |               ^^
    |               |
    |               move occurs because `*r` has type `std::string::String`, which does not implement the `Copy` trait
-   |               help: consider removing the `*`: `r`
+   |               help: consider borrowing here: `&*r`
 
 error[E0507]: cannot move out of `*r` which is behind a shared reference
   --> $DIR/cannot-move-block-spans.rs:6:22
@@ -14,7 +14,7 @@ LL |     let y = unsafe { *r };
    |                      ^^
    |                      |
    |                      move occurs because `*r` has type `std::string::String`, which does not implement the `Copy` trait
-   |                      help: consider removing the `*`: `r`
+   |                      help: consider borrowing here: `&*r`
 
 error[E0507]: cannot move out of `*r` which is behind a shared reference
   --> $DIR/cannot-move-block-spans.rs:7:26
@@ -23,7 +23,7 @@ LL |     let z = loop { break *r; };
    |                          ^^
    |                          |
    |                          move occurs because `*r` has type `std::string::String`, which does not implement the `Copy` trait
-   |                          help: consider removing the `*`: `r`
+   |                          help: consider borrowing here: `&*r`
 
 error[E0508]: cannot move out of type `[std::string::String; 2]`, a non-copy array
   --> $DIR/cannot-move-block-spans.rs:11:15
@@ -62,7 +62,7 @@ LL |     let x = { let mut u = 0; u += 1; *r };
    |                                      ^^
    |                                      |
    |                                      move occurs because `*r` has type `std::string::String`, which does not implement the `Copy` trait
-   |                                      help: consider removing the `*`: `r`
+   |                                      help: consider borrowing here: `&*r`
 
 error[E0507]: cannot move out of `*r` which is behind a shared reference
   --> $DIR/cannot-move-block-spans.rs:18:45
@@ -71,7 +71,7 @@ LL |     let y = unsafe { let mut u = 0; u += 1; *r };
    |                                             ^^
    |                                             |
    |                                             move occurs because `*r` has type `std::string::String`, which does not implement the `Copy` trait
-   |                                             help: consider removing the `*`: `r`
+   |                                             help: consider borrowing here: `&*r`
 
 error[E0507]: cannot move out of `*r` which is behind a shared reference
   --> $DIR/cannot-move-block-spans.rs:19:49
@@ -80,7 +80,7 @@ LL |     let z = loop { let mut u = 0; u += 1; break *r; u += 2; };
    |                                                 ^^
    |                                                 |
    |                                                 move occurs because `*r` has type `std::string::String`, which does not implement the `Copy` trait
-   |                                                 help: consider removing the `*`: `r`
+   |                                                 help: consider borrowing here: `&*r`
 
 error: aborting due to 9 previous errors
 
index 086f7bcdc4f27c3c88824ae9427b3c98907dda22..7139617a97a4fe8e0c3cf312ce67a0b035e1addd 100644 (file)
@@ -5,7 +5,7 @@ LL |     let b = *a;
    |             ^^
    |             |
    |             move occurs because `*a` has type `A`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `a`
+   |             help: consider borrowing here: `&*a`
 
 error[E0508]: cannot move out of type `[A; 1]`, a non-copy array
   --> $DIR/move-errors.rs:12:13
@@ -24,7 +24,7 @@ LL |     let s = **r;
    |             ^^^
    |             |
    |             move occurs because `**r` has type `A`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `*r`
+   |             help: consider borrowing here: `&**r`
 
 error[E0507]: cannot move out of an `Rc`
   --> $DIR/move-errors.rs:27:13
@@ -33,7 +33,7 @@ LL |     let s = *r;
    |             ^^
    |             |
    |             move occurs because value has type `A`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `r`
+   |             help: consider borrowing here: `&*r`
 
 error[E0508]: cannot move out of type `[A; 1]`, a non-copy array
   --> $DIR/move-errors.rs:32:13
@@ -49,7 +49,7 @@ error[E0507]: cannot move out of `a.0` which is behind a shared reference
   --> $DIR/move-errors.rs:38:16
    |
 LL |     let A(s) = *a;
-   |           -    ^^ help: consider removing the `*`: `a`
+   |           -    ^^ help: consider borrowing here: `&*a`
    |           |
    |           data moved here
    |           move occurs because `s` has type `std::string::String`, which does not implement the `Copy` trait
@@ -148,7 +148,7 @@ error[E0507]: cannot move out of `x.0` which is behind a shared reference
   --> $DIR/move-errors.rs:110:11
    |
 LL |     match *x {
-   |           ^^ help: consider removing the `*`: `x`
+   |           ^^ help: consider borrowing here: `&*x`
 LL |
 LL |         Ok(s) | Err(s) => (),
    |            -
diff --git a/src/test/ui/nll/user-annotations/adt-tuple-struct-calls.rs b/src/test/ui/nll/user-annotations/adt-tuple-struct-calls.rs
new file mode 100644 (file)
index 0000000..1165832
--- /dev/null
@@ -0,0 +1,71 @@
+// Unit test for the "user substitutions" that are annotated on each
+// node.
+
+struct SomeStruct<T>(T);
+
+fn no_annot() {
+    let c = 66;
+    let f = SomeStruct;
+    f(&c);
+}
+
+fn annot_underscore() {
+    let c = 66;
+    let f = SomeStruct::<_>;
+    f(&c);
+}
+
+fn annot_reference_any_lifetime() {
+    let c = 66;
+    let f = SomeStruct::<&u32>;
+    f(&c);
+}
+
+fn annot_reference_static_lifetime() {
+    let c = 66;
+    let f = SomeStruct::<&'static u32>;
+    f(&c); //~ ERROR
+}
+
+fn annot_reference_named_lifetime<'a>(_d: &'a u32) {
+    let c = 66;
+    let f = SomeStruct::<&'a u32>;
+    f(&c); //~ ERROR
+}
+
+fn annot_reference_named_lifetime_ok<'a>(c: &'a u32) {
+    let f = SomeStruct::<&'a u32>;
+    f(c);
+}
+
+fn annot_reference_named_lifetime_in_closure<'a>(_: &'a u32) {
+    let _closure = || {
+        let c = 66;
+        let f = SomeStruct::<&'a u32>;
+        f(&c); //~ ERROR
+    };
+}
+
+fn annot_reference_named_lifetime_across_closure<'a>(_: &'a u32) {
+    let f = SomeStruct::<&'a u32>;
+    let _closure = || {
+        let c = 66;
+        f(&c); //~ ERROR
+    };
+}
+
+fn annot_reference_named_lifetime_in_closure_ok<'a>(c: &'a u32) {
+    let _closure = || {
+        let f = SomeStruct::<&'a u32>;
+        f(c);
+    };
+}
+
+fn annot_reference_named_lifetime_across_closure_ok<'a>(c: &'a u32) {
+    let f = SomeStruct::<&'a u32>;
+    let _closure = || {
+        f(c);
+    };
+}
+
+fn main() { }
diff --git a/src/test/ui/nll/user-annotations/adt-tuple-struct-calls.stderr b/src/test/ui/nll/user-annotations/adt-tuple-struct-calls.stderr
new file mode 100644 (file)
index 0000000..9664fb9
--- /dev/null
@@ -0,0 +1,56 @@
+error[E0597]: `c` does not live long enough
+  --> $DIR/adt-tuple-struct-calls.rs:27:7
+   |
+LL |     f(&c);
+   |     --^^-
+   |     | |
+   |     | borrowed value does not live long enough
+   |     argument requires that `c` is borrowed for `'static`
+LL | }
+   | - `c` dropped here while still borrowed
+
+error[E0597]: `c` does not live long enough
+  --> $DIR/adt-tuple-struct-calls.rs:33:7
+   |
+LL | fn annot_reference_named_lifetime<'a>(_d: &'a u32) {
+   |                                   -- lifetime `'a` defined here
+...
+LL |     f(&c);
+   |     --^^-
+   |     | |
+   |     | borrowed value does not live long enough
+   |     argument requires that `c` is borrowed for `'a`
+LL | }
+   | - `c` dropped here while still borrowed
+
+error[E0597]: `c` does not live long enough
+  --> $DIR/adt-tuple-struct-calls.rs:45:11
+   |
+LL | fn annot_reference_named_lifetime_in_closure<'a>(_: &'a u32) {
+   |                                              -- lifetime `'a` defined here
+...
+LL |         f(&c);
+   |         --^^-
+   |         | |
+   |         | borrowed value does not live long enough
+   |         argument requires that `c` is borrowed for `'a`
+LL |     };
+   |     - `c` dropped here while still borrowed
+
+error[E0597]: `c` does not live long enough
+  --> $DIR/adt-tuple-struct-calls.rs:53:11
+   |
+LL |     let f = SomeStruct::<&'a u32>;
+   |         - lifetime `'1` appears in the type of `f`
+...
+LL |         f(&c);
+   |         --^^-
+   |         | |
+   |         | borrowed value does not live long enough
+   |         argument requires that `c` is borrowed for `'1`
+LL |     };
+   |     - `c` dropped here while still borrowed
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0597`.
index 7d944569ca9c49980b2fbd0c0e33fbff9b21ac74..b0fe4b6acd48443a7d939b0900b69c3f7ed9edf8 100644 (file)
@@ -28,11 +28,11 @@ error: bare CR not allowed in string, use \r instead
 LL |     let _s = "foo\rbar";
    |                  ^
 
-error: bare CR not allowed in raw string, use \r instead
-  --> $DIR/lex-bare-cr-string-literal-doc-comment.rs:24:14
+error: bare CR not allowed in raw string
+  --> $DIR/lex-bare-cr-string-literal-doc-comment.rs:24:19
    |
 LL |     let _s = r"bar\rfoo";
-   |              ^^^^^
+   |                   ^
 
 error: unknown character escape: \r
   --> $DIR/lex-bare-cr-string-literal-doc-comment.rs:27:19
index 83f8a562a03ad4a2ae5b206c1672fb1155de8d58..9ed53519ceb3156a82eb00d9ddb3454e4f7688e2 100644 (file)
@@ -1,7 +1,7 @@
 macro_rules! foo {
     { $+ } => { //~ ERROR expected identifier, found `+`
                 //~^ ERROR missing fragment specifier
-        $(x)(y) //~ ERROR expected `*` or `+`
+        $(x)(y) //~ ERROR expected one of: `*`, `+`, or `?`
     }
 }
 
index 8ba72fc88862a77a54c207f28984977de3588b18..b4d38d3ce4806ca8c5a6d9bf50241a3c01bdb684 100644 (file)
@@ -4,7 +4,7 @@ error: expected identifier, found `+`
 LL |     { $+ } => {
    |        ^
 
-error: expected `*` or `+`
+error: expected one of: `*`, `+`, or `?`
   --> $DIR/issue-33569.rs:4:13
    |
 LL |         $(x)(y)
index 3b50fb8036ada911eab0393b8aeaa06ef8ce4851..534afabdf777e30023b6ebae60771942822d6763 100644 (file)
@@ -1,4 +1,7 @@
+// ignore-tidy-cr
+// compile-flags: -Z continue-parse-after-error
 pub fn main() {
+    br"a\r"; //~ ERROR bare CR not allowed in raw string
     br"é";  //~ ERROR raw byte string must be ASCII
     br##~"a"~##;  //~ ERROR only `#` is allowed in raw string delimitation
 }
index 671ed97d1b52a2e658e0eaa430d1a7c6f9ca24b1..4880d1fdbe8a730b9d673174acc711047038b0f1 100644 (file)
@@ -1,14 +1,20 @@
-error: raw byte string must be ASCII: \u{e9}
-  --> $DIR/raw-byte-string-literals.rs:2:8
+error: bare CR not allowed in raw string
+  --> $DIR/raw-byte-string-literals.rs:4:9
+   |
+LL |     br"a\r";
+   |         ^
+
+error: raw byte string must be ASCII
+  --> $DIR/raw-byte-string-literals.rs:5:8
    |
 LL |     br"é";
    |        ^
 
 error: found invalid character; only `#` is allowed in raw string delimitation: ~
-  --> $DIR/raw-byte-string-literals.rs:3:6
+  --> $DIR/raw-byte-string-literals.rs:6:6
    |
 LL |     br##~"a"~##;
    |      ^^^
 
-error: aborting due to 2 previous errors
+error: aborting due to 3 previous errors
 
index 82607136f0b6f2d87ee4ff3dc3f6d162ab3ceacc..1298c08df846df8994685f19c1ee2f433bc77947 100644 (file)
@@ -2,8 +2,6 @@
 // aux-build:derive-clona.rs
 // aux-build:test-macros.rs
 
-#![feature(custom_attribute)]
-
 #[macro_use]
 extern crate derive_foo;
 #[macro_use]
@@ -25,10 +23,12 @@ macro_rules! attr_proc_mac {
 //~^ ERROR cannot find
 struct Foo;
 
-#[attr_proc_macra] // OK, interpreted as a custom attribute
+// Interpreted as a feature gated custom attribute
+#[attr_proc_macra] //~ ERROR attribute `attr_proc_macra` is currently unknown
 struct Bar;
 
-#[FooWithLongNan]  // OK, interpreted as a custom attribute
+// Interpreted as a feature gated custom attribute
+#[FooWithLongNan] //~ ERROR attribute `FooWithLongNan` is currently unknown
 struct Asdf;
 
 #[derive(Dlone)]
index 705ef6006a049929573175538a1497428fab5fd4..f9f116c15dcc7fff9905a6018b560837dc99f5ed 100644 (file)
@@ -1,5 +1,23 @@
+error[E0658]: The attribute `attr_proc_macra` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/resolve-error.rs:27:3
+   |
+LL | #[attr_proc_macra]
+   |   ^^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `FooWithLongNan` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/resolve-error.rs:31:3
+   |
+LL | #[FooWithLongNan]
+   |   ^^^^^^^^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
 error: cannot find derive macro `FooWithLongNan` in this scope
-  --> $DIR/resolve-error.rs:24:10
+  --> $DIR/resolve-error.rs:22:10
    |
 LL | #[derive(FooWithLongNan)]
    |          ^^^^^^^^^^^^^^ help: try: `FooWithLongName`
@@ -46,5 +64,6 @@ error: cannot find macro `bang_proc_macrp!` in this scope
 LL |     bang_proc_macrp!();
    |     ^^^^^^^^^^^^^^^ help: you could try the macro: `bang_proc_macro`
 
-error: aborting due to 8 previous errors
+error: aborting due to 10 previous errors
 
+For more information about this error, try `rustc --explain E0658`.
index 9ce89e82ca2256c4ac5fc54a92551f05ce57d2aa..bc6a9fe562a6790289f279a36c01d2f628bef3c7 100644 (file)
@@ -1,19 +1,27 @@
-#![feature(repr_align_enum)]
 #![allow(dead_code)]
 
 #[repr(align(16.0))] //~ ERROR: invalid `repr(align)` attribute: not an unsuffixed integer
-struct A(i32);
+struct S0(i32);
 
 #[repr(align(15))] //~ ERROR: invalid `repr(align)` attribute: not a power of two
-struct B(i32);
+struct S1(i32);
 
 #[repr(align(4294967296))] //~ ERROR: invalid `repr(align)` attribute: larger than 2^29
-struct C(i32);
+struct S2(i32);
 
 #[repr(align(536870912))] // ok: this is the largest accepted alignment
-struct D(i32);
+struct S3(i32);
+
+#[repr(align(16.0))] //~ ERROR: invalid `repr(align)` attribute: not an unsuffixed integer
+enum E0 { A, B }
 
 #[repr(align(15))] //~ ERROR: invalid `repr(align)` attribute: not a power of two
-enum E { Left, Right }
+enum E1 { A, B }
+
+#[repr(align(4294967296))] //~ ERROR: invalid `repr(align)` attribute: larger than 2^29
+enum E2 { A, B }
+
+#[repr(align(536870912))] // ok: this is the largest accepted alignment
+enum E3 { A, B }
 
 fn main() {}
index 641f117a71710afdc0e146504185391bc2ffa471..280cab2b4a144fed2ff097ec02ac54bbb4a09d50 100644 (file)
@@ -1,27 +1,39 @@
 error[E0589]: invalid `repr(align)` attribute: not an unsuffixed integer
-  --> $DIR/repr-align.rs:4:8
+  --> $DIR/repr-align.rs:3:8
    |
 LL | #[repr(align(16.0))]
    |        ^^^^^^^^^^^
 
 error[E0589]: invalid `repr(align)` attribute: not a power of two
-  --> $DIR/repr-align.rs:7:8
+  --> $DIR/repr-align.rs:6:8
    |
 LL | #[repr(align(15))]
    |        ^^^^^^^^^
 
 error[E0589]: invalid `repr(align)` attribute: larger than 2^29
-  --> $DIR/repr-align.rs:10:8
+  --> $DIR/repr-align.rs:9:8
    |
 LL | #[repr(align(4294967296))]
    |        ^^^^^^^^^^^^^^^^^
 
+error[E0589]: invalid `repr(align)` attribute: not an unsuffixed integer
+  --> $DIR/repr-align.rs:15:8
+   |
+LL | #[repr(align(16.0))]
+   |        ^^^^^^^^^^^
+
 error[E0589]: invalid `repr(align)` attribute: not a power of two
-  --> $DIR/repr-align.rs:16:8
+  --> $DIR/repr-align.rs:18:8
    |
 LL | #[repr(align(15))]
    |        ^^^^^^^^^
 
-error: aborting due to 4 previous errors
+error[E0589]: invalid `repr(align)` attribute: larger than 2^29
+  --> $DIR/repr-align.rs:21:8
+   |
+LL | #[repr(align(4294967296))]
+   |        ^^^^^^^^^^^^^^^^^
+
+error: aborting due to 6 previous errors
 
 For more information about this error, try `rustc --explain E0589`.
index 392e7c9de4d09fda712c5f979f1ca7947fea88a0..c3d772f6266c056bff520e730b13254c4e5fdeca 100644 (file)
@@ -1,26 +1,5 @@
 // See also repr-transparent.rs
 
-#[repr(transparent)] //~ ERROR unsupported representation for zero-variant enum
-enum Void {}         //~| ERROR should be applied to struct
-
-#[repr(transparent)] //~ ERROR should be applied to struct
-enum FieldlessEnum {
-    Foo,
-    Bar,
-}
-
-#[repr(transparent)] //~ ERROR should be applied to struct
-enum Enum {
-    Foo(String),
-    Bar(u32),
-}
-
-#[repr(transparent)] //~ ERROR should be applied to struct
-union Foo {
-    u: u32,
-    s: i32
-}
-
 #[repr(transparent)] //~ ERROR should be applied to struct
 fn cant_repr_this() {}
 
index 24fa309a2fb69a3740742f0aa2318468430d0bc6..03df3569b42bcc30ae10c229529f18d6f9928ed8 100644 (file)
@@ -1,69 +1,19 @@
-error[E0517]: attribute should be applied to struct
+error[E0517]: attribute should be applied to struct, enum, or union
   --> $DIR/repr-transparent-other-items.rs:3:8
    |
 LL | #[repr(transparent)]
    |        ^^^^^^^^^^^
-LL | enum Void {}
-   | ------------ not a struct
-
-error[E0517]: attribute should be applied to struct
-  --> $DIR/repr-transparent-other-items.rs:6:8
-   |
-LL |   #[repr(transparent)]
-   |          ^^^^^^^^^^^
-LL | / enum FieldlessEnum {
-LL | |     Foo,
-LL | |     Bar,
-LL | | }
-   | |_- not a struct
-
-error[E0517]: attribute should be applied to struct
-  --> $DIR/repr-transparent-other-items.rs:12:8
-   |
-LL |   #[repr(transparent)]
-   |          ^^^^^^^^^^^
-LL | / enum Enum {
-LL | |     Foo(String),
-LL | |     Bar(u32),
-LL | | }
-   | |_- not a struct
-
-error[E0517]: attribute should be applied to struct
-  --> $DIR/repr-transparent-other-items.rs:18:8
-   |
-LL |   #[repr(transparent)]
-   |          ^^^^^^^^^^^
-LL | / union Foo {
-LL | |     u: u32,
-LL | |     s: i32
-LL | | }
-   | |_- not a struct
-
-error[E0517]: attribute should be applied to struct
-  --> $DIR/repr-transparent-other-items.rs:24:8
-   |
-LL | #[repr(transparent)]
-   |        ^^^^^^^^^^^
 LL | fn cant_repr_this() {}
-   | ---------------------- not a struct
+   | ---------------------- not a struct, enum, or union
 
-error[E0517]: attribute should be applied to struct
-  --> $DIR/repr-transparent-other-items.rs:27:8
+error[E0517]: attribute should be applied to struct, enum, or union
+  --> $DIR/repr-transparent-other-items.rs:6:8
    |
 LL | #[repr(transparent)]
    |        ^^^^^^^^^^^
 LL | static CANT_REPR_THIS: u32 = 0;
-   | ------------------------------- not a struct
-
-error[E0084]: unsupported representation for zero-variant enum
-  --> $DIR/repr-transparent-other-items.rs:3:1
-   |
-LL | #[repr(transparent)]
-   | ^^^^^^^^^^^^^^^^^^^^
-LL | enum Void {}
-   | ------------ zero-variant enum
+   | ------------------------------- not a struct, enum, or union
 
-error: aborting due to 7 previous errors
+error: aborting due to 2 previous errors
 
-Some errors have detailed explanations: E0084, E0517.
-For more information about an error, try `rustc --explain E0084`.
+For more information about this error, try `rustc --explain E0517`.
index 66d39ff9bb593ecbfb9c329aa9d1c53c8dfb7a98..730d428ff500b6163918f25973302f93885733dc 100644 (file)
@@ -3,7 +3,7 @@
 // - repr-transparent-other-reprs.rs
 // - repr-transparent-other-items.rs
 
-#![feature(repr_align)]
+#![feature(repr_align, transparent_enums, transparent_unions)]
 
 use std::marker::PhantomData;
 
 #[repr(transparent)]
 struct GenericAlign<T>(ZstAlign32<T>, u32); //~ ERROR alignment larger than 1
 
+#[repr(transparent)] //~ ERROR unsupported representation for zero-variant enum
+enum Void {}
+//~^ ERROR transparent enum needs exactly one variant, but has 0
+
+#[repr(transparent)]
+enum FieldlessEnum { //~ ERROR transparent enum needs exactly one non-zero-sized field, but has 0
+    Foo,
+}
+
+#[repr(transparent)]
+enum TooManyFieldsEnum {
+    Foo(u32, String),
+}
+//~^^^ ERROR transparent enum needs exactly one non-zero-sized field, but has 2
+
+#[repr(transparent)]
+enum TooManyVariants { //~ ERROR transparent enum needs exactly one variant, but has 2
+    Foo(String),
+    Bar,
+}
+
+#[repr(transparent)]
+union UnitUnion { //~ ERROR transparent union needs exactly one non-zero-sized field, but has 0
+    u: (),
+}
+
+#[repr(transparent)]
+union TooManyFields { //~ ERROR transparent union needs exactly one non-zero-sized field, but has 2
+    u: u32,
+    s: i32
+}
+
 fn main() {}
index 2542a842fe8a920089c7c69c168252fa7cc3c1bb..ea16bdf53783d2c51d5827da594aed7b9bd80ed0 100644 (file)
@@ -3,32 +3,24 @@ error[E0690]: transparent struct needs exactly one non-zero-sized field, but has
    |
 LL | struct NoFields;
    | ^^^^^^^^^^^^^^^^
-   |
-   = note: non-zero-sized field
 
 error[E0690]: transparent struct needs exactly one non-zero-sized field, but has 0
   --> $DIR/repr-transparent.rs:14:1
    |
 LL | struct ContainsOnlyZst(());
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = note: non-zero-sized field
 
 error[E0690]: transparent struct needs exactly one non-zero-sized field, but has 0
   --> $DIR/repr-transparent.rs:17:1
    |
 LL | struct ContainsOnlyZstArray([bool; 0]);
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = note: non-zero-sized field
 
 error[E0690]: transparent struct needs exactly one non-zero-sized field, but has 0
   --> $DIR/repr-transparent.rs:20:1
    |
 LL | struct ContainsMultipleZst(PhantomData<*const i32>, NoFields);
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-   = note: non-zero-sized field
 
 error[E0690]: transparent struct needs exactly one non-zero-sized field, but has 2
   --> $DIR/repr-transparent.rs:24:1
@@ -36,7 +28,7 @@ error[E0690]: transparent struct needs exactly one non-zero-sized field, but has
 LL | struct MultipleNonZst(u8, u8);
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: non-zero-sized field
+note: the following non-zero-sized fields exist on `MultipleNonZst`:
   --> $DIR/repr-transparent.rs:24:23
    |
 LL | struct MultipleNonZst(u8, u8);
@@ -48,7 +40,7 @@ error[E0690]: transparent struct needs exactly one non-zero-sized field, but has
 LL | pub struct StructWithProjection(f32, <f32 as Mirror>::It);
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-note: non-zero-sized field
+note: the following non-zero-sized fields exist on `StructWithProjection`:
   --> $DIR/repr-transparent.rs:30:33
    |
 LL | pub struct StructWithProjection(f32, <f32 as Mirror>::It);
@@ -66,7 +58,85 @@ error[E0691]: zero-sized field in transparent struct has alignment larger than 1
 LL | struct GenericAlign<T>(ZstAlign32<T>, u32);
    |                        ^^^^^^^^^^^^^
 
-error: aborting due to 8 previous errors
+error[E0084]: unsupported representation for zero-variant enum
+  --> $DIR/repr-transparent.rs:42:1
+   |
+LL | #[repr(transparent)]
+   | ^^^^^^^^^^^^^^^^^^^^
+LL | enum Void {}
+   | ------------ zero-variant enum
+
+error[E0731]: transparent enum needs exactly one variant, but has 0
+  --> $DIR/repr-transparent.rs:43:1
+   |
+LL | enum Void {}
+   | ^^^^^^^^^^^^
+
+error[E0690]: the variant of a transparent enum needs exactly one non-zero-sized field, but has 0
+  --> $DIR/repr-transparent.rs:47:1
+   |
+LL | / enum FieldlessEnum {
+LL | |     Foo,
+LL | | }
+   | |_^
+
+error[E0690]: the variant of a transparent enum needs exactly one non-zero-sized field, but has 2
+  --> $DIR/repr-transparent.rs:52:1
+   |
+LL | / enum TooManyFieldsEnum {
+LL | |     Foo(u32, String),
+LL | | }
+   | |_^
+   |
+note: the following non-zero-sized fields exist on `TooManyFieldsEnum`:
+  --> $DIR/repr-transparent.rs:53:9
+   |
+LL |     Foo(u32, String),
+   |         ^^^  ^^^^^^
+
+error[E0731]: transparent enum needs exactly one variant, but has 2
+  --> $DIR/repr-transparent.rs:58:1
+   |
+LL | / enum TooManyVariants {
+LL | |     Foo(String),
+LL | |     Bar,
+LL | | }
+   | |_^
+   |
+note: the following variants exist on `TooManyVariants`
+  --> $DIR/repr-transparent.rs:59:5
+   |
+LL |     Foo(String),
+   |     ^^^^^^^^^^^
+LL |     Bar,
+   |     ^^^
+
+error[E0690]: transparent union needs exactly one non-zero-sized field, but has 0
+  --> $DIR/repr-transparent.rs:64:1
+   |
+LL | / union UnitUnion {
+LL | |     u: (),
+LL | | }
+   | |_^
+
+error[E0690]: transparent union needs exactly one non-zero-sized field, but has 2
+  --> $DIR/repr-transparent.rs:69:1
+   |
+LL | / union TooManyFields {
+LL | |     u: u32,
+LL | |     s: i32
+LL | | }
+   | |_^
+   |
+note: the following non-zero-sized fields exist on `TooManyFields`:
+  --> $DIR/repr-transparent.rs:70:5
+   |
+LL |     u: u32,
+   |     ^^^^^^
+LL |     s: i32
+   |     ^^^^^^
+
+error: aborting due to 15 previous errors
 
-Some errors have detailed explanations: E0690, E0691.
-For more information about an error, try `rustc --explain E0690`.
+Some errors have detailed explanations: E0084, E0690, E0691, E0731.
+For more information about an error, try `rustc --explain E0084`.
diff --git a/src/test/ui/save-analysis/emit-notifications.nll.stderr b/src/test/ui/save-analysis/emit-notifications.nll.stderr
new file mode 100644 (file)
index 0000000..60734ce
--- /dev/null
@@ -0,0 +1,2 @@
+{"artifact":"$TEST_BUILD_DIR/save-analysis/emit-notifications.nll/save-analysis/libemit_notifications.json","emit":"save-analysis"}
+{"artifact":"$TEST_BUILD_DIR/save-analysis/emit-notifications.nll/libemit_notifications.rlib","emit":"link"}
diff --git a/src/test/ui/save-analysis/emit-notifications.rs b/src/test/ui/save-analysis/emit-notifications.rs
new file mode 100644 (file)
index 0000000..411acbb
--- /dev/null
@@ -0,0 +1,4 @@
+// compile-pass
+// compile-flags: -Zsave-analysis -Zemit-artifact-notifications
+// compile-flags: --crate-type rlib --error-format=json
+pub fn foo() {}
diff --git a/src/test/ui/save-analysis/emit-notifications.stderr b/src/test/ui/save-analysis/emit-notifications.stderr
new file mode 100644 (file)
index 0000000..e16f60f
--- /dev/null
@@ -0,0 +1,2 @@
+{"artifact":"$TEST_BUILD_DIR/save-analysis/emit-notifications/save-analysis/libemit_notifications.json","emit":"save-analysis"}
+{"artifact":"$TEST_BUILD_DIR/save-analysis/emit-notifications/libemit_notifications.rlib","emit":"link"}
index 189a27db382cda69f4344f0d105f9b08db3a6fbc..9db9fcf40f82c0d34bddd0eb2d2322eff055090f 100644 (file)
@@ -5,7 +5,7 @@ LL |     let x = *&x;
    |             ^^^
    |             |
    |             move occurs because value has type `std::sync::atomic::AtomicBool`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `&x`
+   |             help: consider borrowing here: `&*&x`
 
 error[E0507]: cannot move out of a shared reference
   --> $DIR/std-uncopyable-atomics.rs:11:13
@@ -14,7 +14,7 @@ LL |     let x = *&x;
    |             ^^^
    |             |
    |             move occurs because value has type `std::sync::atomic::AtomicIsize`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `&x`
+   |             help: consider borrowing here: `&*&x`
 
 error[E0507]: cannot move out of a shared reference
   --> $DIR/std-uncopyable-atomics.rs:13:13
@@ -23,7 +23,7 @@ LL |     let x = *&x;
    |             ^^^
    |             |
    |             move occurs because value has type `std::sync::atomic::AtomicUsize`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `&x`
+   |             help: consider borrowing here: `&*&x`
 
 error[E0507]: cannot move out of a shared reference
   --> $DIR/std-uncopyable-atomics.rs:15:13
@@ -32,7 +32,7 @@ LL |     let x = *&x;
    |             ^^^
    |             |
    |             move occurs because value has type `std::sync::atomic::AtomicPtr<usize>`, which does not implement the `Copy` trait
-   |             help: consider removing the `*`: `&x`
+   |             help: consider borrowing here: `&*&x`
 
 error: aborting due to 4 previous errors
 
index 8952175e4258a86db4c2f06e290da95a118b059c..674a5ed18cee3ab5033e6d7842dc94d8afb1f5da 100644 (file)
@@ -1,4 +1,4 @@
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
 macro_rules! stmt_mac {
     () => {
@@ -7,18 +7,19 @@ fn b() {}
 }
 
 fn main() {
-    #[attr]
+    #[rustc_dummy]
     fn a() {}
 
-    #[attr] //~ ERROR attributes on expressions are experimental
+    // Bug: built-in attrs like `rustc_dummy` are not gated on blocks, but other attrs are.
+    #[rustfmt::skip] //~ ERROR attributes on expressions are experimental
     {
 
     }
 
-    #[attr]
+    #[rustc_dummy]
     5;
 
-    #[attr]
+    #[rustc_dummy]
     stmt_mac!();
 }
 
@@ -26,25 +27,25 @@ fn a() {}
 
 #[cfg(unset)]
 fn c() {
-    #[attr]
+    #[rustc_dummy]
     5;
 }
 
 #[cfg(not(unset))]
 fn j() {
-    #[attr]
+    #[rustc_dummy]
     5;
 }
 
 #[cfg_attr(not(unset), cfg(unset))]
 fn d() {
-    #[attr]
+    #[rustc_dummy]
     8;
 }
 
 #[cfg_attr(not(unset), cfg(not(unset)))]
 fn i() {
-    #[attr]
+    #[rustc_dummy]
     8;
 }
 
@@ -53,30 +54,30 @@ fn i() {
 macro_rules! item_mac {
     ($e:ident) => {
         fn $e() {
-            #[attr]
+            #[rustc_dummy]
             42;
 
             #[cfg(unset)]
             fn f() {
-                #[attr]
+                #[rustc_dummy]
                 5;
             }
 
             #[cfg(not(unset))]
             fn k() {
-                #[attr]
+                #[rustc_dummy]
                 5;
             }
 
             #[cfg_attr(not(unset), cfg(unset))]
             fn g() {
-                #[attr]
+                #[rustc_dummy]
                 8;
             }
 
             #[cfg_attr(not(unset), cfg(not(unset)))]
             fn h() {
-                #[attr]
+                #[rustc_dummy]
                 8;
             }
 
@@ -90,51 +91,51 @@ fn h() {
 
 extern {
     #[cfg(unset)]
-    fn x(a: [u8; #[attr] 5]);
-    fn y(a: [u8; #[attr] 5]); //~ ERROR attributes on expressions are experimental
+    fn x(a: [u8; #[rustc_dummy] 5]);
+    fn y(a: [u8; #[rustc_dummy] 5]); //~ ERROR attributes on expressions are experimental
 }
 
 struct Foo;
 impl Foo {
     #[cfg(unset)]
-    const X: u8 = #[attr] 5;
-    const Y: u8 = #[attr] 5; //~ ERROR attributes on expressions are experimental
+    const X: u8 = #[rustc_dummy] 5;
+    const Y: u8 = #[rustc_dummy] 5; //~ ERROR attributes on expressions are experimental
 }
 
 trait Bar {
     #[cfg(unset)]
-    const X: [u8; #[attr] 5];
-    const Y: [u8; #[attr] 5]; //~ ERROR attributes on expressions are experimental
+    const X: [u8; #[rustc_dummy] 5];
+    const Y: [u8; #[rustc_dummy] 5]; //~ ERROR attributes on expressions are experimental
 }
 
 struct Joyce {
     #[cfg(unset)]
-    field: [u8; #[attr] 5],
-    field2: [u8; #[attr] 5] //~ ERROR attributes on expressions are experimental
+    field: [u8; #[rustc_dummy] 5],
+    field2: [u8; #[rustc_dummy] 5] //~ ERROR attributes on expressions are experimental
 }
 
 struct Walky(
-    #[cfg(unset)] [u8; #[attr] 5],
-    [u8; #[attr] 5] //~ ERROR attributes on expressions are experimental
+    #[cfg(unset)] [u8; #[rustc_dummy] 5],
+    [u8; #[rustc_dummy] 5] //~ ERROR attributes on expressions are experimental
 );
 
 enum Mike {
     Happy(
-        #[cfg(unset)] [u8; #[attr] 5],
-        [u8; #[attr] 5] //~ ERROR attributes on expressions are experimental
+        #[cfg(unset)] [u8; #[rustc_dummy] 5],
+        [u8; #[rustc_dummy] 5] //~ ERROR attributes on expressions are experimental
     ),
     Angry {
         #[cfg(unset)]
-        field: [u8; #[attr] 5],
-        field2: [u8; #[attr] 5] //~ ERROR attributes on expressions are experimental
+        field: [u8; #[rustc_dummy] 5],
+        field2: [u8; #[rustc_dummy] 5] //~ ERROR attributes on expressions are experimental
     }
 }
 
 fn pat() {
     match 5 {
         #[cfg(unset)]
-        5 => #[attr] (),
-        6 => #[attr] (), //~ ERROR attributes on expressions are experimental
+        5 => #[rustc_dummy] (),
+        6 => #[rustc_dummy] (), //~ ERROR attributes on expressions are experimental
         _ => (),
     }
 }
index 1b5e989af7b19140f4782af58b8a0b14f7a5b08b..01372cc164b62f63884aa2b31e31e2abcad7df28 100644 (file)
@@ -1,80 +1,80 @@
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:13:5
+  --> $DIR/stmt_expr_attrs_no_feature.rs:14:5
    |
-LL |     #[attr]
-   |     ^^^^^^^
+LL |     #[rustfmt::skip]
+   |     ^^^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:94:18
+  --> $DIR/stmt_expr_attrs_no_feature.rs:95:18
    |
-LL |     fn y(a: [u8; #[attr] 5]);
-   |                  ^^^^^^^
+LL |     fn y(a: [u8; #[rustc_dummy] 5]);
+   |                  ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:101:19
+  --> $DIR/stmt_expr_attrs_no_feature.rs:102:19
    |
-LL |     const Y: u8 = #[attr] 5;
-   |                   ^^^^^^^
+LL |     const Y: u8 = #[rustc_dummy] 5;
+   |                   ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:107:19
+  --> $DIR/stmt_expr_attrs_no_feature.rs:108:19
    |
-LL |     const Y: [u8; #[attr] 5];
-   |                   ^^^^^^^
+LL |     const Y: [u8; #[rustc_dummy] 5];
+   |                   ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:113:18
+  --> $DIR/stmt_expr_attrs_no_feature.rs:114:18
    |
-LL |     field2: [u8; #[attr] 5]
-   |                  ^^^^^^^
+LL |     field2: [u8; #[rustc_dummy] 5]
+   |                  ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:118:10
+  --> $DIR/stmt_expr_attrs_no_feature.rs:119:10
    |
-LL |     [u8; #[attr] 5]
-   |          ^^^^^^^
+LL |     [u8; #[rustc_dummy] 5]
+   |          ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:124:14
+  --> $DIR/stmt_expr_attrs_no_feature.rs:125:14
    |
-LL |         [u8; #[attr] 5]
-   |              ^^^^^^^
+LL |         [u8; #[rustc_dummy] 5]
+   |              ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:129:22
+  --> $DIR/stmt_expr_attrs_no_feature.rs:130:22
    |
-LL |         field2: [u8; #[attr] 5]
-   |                      ^^^^^^^
+LL |         field2: [u8; #[rustc_dummy] 5]
+   |                      ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
 
 error[E0658]: attributes on expressions are experimental
-  --> $DIR/stmt_expr_attrs_no_feature.rs:137:14
+  --> $DIR/stmt_expr_attrs_no_feature.rs:138:14
    |
-LL |         6 => #[attr] (),
-   |              ^^^^^^^
+LL |         6 => #[rustc_dummy] (),
+   |              ^^^^^^^^^^^^^^
    |
    = note: for more information, see https://github.com/rust-lang/rust/issues/15701
    = help: add #![feature(stmt_expr_attributes)] to the crate attributes to enable
index bd2d6623d910435d1f6093ee552a4f22b73e8b31..a6531490c015914091f13b6423d65a64611090a5 100644 (file)
@@ -1,15 +1,15 @@
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
-#[my_attr = 1usize] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1u8] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1u16] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1u32] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1u64] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1isize] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1i8] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1i16] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1i32] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1i64] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes
-#[my_attr = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes
-fn main() { }
+#[rustc_dummy = 1usize] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1u8] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1u16] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1u32] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1u64] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1isize] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1i8] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1i16] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1i32] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1i64] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes
+#[rustc_dummy = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes
+fn main() {}
index 495404af3e8a5498630d99575f4cb85afc1673fd..83de173b1a703a4e553c0b160022108436947e73 100644 (file)
@@ -1,96 +1,96 @@
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:3:13
+  --> $DIR/suffixed-literal-meta.rs:3:17
    |
-LL | #[my_attr = 1usize]
-   |             ^^^^^^
+LL | #[rustc_dummy = 1usize]
+   |                 ^^^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:4:13
+  --> $DIR/suffixed-literal-meta.rs:4:17
    |
-LL | #[my_attr = 1u8]
-   |             ^^^
+LL | #[rustc_dummy = 1u8]
+   |                 ^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:5:13
+  --> $DIR/suffixed-literal-meta.rs:5:17
    |
-LL | #[my_attr = 1u16]
-   |             ^^^^
+LL | #[rustc_dummy = 1u16]
+   |                 ^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:6:13
+  --> $DIR/suffixed-literal-meta.rs:6:17
    |
-LL | #[my_attr = 1u32]
-   |             ^^^^
+LL | #[rustc_dummy = 1u32]
+   |                 ^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:7:13
+  --> $DIR/suffixed-literal-meta.rs:7:17
    |
-LL | #[my_attr = 1u64]
-   |             ^^^^
+LL | #[rustc_dummy = 1u64]
+   |                 ^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:8:13
+  --> $DIR/suffixed-literal-meta.rs:8:17
    |
-LL | #[my_attr = 1isize]
-   |             ^^^^^^
+LL | #[rustc_dummy = 1isize]
+   |                 ^^^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:9:13
+  --> $DIR/suffixed-literal-meta.rs:9:17
    |
-LL | #[my_attr = 1i8]
-   |             ^^^
+LL | #[rustc_dummy = 1i8]
+   |                 ^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:10:13
+  --> $DIR/suffixed-literal-meta.rs:10:17
    |
-LL | #[my_attr = 1i16]
-   |             ^^^^
+LL | #[rustc_dummy = 1i16]
+   |                 ^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:11:13
+  --> $DIR/suffixed-literal-meta.rs:11:17
    |
-LL | #[my_attr = 1i32]
-   |             ^^^^
+LL | #[rustc_dummy = 1i32]
+   |                 ^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:12:13
+  --> $DIR/suffixed-literal-meta.rs:12:17
    |
-LL | #[my_attr = 1i64]
-   |             ^^^^
+LL | #[rustc_dummy = 1i64]
+   |                 ^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:13:13
+  --> $DIR/suffixed-literal-meta.rs:13:17
    |
-LL | #[my_attr = 1.0f32]
-   |             ^^^^^^
+LL | #[rustc_dummy = 1.0f32]
+   |                 ^^^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
 error: suffixed literals are not allowed in attributes
-  --> $DIR/suffixed-literal-meta.rs:14:13
+  --> $DIR/suffixed-literal-meta.rs:14:17
    |
-LL | #[my_attr = 1.0f64]
-   |             ^^^^^^
+LL | #[rustc_dummy = 1.0f64]
+   |                 ^^^^^^
    |
    = help: instead of using a suffixed literal (1u8, 1.0f32, etc.), use an unsuffixed version (1, 1.0, etc.).
 
index 31ab1a6639acc7e95294754638864141bae0621b..69b303a66237ebf8184d8737a78551c2a1990340 100644 (file)
@@ -37,26 +37,26 @@ pub fn main() {
 
     let X(_t) = *s;
     //~^ ERROR cannot move
-    //~| HELP consider removing the `*`
+    //~| HELP consider borrowing here
     //~| SUGGESTION s
     if let Either::One(_t) = *r { }
     //~^ ERROR cannot move
-    //~| HELP consider removing the `*`
+    //~| HELP consider borrowing here
     //~| SUGGESTION r
     while let Either::One(_t) = *r { }
     //~^ ERROR cannot move
-    //~| HELP consider removing the `*`
+    //~| HELP consider borrowing here
     //~| SUGGESTION r
     match *r {
         //~^ ERROR cannot move
-        //~| HELP consider removing the `*`
+        //~| HELP consider borrowing here
         //~| SUGGESTION r
         Either::One(_t)
         | Either::Two(_t) => (),
     }
     match *r {
         //~^ ERROR cannot move
-        //~| HELP consider removing the `*`
+        //~| HELP consider borrowing here
         //~| SUGGESTION r
         Either::One(_t) => (),
         Either::Two(ref _t) => (),
@@ -65,26 +65,26 @@ pub fn main() {
 
     let X(_t) = *sm;
     //~^ ERROR cannot move
-    //~| HELP consider removing the `*`
+    //~| HELP consider borrowing here
     //~| SUGGESTION sm
     if let Either::One(_t) = *rm { }
     //~^ ERROR cannot move
-    //~| HELP consider removing the `*`
+    //~| HELP consider borrowing here
     //~| SUGGESTION rm
     while let Either::One(_t) = *rm { }
     //~^ ERROR cannot move
-    //~| HELP consider removing the `*`
+    //~| HELP consider borrowing here
     //~| SUGGESTION rm
     match *rm {
         //~^ ERROR cannot move
-        //~| HELP consider removing the `*`
+        //~| HELP consider borrowing here
         //~| SUGGESTION rm
         Either::One(_t)
         | Either::Two(_t) => (),
     }
     match *rm {
         //~^ ERROR cannot move
-        //~| HELP consider removing the `*`
+        //~| HELP consider borrowing here
         //~| SUGGESTION rm
         Either::One(_t) => (),
         Either::Two(ref _t) => (),
@@ -92,7 +92,7 @@ pub fn main() {
     }
     match *rm {
         //~^ ERROR cannot move
-        //~| HELP consider removing the `*`
+        //~| HELP consider borrowing here
         //~| SUGGESTION rm
         Either::One(_t) => (),
         Either::Two(ref mut _t) => (),
index bde3afa3840a25a0f23c10d28bdae4be92c3550c..cb3ce5991aeeeeca5e103c6c846e935994d1a544 100644 (file)
@@ -2,7 +2,7 @@ error[E0507]: cannot move out of `s.0` which is behind a shared reference
   --> $DIR/simple.rs:38:17
    |
 LL |     let X(_t) = *s;
-   |           --    ^^ help: consider removing the `*`: `s`
+   |           --    ^^ help: consider borrowing here: `&*s`
    |           |
    |           data moved here
    |           move occurs because `_t` has type `Y`, which does not implement the `Copy` trait
@@ -11,7 +11,7 @@ error[E0507]: cannot move out of `r.0` which is behind a shared reference
   --> $DIR/simple.rs:42:30
    |
 LL |     if let Either::One(_t) = *r { }
-   |                        --    ^^ help: consider removing the `*`: `r`
+   |                        --    ^^ help: consider borrowing here: `&*r`
    |                        |
    |                        data moved here
    |                        move occurs because `_t` has type `X`, which does not implement the `Copy` trait
@@ -20,7 +20,7 @@ error[E0507]: cannot move out of `r.0` which is behind a shared reference
   --> $DIR/simple.rs:46:33
    |
 LL |     while let Either::One(_t) = *r { }
-   |                           --    ^^ help: consider removing the `*`: `r`
+   |                           --    ^^ help: consider borrowing here: `&*r`
    |                           |
    |                           data moved here
    |                           move occurs because `_t` has type `X`, which does not implement the `Copy` trait
@@ -29,7 +29,7 @@ error[E0507]: cannot move out of `r.0` which is behind a shared reference
   --> $DIR/simple.rs:50:11
    |
 LL |     match *r {
-   |           ^^ help: consider removing the `*`: `r`
+   |           ^^ help: consider borrowing here: `&*r`
 ...
 LL |         Either::One(_t)
    |                     --
@@ -41,7 +41,7 @@ error[E0507]: cannot move out of `r.0` which is behind a shared reference
   --> $DIR/simple.rs:57:11
    |
 LL |     match *r {
-   |           ^^ help: consider removing the `*`: `r`
+   |           ^^ help: consider borrowing here: `&*r`
 ...
 LL |         Either::One(_t) => (),
    |                     --
@@ -53,7 +53,7 @@ error[E0507]: cannot move out of `sm.0` which is behind a mutable reference
   --> $DIR/simple.rs:66:17
    |
 LL |     let X(_t) = *sm;
-   |           --    ^^^ help: consider removing the `*`: `sm`
+   |           --    ^^^ help: consider borrowing here: `&*sm`
    |           |
    |           data moved here
    |           move occurs because `_t` has type `Y`, which does not implement the `Copy` trait
@@ -62,7 +62,7 @@ error[E0507]: cannot move out of `rm.0` which is behind a mutable reference
   --> $DIR/simple.rs:70:30
    |
 LL |     if let Either::One(_t) = *rm { }
-   |                        --    ^^^ help: consider removing the `*`: `rm`
+   |                        --    ^^^ help: consider borrowing here: `&*rm`
    |                        |
    |                        data moved here
    |                        move occurs because `_t` has type `X`, which does not implement the `Copy` trait
@@ -71,7 +71,7 @@ error[E0507]: cannot move out of `rm.0` which is behind a mutable reference
   --> $DIR/simple.rs:74:33
    |
 LL |     while let Either::One(_t) = *rm { }
-   |                           --    ^^^ help: consider removing the `*`: `rm`
+   |                           --    ^^^ help: consider borrowing here: `&*rm`
    |                           |
    |                           data moved here
    |                           move occurs because `_t` has type `X`, which does not implement the `Copy` trait
@@ -80,7 +80,7 @@ error[E0507]: cannot move out of `rm.0` which is behind a mutable reference
   --> $DIR/simple.rs:78:11
    |
 LL |     match *rm {
-   |           ^^^ help: consider removing the `*`: `rm`
+   |           ^^^ help: consider borrowing here: `&*rm`
 ...
 LL |         Either::One(_t)
    |                     --
@@ -92,7 +92,7 @@ error[E0507]: cannot move out of `rm.0` which is behind a mutable reference
   --> $DIR/simple.rs:85:11
    |
 LL |     match *rm {
-   |           ^^^ help: consider removing the `*`: `rm`
+   |           ^^^ help: consider borrowing here: `&*rm`
 ...
 LL |         Either::One(_t) => (),
    |                     --
@@ -104,7 +104,7 @@ error[E0507]: cannot move out of `rm.0` which is behind a mutable reference
   --> $DIR/simple.rs:93:11
    |
 LL |     match *rm {
-   |           ^^^ help: consider removing the `*`: `rm`
+   |           ^^^ help: consider borrowing here: `&*rm`
 ...
 LL |         Either::One(_t) => (),
    |                     --
index 33cffcefd898a239d9f95b7da9499e47ddb66c88..ce902b7e7d28a8d9fb7513114f1c1e985b58b130 100644 (file)
@@ -1,12 +1,11 @@
-#![feature(custom_attribute)]
-
 type A = rustfmt; //~ ERROR expected type, found tool module `rustfmt`
 type B = rustfmt::skip; //~ ERROR expected type, found tool attribute `rustfmt::skip`
 
 #[derive(rustfmt)] //~ ERROR cannot find derive macro `rustfmt` in this scope
 struct S;
 
-#[rustfmt] // OK, interpreted as a custom attribute
+// Interpreted as a feature gated custom attribute
+#[rustfmt] //~ ERROR attribute `rustfmt` is currently unknown
 fn check() {}
 
 #[rustfmt::skip] // OK
index 1c799b41c5a6ca9a3a46957cefbecbae3e2efaa8..1df9821f2444092a0f05e9a917b1d0502cb60977 100644 (file)
@@ -1,39 +1,49 @@
+error[E0658]: The attribute `rustfmt` is currently unknown to the compiler and may have meaning added to it in the future
+  --> $DIR/tool-attributes-misplaced-1.rs:8:3
+   |
+LL | #[rustfmt]
+   |   ^^^^^^^
+   |
+   = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+   = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
 error: cannot find derive macro `rustfmt` in this scope
-  --> $DIR/tool-attributes-misplaced-1.rs:6:10
+  --> $DIR/tool-attributes-misplaced-1.rs:4:10
    |
 LL | #[derive(rustfmt)]
    |          ^^^^^^^
 
 error: cannot find macro `rustfmt!` in this scope
-  --> $DIR/tool-attributes-misplaced-1.rs:15:5
+  --> $DIR/tool-attributes-misplaced-1.rs:14:5
    |
 LL |     rustfmt!();
    |     ^^^^^^^
 
 error[E0573]: expected type, found tool module `rustfmt`
-  --> $DIR/tool-attributes-misplaced-1.rs:3:10
+  --> $DIR/tool-attributes-misplaced-1.rs:1:10
    |
 LL | type A = rustfmt;
    |          ^^^^^^^ not a type
 
 error[E0573]: expected type, found tool attribute `rustfmt::skip`
-  --> $DIR/tool-attributes-misplaced-1.rs:4:10
+  --> $DIR/tool-attributes-misplaced-1.rs:2:10
    |
 LL | type B = rustfmt::skip;
    |          ^^^^^^^^^^^^^ not a type
 
 error[E0423]: expected value, found tool module `rustfmt`
-  --> $DIR/tool-attributes-misplaced-1.rs:14:5
+  --> $DIR/tool-attributes-misplaced-1.rs:13:5
    |
 LL |     rustfmt;
    |     ^^^^^^^ not a value
 
 error[E0423]: expected value, found tool attribute `rustfmt::skip`
-  --> $DIR/tool-attributes-misplaced-1.rs:17:5
+  --> $DIR/tool-attributes-misplaced-1.rs:16:5
    |
 LL |     rustfmt::skip;
    |     ^^^^^^^^^^^^^ not a value
 
-error: aborting due to 6 previous errors
+error: aborting due to 7 previous errors
 
-For more information about this error, try `rustc --explain E0423`.
+Some errors have detailed explanations: E0423, E0658.
+For more information about an error, try `rustc --explain E0423`.
diff --git a/src/test/ui/traits/trait-with-dst.rs b/src/test/ui/traits/trait-with-dst.rs
new file mode 100644 (file)
index 0000000..86d6585
--- /dev/null
@@ -0,0 +1,22 @@
+// compile-pass
+// #55266
+
+struct VTable<DST: ?Sized> {
+    _to_dst_ptr: fn(*mut ()) -> *mut DST,
+}
+
+trait HasVTableFor<DST: ?Sized + 'static> {
+    const VTABLE: &'static VTable<DST>;
+}
+
+impl<T, DST: ?Sized + 'static> HasVTableFor<DST> for T {
+    const VTABLE: &'static VTable<DST> = &VTable {
+        _to_dst_ptr: |_: *mut ()| unsafe { std::mem::zeroed() },
+    };
+}
+
+pub fn push<DST: ?Sized + 'static, T>() {
+    <T as HasVTableFor<DST>>::VTABLE;
+}
+
+fn main() {}
index 40d9a50f1fa4f0993b2c1039383cf4522dc4e70f..c60817a849a3bb2912ad82c3ce3804998a9caae9 100644 (file)
@@ -9,7 +9,7 @@ note: lint level defined here
    |
 LL | #![deny(improper_ctypes)]
    |         ^^^^^^^^^^^^^^^
-   = help: consider adding a #[repr(C)] attribute to this union
+   = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this union
 note: type defined here
   --> $DIR/union-repr-c.rs:9:1
    |
index 4798f7b396cd6fda36c58d38aeff5381053f251c..b07ab96bce13fe55ce2a4d58c9926c0c7f456a3c 100644 (file)
@@ -1,8 +1,8 @@
 // compile-pass
 
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
-#[my_attr(a b c d)]
-#[my_attr[a b c d]]
-#[my_attr{a b c d}]
+#[rustc_dummy(a b c d)]
+#[rustc_dummy[a b c d]]
+#[rustc_dummy{a b c d}]
 fn main() {}
index 810732a977578e917b09bd4bad1d421355fe85fe..cb8ac0e6a05c0837433fae4b7ff3df756421c165 100644 (file)
@@ -1,49 +1,48 @@
 #![deny(unused_attributes)]
-#![allow(dead_code, unused_imports, unused_extern_crates)]
-#![feature(custom_attribute)]
+#![feature(rustc_attrs)]
 
-#![foo] //~ ERROR unused attribute
+#![rustc_dummy] //~ ERROR unused attribute
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 extern crate core;
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 use std::collections;
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 extern "C" {
-    #[foo] //~ ERROR unused attribute
+    #[rustc_dummy] //~ ERROR unused attribute
     fn foo();
 }
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 mod foo {
-    #[foo] //~ ERROR unused attribute
+    #[rustc_dummy] //~ ERROR unused attribute
     pub enum Foo {
-        #[foo] //~ ERROR unused attribute
+        #[rustc_dummy] //~ ERROR unused attribute
         Bar,
     }
 }
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 fn bar(f: foo::Foo) {
     match f {
-        #[foo] //~ ERROR unused attribute
+        #[rustc_dummy] //~ ERROR unused attribute
         foo::Foo::Bar => {}
     }
 }
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 struct Foo {
-    #[foo] //~ ERROR unused attribute
+    #[rustc_dummy] //~ ERROR unused attribute
     a: isize
 }
 
-#[foo] //~ ERROR unused attribute
+#[rustc_dummy] //~ ERROR unused attribute
 trait Baz {
-    #[foo] //~ ERROR unused attribute
+    #[rustc_dummy] //~ ERROR unused attribute
     fn blah(&self);
-    #[foo] //~ ERROR unused attribute
+    #[rustc_dummy] //~ ERROR unused attribute
     fn blah2(&self) {}
 }
 
index 6f096d741444f90430fcfbe0ac5e94cd143e0aaa..956b870715eb2fcd6280c154596b2f7b402cf3b7 100644 (file)
@@ -1,8 +1,8 @@
 error: unused attribute
-  --> $DIR/unused-attr.rs:7:1
+  --> $DIR/unused-attr.rs:6:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
    |
 note: lint level defined here
   --> $DIR/unused-attr.rs:1:9
@@ -11,88 +11,88 @@ LL | #![deny(unused_attributes)]
    |         ^^^^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:10:1
+  --> $DIR/unused-attr.rs:9:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:15:5
+  --> $DIR/unused-attr.rs:14:5
    |
-LL |     #[foo]
-   |     ^^^^^^
+LL |     #[rustc_dummy]
+   |     ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:13:1
+  --> $DIR/unused-attr.rs:12:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:23:9
+  --> $DIR/unused-attr.rs:22:9
    |
-LL |         #[foo]
-   |         ^^^^^^
+LL |         #[rustc_dummy]
+   |         ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:21:5
+  --> $DIR/unused-attr.rs:20:5
    |
-LL |     #[foo]
-   |     ^^^^^^
+LL |     #[rustc_dummy]
+   |     ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:19:1
+  --> $DIR/unused-attr.rs:18:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:31:9
+  --> $DIR/unused-attr.rs:30:9
    |
-LL |         #[foo]
-   |         ^^^^^^
+LL |         #[rustc_dummy]
+   |         ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:28:1
+  --> $DIR/unused-attr.rs:27:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:38:5
+  --> $DIR/unused-attr.rs:37:5
    |
-LL |     #[foo]
-   |     ^^^^^^
+LL |     #[rustc_dummy]
+   |     ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:36:1
+  --> $DIR/unused-attr.rs:35:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:44:5
+  --> $DIR/unused-attr.rs:43:5
    |
-LL |     #[foo]
-   |     ^^^^^^
+LL |     #[rustc_dummy]
+   |     ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:46:5
+  --> $DIR/unused-attr.rs:45:5
    |
-LL |     #[foo]
-   |     ^^^^^^
+LL |     #[rustc_dummy]
+   |     ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:42:1
+  --> $DIR/unused-attr.rs:41:1
    |
-LL | #[foo]
-   | ^^^^^^
+LL | #[rustc_dummy]
+   | ^^^^^^^^^^^^^^
 
 error: unused attribute
-  --> $DIR/unused-attr.rs:5:1
+  --> $DIR/unused-attr.rs:4:1
    |
-LL | #![foo]
-   | ^^^^^^^
+LL | #![rustc_dummy]
+   | ^^^^^^^^^^^^^^^
 
 error: aborting due to 15 previous errors
 
index 71be6f62fa920c0bd10cdf3a29aeb8c6719a8075..c0dbd34ba99a949ece25c297a4a377685eb89c7c 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 71be6f62fa920c0bd10cdf3a29aeb8c6719a8075
+Subproject commit c0dbd34ba99a949ece25c297a4a377685eb89c7c
index 9692ca8fd82a8f96a4113dc4b88c1fb1d79c1c60..483dcbc73f9923e98c71ec9df11ee3d0d5cfb467 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 9692ca8fd82a8f96a4113dc4b88c1fb1d79c1c60
+Subproject commit 483dcbc73f9923e98c71ec9df11ee3d0d5cfb467