]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #106886 - dtolnay:fastinstall, r=Mark-Simulacrum
authorYuki Okushi <jtitor@2k36.org>
Mon, 23 Jan 2023 10:29:59 +0000 (19:29 +0900)
committerGitHub <noreply@github.com>
Mon, 23 Jan 2023 10:29:59 +0000 (19:29 +0900)
Make stage2 rustdoc and proc-macro-srv disableable in x.py install

Rustdoc will build if `[build] tools = ["rustdoc"]` is set, and rust-analyzer-proc-macro-srv will build if `[build] tools = ["rust-analyzer"]` is set.

On my machine skipping these tools speeds up `x.py install` from 7m15s to 6m08s (0m43s for rustdoc and 0m24s for rust-analyzer-proc-macro-srv). This is a significant speedup, since I never use rust-analyzer-proc-macro-srv, and I practically never need to use a custom build of rustdoc.

904 files changed:
.github/workflows/ci.yml
.mailmap
Cargo.lock
README.md
compiler/rustc_abi/src/lib.rs
compiler/rustc_ast/src/ast.rs
compiler/rustc_ast/src/util/parser.rs
compiler/rustc_ast_lowering/src/index.rs
compiler/rustc_ast_lowering/src/item.rs
compiler/rustc_ast_lowering/src/lib.rs
compiler/rustc_ast_passes/src/ast_validation.rs
compiler/rustc_ast_passes/src/errors.rs
compiler/rustc_ast_pretty/src/pprust/state/expr.rs
compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
compiler/rustc_borrowck/src/diagnostics/region_errors.rs
compiler/rustc_borrowck/src/lib.rs
compiler/rustc_borrowck/src/member_constraints.rs
compiler/rustc_borrowck/src/nll.rs
compiler/rustc_borrowck/src/place_ext.rs
compiler/rustc_borrowck/src/places_conflict.rs
compiler/rustc_borrowck/src/region_infer/mod.rs
compiler/rustc_borrowck/src/region_infer/opaque_types.rs
compiler/rustc_borrowck/src/session_diagnostics.rs
compiler/rustc_borrowck/src/type_check/canonical.rs
compiler/rustc_borrowck/src/type_check/constraint_conversion.rs
compiler/rustc_borrowck/src/type_check/free_region_relations.rs
compiler/rustc_borrowck/src/type_check/liveness/trace.rs
compiler/rustc_borrowck/src/type_check/mod.rs
compiler/rustc_borrowck/src/universal_regions.rs
compiler/rustc_builtin_macros/src/deriving/clone.rs
compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs
compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs
compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs
compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs
compiler/rustc_builtin_macros/src/deriving/debug.rs
compiler/rustc_builtin_macros/src/deriving/decodable.rs
compiler/rustc_builtin_macros/src/deriving/default.rs
compiler/rustc_builtin_macros/src/deriving/encodable.rs
compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
compiler/rustc_builtin_macros/src/deriving/hash.rs
compiler/rustc_builtin_macros/src/env.rs
compiler/rustc_builtin_macros/src/format.rs
compiler/rustc_builtin_macros/src/format_foreign.rs
compiler/rustc_codegen_cranelift/src/constant.rs
compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs
compiler/rustc_codegen_llvm/src/abi.rs
compiler/rustc_codegen_llvm/src/asm.rs
compiler/rustc_codegen_llvm/src/back/archive.rs
compiler/rustc_codegen_llvm/src/back/write.rs
compiler/rustc_codegen_llvm/src/builder.rs
compiler/rustc_codegen_llvm/src/callee.rs
compiler/rustc_codegen_llvm/src/consts.rs
compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
compiler/rustc_codegen_llvm/src/intrinsic.rs
compiler/rustc_codegen_llvm/src/type_of.rs
compiler/rustc_codegen_ssa/src/back/link.rs
compiler/rustc_codegen_ssa/src/back/linker.rs
compiler/rustc_codegen_ssa/src/back/symbol_export.rs
compiler/rustc_codegen_ssa/src/back/write.rs
compiler/rustc_codegen_ssa/src/base.rs
compiler/rustc_codegen_ssa/src/codegen_attrs.rs
compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
compiler/rustc_const_eval/src/const_eval/error.rs
compiler/rustc_const_eval/src/const_eval/machine.rs
compiler/rustc_const_eval/src/interpret/cast.rs
compiler/rustc_const_eval/src/interpret/eval_context.rs
compiler/rustc_const_eval/src/interpret/intern.rs
compiler/rustc_const_eval/src/interpret/intrinsics.rs
compiler/rustc_const_eval/src/interpret/machine.rs
compiler/rustc_const_eval/src/interpret/memory.rs
compiler/rustc_const_eval/src/interpret/operand.rs
compiler/rustc_const_eval/src/interpret/place.rs
compiler/rustc_const_eval/src/interpret/step.rs
compiler/rustc_const_eval/src/interpret/terminator.rs
compiler/rustc_const_eval/src/interpret/util.rs
compiler/rustc_const_eval/src/interpret/validity.rs
compiler/rustc_const_eval/src/interpret/visitor.rs
compiler/rustc_const_eval/src/lib.rs
compiler/rustc_const_eval/src/transform/check_consts/check.rs
compiler/rustc_const_eval/src/transform/promote_consts.rs
compiler/rustc_const_eval/src/transform/validate.rs
compiler/rustc_data_structures/src/frozen.rs
compiler/rustc_data_structures/src/fx.rs
compiler/rustc_data_structures/src/graph/dominators/mod.rs
compiler/rustc_data_structures/src/graph/implementation/tests.rs
compiler/rustc_data_structures/src/graph/iterate/mod.rs
compiler/rustc_data_structures/src/graph/scc/tests.rs
compiler/rustc_data_structures/src/lib.rs
compiler/rustc_data_structures/src/sorted_map.rs
compiler/rustc_data_structures/src/sorted_map/index_map.rs
compiler/rustc_data_structures/src/sorted_map/tests.rs
compiler/rustc_data_structures/src/tiny_list.rs
compiler/rustc_data_structures/src/tiny_list/tests.rs
compiler/rustc_data_structures/src/transitive_relation.rs
compiler/rustc_data_structures/src/unord.rs
compiler/rustc_driver/README.md
compiler/rustc_driver/src/lib.rs
compiler/rustc_driver/src/pretty.rs
compiler/rustc_error_codes/src/error_codes.rs
compiler/rustc_error_codes/src/error_codes/E0208.md
compiler/rustc_error_codes/src/error_codes/E0387.md
compiler/rustc_error_codes/src/error_codes/E0713.md
compiler/rustc_error_codes/src/error_codes/E0714.md
compiler/rustc_error_codes/src/error_codes/E0792.md [new file with mode: 0644]
compiler/rustc_error_messages/locales/en-US/ast_passes.ftl
compiler/rustc_error_messages/locales/en-US/borrowck.ftl
compiler/rustc_error_messages/locales/en-US/infer.ftl
compiler/rustc_error_messages/locales/en-US/mir_build.ftl
compiler/rustc_error_messages/locales/en-US/ty_utils.ftl
compiler/rustc_errors/src/diagnostic.rs
compiler/rustc_errors/src/emitter.rs
compiler/rustc_expand/src/base.rs
compiler/rustc_expand/src/config.rs
compiler/rustc_expand/src/expand.rs
compiler/rustc_expand/src/mbe/macro_check.rs
compiler/rustc_expand/src/mbe/macro_rules.rs
compiler/rustc_expand/src/mbe/quoted.rs
compiler/rustc_expand/src/mbe/transcribe.rs
compiler/rustc_expand/src/parse/tests.rs
compiler/rustc_expand/src/proc_macro_server.rs
compiler/rustc_hir/src/def.rs
compiler/rustc_hir/src/hir.rs
compiler/rustc_hir/src/lang_items.rs
compiler/rustc_hir_analysis/src/astconv/errors.rs
compiler/rustc_hir_analysis/src/astconv/generics.rs
compiler/rustc_hir_analysis/src/astconv/mod.rs
compiler/rustc_hir_analysis/src/check/check.rs
compiler/rustc_hir_analysis/src/check/compare_impl_item.rs
compiler/rustc_hir_analysis/src/check/dropck.rs
compiler/rustc_hir_analysis/src/check/intrinsicck.rs
compiler/rustc_hir_analysis/src/check/mod.rs
compiler/rustc_hir_analysis/src/check/region.rs
compiler/rustc_hir_analysis/src/check/wfcheck.rs
compiler/rustc_hir_analysis/src/check_unused.rs
compiler/rustc_hir_analysis/src/coherence/builtin.rs
compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs
compiler/rustc_hir_analysis/src/coherence/mod.rs
compiler/rustc_hir_analysis/src/coherence/orphan.rs
compiler/rustc_hir_analysis/src/coherence/unsafety.rs
compiler/rustc_hir_analysis/src/collect.rs
compiler/rustc_hir_analysis/src/collect/generics_of.rs
compiler/rustc_hir_analysis/src/collect/item_bounds.rs
compiler/rustc_hir_analysis/src/collect/lifetimes.rs
compiler/rustc_hir_analysis/src/collect/predicates_of.rs
compiler/rustc_hir_analysis/src/collect/type_of.rs
compiler/rustc_hir_analysis/src/constrained_generic_params.rs
compiler/rustc_hir_analysis/src/hir_wf_check.rs
compiler/rustc_hir_analysis/src/lib.rs
compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs
compiler/rustc_hir_analysis/src/outlives/utils.rs
compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs
compiler/rustc_hir_analysis/src/variance/mod.rs
compiler/rustc_hir_analysis/src/variance/solve.rs
compiler/rustc_hir_analysis/src/variance/test.rs
compiler/rustc_hir_typeck/src/callee.rs
compiler/rustc_hir_typeck/src/cast.rs
compiler/rustc_hir_typeck/src/closure.rs
compiler/rustc_hir_typeck/src/coercion.rs
compiler/rustc_hir_typeck/src/demand.rs
compiler/rustc_hir_typeck/src/expr.rs
compiler/rustc_hir_typeck/src/expr_use_visitor.rs
compiler/rustc_hir_typeck/src/fallback.rs
compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_build.rs
compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_visualize.rs
compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/record_consumed_borrow.rs
compiler/rustc_hir_typeck/src/generator_interior/mod.rs
compiler/rustc_hir_typeck/src/inherited.rs
compiler/rustc_hir_typeck/src/intrinsicck.rs
compiler/rustc_hir_typeck/src/mem_categorization.rs
compiler/rustc_hir_typeck/src/method/confirm.rs
compiler/rustc_hir_typeck/src/method/mod.rs
compiler/rustc_hir_typeck/src/method/probe.rs
compiler/rustc_hir_typeck/src/method/suggest.rs
compiler/rustc_hir_typeck/src/writeback.rs
compiler/rustc_incremental/src/assert_dep_graph.rs
compiler/rustc_incremental/src/persist/dirty_clean.rs
compiler/rustc_index/src/vec.rs
compiler/rustc_infer/src/errors/mod.rs
compiler/rustc_infer/src/infer/combine.rs
compiler/rustc_infer/src/infer/error_reporting/mod.rs
compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs
compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
compiler/rustc_infer/src/infer/error_reporting/note.rs
compiler/rustc_infer/src/infer/lattice.rs
compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs
compiler/rustc_infer/src/infer/mod.rs
compiler/rustc_infer/src/infer/nll_relate/mod.rs
compiler/rustc_infer/src/infer/opaque_types.rs
compiler/rustc_infer/src/infer/outlives/components.rs
compiler/rustc_infer/src/infer/outlives/env.rs
compiler/rustc_infer/src/infer/outlives/obligations.rs
compiler/rustc_infer/src/infer/outlives/verify.rs
compiler/rustc_infer/src/infer/region_constraints/mod.rs
compiler/rustc_infer/src/infer/resolve.rs
compiler/rustc_infer/src/infer/type_variable.rs
compiler/rustc_infer/src/traits/engine.rs
compiler/rustc_infer/src/traits/util.rs
compiler/rustc_interface/src/interface.rs
compiler/rustc_interface/src/lib.rs
compiler/rustc_interface/src/passes.rs
compiler/rustc_interface/src/queries.rs
compiler/rustc_interface/src/tests.rs
compiler/rustc_interface/src/util.rs
compiler/rustc_lexer/src/lib.rs
compiler/rustc_lexer/src/unescape.rs
compiler/rustc_lint/src/builtin.rs
compiler/rustc_lint/src/context.rs
compiler/rustc_lint/src/early.rs
compiler/rustc_lint/src/internal.rs
compiler/rustc_lint/src/levels.rs
compiler/rustc_lint/src/lib.rs
compiler/rustc_lint/src/lints.rs
compiler/rustc_lint/src/pass_by_value.rs
compiler/rustc_lint/src/passes.rs
compiler/rustc_lint/src/types.rs
compiler/rustc_lint/src/unused.rs
compiler/rustc_lint_defs/src/builtin.rs
compiler/rustc_lint_defs/src/lib.rs
compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
compiler/rustc_macros/src/type_visitable.rs
compiler/rustc_metadata/src/fs.rs
compiler/rustc_metadata/src/locator.rs
compiler/rustc_metadata/src/native_libs.rs
compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs
compiler/rustc_metadata/src/rmeta/encoder.rs
compiler/rustc_metadata/src/rmeta/mod.rs
compiler/rustc_middle/src/hir/map/mod.rs
compiler/rustc_middle/src/hir/mod.rs
compiler/rustc_middle/src/infer/canonical.rs
compiler/rustc_middle/src/macros.rs
compiler/rustc_middle/src/mir/basic_blocks.rs
compiler/rustc_middle/src/mir/graph_cyclic_cache.rs [deleted file]
compiler/rustc_middle/src/mir/interpret/mod.rs
compiler/rustc_middle/src/mir/mod.rs
compiler/rustc_middle/src/mir/predecessors.rs [deleted file]
compiler/rustc_middle/src/mir/switch_sources.rs [deleted file]
compiler/rustc_middle/src/mir/syntax.rs
compiler/rustc_middle/src/mir/terminator.rs
compiler/rustc_middle/src/mir/traversal.rs
compiler/rustc_middle/src/mir/type_visitable.rs
compiler/rustc_middle/src/query/mod.rs
compiler/rustc_middle/src/traits/chalk.rs
compiler/rustc_middle/src/traits/query.rs
compiler/rustc_middle/src/ty/assoc.rs
compiler/rustc_middle/src/ty/context.rs
compiler/rustc_middle/src/ty/diagnostics.rs
compiler/rustc_middle/src/ty/fold.rs
compiler/rustc_middle/src/ty/generics.rs
compiler/rustc_middle/src/ty/instance.rs
compiler/rustc_middle/src/ty/layout.rs
compiler/rustc_middle/src/ty/mod.rs
compiler/rustc_middle/src/ty/parameterized.rs
compiler/rustc_middle/src/ty/print/pretty.rs
compiler/rustc_middle/src/ty/structural_impls.rs
compiler/rustc_middle/src/ty/sty.rs
compiler/rustc_middle/src/ty/subst.rs
compiler/rustc_middle/src/ty/typeck_results.rs
compiler/rustc_middle/src/ty/util.rs
compiler/rustc_middle/src/ty/visit.rs
compiler/rustc_mir_build/src/build/custom/mod.rs
compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
compiler/rustc_mir_build/src/build/expr/as_operand.rs
compiler/rustc_mir_build/src/build/expr/stmt.rs
compiler/rustc_mir_build/src/build/matches/mod.rs
compiler/rustc_mir_build/src/build/matches/test.rs
compiler/rustc_mir_build/src/build/scope.rs
compiler/rustc_mir_build/src/errors.rs
compiler/rustc_mir_build/src/lib.rs
compiler/rustc_mir_build/src/lints.rs
compiler/rustc_mir_build/src/thir/pattern/check_match.rs
compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
compiler/rustc_mir_dataflow/src/impls/mod.rs
compiler/rustc_mir_transform/src/add_retag.rs
compiler/rustc_mir_transform/src/coverage/spans.rs
compiler/rustc_mir_transform/src/generator.rs
compiler/rustc_mir_transform/src/inline.rs
compiler/rustc_mir_transform/src/lib.rs
compiler/rustc_mir_transform/src/sroa.rs
compiler/rustc_monomorphize/src/lib.rs
compiler/rustc_monomorphize/src/polymorphize.rs
compiler/rustc_parse/src/lexer/mod.rs
compiler/rustc_parse/src/lexer/unicode_chars.rs
compiler/rustc_parse/src/parser/expr.rs
compiler/rustc_parse/src/parser/mod.rs
compiler/rustc_parse/src/parser/pat.rs
compiler/rustc_parse/src/parser/ty.rs
compiler/rustc_parse_format/src/lib.rs
compiler/rustc_passes/src/entry.rs
compiler/rustc_passes/src/lib_features.rs
compiler/rustc_passes/src/liveness.rs
compiler/rustc_passes/src/stability.rs
compiler/rustc_privacy/src/lib.rs
compiler/rustc_query_system/src/dep_graph/graph.rs
compiler/rustc_query_system/src/dep_graph/serialized.rs
compiler/rustc_query_system/src/query/caches.rs
compiler/rustc_resolve/src/check_unused.rs
compiler/rustc_resolve/src/diagnostics.rs
compiler/rustc_resolve/src/ident.rs
compiler/rustc_resolve/src/late.rs
compiler/rustc_resolve/src/late/diagnostics.rs
compiler/rustc_resolve/src/lib.rs
compiler/rustc_save_analysis/src/dump_visitor.rs
compiler/rustc_save_analysis/src/span_utils.rs
compiler/rustc_session/src/config.rs
compiler/rustc_session/src/cstore.rs
compiler/rustc_session/src/filesearch.rs
compiler/rustc_session/src/output.rs
compiler/rustc_session/src/session.rs
compiler/rustc_span/src/hygiene.rs
compiler/rustc_span/src/source_map.rs
compiler/rustc_span/src/source_map/tests.rs
compiler/rustc_span/src/symbol.rs
compiler/rustc_target/src/asm/mod.rs
compiler/rustc_target/src/spec/aarch64_apple_darwin.rs
compiler/rustc_target/src/spec/bpf_base.rs
compiler/rustc_target/src/spec/i686_apple_darwin.rs
compiler/rustc_target/src/spec/illumos_base.rs
compiler/rustc_target/src/spec/mod.rs
compiler/rustc_target/src/spec/s390x_unknown_linux_gnu.rs
compiler/rustc_target/src/spec/s390x_unknown_linux_musl.rs
compiler/rustc_target/src/spec/sparcv9_sun_solaris.rs
compiler/rustc_target/src/spec/x86_64_apple_darwin.rs
compiler/rustc_trait_selection/Cargo.toml
compiler/rustc_trait_selection/src/lib.rs
compiler/rustc_trait_selection/src/solve/assembly.rs
compiler/rustc_trait_selection/src/solve/cache.rs [deleted file]
compiler/rustc_trait_selection/src/solve/fulfill.rs
compiler/rustc_trait_selection/src/solve/infcx_ext.rs
compiler/rustc_trait_selection/src/solve/mod.rs
compiler/rustc_trait_selection/src/solve/overflow.rs [deleted file]
compiler/rustc_trait_selection/src/solve/project_goals.rs
compiler/rustc_trait_selection/src/solve/search_graph/cache.rs [new file with mode: 0644]
compiler/rustc_trait_selection/src/solve/search_graph/mod.rs [new file with mode: 0644]
compiler/rustc_trait_selection/src/solve/search_graph/overflow.rs [new file with mode: 0644]
compiler/rustc_trait_selection/src/solve/trait_goals.rs
compiler/rustc_trait_selection/src/solve/trait_goals/structural_traits.rs [new file with mode: 0644]
compiler/rustc_trait_selection/src/traits/chalk_fulfill.rs
compiler/rustc_trait_selection/src/traits/coherence.rs
compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs
compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs
compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
compiler/rustc_trait_selection/src/traits/fulfill.rs
compiler/rustc_trait_selection/src/traits/misc.rs
compiler/rustc_trait_selection/src/traits/mod.rs
compiler/rustc_trait_selection/src/traits/object_safety.rs
compiler/rustc_trait_selection/src/traits/project.rs
compiler/rustc_trait_selection/src/traits/query/normalize.rs
compiler/rustc_trait_selection/src/traits/relationships.rs [deleted file]
compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
compiler/rustc_trait_selection/src/traits/select/confirmation.rs
compiler/rustc_trait_selection/src/traits/select/mod.rs
compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs
compiler/rustc_trait_selection/src/traits/structural_match.rs
compiler/rustc_trait_selection/src/traits/vtable.rs
compiler/rustc_trait_selection/src/traits/wf.rs
compiler/rustc_traits/src/chalk/db.rs
compiler/rustc_traits/src/codegen.rs
compiler/rustc_traits/src/implied_outlives_bounds.rs
compiler/rustc_traits/src/type_op.rs
compiler/rustc_transmute/src/lib.rs
compiler/rustc_ty_utils/src/abi.rs
compiler/rustc_ty_utils/src/consts.rs
compiler/rustc_ty_utils/src/lib.rs
compiler/rustc_type_ir/src/sty.rs
library/alloc/src/alloc.rs
library/alloc/src/collections/binary_heap/mod.rs
library/alloc/src/collections/binary_heap/tests.rs
library/alloc/src/lib.rs
library/alloc/src/rc.rs
library/alloc/src/slice.rs
library/alloc/src/str.rs
library/alloc/src/sync.rs
library/alloc/src/sync/tests.rs
library/alloc/src/vec/drain.rs
library/alloc/src/vec/into_iter.rs
library/alloc/src/vec/is_zero.rs
library/alloc/src/vec/mod.rs
library/alloc/src/vec/splice.rs
library/alloc/tests/lib.rs
library/alloc/tests/vec.rs
library/core/src/any.rs
library/core/src/array/iter.rs
library/core/src/fmt/mod.rs
library/core/src/future/mod.rs
library/core/src/hint.rs
library/core/src/intrinsics/mir.rs
library/core/src/iter/range.rs
library/core/src/iter/sources/from_generator.rs
library/core/src/num/dec2flt/fpu.rs
library/core/src/num/int_macros.rs
library/core/src/pin.rs
library/core/src/ptr/const_ptr.rs
library/core/src/ptr/mod.rs
library/core/src/ptr/mut_ptr.rs
library/core/src/ptr/non_null.rs
library/core/src/slice/iter.rs
library/core/src/slice/iter/macros.rs
library/core/src/slice/mod.rs
library/core/src/slice/sort.rs
library/core/src/task/wake.rs
library/core/tests/num/dec2flt/mod.rs
library/core/tests/slice.rs
library/portable-simd/crates/core_simd/examples/spectral_norm.rs
library/std/src/fs.rs
library/std/src/io/buffered/tests.rs
library/std/src/io/error/repr_bitpacked.rs
library/std/src/io/error/tests.rs
library/std/src/lib.rs
library/std/src/net/ip_addr/tests.rs
library/std/src/net/socket_addr/tests.rs
library/std/src/os/fd/mod.rs
library/std/src/os/fd/owned.rs
library/std/src/os/net/linux_ext/addr.rs
library/std/src/os/unix/net/addr.rs
library/std/src/panicking.rs
library/std/src/path.rs
library/std/src/sys/hermit/thread.rs
library/std/src/sys/hermit/thread_local_dtor.rs
library/std/src/sys/itron/thread.rs
library/std/src/sys/solid/thread_local_dtor.rs
library/std/src/sys/unix/fs.rs
library/std/src/sys/unix/kernel_copy.rs
library/std/src/sys/unix/process/process_fuchsia.rs
library/std/src/sys/unix/process/process_unix.rs
library/std/src/sys/unix/process/process_unix/tests.rs
library/std/src/sys/unix/process/process_vxworks.rs
library/std/src/sys/unix/thread.rs
library/std/src/sys/unix/thread_local_dtor.rs
library/std/src/sys/windows/os.rs
library/std/src/sys/windows/thread.rs
library/std/src/sys_common/thread_local_dtor.rs
library/std/src/thread/local.rs
library/test/src/lib.rs
library/test/src/term/terminfo/searcher.rs
src/bootstrap/cc_detect.rs
src/bootstrap/dist.rs
src/bootstrap/native.rs
src/ci/docker/host-x86_64/arm-android/Dockerfile
src/ci/docker/host-x86_64/dist-android/Dockerfile
src/ci/docker/host-x86_64/dist-s390x-linux/Dockerfile
src/ci/docker/host-x86_64/mingw-check-tidy/Dockerfile
src/ci/docker/host-x86_64/mingw-check/Dockerfile
src/ci/docker/host-x86_64/mingw-check/reuse-requirements.in
src/ci/docker/host-x86_64/mingw-check/reuse-requirements.txt
src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile [new file with mode: 0644]
src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile [new file with mode: 0644]
src/ci/docker/scripts/android-ndk.sh
src/ci/github-actions/ci.yml
src/doc/book
src/doc/nomicon
src/doc/reference
src/doc/rust-by-example
src/doc/rustc-dev-guide
src/doc/rustc/src/instrument-coverage.md
src/librustdoc/clean/auto_trait.rs
src/librustdoc/clean/inline.rs
src/librustdoc/clean/mod.rs
src/librustdoc/clean/simplify.rs
src/librustdoc/core.rs
src/librustdoc/doctest.rs
src/librustdoc/formats/cache.rs
src/librustdoc/html/format.rs
src/librustdoc/html/markdown.rs
src/librustdoc/html/render/context.rs
src/librustdoc/html/render/mod.rs
src/librustdoc/html/render/print_item.rs
src/librustdoc/html/render/search_index.rs
src/librustdoc/html/render/write_shared.rs
src/librustdoc/html/static/css/rustdoc.css
src/librustdoc/html/static/css/settings.css
src/librustdoc/html/static/css/themes/ayu.css
src/librustdoc/html/static/css/themes/dark.css
src/librustdoc/html/static/css/themes/light.css
src/librustdoc/html/static/js/main.js
src/librustdoc/html/static/js/search.js
src/librustdoc/html/static/js/settings.js
src/librustdoc/json/import_finder.rs
src/librustdoc/json/mod.rs
src/librustdoc/passes/collect_trait_impls.rs
src/librustdoc/visit_ast.rs
src/librustdoc/visit_lib.rs
src/llvm-project
src/tools/cargo
src/tools/clippy/clippy_lints/src/inherent_impl.rs
src/tools/clippy/clippy_lints/src/instant_subtraction.rs
src/tools/clippy/clippy_lints/src/len_zero.rs
src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs
src/tools/clippy/clippy_lints/src/missing_trait_methods.rs
src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs
src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs
src/tools/clippy/clippy_utils/src/ty.rs
src/tools/compiletest/src/common.rs
src/tools/compiletest/src/header.rs
src/tools/compiletest/src/main.rs
src/tools/compiletest/src/read2.rs
src/tools/compiletest/src/runtest.rs
src/tools/compiletest/src/util.rs
src/tools/miri/src/bin/miri.rs
src/tools/miri/tests/pass-dep/shims/libc-fs-with-isolation.rs
src/tools/miri/tests/pass/vec.rs
src/tools/tidy/Cargo.toml
src/tools/tidy/src/error_codes.rs
src/tools/tidy/src/lib.rs
src/tools/tidy/src/main.rs
src/tools/tidy/src/rustdoc_gui_tests.rs [new file with mode: 0644]
src/tools/tidy/src/style.rs
src/tools/tidy/src/x_version.rs [new file with mode: 0644]
src/version
tests/assembly/is_aligned.rs
tests/codegen/abi-sysv64.rs
tests/codegen/abi-x86-interrupt.rs
tests/codegen/adjustments.rs
tests/codegen/box-maybe-uninit-llvm14.rs
tests/codegen/box-maybe-uninit.rs
tests/codegen/c-variadic.rs
tests/codegen/call-llvm-intrinsics.rs
tests/codegen/comparison-operators-newtype.rs
tests/codegen/dllimports/main.rs
tests/codegen/enum-match.rs
tests/codegen/fastcall-inreg.rs
tests/codegen/fewer-names.rs
tests/codegen/frame-pointer.rs
tests/codegen/function-arguments.rs
tests/codegen/intrinsics/const_eval_select.rs
tests/codegen/intrinsics/mask.rs
tests/codegen/issue-32031.rs
tests/codegen/issue-45964-bounds-check-slice-pos.rs
tests/codegen/issue-58881.rs
tests/codegen/issue-96497-slice-size-nowrap.rs
tests/codegen/iter-repeat-n-trivial-drop.rs
tests/codegen/loads.rs
tests/codegen/naked-functions.rs
tests/codegen/pic-relocation-model.rs
tests/codegen/pie-relocation-model.rs
tests/codegen/refs.rs
tests/codegen/repr-transparent.rs
tests/codegen/riscv-abi/riscv64-lp64-lp64f-lp64d-abi.rs
tests/codegen/sanitizer-cfi-emit-type-checks.rs
tests/codegen/sanitizer-kcfi-emit-kcfi-operand-bundle-itanium-cxx-abi.rs
tests/codegen/sanitizer-recover.rs
tests/codegen/scalar-pair-bool.rs
tests/codegen/some-abis-do-extend-params-to-32-bits.rs
tests/codegen/static-relocation-model-msvc.rs
tests/codegen/transmute-scalar.rs
tests/codegen/tuple-layout-opt.rs
tests/codegen/var-names.rs
tests/codegen/vec-calloc.rs
tests/codegen/zst-offset.rs
tests/mir-opt/building/async_await.a-{closure#0}.generator_resume.0.mir [new file with mode: 0644]
tests/mir-opt/building/async_await.b-{closure#0}.generator_resume.0.mir [new file with mode: 0644]
tests/mir-opt/building/async_await.rs [new file with mode: 0644]
tests/mir-opt/building/custom/simple_assign.rs
tests/mir-opt/building/custom/simple_assign.simple.built.after.mir
tests/mir-opt/inline/inline_into_box_place.main.Inline.diff
tests/mir-opt/inline/issue_106141.outer.Inline.diff [new file with mode: 0644]
tests/mir-opt/inline/issue_106141.rs [new file with mode: 0644]
tests/mir-opt/nll/region_subtyping_basic.main.nll.0.32bit.mir
tests/mir-opt/nll/region_subtyping_basic.main.nll.0.64bit.mir
tests/run-make-fulldeps/issue-19371/foo.rs
tests/rustdoc-gui/basic-code.goml
tests/rustdoc-gui/code-tags.goml
tests/rustdoc-gui/font-weight.goml
tests/rustdoc-gui/list_code_block.goml
tests/rustdoc-gui/scrape-examples-button-focus.goml
tests/rustdoc-gui/scrape-examples-color.goml
tests/rustdoc-gui/settings.goml
tests/rustdoc-js-std/macro-print.js
tests/rustdoc-js-std/typed-query.js
tests/rustdoc-js-std/vec-new.js
tests/rustdoc-js/search-short-types.js
tests/rustdoc-ui/infinite-recursive-type-impl-trait-return.rs
tests/rustdoc-ui/infinite-recursive-type-impl-trait-return.stderr [deleted file]
tests/rustdoc-ui/infinite-recursive-type-impl-trait.rs
tests/rustdoc-ui/infinite-recursive-type-impl-trait.stderr [deleted file]
tests/rustdoc/array-links.rs
tests/rustdoc/assoc-consts.rs
tests/rustdoc/assoc-item-cast.rs
tests/rustdoc/assoc-types.rs
tests/rustdoc/async-fn.rs
tests/rustdoc/attributes.rs
tests/rustdoc/auxiliary/issue-85454.rs
tests/rustdoc/const-fn.rs
tests/rustdoc/const-generics/add-impl.rs
tests/rustdoc/const-generics/const-generic-defaults.rs
tests/rustdoc/const-generics/const-generics-docs.rs
tests/rustdoc/const-generics/const-impl.rs
tests/rustdoc/const-generics/generic_const_exprs.rs
tests/rustdoc/const-generics/type-alias.rs
tests/rustdoc/const-intrinsic.rs
tests/rustdoc/doc-notable_trait-slice.bare_fn_matches.html
tests/rustdoc/doc-notable_trait.bare-fn.html
tests/rustdoc/doc-notable_trait.some-struct-new.html
tests/rustdoc/doc-notable_trait.wrap-me.html
tests/rustdoc/fn-pointer-arg-name.rs
tests/rustdoc/hide-complex-unevaluated-const-arguments.rs
tests/rustdoc/impl-in-const-block.rs [deleted file]
tests/rustdoc/inline-default-methods.rs
tests/rustdoc/inline_cross/auxiliary/cross-glob.rs
tests/rustdoc/inline_cross/cross-glob.rs
tests/rustdoc/inline_cross/dyn_trait.rs
tests/rustdoc/inline_cross/impl_trait.rs
tests/rustdoc/issue-20646.rs
tests/rustdoc/issue-20727-2.rs
tests/rustdoc/issue-20727-3.rs
tests/rustdoc/issue-20727-4.rs
tests/rustdoc/issue-20727.rs
tests/rustdoc/issue-22038.rs
tests/rustdoc/issue-33302.rs
tests/rustdoc/issue-85454.rs
tests/rustdoc/issue-98697.rs
tests/rustdoc/legacy-const-generic.rs
tests/rustdoc/lifetime-name.rs
tests/rustdoc/mut-params.rs
tests/rustdoc/normalize-assoc-item.rs
tests/rustdoc/pub-method.rs
tests/rustdoc/range-arg-pattern.rs
tests/rustdoc/reexports-priv.rs
tests/rustdoc/reexports.rs
tests/rustdoc/rfc-2632-const-trait-impl.rs
tests/rustdoc/safe-intrinsic.rs
tests/rustdoc/slice-links.rs
tests/rustdoc/spotlight-from-dependency.odd.html
tests/rustdoc/struct-arg-pattern.rs
tests/rustdoc/test-parens.rs
tests/rustdoc/toggle-item-contents.rs
tests/rustdoc/tuple-struct-fields-doc.rs
tests/rustdoc/tuples.rs
tests/rustdoc/unit-return.rs
tests/rustdoc/where-sized.rs
tests/rustdoc/where.SWhere_Simd_item-decl.html
tests/rustdoc/where.SWhere_TraitWhere_item-decl.html
tests/rustdoc/where.rs
tests/rustdoc/whitespace-after-where-clause.enum.html
tests/rustdoc/whitespace-after-where-clause.enum2.html
tests/rustdoc/whitespace-after-where-clause.struct.html
tests/rustdoc/whitespace-after-where-clause.struct2.html
tests/rustdoc/whitespace-after-where-clause.trait.html
tests/rustdoc/whitespace-after-where-clause.trait2.html
tests/rustdoc/whitespace-after-where-clause.union.html
tests/rustdoc/whitespace-after-where-clause.union2.html
tests/rustdoc/wrapping.rs
tests/ui-fulldeps/mod_dir_path_canonicalized.rs
tests/ui/anonymous-higher-ranked-lifetime.stderr
tests/ui/async-await/auxiliary/issue-107036.rs [new file with mode: 0644]
tests/ui/async-await/await-sequence.rs [new file with mode: 0644]
tests/ui/async-await/in-trait/missing-send-bound.rs [new file with mode: 0644]
tests/ui/async-await/in-trait/missing-send-bound.stderr [new file with mode: 0644]
tests/ui/async-await/issue-107036.rs [new file with mode: 0644]
tests/ui/borrowck/issue-92157.rs [new file with mode: 0644]
tests/ui/borrowck/issue-92157.stderr [new file with mode: 0644]
tests/ui/cast/cast-as-bool.rs
tests/ui/cast/cast-as-bool.stderr
tests/ui/cast/issue-106883-is-empty.rs [new file with mode: 0644]
tests/ui/cast/issue-106883-is-empty.stderr [new file with mode: 0644]
tests/ui/chalkify/bugs/async.rs
tests/ui/chalkify/bugs/async.stderr
tests/ui/closures/multiple-fn-bounds.stderr
tests/ui/coherence/coherence-with-generator.rs
tests/ui/coherence/coherence-with-generator.stderr [deleted file]
tests/ui/coherence/coherence-with-generator.stock.stderr [new file with mode: 0644]
tests/ui/const-generics/const-param-type-depends-on-const-param.min.stderr
tests/ui/const-generics/const-param-type-depends-on-const-param.rs
tests/ui/const-generics/dont-evaluate-array-len-on-err-1.stderr
tests/ui/const-generics/generic_const_exprs/array-size-in-generic-struct-param.full.stderr
tests/ui/const-generics/generic_const_exprs/auxiliary/anon_const_non_local.rs [new file with mode: 0644]
tests/ui/const-generics/generic_const_exprs/const-block-is-poly.rs [new file with mode: 0644]
tests/ui/const-generics/generic_const_exprs/const-block-is-poly.stderr [new file with mode: 0644]
tests/ui/const-generics/generic_const_exprs/issue-62504.min.stderr
tests/ui/const-generics/generic_const_exprs/issue-79518-default_trait_method_normalization.stderr
tests/ui/const-generics/generic_const_exprs/let-bindings.stderr
tests/ui/const-generics/generic_const_exprs/non_local_anon_const_diagnostics.rs [new file with mode: 0644]
tests/ui/const-generics/generic_const_exprs/non_local_anon_const_diagnostics.stderr [new file with mode: 0644]
tests/ui/const-generics/generic_const_exprs/unused_expr.stderr
tests/ui/const-generics/issue-106419-struct-with-multiple-const-params.rs [new file with mode: 0644]
tests/ui/const-generics/issues/issue-62878.min.stderr
tests/ui/const-generics/issues/issue-62878.rs
tests/ui/const-generics/issues/issue-67945-2.full.stderr
tests/ui/const-generics/issues/issue-67945-3.full.stderr
tests/ui/const-generics/issues/issue-67945-4.full.stderr
tests/ui/const-generics/issues/issue-71169.min.stderr
tests/ui/const-generics/issues/issue-71169.rs
tests/ui/const-generics/issues/issue-73491.min.stderr
tests/ui/const-generics/issues/issue-73491.rs
tests/ui/const-generics/issues/issue-74101.min.stderr
tests/ui/const-generics/issues/issue-74101.rs
tests/ui/const-generics/issues/issue-75047.min.stderr
tests/ui/const-generics/issues/issue-75047.rs
tests/ui/const-generics/issues/issue-77357.stderr
tests/ui/const-generics/issues/issue-85031-2.rs
tests/ui/const-generics/issues/issue-85031-2.stderr
tests/ui/const-generics/issues/issue-90318.rs
tests/ui/const-generics/issues/issue-90318.stderr
tests/ui/const-generics/nested-type.min.stderr
tests/ui/const-generics/unused_braces.fixed
tests/ui/const-generics/unused_braces.rs
tests/ui/const-generics/unused_braces.stderr
tests/ui/consts/auxiliary/closure-in-foreign-crate.rs [new file with mode: 0644]
tests/ui/consts/closure-in-foreign-crate.rs [new file with mode: 0644]
tests/ui/consts/const-match-check.eval1.stderr
tests/ui/consts/const-match-check.eval2.stderr
tests/ui/consts/const-match-check.matchck.stderr
tests/ui/consts/const-size_of-cycle.stderr
tests/ui/consts/const_cmp_type_id.rs [new file with mode: 0644]
tests/ui/consts/issue-44415.stderr
tests/ui/consts/issue-73976-monomorphic.rs
tests/ui/consts/too_generic_eval_ice.rs
tests/ui/consts/too_generic_eval_ice.stderr
tests/ui/debuginfo/debuginfo-type-name-layout-ice-94961-1.stderr
tests/ui/debuginfo/debuginfo-type-name-layout-ice-94961-2.stderr
tests/ui/dep-graph/dep-graph-dump.rs [new file with mode: 0644]
tests/ui/dep-graph/dep-graph-dump.stderr [new file with mode: 0644]
tests/ui/deriving/deriving-all-codegen.stdout
tests/ui/empty/empty-macro-use.stderr
tests/ui/error-codes/E0208.rs [new file with mode: 0644]
tests/ui/error-codes/E0208.stderr [new file with mode: 0644]
tests/ui/error-codes/E0606.rs
tests/ui/error-codes/E0606.stderr
tests/ui/error-festival.stderr
tests/ui/errors/auxiliary/remapped_dep.rs [new file with mode: 0644]
tests/ui/errors/remap-path-prefix-reverse.local-self.stderr [new file with mode: 0644]
tests/ui/errors/remap-path-prefix-reverse.remapped-self.stderr [new file with mode: 0644]
tests/ui/errors/remap-path-prefix-reverse.rs [new file with mode: 0644]
tests/ui/errors/remap-path-prefix.rs [new file with mode: 0644]
tests/ui/errors/remap-path-prefix.stderr [new file with mode: 0644]
tests/ui/extenv/issue-55897.stderr
tests/ui/fmt/auxiliary/format-string-proc-macro.rs
tests/ui/fmt/indoc-issue-106408.rs [new file with mode: 0644]
tests/ui/fmt/respanned-literal-issue-106191.rs
tests/ui/fmt/respanned-literal-issue-106191.stderr
tests/ui/generic-associated-types/bugs/hrtb-implied-1.rs
tests/ui/generic-associated-types/bugs/hrtb-implied-1.stderr
tests/ui/generic-associated-types/bugs/hrtb-implied-2.rs
tests/ui/generic-associated-types/bugs/issue-100013.rs
tests/ui/generic-associated-types/bugs/issue-100013.stderr
tests/ui/generic-associated-types/bugs/issue-91762.rs
tests/ui/generic-associated-types/collectivity-regression.stderr
tests/ui/generic-associated-types/issue-88360.fixed [new file with mode: 0644]
tests/ui/generic-associated-types/issue-88360.rs
tests/ui/generic-associated-types/issue-88360.stderr
tests/ui/generics/issue-106694.rs [new file with mode: 0644]
tests/ui/generics/issue-106694.stderr [new file with mode: 0644]
tests/ui/higher-rank-trait-bounds/issue-42114.rs [new file with mode: 0644]
tests/ui/hygiene/globs.stderr
tests/ui/hygiene/no_implicit_prelude-2018.stderr
tests/ui/impl-trait/issues/issue-105826.rs [new file with mode: 0644]
tests/ui/impl-trait/recursive-generator.rs [new file with mode: 0644]
tests/ui/impl-trait/recursive-generator.stderr [new file with mode: 0644]
tests/ui/impl-trait/recursive-impl-trait-type-indirect.stderr
tests/ui/imports/bad-import-in-nested.stderr
tests/ui/imports/bad-import-with-rename.stderr
tests/ui/imports/issue-56125.stderr
tests/ui/imports/issue-57015.stderr
tests/ui/imports/issue-99695-b.fixed [new file with mode: 0644]
tests/ui/imports/issue-99695-b.rs [new file with mode: 0644]
tests/ui/imports/issue-99695-b.stderr [new file with mode: 0644]
tests/ui/imports/issue-99695.fixed [new file with mode: 0644]
tests/ui/imports/issue-99695.rs [new file with mode: 0644]
tests/ui/imports/issue-99695.stderr [new file with mode: 0644]
tests/ui/inference/cannot-infer-partial-try-return.stderr
tests/ui/inference/issue-107090.rs [new file with mode: 0644]
tests/ui/inference/issue-107090.stderr [new file with mode: 0644]
tests/ui/inference/issue-83606.rs
tests/ui/inference/issue-83606.stderr
tests/ui/inference/question-mark-type-infer.stderr
tests/ui/issues/issue-19100.fixed
tests/ui/issues/issue-19100.rs
tests/ui/issues/issue-19100.stderr
tests/ui/issues/issue-65634-raw-ident-suggestion.edition2015.stderr [new file with mode: 0644]
tests/ui/issues/issue-65634-raw-ident-suggestion.edition2018.stderr [new file with mode: 0644]
tests/ui/issues/issue-65634-raw-ident-suggestion.rs
tests/ui/issues/issue-65634-raw-ident-suggestion.stderr [deleted file]
tests/ui/issues/issue-69455.stderr
tests/ui/lifetimes/issue-105507.fixed [new file with mode: 0644]
tests/ui/lifetimes/issue-105507.rs [new file with mode: 0644]
tests/ui/lifetimes/issue-105507.stderr [new file with mode: 0644]
tests/ui/limits/issue-15919-32.stderr
tests/ui/limits/issue-15919-64.stderr
tests/ui/limits/issue-17913.rs
tests/ui/limits/issue-17913.stderr
tests/ui/limits/issue-55878.stderr
tests/ui/limits/issue-69485-var-size-diffs-too-large.stderr
tests/ui/limits/issue-75158-64.stderr
tests/ui/lint/issue-30302.rs
tests/ui/lint/issue-30302.stderr
tests/ui/lint/lint-uppercase-variables.rs
tests/ui/lint/lint-uppercase-variables.stderr
tests/ui/lint/semicolon-in-expressions-from-macros/semicolon-in-expressions-from-macros.stderr
tests/ui/lint/semicolon-in-expressions-from-macros/warn-semicolon-in-expressions-from-macros.stderr
tests/ui/lint/unused/issue-105061-array-lint.rs [new file with mode: 0644]
tests/ui/lint/unused/issue-105061-array-lint.stderr [new file with mode: 0644]
tests/ui/lint/unused/issue-105061-should-lint.rs [new file with mode: 0644]
tests/ui/lint/unused/issue-105061-should-lint.stderr [new file with mode: 0644]
tests/ui/lint/unused/issue-105061.rs [new file with mode: 0644]
tests/ui/lint/unused/issue-105061.stderr [new file with mode: 0644]
tests/ui/lint/unused_braces.fixed
tests/ui/lint/unused_braces.rs
tests/ui/lint/unused_braces.stderr
tests/ui/macros/issue-84195-lint-anon-const.stderr
tests/ui/macros/issue-88228.rs
tests/ui/macros/issue-88228.stderr
tests/ui/macros/lint-trailing-macro-call.stderr
tests/ui/macros/macro-context.stderr
tests/ui/macros/macro-in-expression-context.stderr
tests/ui/macros/macro-use-wrong-name.stderr
tests/ui/methods/method-not-found-but-doc-alias.rs [new file with mode: 0644]
tests/ui/methods/method-not-found-but-doc-alias.stderr [new file with mode: 0644]
tests/ui/mismatched_types/cast-rfc0401.stderr
tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.fixed [new file with mode: 0644]
tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.rs [new file with mode: 0644]
tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.stderr [new file with mode: 0644]
tests/ui/mismatched_types/closure-arg-type-mismatch.stderr
tests/ui/mismatched_types/issue-36053-2.stderr
tests/ui/missing/missing-macro-use.stderr
tests/ui/parser/recover-unticked-labels.fixed [new file with mode: 0644]
tests/ui/parser/recover-unticked-labels.rs [new file with mode: 0644]
tests/ui/parser/recover-unticked-labels.stderr [new file with mode: 0644]
tests/ui/parser/unicode-chars.rs
tests/ui/parser/unicode-chars.stderr
tests/ui/pattern/issue-106552.rs [new file with mode: 0644]
tests/ui/pattern/issue-106552.stderr [new file with mode: 0644]
tests/ui/pattern/issue-14221.rs
tests/ui/pattern/issue-14221.stderr
tests/ui/pattern/issue-67776-match-same-name-enum-variant-refs.rs
tests/ui/pattern/issue-67776-match-same-name-enum-variant-refs.stderr
tests/ui/proc-macro/derive-helper-shadowing.stderr
tests/ui/proc-macro/expand-expr.rs
tests/ui/proc-macro/expand-expr.stderr
tests/ui/proc-macro/generate-mod.rs
tests/ui/proc-macro/generate-mod.stderr
tests/ui/proc-macro/pretty-print-hack-show.remapped.stderr
tests/ui/proc-macro/pretty-print-hack-show.remapped.stdout
tests/ui/proc-macro/pretty-print-hack-show.rs
tests/ui/remap-path-prefix.rs [deleted file]
tests/ui/remap-path-prefix.stderr [deleted file]
tests/ui/rfc-2126-extern-absolute-paths/not-allowed.stderr
tests/ui/simd/portable-intrinsics-arent-exposed.stderr
tests/ui/single-use-lifetime/issue-104440.rs [new file with mode: 0644]
tests/ui/single-use-lifetime/issue-104440.stderr [new file with mode: 0644]
tests/ui/stability-attribute/issue-106589.rs [new file with mode: 0644]
tests/ui/stability-attribute/issue-106589.stderr [new file with mode: 0644]
tests/ui/suggestions/call-on-unimplemented-with-autoderef.rs [new file with mode: 0644]
tests/ui/suggestions/call-on-unimplemented-with-autoderef.stderr [new file with mode: 0644]
tests/ui/suggestions/constrain-suggest-ice.stderr
tests/ui/suggestions/issue-88730.rs
tests/ui/suggestions/issue-88730.stderr
tests/ui/suggestions/suggest-remove-deref.fixed [new file with mode: 0644]
tests/ui/suggestions/suggest-remove-deref.rs [new file with mode: 0644]
tests/ui/suggestions/suggest-remove-deref.stderr [new file with mode: 0644]
tests/ui/suggestions/type-mismatch-byte-literal.rs [new file with mode: 0644]
tests/ui/suggestions/type-mismatch-byte-literal.stderr [new file with mode: 0644]
tests/ui/symbol-names/impl2.rs
tests/ui/symbol-names/impl2.stderr
tests/ui/test-attrs/inaccessible-test-modules.stderr
tests/ui/traits/copy-impl-cannot-normalize.stderr
tests/ui/traits/copy-is-not-modulo-regions.not_static.stderr [new file with mode: 0644]
tests/ui/traits/copy-is-not-modulo-regions.rs [new file with mode: 0644]
tests/ui/traits/copy-requires-self-wf.rs [new file with mode: 0644]
tests/ui/traits/issue-106072.rs [new file with mode: 0644]
tests/ui/traits/issue-106072.stderr [new file with mode: 0644]
tests/ui/traits/issue-50480.rs
tests/ui/traits/issue-50480.stderr
tests/ui/traits/new-solver/fn-trait-closure.rs [new file with mode: 0644]
tests/ui/traits/new-solver/fn-trait.rs [new file with mode: 0644]
tests/ui/traits/new-solver/pointer-sized.rs [new file with mode: 0644]
tests/ui/traits/new-solver/pointer-sized.stderr [new file with mode: 0644]
tests/ui/type-alias-impl-trait/bound_reduction2.rs
tests/ui/type-alias-impl-trait/bound_reduction2.stderr
tests/ui/type-alias-impl-trait/generic_nondefining_use.rs
tests/ui/type-alias-impl-trait/generic_nondefining_use.stderr
tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs
tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.stderr
tests/ui/type-alias-impl-trait/issue-104817.rs [new file with mode: 0644]
tests/ui/type-alias-impl-trait/issue-104817.stock.stderr [new file with mode: 0644]
tests/ui/type-alias-impl-trait/issue-53092-2.rs
tests/ui/type-alias-impl-trait/issue-53092-2.stderr
tests/ui/type-alias-impl-trait/issue-60564.rs
tests/ui/type-alias-impl-trait/issue-60564.stderr
tests/ui/type-alias-impl-trait/issue-68368-non-defining-use-2.rs
tests/ui/type-alias-impl-trait/issue-68368-non-defining-use-2.stderr
tests/ui/type-alias-impl-trait/issue-68368-non-defining-use.rs
tests/ui/type-alias-impl-trait/issue-68368-non-defining-use.stderr
tests/ui/type-alias-impl-trait/issue-69136-inner-lifetime-resolve-error.rs
tests/ui/type-alias-impl-trait/issue-69136-inner-lifetime-resolve-error.stderr
tests/ui/type-alias-impl-trait/no_inferrable_concrete_type.rs
tests/ui/type-alias-impl-trait/no_inferrable_concrete_type.stderr
tests/ui/type-alias-impl-trait/outlives-bound-var.rs [new file with mode: 0644]
tests/ui/type/type-check/cannot_infer_local_or_vec_in_tuples.stderr
tests/ui/typeck/issue-104513-ice.stderr
tests/ui/unresolved/unresolved-candidates.stderr
tests/ui/variance/variance-associated-consts.stderr
tests/ui/variance/variance-associated-types.stderr
tests/ui/variance/variance-object-types.stderr
tests/ui/variance/variance-regions-direct.stderr
tests/ui/variance/variance-regions-indirect.stderr
tests/ui/variance/variance-trait-bounds.stderr
tests/ui/variance/variance-trait-object-bound.stderr
tests/ui/variance/variance-types-bounds.stderr
tests/ui/variance/variance-types.stderr
triagebot.toml

index 82048f800d0e889e3318e59f48043e6e515f529f..5f77656e5c189957be0dca0cb1047f2195795a4a 100644 (file)
@@ -291,6 +291,14 @@ jobs:
           - name: x86_64-gnu-distcheck
             os: ubuntu-20.04-xl
             env: {}
+          - name: x86_64-gnu-llvm-15
+            env:
+              RUST_BACKTRACE: 1
+            os: ubuntu-20.04-xl
+          - name: x86_64-gnu-llvm-14
+            env:
+              RUST_BACKTRACE: 1
+            os: ubuntu-20.04-xl
           - name: x86_64-gnu-llvm-13
             env:
               RUST_BACKTRACE: 1
index 022cdd0fd50c161b9c3f0c2756fa5a9cd608818f..8ed692989ccbf73baf3dbe06012380237c5b3a56 100644 (file)
--- a/.mailmap
+++ b/.mailmap
@@ -15,6 +15,7 @@ Adrien Tétar <adri-from-59@hotmail.fr>
 Ahmed Charles <ahmedcharles@gmail.com> <acharles@outlook.com>
 Alan Egerton <eggyal@gmail.com>
 Alan Stoate <alan.stoate@gmail.com>
+Albert Larsan <albert.larsan@gmail.com> Albert Larsan <74931857+albertlarsan68@users.noreply.github.com>
 Alessandro Decina <alessandro.d@gmail.com>
 Alex Burka <durka42+github@gmail.com> Alex Burka <aburka@seas.upenn.edu>
 Alex Hansen <ahansen2@trinity.edu>
@@ -324,6 +325,7 @@ Lennart Kudling <github@kudling.de>
 Léo Lanteri Thauvin <leseulartichaut@gmail.com>
 Léo Lanteri Thauvin <leseulartichaut@gmail.com> <38361244+LeSeulArtichaut@users.noreply.github.com>
 Léo Testard <leo.testard@gmail.com>
+León Orell Valerian Liehr <me@fmease.dev> <liehr.exchange@gmx.net>
 Leonardo Yvens <leoyvens@gmail.com>
 Liigo Zhuang <liigo@qq.com>
 Lily Ballard <lily@ballards.net> <kevin@sb.org>
index 5511d301775590f8f9cec20e81a88d0b4afdae2c..cc1b5dcf6a76e5571d869c6363c1732d3233b1f9 100644 (file)
@@ -351,7 +351,7 @@ dependencies = [
  "cargo-test-macro",
  "cargo-test-support",
  "cargo-util",
- "clap 4.0.32",
+ "clap 4.1.1",
  "crates-io",
  "curl",
  "curl-sys",
@@ -551,9 +551,9 @@ version = "0.1.0"
 
 [[package]]
 name = "cc"
-version = "1.0.76"
+version = "1.0.77"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
+checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
 dependencies = [
  "jobserver",
 ]
@@ -655,12 +655,12 @@ dependencies = [
 
 [[package]]
 name = "clap"
-version = "4.0.32"
+version = "4.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7db700bc935f9e43e88d00b0850dae18a63773cfbec6d8e070fccf7fef89a39"
+checksum = "4ec7a4128863c188deefe750ac1d1dfe66c236909f845af04beed823638dc1b2"
 dependencies = [
  "bitflags",
- "clap_derive 4.0.21",
+ "clap_derive 4.1.0",
  "clap_lex 0.3.0",
  "is-terminal",
  "once_cell",
@@ -675,7 +675,7 @@ version = "4.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "10861370d2ba66b0f5989f83ebf35db6421713fd92351790e7fdd6c36774c56b"
 dependencies = [
- "clap 4.0.32",
+ "clap 4.1.1",
 ]
 
 [[package]]
@@ -693,9 +693,9 @@ dependencies = [
 
 [[package]]
 name = "clap_derive"
-version = "4.0.21"
+version = "4.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
+checksum = "684a277d672e91966334af371f1a7b5833f9aa00b07c84e92fbce95e00208ce8"
 dependencies = [
  "heck",
  "proc-macro-error",
@@ -1799,9 +1799,9 @@ dependencies = [
 
 [[package]]
 name = "git2"
-version = "0.16.0"
+version = "0.16.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be36bc9e0546df253c0cc41fd0af34f5e92845ad8509462ec76672fac6997f5b"
+checksum = "ccf7f68c2995f392c49fffb4f95ae2c873297830eb25c6bc4c114ce8f4562acc"
 dependencies = [
  "bitflags",
  "libc",
@@ -2294,7 +2294,7 @@ name = "jsondoclint"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "clap 4.0.32",
+ "clap 4.1.1",
  "fs-err",
  "rustdoc-json-types",
  "serde",
@@ -2365,9 +2365,9 @@ dependencies = [
 
 [[package]]
 name = "libgit2-sys"
-version = "0.14.1+1.5.0"
+version = "0.14.2+1.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a07fb2692bc3593bda59de45a502bb3071659f2c515e28c71e728306b038e17"
+checksum = "7f3d95f6b51075fe9810a7ae22c7095f12b98005ab364d8544797a825ce946a4"
 dependencies = [
  "cc",
  "libc",
@@ -2557,7 +2557,7 @@ dependencies = [
  "ammonia",
  "anyhow",
  "chrono",
- "clap 4.0.32",
+ "clap 4.1.1",
  "clap_complete",
  "elasticlunr-rs",
  "env_logger 0.10.0",
@@ -3528,7 +3528,7 @@ dependencies = [
 name = "rustbook"
 version = "0.1.0"
 dependencies = [
- "clap 4.0.32",
+ "clap 4.1.1",
  "env_logger 0.7.1",
  "mdbook",
 ]
@@ -4783,6 +4783,7 @@ dependencies = [
  "rustc_middle",
  "rustc_parse_format",
  "rustc_query_system",
+ "rustc_serialize",
  "rustc_session",
  "rustc_span",
  "rustc_target",
@@ -5607,6 +5608,7 @@ dependencies = [
  "lazy_static",
  "miropt-test-tools",
  "regex",
+ "semver",
  "termcolor",
  "walkdir",
 ]
index ac39435a8c7fb9338a0f278452e2d8ff8afa525d..0eb7c4b266a9f3a529e1b3c555ac2bf1dee974c1 100644 (file)
--- a/README.md
+++ b/README.md
@@ -3,10 +3,11 @@
 This is the main source code repository for [Rust]. It contains the compiler,
 standard library, and documentation.
 
-[Rust]: https://www.rust-lang.org
+[Rust]: https://www.rust-lang.org/
 
 **Note: this README is for _users_ rather than _contributors_.**
-If you wish to _contribute_ to the compiler, you should read [CONTRIBUTING.md](CONTRIBUTING.md) instead.
+If you wish to _contribute_ to the compiler, you should read
+[CONTRIBUTING.md](CONTRIBUTING.md) instead.
 
 ## Quick Start
 
@@ -20,13 +21,15 @@ Read ["Installation"] from [The Book].
 The Rust build system uses a Python script called `x.py` to build the compiler,
 which manages the bootstrapping process. It lives at the root of the project.
 
-The `x.py` command can be run directly on most Unix systems in the following format:
+The `x.py` command can be run directly on most Unix systems in the following
+format:
 
 ```sh
 ./x.py <subcommand> [flags]
 ```
 
-This is how the documentation and examples assume you are running `x.py`. Some alternative ways are:
+This is how the documentation and examples assume you are running `x.py`.
+Some alternative ways are:
 
 ```sh
 # On a Unix shell if you don't have the necessary `python3` command
@@ -39,8 +42,8 @@ x.py <subcommand> [flags]
 python x.py <subcommand> [flags]
 ```
 
-More information about `x.py` can be found
-by running it with the `--help` flag or reading the [rustc dev guide][rustcguidebuild].
+More information about `x.py` can be found by running it with the `--help` flag
+or reading the [rustc dev guide][rustcguidebuild].
 
 [gettingstarted]: https://rustc-dev-guide.rust-lang.org/getting-started.html
 [rustcguidebuild]: https://rustc-dev-guide.rust-lang.org/building/how-to-build-and-run.html
@@ -49,24 +52,29 @@ by running it with the `--help` flag or reading the [rustc dev guide][rustcguide
 
 Make sure you have installed the dependencies:
 
-   * `python` 3 or 2.7
-   * `git`
-   * A C compiler (when building for the host, `cc` is enough; cross-compiling may need additional compilers)
-   * `curl` (not needed on Windows)
-   * `pkg-config` if you are compiling on Linux and targeting Linux
-   * `libiconv` (already included with glibc on Debian-based distros)
+* `python` 3 or 2.7
+* `git`
+* A C compiler (when building for the host, `cc` is enough; cross-compiling may
+  need additional compilers)
+* `curl` (not needed on Windows)
+* `pkg-config` if you are compiling on Linux and targeting Linux
+* `libiconv` (already included with glibc on Debian-based distros)
 
-To build cargo, you'll also need OpenSSL (`libssl-dev` or `openssl-devel` on most Unix distros).
+To build Cargo, you'll also need OpenSSL (`libssl-dev` or `openssl-devel` on
+most Unix distros).
 
 If building LLVM from source, you'll need additional tools:
 
 * `g++`, `clang++`, or MSVC with versions listed on
   [LLVM's documentation](https://llvm.org/docs/GettingStarted.html#host-c-toolchain-both-compiler-and-standard-library)
-* `ninja`, or GNU `make` 3.81 or later (ninja is recommended, especially on Windows)
+* `ninja`, or GNU `make` 3.81 or later (Ninja is recommended, especially on
+  Windows)
 * `cmake` 3.13.4 or later
-* `libstdc++-static` may be required on some Linux distributions such as Fedora and Ubuntu
+* `libstdc++-static` may be required on some Linux distributions such as Fedora
+  and Ubuntu
 
-On tier 1 or tier 2 with host tools platforms, you can also choose to download LLVM by setting `llvm.download-ci-llvm = true`.
+On tier 1 or tier 2 with host tools platforms, you can also choose to download
+LLVM by setting `llvm.download-ci-llvm = true`.
 Otherwise, you'll need LLVM installed and `llvm-config` in your path.
 See [the rustc-dev-guide for more info][sysllvm].
 
@@ -86,34 +94,37 @@ See [the rustc-dev-guide for more info][sysllvm].
 
 2. Configure the build settings:
 
-    The Rust build system uses a file named `config.toml` in the root of the
-    source tree to determine various configuration settings for the build.
-    Set up the defaults intended for distros to get started. You can see a full list of options
-    in `config.toml.example`.
+   The Rust build system uses a file named `config.toml` in the root of the
+   source tree to determine various configuration settings for the build.
+   Set up the defaults intended for distros to get started. You can see a full
+   list of options in `config.toml.example`.
 
-    ```sh
-    printf 'profile = "user" \nchangelog-seen = 2 \n' > config.toml
-    ```
+   ```sh
+   printf 'profile = "user" \nchangelog-seen = 2 \n' > config.toml
+   ```
 
-    If you plan to use `x.py install` to create an installation, it is recommended
-    that you set the `prefix` value in the `[install]` section to a directory.
+   If you plan to use `x.py install` to create an installation, it is
+   recommended that you set the `prefix` value in the `[install]` section to a
+   directory.
 
 3. Build and install:
 
-    ```sh
-    ./x.py build && ./x.py install
-    ```
+   ```sh
+   ./x.py build && ./x.py install
+   ```
 
-    When complete, `./x.py install` will place several programs into
-    `$PREFIX/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
-    API-documentation tool. If you've set `profile = "user"` or `build.extended = true`, it will
-    also include [Cargo], Rust's package manager.
+   When complete, `./x.py install` will place several programs into
+   `$PREFIX/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
+   API-documentation tool. If you've set `profile = "user"` or
+   `build.extended = true`, it will also include [Cargo], Rust's package
+   manager.
 
 [Cargo]: https://github.com/rust-lang/cargo
 
 ### Building on Windows
 
-On Windows, we suggest using [winget] to install dependencies by running the following in a terminal:
+On Windows, we suggest using [winget] to install dependencies by running the
+following in a terminal:
 
 ```powershell
 winget install -e Python.Python.3
@@ -121,17 +132,19 @@ winget install -e Kitware.CMake
 winget install -e Git.Git
 ```
 
-Then edit your system's `PATH` variable and add: `C:\Program Files\CMake\bin`. See
-[this guide on editing the system `PATH`](https://www.java.com/en/download/help/path.html) from the
-Java documentation.
+Then edit your system's `PATH` variable and add: `C:\Program Files\CMake\bin`.
+See
+[this guide on editing the system `PATH`](https://www.java.com/en/download/help/path.html)
+from the Java documentation.
 
 [winget]: https://github.com/microsoft/winget-cli
 
 There are two prominent ABIs in use on Windows: the native (MSVC) ABI used by
 Visual Studio and the GNU ABI used by the GCC toolchain. Which version of Rust
 you need depends largely on what C/C++ libraries you want to interoperate with.
-Use the MSVC build of Rust to interop with software produced by Visual Studio and
-the GNU build to interop with GNU software built using the MinGW/MSYS2 toolchain.
+Use the MSVC build of Rust to interop with software produced by Visual Studio
+and the GNU build to interop with GNU software built using the MinGW/MSYS2
+toolchain.
 
 #### MinGW
 
@@ -144,7 +157,7 @@ the GNU build to interop with GNU software built using the MinGW/MSYS2 toolchain
 2. Run `mingw32_shell.bat` or `mingw64_shell.bat` from the MSYS2 installation
    directory (e.g. `C:\msys64`), depending on whether you want 32-bit or 64-bit
    Rust. (As of the latest version of MSYS2 you have to run `msys2_shell.cmd
-   -mingw32` or `msys2_shell.cmd -mingw64` from the command line instead)
+   -mingw32` or `msys2_shell.cmd -mingw64` from the command line instead.)
 
 3. From this terminal, install the required tools:
 
@@ -153,11 +166,11 @@ the GNU build to interop with GNU software built using the MinGW/MSYS2 toolchain
    pacman -Sy pacman-mirrors
 
    # Install build tools needed for Rust. If you're building a 32-bit compiler,
-   # then replace "x86_64" below with "i686". If you've already got git, python,
-   # or CMake installed and in PATH you can remove them from this list. Note
-   # that it is important that you do **not** use the 'python2', 'cmake' and 'ninja'
-   # packages from the 'msys2' subsystem. The build has historically been known
-   # to fail with these packages.
+   # then replace "x86_64" below with "i686". If you've already got Git, Python,
+   # or CMake installed and in PATH you can remove them from this list.
+   # Note that it is important that you do **not** use the 'python2', 'cmake',
+   # and 'ninja' packages from the 'msys2' subsystem.
+   # The build has historically been known to fail with these packages.
    pacman -S git \
                make \
                diffutils \
@@ -178,12 +191,12 @@ the GNU build to interop with GNU software built using the MinGW/MSYS2 toolchain
 
 MSVC builds of Rust additionally require an installation of Visual Studio 2017
 (or later) so `rustc` can use its linker.  The simplest way is to get
-[Visual Studio], check the “C++ build tools” and “Windows 10 SDK” workload.
+[Visual Studio], check the "C++ build tools" and "Windows 10 SDK" workload.
 
 [Visual Studio]: https://visualstudio.microsoft.com/downloads/
 
-(If you're installing cmake yourself, be careful that “C++ CMake tools for
-Windows” doesn't get included under “Individual components”.)
+(If you're installing CMake yourself, be careful that "C++ CMake tools for
+Windows" doesn't get included under "Individual components".)
 
 With these dependencies installed, you can build the compiler in a `cmd.exe`
 shell with:
@@ -192,10 +205,11 @@ shell with:
 python x.py build
 ```
 
-Right now, building Rust only works with some known versions of Visual Studio. If
-you have a more recent version installed and the build system doesn't understand,
-you may need to force rustbuild to use an older version. This can be done
-by manually calling the appropriate vcvars file before running the bootstrap.
+Right now, building Rust only works with some known versions of Visual Studio.
+If you have a more recent version installed and the build system doesn't
+understand, you may need to force rustbuild to use an older version.
+This can be done by manually calling the appropriate vcvars file before running
+the bootstrap.
 
 ```batch
 CALL "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars64.bat"
@@ -215,9 +229,9 @@ Windows build triples are:
     - `x86_64-pc-windows-msvc`
 
 The build triple can be specified by either specifying `--build=<triple>` when
-invoking `x.py` commands, or by creating a `config.toml` file (as described
-in [Installing From Source](#installing-from-source)), and modifying the
-`build` option under the `[build]` section.
+invoking `x.py` commands, or by creating a `config.toml` file (as described in
+[Installing from Source](#installing-from-source)), and modifying the `build`
+option under the `[build]` section.
 
 ### Configure and Make
 
@@ -229,33 +243,35 @@ configure script and makefile (the latter of which just invokes `x.py`).
 make && sudo make install
 ```
 
-`configure` generates a `config.toml` which can also be used with normal `x.py` invocations.
+`configure` generates a `config.toml` which can also be used with normal `x.py`
+invocations.
 
 ## Building Documentation
 
-If you’d like to build the documentation, it’s almost the same:
+If you'd like to build the documentation, it's almost the same:
 
 ```sh
 ./x.py doc
 ```
 
 The generated documentation will appear under `doc` in the `build` directory for
-the ABI used. I.e., if the ABI was `x86_64-pc-windows-msvc`, the directory will be
-`build\x86_64-pc-windows-msvc\doc`.
+the ABI used. That is, if the ABI was `x86_64-pc-windows-msvc`, the directory
+will be `build\x86_64-pc-windows-msvc\doc`.
 
 ## Notes
 
-Since the Rust compiler is written in Rust, it must be built by a
-precompiled "snapshot" version of itself (made in an earlier stage of
-development). As such, source builds require an Internet connection to
-fetch snapshots, and an OS that can execute the available snapshot binaries.
+Since the Rust compiler is written in Rust, it must be built by a precompiled
+"snapshot" version of itself (made in an earlier stage of development).
+As such, source builds require an Internet connection to fetch snapshots, and an
+OS that can execute the available snapshot binaries.
 
-See https://doc.rust-lang.org/nightly/rustc/platform-support.html for a list of supported platforms.
-Only "host tools" platforms have a pre-compiled snapshot binary available; to compile for a platform
-without host tools you must cross-compile.
+See https://doc.rust-lang.org/nightly/rustc/platform-support.html for a list of
+supported platforms.
+Only "host tools" platforms have a pre-compiled snapshot binary available; to
+compile for a platform without host tools you must cross-compile.
 
-You may find that other platforms work, but these are our officially
-supported build environments that are most likely to work.
+You may find that other platforms work, but these are our officially supported
+build environments that are most likely to work.
 
 ## Getting Help
 
@@ -267,9 +283,9 @@ See [CONTRIBUTING.md](CONTRIBUTING.md).
 
 ## License
 
-Rust is primarily distributed under the terms of both the MIT license
-and the Apache License (Version 2.0), with portions covered by various
-BSD-like licenses.
+Rust is primarily distributed under the terms of both the MIT license and the
+Apache License (Version 2.0), with portions covered by various BSD-like
+licenses.
 
 See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT), and
 [COPYRIGHT](COPYRIGHT) for details.
@@ -277,13 +293,14 @@ See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT), and
 ## Trademark
 
 [The Rust Foundation][rust-foundation] owns and protects the Rust and Cargo
-trademarks and logos (the “Rust Trademarks”).
+trademarks and logos (the "Rust Trademarks").
 
-If you want to use these names or brands, please read the [media guide][media-guide].
+If you want to use these names or brands, please read the
+[media guide][media-guide].
 
 Third-party logos may be subject to third-party copyrights and trademarks. See
 [Licenses][policies-licenses] for details.
 
 [rust-foundation]: https://foundation.rust-lang.org/
-[media-guide]: https://www.rust-lang.org/policies/media-guide
+[media-guide]: https://foundation.rust-lang.org/policies/logo-policy-and-media-guide/
 [policies-licenses]: https://www.rust-lang.org/policies/licenses
index 4582d3c6badf99491254930d6e7c0f3ee6d7cff8..f4cb459f32fddee1bcc78c465ae6916b8a92736e 100644 (file)
@@ -1100,7 +1100,7 @@ pub enum FieldsShape {
         /// named `inverse_memory_index`.
         ///
         // FIXME(eddyb) build a better abstraction for permutations, if possible.
-        // FIXME(camlorn) also consider small vector  optimization here.
+        // FIXME(camlorn) also consider small vector optimization here.
         memory_index: Vec<u32>,
     },
 }
@@ -1263,8 +1263,8 @@ pub enum Variants<V: Idx> {
 
     /// Enum-likes with more than one inhabited variant: each variant comes with
     /// a *discriminant* (usually the same as the variant index but the user can
-    /// assign explicit discriminant values).  That discriminant is encoded
-    /// as a *tag* on the machine.  The layout of each variant is
+    /// assign explicit discriminant values). That discriminant is encoded
+    /// as a *tag* on the machine. The layout of each variant is
     /// a struct, and they all have space reserved for the tag.
     /// For enums, the tag is the sole field of the layout.
     Multiple {
index 7de594719ddc44568872ea3ee2516da728a96ada..9317579f70dd5967cb5e80382c8cab541c58ecea 100644 (file)
@@ -2032,7 +2032,8 @@ fn clone(&self) -> Self {
 impl Ty {
     pub fn peel_refs(&self) -> &Self {
         let mut final_ty = self;
-        while let TyKind::Ref(_, MutTy { ty, .. }) = &final_ty.kind {
+        while let TyKind::Ref(_, MutTy { ty, .. }) | TyKind::Ptr(MutTy { ty, .. }) = &final_ty.kind
+        {
             final_ty = ty;
         }
         final_ty
index 819f1884a06922ca5861679de032dde2b2db47a7..4f7099c7be8a6561492a22b210643768b9db4a2a 100644 (file)
@@ -304,7 +304,7 @@ pub fn order(self) -> i8 {
             | ExprPrecedence::Yeet => PREC_JUMP,
 
             // `Range` claims to have higher precedence than `Assign`, but `x .. x = x` fails to
-            // parse, instead of parsing as `(x .. x) = x`.  Giving `Range` a lower precedence
+            // parse, instead of parsing as `(x .. x) = x`. Giving `Range` a lower precedence
             // ensures that `pprust` will add parentheses in the right places to get the desired
             // parse.
             ExprPrecedence::Range => PREC_RANGE,
index fe0bd43815d7f4393d71c2ec91c033f19557964b..63033085bec674de5bbc4159cf980c8f5a5ed289 100644 (file)
@@ -38,7 +38,7 @@ pub(super) fn index_hir<'hir>(
 ) -> (IndexVec<ItemLocalId, Option<ParentedNode<'hir>>>, FxHashMap<LocalDefId, ItemLocalId>) {
     let mut nodes = IndexVec::new();
     // This node's parent should never be accessed: the owner's parent is computed by the
-    // hir_owner_parent query.  Make it invalid (= ItemLocalId::MAX) to force an ICE whenever it is
+    // hir_owner_parent query. Make it invalid (= ItemLocalId::MAX) to force an ICE whenever it is
     // used.
     nodes.push(Some(ParentedNode { parent: ItemLocalId::INVALID, node: item.into() }));
     let mut collector = NodeCollector {
index 065779d0670c687b86454b1e90fea8a30f237f2c..5d2589cb2b2f7817a8603a1319eed39a727fd597 100644 (file)
@@ -523,7 +523,7 @@ fn lower_use_tree(
                 //
                 // The first two are produced by recursively invoking
                 // `lower_use_tree` (and indeed there may be things
-                // like `use foo::{a::{b, c}}` and so forth).  They
+                // like `use foo::{a::{b, c}}` and so forth). They
                 // wind up being directly added to
                 // `self.items`. However, the structure of this
                 // function also requires us to return one item, and
index 41d4a5679f1a0b2822396af1256c20e5497350a0..bc6d2cf12c78aaaeb80e5a50aa936f64cf49d8dd 100644 (file)
@@ -662,7 +662,7 @@ fn make_owner_info(&mut self, node: hir::OwnerNode<'hir>) -> &'hir hir::OwnerInf
         self.arena.alloc(hir::OwnerInfo { nodes, parenting, attrs, trait_map })
     }
 
-    /// Hash the HIR node twice, one deep and one shallow hash.  This allows to differentiate
+    /// Hash the HIR node twice, one deep and one shallow hash. This allows to differentiate
     /// queries which depend on the full HIR tree and those which only depend on the item signature.
     fn hash_owner(
         &mut self,
@@ -1193,7 +1193,7 @@ fn lower_path_ty(
         itctx: &ImplTraitContext,
     ) -> hir::Ty<'hir> {
         // Check whether we should interpret this as a bare trait object.
-        // This check mirrors the one in late resolution.  We only introduce this special case in
+        // This check mirrors the one in late resolution. We only introduce this special case in
         // the rare occurrence we need to lower `Fresh` anonymous lifetimes.
         // The other cases when a qpath should be opportunistically made a trait object are handled
         // by `ty_path`.
@@ -1918,7 +1918,7 @@ fn lower_async_fn_ret_ty(
             this.with_remapping(new_remapping, |this| {
                 // We have to be careful to get elision right here. The
                 // idea is that we create a lifetime parameter for each
-                // lifetime in the return type.  So, given a return type
+                // lifetime in the return type. So, given a return type
                 // like `async fn foo(..) -> &[&u32]`, we lower to `impl
                 // Future<Output = &'1 [ &'2 u32 ]>`.
                 //
@@ -2012,7 +2012,7 @@ fn lower_async_fn_ret_ty(
 
         // Create the `Foo<...>` reference itself. Note that the `type
         // Foo = impl Trait` is, internally, created as a child of the
-        // async fn, so the *type parameters* are inherited.  It's
+        // async fn, so the *type parameters* are inherited. It's
         // only the lifetime parameters that we must supply.
         let opaque_ty_ref = hir::TyKind::OpaqueDef(
             hir::ItemId { owner_id: hir::OwnerId { def_id: opaque_ty_def_id } },
index 55ea12d25ea2ceead70b4c4e6c4de49548a84852..902b4b1a1ecfefb5fa53fbbfd499f2a2b0329dfb 100644 (file)
@@ -1100,16 +1100,17 @@ fn visit_item(&mut self, item: &'a Item) {
                         replace_span: self.ending_semi_or_hi(item.span),
                         extern_block_suggestion: match sig.header.ext {
                             Extern::None => None,
-                            Extern::Implicit(start_span) => Some(ExternBlockSuggestion {
+                            Extern::Implicit(start_span) => Some(ExternBlockSuggestion::Implicit {
                                 start_span,
                                 end_span: item.span.shrink_to_hi(),
-                                abi: None,
-                            }),
-                            Extern::Explicit(abi, start_span) => Some(ExternBlockSuggestion {
-                                start_span,
-                                end_span: item.span.shrink_to_hi(),
-                                abi: Some(abi.symbol_unescaped),
                             }),
+                            Extern::Explicit(abi, start_span) => {
+                                Some(ExternBlockSuggestion::Explicit {
+                                    start_span,
+                                    end_span: item.span.shrink_to_hi(),
+                                    abi: abi.symbol_unescaped,
+                                })
+                            }
                         },
                     });
                 }
index 59f582f10d989be097f5f44eb6ad86827dfd8100..09e262452b11d5819d0f743fa53b0ca1f264f2cd 100644 (file)
@@ -1,6 +1,5 @@
 //! Errors emitted by ast_passes.
 
-use rustc_errors::{fluent, AddToDiagnostic, Applicability, Diagnostic, SubdiagnosticMessage};
 use rustc_macros::{Diagnostic, Subdiagnostic};
 use rustc_span::{Span, Symbol};
 
@@ -207,28 +206,21 @@ pub struct FnWithoutBody {
     pub extern_block_suggestion: Option<ExternBlockSuggestion>,
 }
 
-pub struct ExternBlockSuggestion {
-    pub start_span: Span,
-    pub end_span: Span,
-    pub abi: Option<Symbol>,
-}
-
-impl AddToDiagnostic for ExternBlockSuggestion {
-    fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
-    where
-        F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
-    {
-        let start_suggestion = if let Some(abi) = self.abi {
-            format!("extern \"{}\" {{", abi)
-        } else {
-            "extern {".to_owned()
-        };
-        let end_suggestion = " }".to_owned();
-
-        diag.multipart_suggestion(
-            fluent::extern_block_suggestion,
-            vec![(self.start_span, start_suggestion), (self.end_span, end_suggestion)],
-            Applicability::MaybeIncorrect,
-        );
-    }
+#[derive(Subdiagnostic)]
+pub enum ExternBlockSuggestion {
+    #[multipart_suggestion(ast_passes_extern_block_suggestion, applicability = "maybe-incorrect")]
+    Implicit {
+        #[suggestion_part(code = "extern {{")]
+        start_span: Span,
+        #[suggestion_part(code = " }}")]
+        end_span: Span,
+    },
+    #[multipart_suggestion(ast_passes_extern_block_suggestion, applicability = "maybe-incorrect")]
+    Explicit {
+        #[suggestion_part(code = "extern \"{abi}\" {{")]
+        start_span: Span,
+        #[suggestion_part(code = " }}")]
+        end_span: Span,
+        abi: Symbol,
+    },
 }
index b125c6407d05040bb2ed2868332fc90415b1a5bf..2a18e5164a309bbb360ccae43d46565b4e83f9ff 100644 (file)
@@ -473,10 +473,10 @@ pub(super) fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline
                 self.word("]");
             }
             ast::ExprKind::Range(start, end, limits) => {
-                // Special case for `Range`.  `AssocOp` claims that `Range` has higher precedence
+                // Special case for `Range`. `AssocOp` claims that `Range` has higher precedence
                 // than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`.
                 // Here we use a fake precedence value so that any child with lower precedence than
-                // a "normal" binop gets parenthesized.  (`LOr` is the lowest-precedence binop.)
+                // a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.)
                 let fake_prec = AssocOp::LOr.precedence() as i8;
                 if let Some(e) = start {
                     self.print_expr_maybe_paren(e, fake_prec);
index 968c1f49b95c00ccec072fc253421936dc258f90..e5a36259fa495c0abdff6188e8dc5862499dbdbe 100644 (file)
@@ -673,40 +673,34 @@ fn suggest_borrow_fn_like(
         let tcx = self.infcx.tcx;
 
         // Find out if the predicates show that the type is a Fn or FnMut
-        let find_fn_kind_from_did = |predicates: ty::EarlyBinder<
-            &[(ty::Predicate<'tcx>, Span)],
-        >,
-                                     substs| {
-            predicates.0.iter().find_map(|(pred, _)| {
-                    let pred = if let Some(substs) = substs {
-                        predicates.rebind(*pred).subst(tcx, substs).kind().skip_binder()
-                    } else {
-                        pred.kind().skip_binder()
-                    };
-                    if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) = pred && pred.self_ty() == ty {
-                    if Some(pred.def_id()) == tcx.lang_items().fn_trait() {
-                        return Some(hir::Mutability::Not);
-                    } else if Some(pred.def_id()) == tcx.lang_items().fn_mut_trait() {
-                        return Some(hir::Mutability::Mut);
-                    }
+        let find_fn_kind_from_did = |(pred, _): (ty::Predicate<'tcx>, _)| {
+            if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) = pred.kind().skip_binder()
+                && pred.self_ty() == ty
+            {
+                if Some(pred.def_id()) == tcx.lang_items().fn_trait() {
+                    return Some(hir::Mutability::Not);
+                } else if Some(pred.def_id()) == tcx.lang_items().fn_mut_trait() {
+                    return Some(hir::Mutability::Mut);
                 }
-                    None
-                })
+            }
+            None
         };
 
         // If the type is opaque/param/closure, and it is Fn or FnMut, let's suggest (mutably)
         // borrowing the type, since `&mut F: FnMut` iff `F: FnMut` and similarly for `Fn`.
         // These types seem reasonably opaque enough that they could be substituted with their
         // borrowed variants in a function body when we see a move error.
-        let borrow_level = match ty.kind() {
-            ty::Param(_) => find_fn_kind_from_did(
-                tcx.bound_explicit_predicates_of(self.mir_def_id().to_def_id())
-                    .map_bound(|p| p.predicates),
-                None,
-            ),
-            ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
-                find_fn_kind_from_did(tcx.bound_explicit_item_bounds(*def_id), Some(*substs))
-            }
+        let borrow_level = match *ty.kind() {
+            ty::Param(_) => tcx
+                .explicit_predicates_of(self.mir_def_id().to_def_id())
+                .predicates
+                .iter()
+                .copied()
+                .find_map(find_fn_kind_from_did),
+            ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => tcx
+                .bound_explicit_item_bounds(def_id)
+                .subst_iter_copied(tcx, substs)
+                .find_map(find_fn_kind_from_did),
             ty::Closure(_, substs) => match substs.as_closure().kind() {
                 ty::ClosureKind::Fn => Some(hir::Mutability::Not),
                 ty::ClosureKind::FnMut => Some(hir::Mutability::Mut),
@@ -2199,7 +2193,7 @@ fn predecessor_locations<'tcx, 'a>(
         let mut back_edge_stack = Vec::new();
 
         predecessor_locations(self.body, location).for_each(|predecessor| {
-            if location.dominates(predecessor, &self.dominators) {
+            if location.dominates(predecessor, self.dominators()) {
                 back_edge_stack.push(predecessor)
             } else {
                 stack.push(predecessor);
@@ -2311,7 +2305,7 @@ fn predecessor_locations<'tcx, 'a>(
 
             let mut has_predecessor = false;
             predecessor_locations(self.body, location).for_each(|predecessor| {
-                if location.dominates(predecessor, &self.dominators) {
+                if location.dominates(predecessor, self.dominators()) {
                     back_edge_stack.push(predecessor)
                 } else {
                     stack.push(predecessor);
index f3050a6ef3f07f7964cf58a23bc3a858dd05b717..187861ba127bd93e36af2707722146eee40ff574 100644 (file)
@@ -5,8 +5,13 @@
 use rustc_data_structures::fx::FxIndexSet;
 use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
 use rustc_hir as hir;
+use rustc_hir::def::Res::Def;
 use rustc_hir::def_id::DefId;
 use rustc_hir::intravisit::Visitor;
+use rustc_hir::GenericBound::Trait;
+use rustc_hir::QPath::Resolved;
+use rustc_hir::WherePredicate::BoundPredicate;
+use rustc_hir::{PolyTraitRef, TyKind, WhereBoundPredicate};
 use rustc_infer::infer::{
     error_reporting::nice_region_error::{
         self, find_anon_type, find_param_with_region, suggest_adding_lifetime_params,
@@ -186,6 +191,101 @@ fn is_closure_fn_mut(&self, fr: RegionVid) -> bool {
         false
     }
 
+    // For generic associated types (GATs) which implied 'static requirement
+    // from higher-ranked trait bounds (HRTB). Try to locate span of the trait
+    // and the span which bounded to the trait for adding 'static lifetime suggestion
+    fn suggest_static_lifetime_for_gat_from_hrtb(
+        &self,
+        diag: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
+        lower_bound: RegionVid,
+    ) {
+        let mut suggestions = vec![];
+        let hir = self.infcx.tcx.hir();
+
+        // find generic associated types in the given region 'lower_bound'
+        let gat_id_and_generics = self
+            .regioncx
+            .placeholders_contained_in(lower_bound)
+            .map(|placeholder| {
+                if let Some(id) = placeholder.name.get_id()
+                    && let Some(placeholder_id) = id.as_local()
+                    && let gat_hir_id = hir.local_def_id_to_hir_id(placeholder_id)
+                    && let Some(generics_impl) = hir.get_parent(gat_hir_id).generics()
+                {
+                    Some((gat_hir_id, generics_impl))
+                } else {
+                    None
+                }
+            })
+            .collect::<Vec<_>>();
+        debug!(?gat_id_and_generics);
+
+        // find higher-ranked trait bounds bounded to the generic associated types
+        let mut hrtb_bounds = vec![];
+        gat_id_and_generics.iter().flatten().for_each(|(gat_hir_id, generics)| {
+            for pred in generics.predicates {
+                let BoundPredicate(
+                        WhereBoundPredicate {
+                            bound_generic_params,
+                            bounds,
+                            ..
+                        }) = pred else { continue; };
+                if bound_generic_params
+                    .iter()
+                    .rfind(|bgp| hir.local_def_id_to_hir_id(bgp.def_id) == *gat_hir_id)
+                    .is_some()
+                {
+                    for bound in *bounds {
+                        hrtb_bounds.push(bound);
+                    }
+                }
+            }
+        });
+        debug!(?hrtb_bounds);
+
+        hrtb_bounds.iter().for_each(|bound| {
+            let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }, _) = bound else { return; };
+            diag.span_note(
+                *trait_span,
+                format!("due to current limitations in the borrow checker, this implies a `'static` lifetime")
+            );
+            let Some(generics_fn) = hir.get_generics(self.body.source.def_id().expect_local()) else { return; };
+            let Def(_, trait_res_defid) = trait_ref.path.res else { return; };
+            debug!(?generics_fn);
+            generics_fn.predicates.iter().for_each(|predicate| {
+                let BoundPredicate(
+                    WhereBoundPredicate {
+                        span: bounded_span,
+                        bounded_ty,
+                        bounds,
+                        ..
+                    }
+                ) = predicate else { return; };
+                bounds.iter().for_each(|bd| {
+                    if let Trait(PolyTraitRef { trait_ref: tr_ref, .. }, _) = bd
+                        && let Def(_, res_defid) = tr_ref.path.res
+                        && res_defid == trait_res_defid // trait id matches
+                        && let TyKind::Path(Resolved(_, path)) = bounded_ty.kind
+                        && let Def(_, defid) = path.res
+                        && generics_fn.params
+                            .iter()
+                            .rfind(|param| param.def_id.to_def_id() == defid)
+                            .is_some() {
+                            suggestions.push((bounded_span.shrink_to_hi(), format!(" + 'static")));
+                        }
+                });
+            });
+        });
+        if suggestions.len() > 0 {
+            suggestions.dedup();
+            diag.multipart_suggestion_verbose(
+                format!("consider restricting the type parameter to the `'static` lifetime"),
+                suggestions,
+                Applicability::MaybeIncorrect,
+            );
+        }
+    }
+
     /// Produces nice borrowck error diagnostics for all the errors collected in `nll_errors`.
     pub(crate) fn report_region_errors(&mut self, nll_errors: RegionErrors<'tcx>) {
         // Iterate through all the errors, producing a diagnostic for each one. The diagnostics are
@@ -223,12 +323,21 @@ pub(crate) fn report_region_errors(&mut self, nll_errors: RegionErrors<'tcx>) {
                         // to report it; we could probably handle it by
                         // iterating over the universal regions and reporting
                         // an error that multiple bounds are required.
-                        self.buffer_error(self.infcx.tcx.sess.create_err(
-                            GenericDoesNotLiveLongEnough {
+                        let mut diag =
+                            self.infcx.tcx.sess.create_err(GenericDoesNotLiveLongEnough {
                                 kind: type_test.generic_kind.to_string(),
                                 span: type_test_span,
-                            },
-                        ));
+                            });
+
+                        // Add notes and suggestions for the case of 'static lifetime
+                        // implied but not specified when a generic associated types
+                        // are from higher-ranked trait bounds
+                        self.suggest_static_lifetime_for_gat_from_hrtb(
+                            &mut diag,
+                            type_test.lower_bound,
+                        );
+
+                        self.buffer_error(diag);
                     }
                 }
 
index 278ffed07477b5b3bdf8224a7b3c79c89b2b9ace..73ea7314b75cc202c6a9091ed39ad2b7d4dae464 100644 (file)
@@ -5,6 +5,7 @@
 #![feature(let_chains)]
 #![feature(min_specialization)]
 #![feature(never_type)]
+#![feature(once_cell)]
 #![feature(rustc_attrs)]
 #![feature(stmt_expr_attributes)]
 #![feature(trusted_step)]
@@ -39,6 +40,7 @@
 
 use either::Either;
 use smallvec::SmallVec;
+use std::cell::OnceCell;
 use std::cell::RefCell;
 use std::collections::BTreeMap;
 use std::rc::Rc;
@@ -333,7 +335,7 @@ fn do_mir_borrowck<'tcx>(
                 used_mut: Default::default(),
                 used_mut_upvars: SmallVec::new(),
                 borrow_set: Rc::clone(&borrow_set),
-                dominators: Dominators::dummy(), // not used
+                dominators: Default::default(),
                 upvars: Vec::new(),
                 local_names: IndexVec::from_elem(None, &promoted_body.local_decls),
                 region_names: RefCell::default(),
@@ -346,8 +348,6 @@ fn do_mir_borrowck<'tcx>(
         };
     }
 
-    let dominators = body.basic_blocks.dominators();
-
     let mut mbcx = MirBorrowckCtxt {
         infcx,
         param_env,
@@ -364,7 +364,7 @@ fn do_mir_borrowck<'tcx>(
         used_mut: Default::default(),
         used_mut_upvars: SmallVec::new(),
         borrow_set: Rc::clone(&borrow_set),
-        dominators,
+        dominators: Default::default(),
         upvars,
         local_names,
         region_names: RefCell::default(),
@@ -534,7 +534,7 @@ struct MirBorrowckCtxt<'cx, 'tcx> {
     borrow_set: Rc<BorrowSet<'tcx>>,
 
     /// Dominators for MIR
-    dominators: Dominators<BasicBlock>,
+    dominators: OnceCell<Dominators<BasicBlock>>,
 
     /// Information about upvars not necessarily preserved in types or MIR
     upvars: Vec<Upvar<'tcx>>,
@@ -1051,7 +1051,7 @@ fn check_access_for_conflict(
 
                 (Read(kind), BorrowKind::Unique | BorrowKind::Mut { .. }) => {
                     // Reading from mere reservations of mutable-borrows is OK.
-                    if !is_active(&this.dominators, borrow, location) {
+                    if !is_active(this.dominators(), borrow, location) {
                         assert!(allow_two_phase_borrow(borrow.kind));
                         return Control::Continue;
                     }
@@ -2173,7 +2173,7 @@ fn is_mutable(
                                     // `self.foo` -- we want to double
                                     // check that the location `*self`
                                     // is mutable (i.e., this is not a
-                                    // `Fn` closure).  But if that
+                                    // `Fn` closure). But if that
                                     // check succeeds, we want to
                                     // *blame* the mutability on
                                     // `place` (that is,
@@ -2219,6 +2219,10 @@ fn is_mutable(
     fn is_upvar_field_projection(&self, place_ref: PlaceRef<'tcx>) -> Option<Field> {
         path_utils::is_upvar_field_projection(self.infcx.tcx, &self.upvars, place_ref, self.body())
     }
+
+    fn dominators(&self) -> &Dominators<BasicBlock> {
+        self.dominators.get_or_init(|| self.body.basic_blocks.dominators())
+    }
 }
 
 mod error {
index b63e286676ff48fc2129344547850f320da554d0..4af324f740aef686c98d68bd329660555744bae2 100644 (file)
@@ -109,7 +109,7 @@ impl<'tcx, R1> MemberConstraintSet<'tcx, R1>
     R1: Copy + Hash + Eq,
 {
     /// Remap the "member region" key using `map_fn`, producing a new
-    /// member constraint set.  This is used in the NLL code to map from
+    /// member constraint set. This is used in the NLL code to map from
     /// the original `RegionVid` to an scc index. In some cases, we
     /// may have multiple `R1` values mapping to the same `R2` key -- that
     /// is ok, the two sets will be merged.
@@ -158,7 +158,7 @@ pub(crate) fn all_indices(
     }
 
     /// Iterate down the constraint indices associated with a given
-    /// peek-region.  You can then use `choice_regions` and other
+    /// peek-region. You can then use `choice_regions` and other
     /// methods to access data.
     pub(crate) fn indices(
         &self,
index e379e6470623759eabab4567b6d8de519713ba92..b2d92d0dba7a4de1cb38292dc7e9cb545f880d86 100644 (file)
@@ -385,7 +385,7 @@ pub(super) fn dump_annotation<'tcx>(
 
     // When the enclosing function is tagged with `#[rustc_regions]`,
     // we dump out various bits of state as warnings. This is useful
-    // for verifying that the compiler is behaving as expected.  These
+    // for verifying that the compiler is behaving as expected. These
     // warnings focus on the closure region requirements -- for
     // viewing the intraprocedural state, the -Zdump-mir output is
     // better.
index 9f6b1fdfcb54085853bb49c34a070b95c3358ec7..85d207b2fc9a7530afd0724e001c6881d6389eab 100644 (file)
@@ -63,7 +63,7 @@ fn ignore_borrow(
                     ty::RawPtr(..) | ty::Ref(_, _, hir::Mutability::Not) => {
                         // For both derefs of raw pointers and `&T`
                         // references, the original path is `Copy` and
-                        // therefore not significant.  In particular,
+                        // therefore not significant. In particular,
                         // there is nothing the user can do to the
                         // original path that would invalidate the
                         // newly created reference -- and if there
index 89ac0dfa4d6f51c2e61225b7d27b8fda4895bb57..918fb2d69237e4cb48b125a85daab2a60f8635dc 100644 (file)
@@ -209,7 +209,7 @@ fn place_components_conflict<'tcx>(
             match (elem, &base_ty.kind(), access) {
                 (_, _, Shallow(Some(ArtificialField::ArrayLength)))
                 | (_, _, Shallow(Some(ArtificialField::ShallowBorrow))) => {
-                    // The array length is like  additional fields on the
+                    // The array length is like additional fields on the
                     // type; it does not overlap any existing data there.
                     // Furthermore, if cannot actually be a prefix of any
                     // borrowed place (at least in MIR as it is currently.)
index 308f6e19a73e86b2277985a092a5748a77827bd2..238172ea3992f5a08b2cac2f0f00b1659d67ae02 100644 (file)
@@ -527,6 +527,14 @@ pub(crate) fn region_value_str(&self, r: RegionVid) -> String {
         self.scc_values.region_value_str(scc)
     }
 
+    pub(crate) fn placeholders_contained_in<'a>(
+        &'a self,
+        r: RegionVid,
+    ) -> impl Iterator<Item = ty::PlaceholderRegion> + 'a {
+        let scc = self.constraint_sccs.scc(r.to_region_vid());
+        self.scc_values.placeholders_contained_in(scc)
+    }
+
     /// Returns access to the value of `r` for debugging purposes.
     pub(crate) fn region_universe(&self, r: RegionVid) -> ty::UniverseIndex {
         let scc = self.constraint_sccs.scc(r.to_region_vid());
@@ -680,7 +688,7 @@ fn compute_value_for_scc(&mut self, scc_a: ConstraintSccIndex) {
     /// enforce the constraint).
     ///
     /// The current value of `scc` at the time the method is invoked
-    /// is considered a *lower bound*.  If possible, we will modify
+    /// is considered a *lower bound*. If possible, we will modify
     /// the constraint to set it equal to one of the option regions.
     /// If we make any changes, returns true, else false.
     #[instrument(skip(self, member_constraint_index), level = "debug")]
@@ -959,7 +967,7 @@ fn try_promote_type_test(
             //
             // This is needed because -- particularly in the case
             // where `ur` is a local bound -- we are sometimes in a
-            // position to prove things that our caller cannot.  See
+            // position to prove things that our caller cannot. See
             // #53570 for an example.
             if self.eval_verify_bound(infcx, param_env, generic_ty, ur, &type_test.verify_bound) {
                 continue;
@@ -2035,7 +2043,7 @@ pub(crate) fn best_blame_constraint(
         //    '5: '6 ('6 is the target)
         //
         // Some of those regions are unified with `'6` (in the same
-        // SCC).  We want to screen those out. After that point, the
+        // SCC). We want to screen those out. After that point, the
         // "closest" constraint we have to the end is going to be the
         // most likely to be the point where the value escapes -- but
         // we still want to screen for an "interesting" point to
index 767f9fe39c68b7162092c6b53e2cb00383373a84..db5a67a8b442d97e13678b8262a7f2cdc197580d 100644 (file)
@@ -12,6 +12,8 @@
 use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
 use rustc_trait_selection::traits::ObligationCtxt;
 
+use crate::session_diagnostics::NonGenericOpaqueTypeParam;
+
 use super::RegionInferenceContext;
 
 impl<'tcx> RegionInferenceContext<'tcx> {
@@ -235,7 +237,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
     /// # Parameters
     ///
     /// - `def_id`, the `impl Trait` type
-    /// - `substs`, the substs  used to instantiate this opaque type
+    /// - `substs`, the substs used to instantiate this opaque type
     /// - `instantiated_ty`, the inferred type C1 -- fully resolved, lifted version of
     ///   `opaque_defn.concrete_ty`
     #[instrument(level = "debug", skip(self))]
@@ -250,7 +252,7 @@ fn infer_opaque_definition_from_instantiation(
         }
 
         let definition_ty = instantiated_ty
-            .remap_generic_params_to_declaration_params(opaque_type_key, self.tcx, false, origin)
+            .remap_generic_params_to_declaration_params(opaque_type_key, self.tcx, false)
             .ty;
 
         if !check_opaque_type_parameter_valid(
@@ -389,17 +391,13 @@ fn check_opaque_type_parameter_valid(
         } else {
             // Prevent `fn foo() -> Foo<u32>` from being defining.
             let opaque_param = opaque_generics.param_at(i, tcx);
-            tcx.sess
-                .struct_span_err(span, "non-defining opaque type use in defining scope")
-                .span_note(
-                    tcx.def_span(opaque_param.def_id),
-                    &format!(
-                        "used non-generic {} `{}` for generic parameter",
-                        opaque_param.kind.descr(),
-                        arg,
-                    ),
-                )
-                .emit();
+            let kind = opaque_param.kind.descr();
+            tcx.sess.emit_err(NonGenericOpaqueTypeParam {
+                ty: arg,
+                kind,
+                span,
+                param_span: tcx.def_span(opaque_param.def_id),
+            });
             return false;
         }
     }
index 577332c0744b84dd8447af7ab3519a33ef18abe0..23acf159240fa7bf509b44ee9683a6bac6acd87c 100644 (file)
@@ -1,6 +1,6 @@
 use rustc_errors::{IntoDiagnosticArg, MultiSpan};
 use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
-use rustc_middle::ty::Ty;
+use rustc_middle::ty::{GenericArg, Ty};
 use rustc_span::Span;
 
 use crate::diagnostics::RegionName;
@@ -240,3 +240,14 @@ pub(crate) struct MoveBorrow<'a> {
     #[label]
     pub borrow_span: Span,
 }
+
+#[derive(Diagnostic)]
+#[diag(borrowck_opaque_type_non_generic_param, code = "E0792")]
+pub(crate) struct NonGenericOpaqueTypeParam<'a, 'tcx> {
+    pub ty: GenericArg<'tcx>,
+    pub kind: &'a str,
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub param_span: Span,
+}
index 02222c0a03cb350f9dcfdebc2b963db3f1edfcd5..11729e2c83f0b9d098788e495a6db5ea5b271ce5 100644 (file)
@@ -107,11 +107,7 @@ pub(super) fn normalize_and_prove_instantiated_predicates(
         instantiated_predicates: ty::InstantiatedPredicates<'tcx>,
         locations: Locations,
     ) {
-        for (predicate, span) in instantiated_predicates
-            .predicates
-            .into_iter()
-            .zip(instantiated_predicates.spans.into_iter())
-        {
+        for (predicate, span) in instantiated_predicates {
             debug!(?predicate);
             let category = ConstraintCategory::Predicate(span);
             let predicate = self.normalize_with_category(predicate, locations, category);
index ce7f857e27310aa64b01f6d2529c08cd96ff00de..e15d1b99ad205229e2d7b901e101da1bcc5614b5 100644 (file)
@@ -107,7 +107,7 @@ pub fn apply_closure_requirements(
         closure_substs: ty::SubstsRef<'tcx>,
     ) {
         // Extract the values of the free regions in `closure_substs`
-        // into a vector.  These are the regions that we will be
+        // into a vector. These are the regions that we will be
         // relating to one another.
         let closure_mapping = &UniversalRegions::closure_mapping(
             self.tcx,
index 09cf870bcf35af780f724bdb8a4ea55a6fca3ccd..82ff862479e81824e1f47c6606e6b54cca6e6c86 100644 (file)
@@ -98,7 +98,7 @@ pub(crate) fn non_local_upper_bound(&self, fr: RegionVid) -> RegionVid {
         let upper_bounds = self.non_local_upper_bounds(fr);
 
         // In case we find more than one, reduce to one for
-        // convenience.  This is to prevent us from generating more
+        // convenience. This is to prevent us from generating more
         // complex constraints, but it will cause spurious errors.
         let post_dom = self.inverse_outlives.mutual_immediate_postdominator(upper_bounds);
 
@@ -128,7 +128,7 @@ pub(crate) fn non_local_lower_bound(&self, fr: RegionVid) -> Option<RegionVid> {
         let lower_bounds = self.non_local_bounds(&self.outlives, fr);
 
         // In case we find more than one, reduce to one for
-        // convenience.  This is to prevent us from generating more
+        // convenience. This is to prevent us from generating more
         // complex constraints, but it will cause spurious errors.
         let post_dom = self.outlives.mutual_immediate_postdominator(lower_bounds);
 
@@ -359,14 +359,9 @@ fn add_outlives_bounds<I>(&mut self, outlives_bounds: I)
                         .insert(ty::OutlivesPredicate(GenericKind::Param(param_b), r_a));
                 }
 
-                OutlivesBound::RegionSubProjection(r_a, projection_b) => {
+                OutlivesBound::RegionSubAlias(r_a, alias_b) => {
                     self.region_bound_pairs
-                        .insert(ty::OutlivesPredicate(GenericKind::Projection(projection_b), r_a));
-                }
-
-                OutlivesBound::RegionSubOpaque(r_a, def_id, substs) => {
-                    self.region_bound_pairs
-                        .insert(ty::OutlivesPredicate(GenericKind::Opaque(def_id, substs), r_a));
+                        .insert(ty::OutlivesPredicate(GenericKind::Alias(alias_b), r_a));
                 }
             }
         }
index 42b577175e43757b77357854e6a6047cf0a7f5d7..3ff5d188a3d35233cf381cfb460655ee6b62c3ca 100644 (file)
@@ -328,7 +328,7 @@ fn compute_drop_live_points_for_block(&mut self, mpi: MovePathIndex, term_point:
         debug_assert!(self.drop_live_at.contains(term_point));
 
         // Otherwise, scan backwards through the statements in the
-        // block.  One of them may be either a definition or use
+        // block. One of them may be either a definition or use
         // live point.
         let term_location = self.cx.elements.to_location(term_point);
         debug_assert_eq!(self.cx.body.terminator_loc(term_location.block), term_location,);
index 7a3db191f0c66eef0436ae8844e9ca6e5e33c3cb..81bd4c2a783e906b30a4d1999bbea53b62522998 100644 (file)
@@ -1665,7 +1665,7 @@ fn check_local(&mut self, body: &Body<'tcx>, local: Local, local_decl: &LocalDec
     fn ensure_place_sized(&mut self, ty: Ty<'tcx>, span: Span) {
         let tcx = self.tcx();
 
-        // Erase the regions from `ty` to get a global type.  The
+        // Erase the regions from `ty` to get a global type. The
         // `Sized` bound in no way depends on precise regions, so this
         // shouldn't affect `is_sized`.
         let erased_ty = tcx.erase_regions(ty);
index a4a0c5b90fed32d2a20372aa908806b81aa9a2fe..8bff66f8d5cca02c3c6b15e525e98347cad0d1e7 100644 (file)
@@ -162,7 +162,7 @@ struct UniversalRegionIndices<'tcx> {
     /// `ty::Region` to the internal `RegionVid` we are using. This is
     /// used because trait matching and type-checking will feed us
     /// region constraints that reference those regions and we need to
-    /// be able to map them our internal `RegionVid`. This is
+    /// be able to map them to our internal `RegionVid`. This is
     /// basically equivalent to an `InternalSubsts`, except that it also
     /// contains an entry for `ReStatic` -- it might be nice to just
     /// use a substs, and then handle `ReStatic` another way.
@@ -637,7 +637,7 @@ fn compute_inputs_and_output(
                 let closure_ty = tcx.closure_env_ty(def_id, substs, env_region).unwrap();
 
                 // The "inputs" of the closure in the
-                // signature appear as a tuple.  The MIR side
+                // signature appear as a tuple. The MIR side
                 // flattens this tuple.
                 let (&output, tuplized_inputs) =
                     inputs_and_output.skip_binder().split_last().unwrap();
index d59b3b8c86d35ef6303bb8a3adbd5e9aecf2d1f1..ef5a75f428d4e979c8582c5708c79a8c9854180e 100644 (file)
@@ -20,7 +20,7 @@ pub fn expand_deriving_clone(
     // some additional `AssertParamIsClone` assertions.
     //
     // We can use the simple form if either of the following are true.
-    // - The type derives Copy and there are no generic parameters.  (If we
+    // - The type derives Copy and there are no generic parameters. (If we
     //   used the simple form with generics, we'd have to bound the generics
     //   with Clone + Copy, and then there'd be no Clone impl at all if the
     //   user fills in something that is Clone but not Copy. After
@@ -82,7 +82,7 @@ pub fn expand_deriving_clone(
             nonself_args: Vec::new(),
             ret_ty: Self_,
             attributes: attrs,
-            unify_fieldless_variants: false,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Default,
             combine_substructure: substructure,
         }],
         associated_types: Vec::new(),
@@ -177,7 +177,9 @@ fn cs_clone(
             all_fields = af;
             vdata = &variant.data;
         }
-        EnumTag(..) => cx.span_bug(trait_span, &format!("enum tags in `derive({})`", name,)),
+        EnumTag(..) | AllFieldlessEnum(..) => {
+            cx.span_bug(trait_span, &format!("enum tags in `derive({})`", name,))
+        }
         StaticEnum(..) | StaticStruct(..) => {
             cx.span_bug(trait_span, &format!("associated function in `derive({})`", name))
         }
index f861d47ed408e45fc02ecc6e7c75f76357d47a83..3e994f037ad7aa7e2843b0a271257b679bbab3e4 100644 (file)
@@ -36,7 +36,7 @@ pub fn expand_deriving_eq(
             nonself_args: vec![],
             ret_ty: Unit,
             attributes: attrs,
-            unify_fieldless_variants: true,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Unify,
             combine_substructure: combine_substructure(Box::new(|a, b, c| {
                 cs_total_eq_assert(a, b, c)
             })),
index 96d18c7afb924c56a6fb6d949a414cbfead310ae..a926fca4e65f8ac21fe7eabd06a1c260d5c0b474 100644 (file)
@@ -29,7 +29,7 @@ pub fn expand_deriving_ord(
             nonself_args: vec![(self_ref(), sym::other)],
             ret_ty: Path(path_std!(cmp::Ordering)),
             attributes: attrs,
-            unify_fieldless_variants: true,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Unify,
             combine_substructure: combine_substructure(Box::new(|a, b, c| cs_cmp(a, b, c))),
         }],
         associated_types: Vec::new(),
index 7f95551fc483a6bd69c4386a30191617581ec77e..9051fe0b28abec750146a6435e0b50c021e99d6f 100644 (file)
@@ -76,7 +76,7 @@ fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOr
         nonself_args: vec![(self_ref(), sym::other)],
         ret_ty: Path(path_local!(bool)),
         attributes: attrs,
-        unify_fieldless_variants: true,
+        fieldless_variants_strategy: FieldlessVariantsStrategy::Unify,
         combine_substructure: combine_substructure(Box::new(|a, b, c| cs_eq(a, b, c))),
     }];
 
index 5c4e5b7f8167500c1fd16a51b43d2a099e6548f5..c9dc89212622d70d7d6ddc2bdbfac0093a7bf263 100644 (file)
@@ -28,7 +28,7 @@ pub fn expand_deriving_partial_ord(
         nonself_args: vec![(self_ref(), sym::other)],
         ret_ty,
         attributes: attrs,
-        unify_fieldless_variants: true,
+        fieldless_variants_strategy: FieldlessVariantsStrategy::Unify,
         combine_substructure: combine_substructure(Box::new(|cx, span, substr| {
             cs_partial_cmp(cx, span, substr)
         })),
index 5b1b7e6804c86d7235d0597e074901dba8fa0b37..e0f487e864898d72d530efd0d1b1b0815d01894d 100644 (file)
@@ -2,6 +2,7 @@
 use crate::deriving::generic::*;
 use crate::deriving::path_std;
 
+use ast::EnumDef;
 use rustc_ast::{self as ast, MetaItem};
 use rustc_expand::base::{Annotatable, ExtCtxt};
 use rustc_span::symbol::{sym, Ident, Symbol};
@@ -31,7 +32,8 @@ pub fn expand_deriving_debug(
             nonself_args: vec![(fmtr, sym::f)],
             ret_ty: Path(path_std!(fmt::Result)),
             attributes: ast::AttrVec::new(),
-            unify_fieldless_variants: false,
+            fieldless_variants_strategy:
+                FieldlessVariantsStrategy::SpecializeIfAllVariantsFieldless,
             combine_substructure: combine_substructure(Box::new(|a, b, c| {
                 show_substructure(a, b, c)
             })),
@@ -43,16 +45,18 @@ pub fn expand_deriving_debug(
 }
 
 fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
+    // We want to make sure we have the ctxt set so that we can use unstable methods
+    let span = cx.with_def_site_ctxt(span);
+
     let (ident, vdata, fields) = match substr.fields {
         Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
         EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields),
+        AllFieldlessEnum(enum_def) => return show_fieldless_enum(cx, span, enum_def, substr),
         EnumTag(..) | StaticStruct(..) | StaticEnum(..) => {
             cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`")
         }
     };
 
-    // We want to make sure we have the ctxt set so that we can use unstable methods
-    let span = cx.with_def_site_ctxt(span);
     let name = cx.expr_str(span, ident.name);
     let fmt = substr.nonselflike_args[0].clone();
 
@@ -173,3 +177,47 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
         BlockOrExpr::new_mixed(stmts, Some(expr))
     }
 }
+
+/// Special case for enums with no fields. Builds:
+/// ```text
+/// impl ::core::fmt::Debug for A {
+///     fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
+///          ::core::fmt::Formatter::write_str(f,
+///             match self {
+///                 A::A => "A",
+///                 A::B() => "B",
+///                 A::C {} => "C",
+///             })
+///     }
+/// }
+/// ```
+fn show_fieldless_enum(
+    cx: &mut ExtCtxt<'_>,
+    span: Span,
+    def: &EnumDef,
+    substr: &Substructure<'_>,
+) -> BlockOrExpr {
+    let fmt = substr.nonselflike_args[0].clone();
+    let arms = def
+        .variants
+        .iter()
+        .map(|v| {
+            let variant_path = cx.path(span, vec![substr.type_ident, v.ident]);
+            let pat = match &v.data {
+                ast::VariantData::Tuple(fields, _) => {
+                    debug_assert!(fields.is_empty());
+                    cx.pat_tuple_struct(span, variant_path, vec![])
+                }
+                ast::VariantData::Struct(fields, _) => {
+                    debug_assert!(fields.is_empty());
+                    cx.pat_struct(span, variant_path, vec![])
+                }
+                ast::VariantData::Unit(_) => cx.pat_path(span, variant_path),
+            };
+            cx.arm(span, pat, cx.expr_str(span, v.ident.name))
+        })
+        .collect::<Vec<_>>();
+    let name = cx.expr_match(span, cx.expr_self(span), arms);
+    let fn_path_write_str = cx.std_path(&[sym::fmt, sym::Formatter, sym::write_str]);
+    BlockOrExpr::new_expr(cx.expr_call_global(span, fn_path_write_str, vec![fmt, name]))
+}
index 62af02c2bb4b2d35304cf4eafd42036fba31d5ef..5f9519dad1b25e91213e145a4ec768cc376f5d01 100644 (file)
@@ -49,7 +49,7 @@ pub fn expand_deriving_rustc_decodable(
                 PathKind::Std,
             )),
             attributes: ast::AttrVec::new(),
-            unify_fieldless_variants: false,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Default,
             combine_substructure: combine_substructure(Box::new(|a, b, c| {
                 decodable_substructure(a, b, c, krate)
             })),
index eb66c4a69a69bc25bab4126e1649d5f3097a940f..18270747296b8f7c7d8762e141f7c8df88da5f36 100644 (file)
@@ -34,7 +34,7 @@ pub fn expand_deriving_default(
             nonself_args: Vec::new(),
             ret_ty: Self_,
             attributes: attrs,
-            unify_fieldless_variants: false,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Default,
             combine_substructure: combine_substructure(Box::new(|cx, trait_span, substr| {
                 match substr.fields {
                     StaticStruct(_, fields) => {
index 68bc0ff2ec0b41c553339586a0874ef5fcb6a1f1..2afeed927ac2c7664d8e941f211f7c3f3bccff74 100644 (file)
@@ -133,7 +133,7 @@ pub fn expand_deriving_rustc_encodable(
                 PathKind::Std,
             )),
             attributes: AttrVec::new(),
-            unify_fieldless_variants: false,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Default,
             combine_substructure: combine_substructure(Box::new(|a, b, c| {
                 encodable_substructure(a, b, c, krate)
             })),
index beac591bfc879065a2c6fd411be0673ba59a3383..17b7ac0eba1209de55bddbee1ba6087945b409ed 100644 (file)
@@ -222,14 +222,27 @@ pub struct MethodDef<'a> {
 
     pub attributes: ast::AttrVec,
 
-    /// Can we combine fieldless variants for enums into a single match arm?
-    /// If true, indicates that the trait operation uses the enum tag in some
-    /// way.
-    pub unify_fieldless_variants: bool,
+    pub fieldless_variants_strategy: FieldlessVariantsStrategy,
 
     pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>,
 }
 
+/// How to handle fieldless enum variants.
+#[derive(PartialEq)]
+pub enum FieldlessVariantsStrategy {
+    /// Combine fieldless variants into a single match arm.
+    /// This assumes that relevant information has been handled
+    /// by looking at the enum's discriminant.
+    Unify,
+    /// Don't do anything special about fieldless variants. They are
+    /// handled like any other variant.
+    Default,
+    /// If all variants of the enum are fieldless, expand the special
+    /// `AllFieldLessEnum` substructure, so that the entire enum can be handled
+    /// at once.
+    SpecializeIfAllVariantsFieldless,
+}
+
 /// All the data about the data structure/method being derived upon.
 pub struct Substructure<'a> {
     /// ident of self
@@ -264,9 +277,14 @@ pub enum StaticFields {
 
 /// A summary of the possible sets of fields.
 pub enum SubstructureFields<'a> {
-    /// A non-static method with `Self` is a struct.
+    /// A non-static method where `Self` is a struct.
     Struct(&'a ast::VariantData, Vec<FieldInfo>),
 
+    /// A non-static method handling the entire enum at once
+    /// (after it has been determined that none of the enum
+    /// variants has any fields).
+    AllFieldlessEnum(&'a ast::EnumDef),
+
     /// Matching variants of the enum: variant index, variant count, ast::Variant,
     /// fields: the field name is only non-`None` in the case of a struct
     /// variant.
@@ -1086,8 +1104,8 @@ fn expand_static_struct_method_body(
     /// ```
     /// Creates a tag check combined with a match for a tuple of all
     /// `selflike_args`, with an arm for each variant with fields, possibly an
-    /// arm for each fieldless variant (if `!unify_fieldless_variants` is not
-    /// true), and possibly a default arm.
+    /// arm for each fieldless variant (if `unify_fieldless_variants` is not
+    /// `Unify`), and possibly a default arm.
     fn expand_enum_method_body<'b>(
         &self,
         cx: &mut ExtCtxt<'_>,
@@ -1101,7 +1119,8 @@ fn expand_enum_method_body<'b>(
         let variants = &enum_def.variants;
 
         // Traits that unify fieldless variants always use the tag(s).
-        let uses_tags = self.unify_fieldless_variants;
+        let unify_fieldless_variants =
+            self.fieldless_variants_strategy == FieldlessVariantsStrategy::Unify;
 
         // There is no sensible code to be generated for *any* deriving on a
         // zero-variant enum. So we just generate a failing expression.
@@ -1161,23 +1180,35 @@ fn expand_enum_method_body<'b>(
         // match is necessary.
         let all_fieldless = variants.iter().all(|v| v.data.fields().is_empty());
         if all_fieldless {
-            if uses_tags && variants.len() > 1 {
-                // If the type is fieldless and the trait uses the tag and
-                // there are multiple variants, we need just an operation on
-                // the tag(s).
-                let (tag_field, mut tag_let_stmts) = get_tag_pieces(cx);
-                let mut tag_check = self.call_substructure_method(
-                    cx,
-                    trait_,
-                    type_ident,
-                    nonselflike_args,
-                    &EnumTag(tag_field, None),
-                );
-                tag_let_stmts.append(&mut tag_check.0);
-                return BlockOrExpr(tag_let_stmts, tag_check.1);
-            }
-
-            if variants.len() == 1 {
+            if variants.len() > 1 {
+                match self.fieldless_variants_strategy {
+                    FieldlessVariantsStrategy::Unify => {
+                        // If the type is fieldless and the trait uses the tag and
+                        // there are multiple variants, we need just an operation on
+                        // the tag(s).
+                        let (tag_field, mut tag_let_stmts) = get_tag_pieces(cx);
+                        let mut tag_check = self.call_substructure_method(
+                            cx,
+                            trait_,
+                            type_ident,
+                            nonselflike_args,
+                            &EnumTag(tag_field, None),
+                        );
+                        tag_let_stmts.append(&mut tag_check.0);
+                        return BlockOrExpr(tag_let_stmts, tag_check.1);
+                    }
+                    FieldlessVariantsStrategy::SpecializeIfAllVariantsFieldless => {
+                        return self.call_substructure_method(
+                            cx,
+                            trait_,
+                            type_ident,
+                            nonselflike_args,
+                            &AllFieldlessEnum(enum_def),
+                        );
+                    }
+                    FieldlessVariantsStrategy::Default => (),
+                }
+            } else if variants.len() == 1 {
                 // If there is a single variant, we don't need an operation on
                 // the tag(s). Just use the most degenerate result.
                 return self.call_substructure_method(
@@ -1187,7 +1218,7 @@ fn expand_enum_method_body<'b>(
                     nonselflike_args,
                     &EnumMatching(0, 1, &variants[0], Vec::new()),
                 );
-            };
+            }
         }
 
         // These arms are of the form:
@@ -1198,7 +1229,7 @@ fn expand_enum_method_body<'b>(
         let mut match_arms: Vec<ast::Arm> = variants
             .iter()
             .enumerate()
-            .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty()))
+            .filter(|&(_, v)| !(unify_fieldless_variants && v.data.fields().is_empty()))
             .map(|(index, variant)| {
                 // A single arm has form (&VariantK, &VariantK, ...) => BodyK
                 // (see "Final wrinkle" note below for why.)
@@ -1249,7 +1280,7 @@ fn expand_enum_method_body<'b>(
         // Add a default arm to the match, if necessary.
         let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty());
         let default = match first_fieldless {
-            Some(v) if self.unify_fieldless_variants => {
+            Some(v) if unify_fieldless_variants => {
                 // We need a default case that handles all the fieldless
                 // variants. The index and actual variant aren't meaningful in
                 // this case, so just use dummy values.
@@ -1296,7 +1327,7 @@ fn expand_enum_method_body<'b>(
         // If the trait uses the tag and there are multiple variants, we need
         // to add a tag check operation before the match. Otherwise, the match
         // is enough.
-        if uses_tags && variants.len() > 1 {
+        if unify_fieldless_variants && variants.len() > 1 {
             let (tag_field, mut tag_let_stmts) = get_tag_pieces(cx);
 
             // Combine a tag check with the match.
@@ -1580,5 +1611,6 @@ pub fn cs_fold<F>(
             }
         }
         StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"),
+        AllFieldlessEnum(..) => cx.span_bug(trait_span, "fieldless enum in `derive`"),
     }
 }
index c136bb7141ab9eef3f4b0896e20fc08a47816503..f8570d8f86a08dd691fe82dbd23c26518afcde24 100644 (file)
@@ -33,7 +33,7 @@ pub fn expand_deriving_hash(
             nonself_args: vec![(Ref(Box::new(Path(arg)), Mutability::Mut), sym::state)],
             ret_ty: Unit,
             attributes: AttrVec::new(),
-            unify_fieldless_variants: true,
+            fieldless_variants_strategy: FieldlessVariantsStrategy::Unify,
             combine_substructure: combine_substructure(Box::new(|a, b, c| {
                 hash_substructure(a, b, c)
             })),
index 84d06b69a9d976c3d7c260dfe723693139dfe80b..e5a5e606930f00e213c7bca7c893889955598892 100644 (file)
@@ -1,4 +1,4 @@
-// The compiler code necessary to support the env! extension.  Eventually this
+// The compiler code necessary to support the env! extension. Eventually this
 // should all get sucked into either the compiler syntax extension plugin
 // interface.
 //
index b2b7b9d75bd37163e9ac20a305ba9e19ac9366ef..9f4bbbc62c819e516273c409c89eeec3fc72877a 100644 (file)
@@ -583,7 +583,7 @@ fn report_missing_placeholders(
     if detect_foreign_fmt {
         use super::format_foreign as foreign;
 
-        // The set of foreign substitutions we've explained.  This prevents spamming the user
+        // The set of foreign substitutions we've explained. This prevents spamming the user
         // with `%d should be written as {}` over and over again.
         let mut explained = FxHashSet::default();
 
index 6f7fc3a95ba640e21311aca2d37b403f0f25a2c4..bd9e903b6ba297cdda9ce8de7efa4404e6c4325d 100644 (file)
@@ -253,7 +253,7 @@ pub fn translate(&self) -> Result<String, Option<String>> {
     #[derive(Copy, Clone, PartialEq, Debug)]
     pub enum Num {
         // The range of these values is technically bounded by `NL_ARGMAX`... but, at least for GNU
-        // libc, it apparently has no real fixed limit.  A `u16` is used here on the basis that it
+        // libc, it apparently has no real fixed limit. A `u16` is used here on the basis that it
         // is *vanishingly* unlikely that *anyone* is going to try formatting something wider, or
         // with more precision, than 32 thousand positions which is so wide it couldn't possibly fit
         // on a screen.
index dee6fb5b5130d1f27abaf1fda1581605a82973b5..51450897bfc11278afe11293a8c2e92de48ef52e 100644 (file)
@@ -304,7 +304,7 @@ fn data_id_for_static(
 
         // Comment copied from https://github.com/rust-lang/rust/blob/45060c2a66dfd667f88bd8b94261b28a58d85bd5/src/librustc_codegen_llvm/consts.rs#L141
         // Declare an internal global `extern_with_linkage_foo` which
-        // is initialized with the address of `foo`.  If `foo` is
+        // is initialized with the address of `foo`. If `foo` is
         // discarded during linking (for example, if `foo` has weak
         // linkage and there are no definitions), then
         // `extern_with_linkage_foo` will instead be initialized to
index 2ba012a77b0a908788f0272705f82f7c875cb1a7..28fbcb15b2b5893ab9ea6912d5047a05a6f293a7 100644 (file)
@@ -68,7 +68,7 @@ pub(crate) fn new(tcx: TyCtxt<'_>, isa: &dyn TargetIsa) -> Self {
             .working_dir
             .to_string_lossy(FileNameDisplayPreference::Remapped)
             .into_owned();
-        let (name, file_info) = match tcx.sess.local_crate_source_file.clone() {
+        let (name, file_info) = match tcx.sess.local_crate_source_file() {
             Some(path) => {
                 let name = path.to_string_lossy().into_owned();
                 (name, None)
index 546540dfd76232ad2b6d50887e1b9304d1f9e63d..28be6d033f8bf3d82c3731d60f7837e7d8760df1 100644 (file)
@@ -221,7 +221,7 @@ fn store(
                 bx.store(val, cast_dst, self.layout.align.abi);
             } else {
                 // The actual return type is a struct, but the ABI
-                // adaptation code has cast it into some scalar type.  The
+                // adaptation code has cast it into some scalar type. The
                 // code that follows is the only reliable way I have
                 // found to do a transform like i64 -> {i32,i32}.
                 // Basically we dump the data onto the stack then memcpy it.
index 606f710641fc0e25a17fd6f2b598058328a1e6b4..52c8b51796c0bf7712e38ddfb44bca2f64c4bdb5 100644 (file)
@@ -445,7 +445,7 @@ pub(crate) fn inline_asm_call<'ll>(
             };
 
             // Store mark in a metadata node so we can map LLVM errors
-            // back to source locations.  See #17552.
+            // back to source locations. See #17552.
             let key = "srcloc";
             let kind = llvm::LLVMGetMDKindIDInContext(
                 bx.llcx,
index 36aba5bb740bd6120030c0cbf0e5d6654fa0afc8..426f57c0608009dd437c508b1944f3a2305fc518 100644 (file)
@@ -145,7 +145,7 @@ fn create_dll_import_lib(
             // The binutils linker used on -windows-gnu targets cannot read the import
             // libraries generated by LLVM: in our attempts, the linker produced an .EXE
             // that loaded but crashed with an AV upon calling one of the imported
-            // functions.  Therefore, use binutils to create the import library instead,
+            // functions. Therefore, use binutils to create the import library instead,
             // by writing a .DEF file to the temp dir and calling binutils's dlltool.
             let def_file_path =
                 tmpdir.join(format!("{}{}", lib_name, name_suffix)).with_extension("def");
@@ -219,7 +219,7 @@ fn create_dll_import_lib(
 
             // All import names are Rust identifiers and therefore cannot contain \0 characters.
             // FIXME: when support for #[link_name] is implemented, ensure that the import names
-            // still don't contain any \0 characters.  Also need to check that the names don't
+            // still don't contain any \0 characters. Also need to check that the names don't
             // contain substrings like " @" or "NONAME" that are keywords or otherwise reserved
             // in definition files.
             let cstring_import_name_and_ordinal_vector: Vec<(CString, Option<u16>)> =
@@ -433,7 +433,7 @@ fn find_binutils_dlltool(sess: &Session) -> OsString {
     }
 
     // The user didn't specify the location of the dlltool binary, and we weren't able
-    // to find the appropriate one on the PATH.  Just return the name of the tool
+    // to find the appropriate one on the PATH. Just return the name of the tool
     // and let the invocation fail with a hopefully useful error message.
     tool_name
 }
index e23c88b62c14b4bb92ff0be5078120a0e16ecc79..b2af9f31e4494175aad3d4140b32b7dd232247a5 100644 (file)
@@ -909,7 +909,7 @@ unsafe fn embed_bitcode(
 
 // Create a `__imp_<symbol> = &symbol` global for every public static `symbol`.
 // This is required to satisfy `dllimport` references to static data in .rlibs
-// when using MSVC linker.  We do this only for data, as linker can fix up
+// when using MSVC linker. We do this only for data, as linker can fix up
 // code references on its own.
 // See #26591, #27438
 fn create_msvc_imps(
index 5bf45a81e4347cd7de59a42a80fdaaa796131bd6..5e98deae48aa2dae049de2e28fad05c4df7a256e 100644 (file)
@@ -501,7 +501,7 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
             layout: TyAndLayout<'tcx>,
             offset: Size,
         ) {
-            if !scalar.is_always_valid(bx) {
+            if !scalar.is_uninit_valid() {
                 bx.noundef_metadata(load);
             }
 
index 70ff5c9617b7a8c7c3ecdf008467d18eb924fb0d..f1d01a4602a5e313e06ff3ef816f42c2423a42a2 100644 (file)
@@ -49,8 +49,8 @@ pub fn get_fn<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) ->
         let llptrty = fn_abi.ptr_to_llvm_type(cx);
 
         // This is subtle and surprising, but sometimes we have to bitcast
-        // the resulting fn pointer.  The reason has to do with external
-        // functions.  If you have two crates that both bind the same C
+        // the resulting fn pointer. The reason has to do with external
+        // functions. If you have two crates that both bind the same C
         // library, they may not use precisely the same types: for
         // example, they will probably each declare their own structs,
         // which are distinct types from LLVM's point of view (nominal
index 3626aa901c0ef45b02dfa23fe54487d491436f11..16467b614feafd3015d192b3734244dee1a6058a 100644 (file)
@@ -140,7 +140,7 @@ pub fn codegen_static_initializer<'ll, 'tcx>(
 fn set_global_alignment<'ll>(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align: Align) {
     // The target may require greater alignment for globals than the type does.
     // Note: GCC and Clang also allow `__attribute__((aligned))` on variables,
-    // which can force it to be smaller.  Rust doesn't support this yet.
+    // which can force it to be smaller. Rust doesn't support this yet.
     if let Some(min) = cx.sess().target.min_global_align {
         match Align::from_bits(min) {
             Ok(min) => align = align.max(min),
@@ -171,7 +171,7 @@ fn check_and_apply_linkage<'ll, 'tcx>(
             llvm::LLVMRustSetLinkage(g1, base::linkage_to_llvm(linkage));
 
             // Declare an internal global `extern_with_linkage_foo` which
-            // is initialized with the address of `foo`.  If `foo` is
+            // is initialized with the address of `foo`. If `foo` is
             // discarded during linking (for example, if `foo` has weak
             // linkage and there are no definitions), then
             // `extern_with_linkage_foo` will instead be initialized to
index 393bf30e9f83411d5063ba17d76c7657aeb020ee..22c61248b7d53275fa3e8a662c93ad7f7ae83775 100644 (file)
@@ -8,7 +8,7 @@
 use rustc_codegen_ssa::traits::{ConstMethods, CoverageInfoMethods};
 use rustc_data_structures::fx::FxIndexSet;
 use rustc_hir::def::DefKind;
-use rustc_hir::def_id::DefIdSet;
+use rustc_hir::def_id::DefId;
 use rustc_llvm::RustString;
 use rustc_middle::bug;
 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
@@ -291,7 +291,7 @@ fn add_unused_functions(cx: &CodegenCx<'_, '_>) {
 
     let ignore_unused_generics = tcx.sess.instrument_coverage_except_unused_generics();
 
-    let eligible_def_ids: DefIdSet = tcx
+    let eligible_def_ids: Vec<DefId> = tcx
         .mir_keys(())
         .iter()
         .filter_map(|local_def_id| {
@@ -317,7 +317,9 @@ fn add_unused_functions(cx: &CodegenCx<'_, '_>) {
 
     let codegenned_def_ids = tcx.codegened_and_inlined_items(());
 
-    for &non_codegenned_def_id in eligible_def_ids.difference(codegenned_def_ids) {
+    for non_codegenned_def_id in
+        eligible_def_ids.into_iter().filter(|id| !codegenned_def_ids.contains(id))
+    {
         let codegen_fn_attrs = tcx.codegen_fn_attrs(non_codegenned_def_id);
 
         // If a function is marked `#[no_coverage]`, then skip generating a
index 48e3a812e4f20c839a60a7adcb61969201e82065..b6eb5ee183fa397e6487e75983d27cbe393bcdfc 100644 (file)
@@ -782,10 +782,10 @@ pub fn build_compile_unit_di_node<'ll, 'tcx>(
     codegen_unit_name: &str,
     debug_context: &CodegenUnitDebugContext<'ll, 'tcx>,
 ) -> &'ll DIDescriptor {
-    let mut name_in_debuginfo = match tcx.sess.local_crate_source_file {
-        Some(ref path) => path.clone(),
-        None => PathBuf::from(tcx.crate_name(LOCAL_CRATE).as_str()),
-    };
+    let mut name_in_debuginfo = tcx
+        .sess
+        .local_crate_source_file()
+        .unwrap_or_else(|| PathBuf::from(tcx.crate_name(LOCAL_CRATE).as_str()));
 
     // To avoid breaking split DWARF, we need to ensure that each codegen unit
     // has a unique `DW_AT_name`. This is because there's a remote chance that
index 680d810f78eb9b4121e98955f6cbad23629f52a3..a6a75eff9a36d24e0c331aa3e171f09b3f839ebc 100644 (file)
@@ -654,7 +654,7 @@ fn codegen_gnu_try<'ll>(
         // Type indicator for the exception being thrown.
         //
         // The first value in this tuple is a pointer to the exception object
-        // being thrown.  The second value is a "selector" indicating which of
+        // being thrown. The second value is a "selector" indicating which of
         // the landing pad clauses the exception's type had been matched to.
         // rust_try ignores the selector.
         bx.switch_to_block(catch);
@@ -718,7 +718,7 @@ fn codegen_emcc_try<'ll>(
         // Type indicator for the exception being thrown.
         //
         // The first value in this tuple is a pointer to the exception object
-        // being thrown.  The second value is a "selector" indicating which of
+        // being thrown. The second value is a "selector" indicating which of
         // the landing pad clauses the exception's type had been matched to.
         bx.switch_to_block(catch);
         let tydesc = bx.eh_catch_typeinfo();
index 182adf81785716fe5abea7b91e0571dc6bda7a8b..75cd5df972316366807d06a8d5cfa4d6a399ff0a 100644 (file)
@@ -352,10 +352,10 @@ fn scalar_pair_element_llvm_type<'a>(
         let scalar = [a, b][index];
 
         // Make sure to return the same type `immediate_llvm_type` would when
-        // dealing with an immediate pair.  This means that `(bool, bool)` is
+        // dealing with an immediate pair. This means that `(bool, bool)` is
         // effectively represented as `{i8, i8}` in memory and two `i1`s as an
         // immediate, just like `bool` is typically `i8` in memory and only `i1`
-        // when immediate.  We need to load/store `bool` as `i8` to avoid
+        // when immediate. We need to load/store `bool` as `i8` to avoid
         // crippling LLVM optimizations or triggering other LLVM bugs with `i1`.
         if immediate && scalar.is_bool() {
             return cx.type_i1();
index 342abf81f6a7c0d6b465ce23375dcecd80868297..b148e4185a68a795ce5f2c02e8e164dbcee8cada 100644 (file)
@@ -445,7 +445,7 @@ fn link_rlib<'a>(
 /// Extract all symbols defined in raw-dylib libraries, collated by library name.
 ///
 /// If we have multiple extern blocks that specify symbols defined in the same raw-dylib library,
-/// then the CodegenResults value contains one NativeLib instance for each block.  However, the
+/// then the CodegenResults value contains one NativeLib instance for each block. However, the
 /// linker appears to expect only a single import library for each library used, so we need to
 /// collate the symbols together by library name before generating the import libraries.
 fn collate_raw_dylibs<'a, 'b>(
@@ -1197,7 +1197,7 @@ fn infer_from(
                         if cfg!(any(target_os = "solaris", target_os = "illumos")) {
                             // On historical Solaris systems, "cc" may have
                             // been Sun Studio, which is not flag-compatible
-                            // with "gcc".  This history casts a long shadow,
+                            // with "gcc". This history casts a long shadow,
                             // and many modern illumos distributions today
                             // ship GCC as "gcc" without also making it
                             // available as "cc".
index 0268659d3b9a13d8515eace64f9d4d2851945bec..eaf1e9817c2038a7934948e9b5fffb50fafa48ce 100644 (file)
@@ -544,7 +544,7 @@ fn gc_sections(&mut self, keep_metadata: bool) {
         // link times negatively.
         //
         // -dead_strip can't be part of the pre_link_args because it's also used
-        // for partial linking when using multiple codegen units (-r).  So we
+        // for partial linking when using multiple codegen units (-r). So we
         // insert it here.
         if self.sess.target.is_like_osx {
             self.linker_arg("-dead_strip");
index 8cb7d74b90d4b40dc6393cd63b5925b7a3260c9c..57a99e74c21ade04464f985e4623fa635e1fdddf 100644 (file)
@@ -173,11 +173,15 @@ fn exported_symbols_provider_local(
         return &[];
     }
 
-    let mut symbols: Vec<_> = tcx
-        .reachable_non_generics(LOCAL_CRATE)
-        .iter()
-        .map(|(&def_id, &info)| (ExportedSymbol::NonGeneric(def_id), info))
-        .collect();
+    // FIXME: Sorting this is unnecessary since we are sorting later anyway.
+    //        Can we skip the later sorting?
+    let mut symbols: Vec<_> = tcx.with_stable_hashing_context(|hcx| {
+        tcx.reachable_non_generics(LOCAL_CRATE)
+            .to_sorted(&hcx, true)
+            .into_iter()
+            .map(|(&def_id, &info)| (ExportedSymbol::NonGeneric(def_id), info))
+            .collect()
+    });
 
     if tcx.entry_fn(()).is_some() {
         let exported_symbol =
index 25dc88c535da9c69c110a27dbf2143e7c7af3f56..9f1614af7b16c9a232546c56ab9f5d5016c22472 100644 (file)
@@ -105,7 +105,7 @@ pub struct ModuleConfig {
     pub emit_thin_lto: bool,
     pub bc_cmdline: String,
 
-    // Miscellaneous flags.  These are mostly copied from command-line
+    // Miscellaneous flags. These are mostly copied from command-line
     // options.
     pub verify_llvm_ir: bool,
     pub no_prepopulate_passes: bool,
@@ -538,7 +538,7 @@ fn produce_final_output_artifacts(
 
     let copy_if_one_unit = |output_type: OutputType, keep_numbered: bool| {
         if compiled_modules.modules.len() == 1 {
-            // 1) Only one codegen unit.  In this case it's no difficulty
+            // 1) Only one codegen unit. In this case it's no difficulty
             //    to copy `foo.0.x` to `foo.x`.
             let module_name = Some(&compiled_modules.modules[0].name[..]);
             let path = crate_output.temp_path(output_type, module_name);
@@ -557,15 +557,15 @@ fn produce_final_output_artifacts(
                 .to_owned();
 
             if crate_output.outputs.contains_key(&output_type) {
-                // 2) Multiple codegen units, with `--emit foo=some_name`.  We have
+                // 2) Multiple codegen units, with `--emit foo=some_name`. We have
                 //    no good solution for this case, so warn the user.
                 sess.emit_warning(errors::IgnoringEmitPath { extension });
             } else if crate_output.single_output_file.is_some() {
-                // 3) Multiple codegen units, with `-o some_name`.  We have
+                // 3) Multiple codegen units, with `-o some_name`. We have
                 //    no good solution for this case, so warn the user.
                 sess.emit_warning(errors::IgnoringOutput { extension });
             } else {
-                // 4) Multiple codegen units, but no explicit name.  We
+                // 4) Multiple codegen units, but no explicit name. We
                 //    just leave the `foo.0.x` files in place.
                 // (We don't have to do any work in this case.)
             }
@@ -579,7 +579,7 @@ fn produce_final_output_artifacts(
         match *output_type {
             OutputType::Bitcode => {
                 user_wants_bitcode = true;
-                // Copy to .bc, but always keep the .0.bc.  There is a later
+                // Copy to .bc, but always keep the .0.bc. There is a later
                 // check to figure out if we should delete .0.bc files, or keep
                 // them for making an rlib.
                 copy_if_one_unit(OutputType::Bitcode, true);
@@ -611,7 +611,7 @@ fn produce_final_output_artifacts(
     // `-C save-temps` or `--emit=` flags).
 
     if !sess.opts.cg.save_temps {
-        // Remove the temporary .#module-name#.o objects.  If the user didn't
+        // Remove the temporary .#module-name#.o objects. If the user didn't
         // explicitly request bitcode (with --emit=bc), and the bitcode is not
         // needed for building an rlib, then we must remove .#module-name#.bc as
         // well.
index f7312f6fcdafdc6d0d90539fa70042a9fbd7f7c8..32d3cfe6fc650a2f76a19e9594bd7870ba5a8eeb 100644 (file)
@@ -964,16 +964,19 @@ pub fn provide(providers: &mut Providers) {
         };
 
         let (defids, _) = tcx.collect_and_partition_mono_items(cratenum);
-        for id in &*defids {
+
+        let any_for_speed = defids.items().any(|id| {
             let CodegenFnAttrs { optimize, .. } = tcx.codegen_fn_attrs(*id);
             match optimize {
-                attr::OptimizeAttr::None => continue,
-                attr::OptimizeAttr::Size => continue,
-                attr::OptimizeAttr::Speed => {
-                    return for_speed;
-                }
+                attr::OptimizeAttr::None | attr::OptimizeAttr::Size => false,
+                attr::OptimizeAttr::Speed => true,
             }
+        });
+
+        if any_for_speed {
+            return for_speed;
         }
+
         tcx.sess.opts.optimize
     };
 }
index b0fa7745667360d642a4fbd8c89ea0a71c6ef8d5..8808ad2dcd135ab3c009656beb2bd6f9cf9f67d9 100644 (file)
@@ -658,13 +658,13 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
         sole_meta_list
     {
         // According to the table at https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-header,
-        // the ordinal must fit into 16 bits.  Similarly, the Ordinal field in COFFShortExport (defined
+        // the ordinal must fit into 16 bits. Similarly, the Ordinal field in COFFShortExport (defined
         // in llvm/include/llvm/Object/COFFImportFile.h), which we use to communicate import information
         // to LLVM for `#[link(kind = "raw-dylib"_])`, is also defined to be uint16_t.
         //
         // FIXME: should we allow an ordinal of 0?  The MSVC toolchain has inconsistent support for this:
         // both LINK.EXE and LIB.EXE signal errors and abort when given a .DEF file that specifies
-        // a zero ordinal.  However, llvm-dlltool is perfectly happy to generate an import library
+        // a zero ordinal. However, llvm-dlltool is perfectly happy to generate an import library
         // for such a .DEF file, and MSVC's LINK.EXE is also perfectly happy to consume an import
         // library produced by LLVM with an ordinal of 0, and it generates an .EXE.  (I don't know yet
         // if the resulting EXE runs, as I haven't yet built the necessary DLL -- see earlier comment
index b7982b633f57fa8b38b93983d078358152820d68..e9bc40c33107706b6167373a5ace60f8b8ac6a2b 100644 (file)
@@ -57,9 +57,9 @@ pub struct DebugScope<S, L> {
 }
 
 impl<'tcx, S: Copy, L: Copy> DebugScope<S, L> {
-    /// DILocations inherit source file name from the parent DIScope.  Due to macro expansions
+    /// DILocations inherit source file name from the parent DIScope. Due to macro expansions
     /// it may so happen that the current span belongs to a different file than the DIScope
-    /// corresponding to span's containing source scope.  If so, we need to create a DIScope
+    /// corresponding to span's containing source scope. If so, we need to create a DIScope
     /// "extension" into that file.
     pub fn adjust_dbg_scope_for_span<Cx: CodegenMethods<'tcx, DIScope = S, DILocation = L>>(
         &self,
index 13472cc2bfa0a5c4b8ed7ba81baa6eb3114ac9f9..0579f7815352772807a19789d6928440256383b2 100644 (file)
@@ -36,16 +36,16 @@ fn into(self) -> InterpErrorInfo<'tcx> {
 impl fmt::Display for ConstEvalErrKind {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         use self::ConstEvalErrKind::*;
-        match *self {
+        match self {
             ConstAccessesStatic => write!(f, "constant accesses static"),
             ModifiedGlobal => {
                 write!(f, "modifying a static's initial value from another static's initializer")
             }
-            AssertFailure(ref msg) => write!(f, "{:?}", msg),
+            AssertFailure(msg) => write!(f, "{:?}", msg),
             Panic { msg, line, col, file } => {
                 write!(f, "the evaluated program panicked at '{}', {}:{}:{}", msg, file, line, col)
             }
-            Abort(ref msg) => write!(f, "{}", msg),
+            Abort(msg) => write!(f, "{}", msg),
         }
     }
 }
index e006a62feeabd12bc568da7c5c42e76bdde1cc2f..4709514c82e85ab70a99727065a9c2008f26dcf0 100644 (file)
@@ -225,7 +225,7 @@ fn hook_special_const_fn(
     /// `align_offset(ptr, target_align)` needs special handling in const eval, because the pointer
     /// may not have an address.
     ///
-    /// If `ptr` does have a known address, then we return `CONTINUE` and the function call should
+    /// If `ptr` does have a known address, then we return `Continue(())` and the function call should
     /// proceed as normal.
     ///
     /// If `ptr` doesn't have an address, but its underlying allocation's alignment is at most
@@ -273,18 +273,18 @@ fn align_offset(
                         ret,
                         StackPopUnwind::NotAllowed,
                     )?;
-                    Ok(ControlFlow::BREAK)
+                    Ok(ControlFlow::Break(()))
                 } else {
                     // Not alignable in const, return `usize::MAX`.
                     let usize_max = Scalar::from_machine_usize(self.machine_usize_max(), self);
                     self.write_scalar(usize_max, dest)?;
                     self.return_to_block(ret)?;
-                    Ok(ControlFlow::BREAK)
+                    Ok(ControlFlow::Break(()))
                 }
             }
             Err(_addr) => {
                 // The pointer has an address, continue with function call.
-                Ok(ControlFlow::CONTINUE)
+                Ok(ControlFlow::Continue(()))
             }
         }
     }
@@ -408,7 +408,7 @@ fn find_mir_or_eval_fn(
         // Only check non-glue functions
         if let ty::InstanceDef::Item(def) = instance.def {
             // Execution might have wandered off into other crates, so we cannot do a stability-
-            // sensitive check here.  But we can at least rule out functions that are not const
+            // sensitive check here. But we can at least rule out functions that are not const
             // at all.
             if !ecx.tcx.is_const_fn_raw(def.did) {
                 // allow calling functions inside a trait marked with #[const_trait].
@@ -533,7 +533,7 @@ fn assert_panic(
         let eval_to_int =
             |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
         let err = match msg {
-            BoundsCheck { ref len, ref index } => {
+            BoundsCheck { len, index } => {
                 let len = eval_to_int(len)?;
                 let index = eval_to_int(index)?;
                 BoundsCheck { len, index }
index 986b6d655300168046dad275e318667b1adff014..b2c847d3fd8dd6fca2403ecd5d9216ae4c5e63f4 100644 (file)
@@ -347,7 +347,7 @@ fn unsize_into_ptr(
                 let new_vptr = self.get_vtable_ptr(ty, data_b.principal())?;
                 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
             }
-            (_, &ty::Dynamic(ref data, _, ty::Dyn)) => {
+            (_, &ty::Dynamic(data, _, ty::Dyn)) => {
                 // Initial cast from sized to dyn trait
                 let vtable = self.get_vtable_ptr(src_pointee_ty, data.principal())?;
                 let ptr = self.read_scalar(src)?;
index f551b5c29114d9984fcf108ceb1b9e12628db580..d13fed7a9c2631fa366200f8686433831c9d558b 100644 (file)
@@ -196,7 +196,7 @@ pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
         }
     }
 
-    /// Overwrite the local.  If the local can be overwritten in place, return a reference
+    /// Overwrite the local. If the local can be overwritten in place, return a reference
     /// to do so; otherwise return the `MemPlace` to consult instead.
     ///
     /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
@@ -592,7 +592,7 @@ pub(super) fn size_and_align_of(
                 );
 
                 // Recurse to get the size of the dynamically sized field (must be
-                // the last field).  Can't have foreign types here, how would we
+                // the last field). Can't have foreign types here, how would we
                 // adjust alignment and size for them?
                 let field = layout.field(self, layout.fields.count() - 1);
                 let Some((unsized_size, mut unsized_align)) = self.size_and_align_of(metadata, &field)? else {
index 458cc6180d53e47281bf97a9754125159963cb96..54528b1dbf4a0e252fe90929b9c1945ae43ca3a5 100644 (file)
@@ -59,7 +59,7 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_ev
 
 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
 enum InternMode {
-    /// A static and its current mutability.  Below shared references inside a `static mut`,
+    /// A static and its current mutability. Below shared references inside a `static mut`,
     /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
     /// is *mutable*.
     Static(hir::Mutability),
@@ -296,7 +296,7 @@ fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
                         }
                     }
                     InternMode::Const => {
-                        // Ignore `UnsafeCell`, everything is immutable.  Validity does some sanity
+                        // Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
                         // checking for mutable references that we encounter -- they must all be
                         // ZST.
                         InternMode::Const
@@ -330,7 +330,7 @@ pub enum InternKind {
 
 /// Intern `ret` and everything it references.
 ///
-/// This *cannot raise an interpreter error*.  Doing so is left to validation, which
+/// This *cannot raise an interpreter error*. Doing so is left to validation, which
 /// tracks where in the value we are and thus can show much better error messages.
 #[instrument(level = "debug", skip(ecx))]
 pub fn intern_const_alloc_recursive<
@@ -379,7 +379,7 @@ pub fn intern_const_alloc_recursive<
             inside_unsafe_cell: false,
         }
         .visit_value(&mplace);
-        // We deliberately *ignore* interpreter errors here.  When there is a problem, the remaining
+        // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
         // references are "leftover"-interned, and later validation will show a proper error
         // and point at the right part of the value causing the problem.
         match res {
@@ -454,7 +454,7 @@ pub fn intern_const_alloc_recursive<
             return Err(reported);
         } else if ecx.tcx.try_get_global_alloc(alloc_id).is_none() {
             // We have hit an `AllocId` that is neither in local or global memory and isn't
-            // marked as dangling by local memory.  That should be impossible.
+            // marked as dangling by local memory. That should be impossible.
             span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
         }
     }
index 666fcbd6f80488cc2dff1abc141252a50b72aafc..cc7b6c91b607426e57cd6ff9b65b597eb8b4d54e 100644 (file)
@@ -79,9 +79,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
         }
         sym::variant_count => match tp_ty.kind() {
             // Correctly handles non-monomorphic calls, so there is no need for ensure_monomorphic_enough.
-            ty::Adt(ref adt, _) => {
-                ConstValue::from_machine_usize(adt.variants().len() as u64, &tcx)
-            }
+            ty::Adt(adt, _) => ConstValue::from_machine_usize(adt.variants().len() as u64, &tcx),
             ty::Alias(..) | ty::Param(_) | ty::Placeholder(_) | ty::Infer(_) => {
                 throw_inval!(TooGeneric)
             }
index 1d4ef20d0651f7bd11d7cb466f0290d375ff27d8..248953de8672828a7889da54697268517eddd90e 100644 (file)
@@ -180,7 +180,7 @@ fn find_mir_or_eval_fn(
         unwind: StackPopUnwind,
     ) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>>;
 
-    /// Execute `fn_val`.  It is the hook's responsibility to advance the instruction
+    /// Execute `fn_val`. It is the hook's responsibility to advance the instruction
     /// pointer as appropriate.
     fn call_extra_fn(
         ecx: &mut InterpCx<'mir, 'tcx, Self>,
@@ -439,7 +439,7 @@ fn after_stack_pop(
 }
 
 /// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
-/// (CTFE and ConstProp) use the same instance.  Here, we share that code.
+/// (CTFE and ConstProp) use the same instance. Here, we share that code.
 pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
     type Provenance = AllocId;
     type ProvenanceExtra = ();
index 5b1ac6b2f65e29f7bc7a726d3044a6fd5cb95654..291bfb2b55896dd73412d453533eab6223c56a6f 100644 (file)
@@ -146,7 +146,7 @@ pub fn alloc_map(&self) -> &M::MemoryMap {
 
 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
     /// Call this to turn untagged "global" pointers (obtained via `tcx`) into
-    /// the machine pointer to the allocation.  Must never be used
+    /// the machine pointer to the allocation. Must never be used
     /// for any other pointers, nor for TLS statics.
     ///
     /// Using the resulting pointer represents a *direct* access to that memory
@@ -536,7 +536,7 @@ fn get_alloc_raw(
         &self,
         id: AllocId,
     ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra>> {
-        // The error type of the inner closure here is somewhat funny.  We have two
+        // The error type of the inner closure here is somewhat funny. We have two
         // ways of "erroring": An actual error, or because we got a reference from
         // `get_global_alloc` that we can actually use directly without inserting anything anywhere.
         // So the error type is `InterpResult<'tcx, &Allocation<M::Provenance>>`.
@@ -863,7 +863,7 @@ fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra>(
 
             write!(fmt, "{id:?}")?;
             match self.ecx.memory.alloc_map.get(id) {
-                Some(&(kind, ref alloc)) => {
+                Some((kind, alloc)) => {
                     // normal alloc
                     write!(fmt, " ({}, ", kind)?;
                     write_allocation_track_relocs(
index fcc6f8ea85282673c6f790d08ab13af0a85520f0..befc0928f3debd253efecb036e95d991b02be214 100644 (file)
@@ -488,7 +488,7 @@ pub fn place_to_op(
         Ok(OpTy { op, layout: place.layout, align: Some(place.align) })
     }
 
-    /// Evaluate a place with the goal of reading from it.  This lets us sometimes
+    /// Evaluate a place with the goal of reading from it. This lets us sometimes
     /// avoid allocations.
     pub fn eval_place_to_op(
         &self,
@@ -533,11 +533,11 @@ pub fn eval_operand(
         layout: Option<TyAndLayout<'tcx>>,
     ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
         use rustc_middle::mir::Operand::*;
-        let op = match *mir_op {
+        let op = match mir_op {
             // FIXME: do some more logic on `move` to invalidate the old location
-            Copy(place) | Move(place) => self.eval_place_to_op(place, layout)?,
+            &Copy(place) | &Move(place) => self.eval_place_to_op(place, layout)?,
 
-            Constant(ref constant) => {
+            Constant(constant) => {
                 let c =
                     self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal)?;
 
index 97a73e98abcbca9d563027620fc8a74630014606..274af61ee7c1d5899b1d2d8b3a2700c912d9a0fa 100644 (file)
@@ -233,7 +233,7 @@ pub(crate) fn len(&self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
                 _ => bug!("len not supported on unsized type {:?}", self.layout.ty),
             }
         } else {
-            // Go through the layout.  There are lots of types that support a length,
+            // Go through the layout. There are lots of types that support a length,
             // e.g., SIMD types. (But not all repr(simd) types even have FieldsShape::Array!)
             match self.layout.fields {
                 abi::FieldsShape::Array { count, .. } => Ok(count),
@@ -294,7 +294,7 @@ impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
     M: Machine<'mir, 'tcx, Provenance = Prov>,
 {
     /// Take a value, which represents a (thin or wide) reference, and make it a place.
-    /// Alignment is just based on the type.  This is the inverse of `MemPlace::to_ref()`.
+    /// Alignment is just based on the type. This is the inverse of `MemPlace::to_ref()`.
     ///
     /// Only call this if you are sure the place is "valid" (aligned and inbounds), or do not
     /// want to ever use the place for memory access!
@@ -703,7 +703,7 @@ pub fn force_allocation(
                     &mut Operand::Immediate(local_val) => {
                         // We need to make an allocation.
 
-                        // We need the layout of the local.  We can NOT use the layout we got,
+                        // We need the layout of the local. We can NOT use the layout we got,
                         // that might e.g., be an inner field of a struct with `Scalar` layout,
                         // that has different alignment than the outer field.
                         let local_layout =
index 81b44a49484d0a72e213d8a32b510e784b7938b1..fad4cb06cd6fe4805f2f9dacfb3feac0bf075e22 100644 (file)
@@ -111,7 +111,7 @@ pub fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
                 M::retag_place_contents(self, *kind, &dest)?;
             }
 
-            Intrinsic(box ref intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
+            Intrinsic(box intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
 
             // Statements we do not track.
             AscribeUserType(..) => {}
@@ -163,8 +163,8 @@ pub fn eval_rvalue_into_place(
                 self.copy_op(&op, &dest, /*allow_transmute*/ false)?;
             }
 
-            CopyForDeref(ref place) => {
-                let op = self.eval_place_to_op(*place, Some(dest.layout))?;
+            CopyForDeref(place) => {
+                let op = self.eval_place_to_op(place, Some(dest.layout))?;
                 self.copy_op(&op, &dest, /* allow_transmute*/ false)?;
             }
 
index 550c7a44c4199e6cf7043d4dd60f6e49c7f4b436..da320cd1cd5f0c6444b1ce8a8d56fffe2e776c78 100644 (file)
@@ -446,7 +446,7 @@ pub(crate) fn eval_fn_call(
                     // they go to.
 
                     // For where they come from: If the ABI is RustCall, we untuple the
-                    // last incoming argument.  These two iterators do not have the same type,
+                    // last incoming argument. These two iterators do not have the same type,
                     // so to keep the code paths uniform we accept an allocation
                     // (for RustCall ABI only).
                     let caller_args: Cow<'_, [OpTy<'tcx, M::Provenance>]> =
@@ -481,7 +481,7 @@ pub(crate) fn eval_fn_call(
                         .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore));
 
                     // Now we have to spread them out across the callee's locals,
-                    // taking into account the `spread_arg`.  If we could write
+                    // taking into account the `spread_arg`. If we could write
                     // this is a single iterator (that handles `spread_arg`), then
                     // `pass_argument` would be the loop body. It takes care to
                     // not advance `caller_iter` for ZSTs.
@@ -648,8 +648,8 @@ fn drop_in_place(
         unwind: Option<mir::BasicBlock>,
     ) -> InterpResult<'tcx> {
         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
-        // We take the address of the object.  This may well be unaligned, which is fine
-        // for us here.  However, unaligned accesses will probably make the actual drop
+        // We take the address of the object. This may well be unaligned, which is fine
+        // for us here. However, unaligned accesses will probably make the actual drop
         // implementation fail -- a problem shared by rustc.
         let place = self.force_allocation(place)?;
 
index a61d3ab40a5ca1c0efa4c9d1339ef95bf38be99b..cabc65e2c077e674d356938d939341dbb8bafb54 100644 (file)
@@ -26,7 +26,7 @@ impl<'tcx> TypeVisitor<'tcx> for UsedParamsNeedSubstVisitor<'tcx> {
 
         fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             if !ty.needs_subst() {
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
 
             match *ty.kind() {
@@ -48,7 +48,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                             return subst.visit_with(self);
                         }
                     }
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 }
                 _ => ty.super_visit_with(self),
             }
index f905d3fb479a0b7e343c7dd187239f94f3543705..19e359986a12e15762ad60cd06dc945a2f0eb046 100644 (file)
@@ -175,7 +175,7 @@ fn write_path(out: &mut String, path: &[PathElem]) {
             TupleElem(idx) => write!(out, ".{}", idx),
             ArrayElem(idx) => write!(out, "[{}]", idx),
             // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
-            // some of the other items here also are not Rust syntax.  Actually we can't
+            // some of the other items here also are not Rust syntax. Actually we can't
             // even use the usual syntax because we are just showing the projections,
             // not the root.
             Deref => write!(out, ".<deref>"),
@@ -419,7 +419,7 @@ fn check_safe_pointer(
             )
         }
         // Recursive checking
-        if let Some(ref mut ref_tracking) = self.ref_tracking {
+        if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
             // Proceed recursively even for ZST, no reason to skip them!
             // `!` is a ZST and we want to validate it.
             if let Ok((alloc_id, _offset, _prov)) = self.ecx.ptr_try_get_alloc_id(place.ptr) {
@@ -484,7 +484,7 @@ fn check_safe_pointer(
     }
 
     /// Check if this is a value of primitive type, and if yes check the validity of the value
-    /// at that type.  Return `true` if the type is indeed primitive.
+    /// at that type. Return `true` if the type is indeed primitive.
     fn try_visit_primitive(
         &mut self,
         value: &OpTy<'tcx, M::Provenance>,
@@ -623,7 +623,7 @@ fn visit_scalar(
                 // Can only happen during CTFE.
                 // We support 2 kinds of ranges here: full range, and excluding zero.
                 if start == 1 && end == max_value {
-                    // Only null is the niche.  So make sure the ptr is NOT null.
+                    // Only null is the niche. So make sure the ptr is NOT null.
                     if self.ecx.scalar_may_be_null(scalar)? {
                         throw_validation_failure!(self.path,
                             { "a potentially null pointer" }
@@ -759,7 +759,7 @@ fn visit_value(&mut self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx>
         // Recursively walk the value at its type.
         self.walk_value(op)?;
 
-        // *After* all of this, check the ABI.  We need to check the ABI to handle
+        // *After* all of this, check the ABI. We need to check the ABI to handle
         // types like `NonNull` where the `Scalar` info is more restrictive than what
         // the fields say (`rustc_layout_scalar_valid_range_start`).
         // But in most cases, this will just propagate what the fields say,
@@ -857,10 +857,10 @@ fn visit_aggregate(
                 // Optimization: we just check the entire range at once.
                 // NOTE: Keep this in sync with the handling of integer and float
                 // types above, in `visit_primitive`.
-                // In run-time mode, we accept pointers in here.  This is actually more
+                // In run-time mode, we accept pointers in here. This is actually more
                 // permissive than a per-element check would be, e.g., we accept
                 // a &[u8] that contains a pointer even though bytewise checking would
-                // reject it.  However, that's good: We don't inherently want
+                // reject it. However, that's good: We don't inherently want
                 // to reject those pointers, we just do not have the machinery to
                 // talk about parts of a pointer.
                 // We also accept uninit, for consistency with the slow path.
index 1a10851a9f9013d92923c9347fac038fd14a7e17..f9efc2418dbbcd0ed5ccb8919da86b8bec11a391 100644 (file)
@@ -481,12 +481,12 @@ fn walk_value(&mut self, v: &Self::V) -> InterpResult<'tcx>
                 };
 
                 // Visit the fields of this value.
-                match v.layout().fields {
+                match &v.layout().fields {
                     FieldsShape::Primitive => {}
-                    FieldsShape::Union(fields) => {
+                    &FieldsShape::Union(fields) => {
                         self.visit_union(v, fields)?;
                     }
-                    FieldsShape::Arbitrary { ref offsets, .. } => {
+                    FieldsShape::Arbitrary { offsets, .. } => {
                         // FIXME: We collect in a vec because otherwise there are lifetime
                         // errors: Projecting to a field needs access to `ecx`.
                         let fields: Vec<InterpResult<'tcx, Self::V>> =
index 46e7b09a55e109c61f24eabb844437a13dcd1d1d..57b91df2d0708f8f06e35438518c51b6f148df6c 100644 (file)
@@ -6,7 +6,6 @@
 
 #![feature(assert_matches)]
 #![feature(box_patterns)]
-#![feature(control_flow_enum)]
 #![feature(decl_macro)]
 #![feature(exact_size_is_empty)]
 #![feature(let_chains)]
index d4c75cd55ce1e4fe89170c96bffe7fcc8b7d88b8..79f1737e32b21fcabe767b8999d753b5210aaa30 100644 (file)
@@ -442,7 +442,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
 
         self.super_rvalue(rvalue, location);
 
-        match *rvalue {
+        match rvalue {
             Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
 
             Rvalue::Use(_)
@@ -451,18 +451,15 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
             | Rvalue::Discriminant(..)
             | Rvalue::Len(_) => {}
 
-            Rvalue::Aggregate(ref kind, ..) => {
-                if let AggregateKind::Generator(def_id, ..) = kind.as_ref() {
-                    if let Some(generator_kind) = self.tcx.generator_kind(def_id.to_def_id()) {
-                        if matches!(generator_kind, hir::GeneratorKind::Async(..)) {
-                            self.check_op(ops::Generator(generator_kind));
-                        }
-                    }
+            Rvalue::Aggregate(kind, ..) => {
+                if let AggregateKind::Generator(def_id, ..) = kind.as_ref()
+                    && let Some(generator_kind @ hir::GeneratorKind::Async(..)) = self.tcx.generator_kind(def_id.to_def_id())
+                {
+                    self.check_op(ops::Generator(generator_kind));
                 }
             }
 
-            Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
-            | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
+            Rvalue::Ref(_, kind @ (BorrowKind::Mut { .. } | BorrowKind::Unique), place) => {
                 let ty = place.ty(self.body, self.tcx).ty;
                 let is_allowed = match ty.kind() {
                     // Inside a `static mut`, `&mut [...]` is allowed.
@@ -491,12 +488,12 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                 }
             }
 
-            Rvalue::AddressOf(Mutability::Mut, ref place) => {
+            Rvalue::AddressOf(Mutability::Mut, place) => {
                 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
             }
 
-            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
-            | Rvalue::AddressOf(Mutability::Not, ref place) => {
+            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, place)
+            | Rvalue::AddressOf(Mutability::Not, place) => {
                 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
                     &self.ccx,
                     &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
@@ -564,7 +561,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
             Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => {}
             Rvalue::ShallowInitBox(_, _) => {}
 
-            Rvalue::UnaryOp(_, ref operand) => {
+            Rvalue::UnaryOp(_, operand) => {
                 let ty = operand.ty(self.body, self.tcx);
                 if is_int_bool_or_char(ty) {
                     // Int, bool, and char operations are fine.
@@ -575,8 +572,8 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                 }
             }
 
-            Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
-            | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
+            Rvalue::BinaryOp(op, box (lhs, rhs))
+            | Rvalue::CheckedBinaryOp(op, box (lhs, rhs)) => {
                 let lhs_ty = lhs.ty(self.body, self.tcx);
                 let rhs_ty = rhs.ty(self.body, self.tcx);
 
@@ -585,13 +582,16 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
                     assert_eq!(lhs_ty, rhs_ty);
                     assert!(
-                        op == BinOp::Eq
-                            || op == BinOp::Ne
-                            || op == BinOp::Le
-                            || op == BinOp::Lt
-                            || op == BinOp::Ge
-                            || op == BinOp::Gt
-                            || op == BinOp::Offset
+                        matches!(
+                            op,
+                            BinOp::Eq
+                            | BinOp::Ne
+                            | BinOp::Le
+                            | BinOp::Lt
+                            | BinOp::Ge
+                            | BinOp::Gt
+                            | BinOp::Offset
+                        )
                     );
 
                     self.check_op(ops::RawPtrComparison);
index 04ce701452b90de0c346efd4b8c8250f0e6d76f9..fae6117f8f05224616eb95d0ed345b606b785890 100644 (file)
@@ -133,7 +133,7 @@ fn visit_local(&mut self, index: Local, context: PlaceContext, location: Locatio
                 }
                 _ => { /* mark as unpromotable below */ }
             }
-        } else if let TempState::Defined { ref mut uses, .. } = *temp {
+        } else if let TempState::Defined { uses, .. } = temp {
             // We always allow borrows, even mutable ones, as we need
             // to promote mutable borrows of some ZSTs e.g., `&mut []`.
             let allowed_use = match context {
@@ -748,7 +748,7 @@ fn promote_temp(&mut self, temp: Local) -> Local {
         if loc.statement_index < num_stmts {
             let (mut rvalue, source_info) = {
                 let statement = &mut self.source[loc.block].statements[loc.statement_index];
-                let StatementKind::Assign(box (_, ref mut rhs)) = statement.kind else {
+                let StatementKind::Assign(box (_, rhs)) = &mut statement.kind else {
                     span_bug!(
                         statement.source_info.span,
                         "{:?} is not an assignment",
@@ -778,9 +778,9 @@ fn promote_temp(&mut self, temp: Local) -> Local {
                 self.source[loc.block].terminator().clone()
             } else {
                 let terminator = self.source[loc.block].terminator_mut();
-                let target = match terminator.kind {
-                    TerminatorKind::Call { target: Some(target), .. } => target,
-                    ref kind => {
+                let target = match &terminator.kind {
+                    TerminatorKind::Call { target: Some(target), .. } => *target,
+                    kind => {
                         span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
                     }
                 };
@@ -814,7 +814,7 @@ fn promote_temp(&mut self, temp: Local) -> Local {
                         ..terminator
                     };
                 }
-                ref kind => {
+                kind => {
                     span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
                 }
             };
@@ -847,54 +847,50 @@ fn promote_candidate(mut self, candidate: Candidate, next_promoted_id: usize) ->
             let local_decls = &mut self.source.local_decls;
             let loc = candidate.location;
             let statement = &mut blocks[loc.block].statements[loc.statement_index];
-            match statement.kind {
-                StatementKind::Assign(box (
-                    _,
-                    Rvalue::Ref(ref mut region, borrow_kind, ref mut place),
-                )) => {
-                    // Use the underlying local for this (necessarily interior) borrow.
-                    let ty = local_decls[place.local].ty;
-                    let span = statement.source_info.span;
-
-                    let ref_ty = tcx.mk_ref(
-                        tcx.lifetimes.re_erased,
-                        ty::TypeAndMut { ty, mutbl: borrow_kind.to_mutbl_lossy() },
-                    );
+            let StatementKind::Assign(box (_, Rvalue::Ref(region, borrow_kind, place))) = &mut statement.kind else {
+                bug!()
+            };
 
-                    *region = tcx.lifetimes.re_erased;
-
-                    let mut projection = vec![PlaceElem::Deref];
-                    projection.extend(place.projection);
-                    place.projection = tcx.intern_place_elems(&projection);
-
-                    // Create a temp to hold the promoted reference.
-                    // This is because `*r` requires `r` to be a local,
-                    // otherwise we would use the `promoted` directly.
-                    let mut promoted_ref = LocalDecl::new(ref_ty, span);
-                    promoted_ref.source_info = statement.source_info;
-                    let promoted_ref = local_decls.push(promoted_ref);
-                    assert_eq!(self.temps.push(TempState::Unpromotable), promoted_ref);
-
-                    let promoted_ref_statement = Statement {
-                        source_info: statement.source_info,
-                        kind: StatementKind::Assign(Box::new((
-                            Place::from(promoted_ref),
-                            Rvalue::Use(promoted_operand(ref_ty, span)),
-                        ))),
-                    };
-                    self.extra_statements.push((loc, promoted_ref_statement));
-
-                    Rvalue::Ref(
-                        tcx.lifetimes.re_erased,
-                        borrow_kind,
-                        Place {
-                            local: mem::replace(&mut place.local, promoted_ref),
-                            projection: List::empty(),
-                        },
-                    )
-                }
-                _ => bug!(),
-            }
+            // Use the underlying local for this (necessarily interior) borrow.
+            let ty = local_decls[place.local].ty;
+            let span = statement.source_info.span;
+
+            let ref_ty = tcx.mk_ref(
+                tcx.lifetimes.re_erased,
+                ty::TypeAndMut { ty, mutbl: borrow_kind.to_mutbl_lossy() },
+            );
+
+            *region = tcx.lifetimes.re_erased;
+
+            let mut projection = vec![PlaceElem::Deref];
+            projection.extend(place.projection);
+            place.projection = tcx.intern_place_elems(&projection);
+
+            // Create a temp to hold the promoted reference.
+            // This is because `*r` requires `r` to be a local,
+            // otherwise we would use the `promoted` directly.
+            let mut promoted_ref = LocalDecl::new(ref_ty, span);
+            promoted_ref.source_info = statement.source_info;
+            let promoted_ref = local_decls.push(promoted_ref);
+            assert_eq!(self.temps.push(TempState::Unpromotable), promoted_ref);
+
+            let promoted_ref_statement = Statement {
+                source_info: statement.source_info,
+                kind: StatementKind::Assign(Box::new((
+                    Place::from(promoted_ref),
+                    Rvalue::Use(promoted_operand(ref_ty, span)),
+                ))),
+            };
+            self.extra_statements.push((loc, promoted_ref_statement));
+
+            Rvalue::Ref(
+                tcx.lifetimes.re_erased,
+                *borrow_kind,
+                Place {
+                    local: mem::replace(&mut place.local, promoted_ref),
+                    projection: List::empty(),
+                },
+            )
         };
 
         assert_eq!(self.new_block(), START_BLOCK);
index 94e1b95a0eb3c9501a33f4d3bafecd82c632bd9f..dd168a9ac3cd3e94078cc20e24fb4c18c4b1b35c 100644 (file)
@@ -1,7 +1,8 @@
 //! Validates the MIR to ensure that invariants are upheld.
 
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_index::bit_set::BitSet;
+use rustc_index::vec::IndexVec;
 use rustc_infer::traits::Reveal;
 use rustc_middle::mir::interpret::Scalar;
 use rustc_middle::mir::visit::NonUseContext::VarDebugInfo;
@@ -18,7 +19,7 @@
 use rustc_mir_dataflow::{Analysis, ResultsCursor};
 use rustc_target::abi::{Size, VariantIdx};
 
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
 enum EdgeKind {
     Unwind,
     Normal,
@@ -57,18 +58,20 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
             .iterate_to_fixpoint()
             .into_results_cursor(body);
 
-        TypeChecker {
+        let mut checker = TypeChecker {
             when: &self.when,
             body,
             tcx,
             param_env,
             mir_phase,
+            unwind_edge_count: 0,
             reachable_blocks: traversal::reachable_as_bitset(body),
             storage_liveness,
             place_cache: Vec::new(),
             value_cache: Vec::new(),
-        }
-        .visit_body(body);
+        };
+        checker.visit_body(body);
+        checker.check_cleanup_control_flow();
     }
 }
 
@@ -78,6 +81,7 @@ struct TypeChecker<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
     param_env: ParamEnv<'tcx>,
     mir_phase: MirPhase,
+    unwind_edge_count: usize,
     reachable_blocks: BitSet<BasicBlock>,
     storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive<'static>>,
     place_cache: Vec<PlaceRef<'tcx>>,
@@ -102,7 +106,7 @@ fn fail(&self, location: Location, msg: impl AsRef<str>) {
         );
     }
 
-    fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
+    fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
         if bb == START_BLOCK {
             self.fail(location, "start block must not have predecessors")
         }
@@ -111,10 +115,12 @@ fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
             match (src.is_cleanup, bb.is_cleanup, edge_kind) {
                 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
                 (false, false, EdgeKind::Normal)
-                // Non-cleanup blocks can jump to cleanup blocks along unwind edges
-                | (false, true, EdgeKind::Unwind)
                 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
                 | (true, true, EdgeKind::Normal) => {}
+                // Non-cleanup blocks can jump to cleanup blocks along unwind edges
+                (false, true, EdgeKind::Unwind) => {
+                    self.unwind_edge_count += 1;
+                }
                 // All other jumps are invalid
                 _ => {
                     self.fail(
@@ -134,6 +140,88 @@ fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
         }
     }
 
+    fn check_cleanup_control_flow(&self) {
+        if self.unwind_edge_count <= 1 {
+            return;
+        }
+        let doms = self.body.basic_blocks.dominators();
+        let mut post_contract_node = FxHashMap::default();
+        // Reusing the allocation across invocations of the closure
+        let mut dom_path = vec![];
+        let mut get_post_contract_node = |mut bb| {
+            let root = loop {
+                if let Some(root) = post_contract_node.get(&bb) {
+                    break *root;
+                }
+                let parent = doms.immediate_dominator(bb);
+                dom_path.push(bb);
+                if !self.body.basic_blocks[parent].is_cleanup {
+                    break bb;
+                }
+                bb = parent;
+            };
+            for bb in dom_path.drain(..) {
+                post_contract_node.insert(bb, root);
+            }
+            root
+        };
+
+        let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
+        for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
+            if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
+                continue;
+            }
+            let bb = get_post_contract_node(bb);
+            for s in bb_data.terminator().successors() {
+                let s = get_post_contract_node(s);
+                if s == bb {
+                    continue;
+                }
+                let parent = &mut parent[bb];
+                match parent {
+                    None => {
+                        *parent = Some(s);
+                    }
+                    Some(e) if *e == s => (),
+                    Some(e) => self.fail(
+                        Location { block: bb, statement_index: 0 },
+                        format!(
+                            "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
+                            bb,
+                            s,
+                            *e
+                        )
+                    ),
+                }
+            }
+        }
+
+        // Check for cycles
+        let mut stack = FxHashSet::default();
+        for i in 0..parent.len() {
+            let mut bb = BasicBlock::from_usize(i);
+            stack.clear();
+            stack.insert(bb);
+            loop {
+                let Some(parent)= parent[bb].take() else {
+                    break
+                };
+                let no_cycle = stack.insert(parent);
+                if !no_cycle {
+                    self.fail(
+                        Location { block: bb, statement_index: 0 },
+                        format!(
+                            "Cleanup control flow violation: Cycle involving edge {:?} -> {:?}",
+                            bb, parent,
+                        ),
+                    );
+                    break;
+                }
+                bb = parent;
+            }
+        }
+    }
+
     /// Check if src can be assigned into dest.
     /// This is not precise, it will accept some incorrect assignments.
     fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
index c81e1b124f0e5f02eb4ddb904e7d97a41856226d..73190574667f32135a490a3f717a777c72037793 100644 (file)
@@ -36,7 +36,7 @@
 //! ```
 //!
 //! `Frozen` impls `Deref`, so we can ergonomically call methods on `Bar`, but it doesn't `impl
-//! DerefMut`.  Now calling `foo.compute.mutate()` will result in a compile-time error stating that
+//! DerefMut`. Now calling `foo.compute.mutate()` will result in a compile-time error stating that
 //! `mutate` requires a mutable reference but we don't have one.
 //!
 //! # Caveats
index 0d0c51b6819460f58ddfd8f8faec421d451f6829..9fce0e1e65cc90f416fe8fa228a861796a51a5d9 100644 (file)
@@ -11,8 +11,8 @@
 #[macro_export]
 macro_rules! define_id_collections {
     ($map_name:ident, $set_name:ident, $entry_name:ident, $key:ty) => {
-        pub type $map_name<T> = $crate::fx::FxHashMap<$key, T>;
-        pub type $set_name = $crate::fx::FxHashSet<$key>;
+        pub type $map_name<T> = $crate::unord::UnordMap<$key, T>;
+        pub type $set_name = $crate::unord::UnordSet<$key>;
         pub type $entry_name<'a, T> = $crate::fx::StdEntry<'a, $key, T>;
     };
 }
index ea2a4388b92f00296193d9403601669f9b5379cc..471457f61b2d7d105d6fde503c42e29e909ad788 100644 (file)
@@ -135,7 +135,10 @@ pub fn dominators<G: ControlFlowGraph>(graph: G) -> Dominators<G::Node> {
         // This loop computes the semi[w] for w.
         semi[w] = w;
         for v in graph.predecessors(pre_order_to_real[w]) {
-            let v = real_to_pre_order[v].unwrap();
+            // Reachable vertices may have unreachable predecessors, so ignore any of them
+            let Some(v) = real_to_pre_order[v] else {
+                continue
+            };
 
             // eval returns a vertex x from which semi[x] is minimum among
             // vertices semi[v] +> x *> v.
@@ -268,10 +271,6 @@ pub struct Dominators<N: Idx> {
 }
 
 impl<Node: Idx> Dominators<Node> {
-    pub fn dummy() -> Self {
-        Self { post_order_rank: IndexVec::new(), immediate_dominators: IndexVec::new() }
-    }
-
     pub fn is_reachable(&self, node: Node) -> bool {
         self.immediate_dominators[node].is_some()
     }
@@ -296,7 +295,7 @@ pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
     /// of two unrelated nodes will also be consistent, but otherwise the order has no
     /// meaning.) This method cannot be used to determine if either Node dominates the other.
     pub fn rank_partial_cmp(&self, lhs: Node, rhs: Node) -> Option<Ordering> {
-        self.post_order_rank[lhs].partial_cmp(&self.post_order_rank[rhs])
+        self.post_order_rank[rhs].partial_cmp(&self.post_order_rank[lhs])
     }
 }
 
index e4e4d0d44babaa231055d95c5702d94ca9bf2429..dc1ce1747bfa0dc1d6def1306db79460d3e907f9 100644 (file)
@@ -70,8 +70,8 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(
             "counter={:?} expected={:?} edge_index={:?} edge={:?}",
             counter, expected_incoming[counter], edge_index, edge
         );
-        match expected_incoming[counter] {
-            (ref e, ref n) => {
+        match &expected_incoming[counter] {
+            (e, n) => {
                 assert!(e == &edge.data);
                 assert!(n == graph.node_data(edge.source()));
                 assert!(start_index == edge.target);
@@ -88,8 +88,8 @@ fn test_adjacent_edges<N: PartialEq + Debug, E: PartialEq + Debug>(
             "counter={:?} expected={:?} edge_index={:?} edge={:?}",
             counter, expected_outgoing[counter], edge_index, edge
         );
-        match expected_outgoing[counter] {
-            (ref e, ref n) => {
+        match &expected_outgoing[counter] {
+            (e, n) => {
                 assert!(e == &edge.data);
                 assert!(start_index == edge.source);
                 assert!(n == graph.node_data(edge.target));
index 57007611a76c3b3d12a4fe43142eb2e3c775d284..8a9af300c066ef2ba1cfed4e9ef73e8606c77f9a 100644 (file)
@@ -317,12 +317,12 @@ fn node_examined(
         _node: G::Node,
         _prior_status: Option<NodeStatus>,
     ) -> ControlFlow<Self::BreakVal> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     /// Called after all nodes reachable from this one have been examined.
     fn node_settled(&mut self, _node: G::Node) -> ControlFlow<Self::BreakVal> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     /// Behave as if no edges exist from `source` to `target`.
@@ -346,8 +346,8 @@ fn node_examined(
         prior_status: Option<NodeStatus>,
     ) -> ControlFlow<Self::BreakVal> {
         match prior_status {
-            Some(NodeStatus::Visited) => ControlFlow::BREAK,
-            _ => ControlFlow::CONTINUE,
+            Some(NodeStatus::Visited) => ControlFlow::Break(()),
+            _ => ControlFlow::Continue(()),
         }
     }
 }
index 9940fee60d7d8209a56a988a0bb52278d434302b..820a70fc8e446c8094763ee5ad25db9f3e9c6cc9 100644 (file)
@@ -84,7 +84,7 @@ fn test_find_state_2() {
     // 0 -> 1 -> 2 -> 1
     //
     // and at this point detect a cycle. The state of 2 will thus be
-    // `InCycleWith { 1 }`.  We will then visit the 1 -> 3 edge, which
+    // `InCycleWith { 1 }`. We will then visit the 1 -> 3 edge, which
     // will attempt to visit 0 as well, thus going to the state
     // `InCycleWith { 0 }`. Finally, node 1 will complete; the lowest
     // depth of any successor was 3 which had depth 0, and thus it
index 3a2000233c5d10fac7150e1956b2e33f7a9a7042..954e84c303b83d031cc4b2be76c56b4e90e1cea1 100644 (file)
@@ -11,7 +11,6 @@
 #![feature(associated_type_bounds)]
 #![feature(auto_traits)]
 #![feature(cell_leak)]
-#![feature(control_flow_enum)]
 #![feature(extend_one)]
 #![feature(hash_raw_entry)]
 #![feature(hasher_prefixfree_extras)]
index c63caa06818f26e2f3f263d778c140ae5fc588aa..9409057d4847e419b8960e0a389d6341f7a6d192 100644 (file)
@@ -1,6 +1,5 @@
 use crate::stable_hasher::{HashStable, StableHasher, StableOrd};
 use std::borrow::Borrow;
-use std::cmp::Ordering;
 use std::fmt::Debug;
 use std::mem;
 use std::ops::{Bound, Index, IndexMut, RangeBounds};
@@ -171,7 +170,7 @@ pub fn offset_keys<F>(&mut self, f: F)
     where
         F: Fn(&mut K),
     {
-        self.data.iter_mut().map(|&mut (ref mut k, _)| k).for_each(f);
+        self.data.iter_mut().map(|(k, _)| k).for_each(f);
     }
 
     /// Inserts a presorted range of elements into the map. If the range can be
@@ -232,10 +231,10 @@ fn range_slice_indices<R>(&self, range: R) -> (usize, usize)
         R: RangeBounds<K>,
     {
         let start = match range.start_bound() {
-            Bound::Included(ref k) => match self.lookup_index_for(k) {
+            Bound::Included(k) => match self.lookup_index_for(k) {
                 Ok(index) | Err(index) => index,
             },
-            Bound::Excluded(ref k) => match self.lookup_index_for(k) {
+            Bound::Excluded(k) => match self.lookup_index_for(k) {
                 Ok(index) => index + 1,
                 Err(index) => index,
             },
@@ -243,11 +242,11 @@ fn range_slice_indices<R>(&self, range: R) -> (usize, usize)
         };
 
         let end = match range.end_bound() {
-            Bound::Included(ref k) => match self.lookup_index_for(k) {
+            Bound::Included(k) => match self.lookup_index_for(k) {
                 Ok(index) => index + 1,
                 Err(index) => index,
             },
-            Bound::Excluded(ref k) => match self.lookup_index_for(k) {
+            Bound::Excluded(k) => match self.lookup_index_for(k) {
                 Ok(index) | Err(index) => index,
             },
             Bound::Unbounded => self.data.len(),
@@ -302,7 +301,7 @@ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
         let mut data: Vec<(K, V)> = iter.into_iter().collect();
 
         data.sort_unstable_by(|(k1, _), (k2, _)| k1.cmp(k2));
-        data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| k1.cmp(k2) == Ordering::Equal);
+        data.dedup_by(|(k1, _), (k2, _)| k1 == k2);
 
         SortedMap { data }
     }
index 7af5c14942adf8c30ac0c0bc898655b97251a708..814e7c7fb9ba6751aa9dbe368319bd5525df3c17 100644 (file)
@@ -63,13 +63,13 @@ pub fn into_iter_enumerated(self) -> impl DoubleEndedIterator<Item = (I, (K, V))
     /// Returns an iterator over the items in the map in insertion order.
     #[inline]
     pub fn iter(&self) -> impl '_ + DoubleEndedIterator<Item = (&K, &V)> {
-        self.items.iter().map(|(ref k, ref v)| (k, v))
+        self.items.iter().map(|(k, v)| (k, v))
     }
 
     /// Returns an iterator over the items in the map in insertion order along with their indices.
     #[inline]
     pub fn iter_enumerated(&self) -> impl '_ + DoubleEndedIterator<Item = (I, (&K, &V))> {
-        self.items.iter_enumerated().map(|(i, (ref k, ref v))| (i, (k, v)))
+        self.items.iter_enumerated().map(|(i, (k, v))| (i, (k, v)))
     }
 
     /// Returns the item in the map with the given index.
index 1e977d709f1cdae35699717160e0c29b97c973da..3cc250862df42bb39b28688d564a929d400a7cc7 100644 (file)
@@ -6,7 +6,7 @@ fn test_sorted_index_multi_map() {
     let set: SortedIndexMultiMap<usize, _, _> = entries.iter().copied().collect();
 
     // Insertion order is preserved.
-    assert!(entries.iter().map(|(ref k, ref v)| (k, v)).eq(set.iter()));
+    assert!(entries.iter().map(|(k, v)| (k, v)).eq(set.iter()));
 
     // Indexing
     for (i, expect) in entries.iter().enumerate() {
index 9b07f86846eb32089b26b5834dcdb72ab0b29e9e..11a408f216a1465ca4335fa7879a342a82ff61ae 100644 (file)
@@ -37,9 +37,9 @@ pub fn insert(&mut self, data: T) {
 
     #[inline]
     pub fn remove(&mut self, data: &T) -> bool {
-        self.head = match self.head {
-            Some(ref mut head) if head.data == *data => head.next.take().map(|x| *x),
-            Some(ref mut head) => return head.remove_next(data),
+        self.head = match &mut self.head {
+            Some(head) if head.data == *data => head.next.take().map(|x| *x),
+            Some(head) => return head.remove_next(data),
             None => return false,
         };
         true
@@ -48,7 +48,7 @@ pub fn remove(&mut self, data: &T) -> bool {
     #[inline]
     pub fn contains(&self, data: &T) -> bool {
         let mut elem = self.head.as_ref();
-        while let Some(ref e) = elem {
+        while let Some(e) = elem {
             if &e.data == data {
                 return true;
             }
@@ -65,15 +65,14 @@ struct Element<T> {
 }
 
 impl<T: PartialEq> Element<T> {
-    fn remove_next(&mut self, data: &T) -> bool {
-        let mut n = self;
+    fn remove_next(mut self: &mut Self, data: &T) -> bool {
         loop {
-            match n.next {
+            match self.next {
                 Some(ref mut next) if next.data == *data => {
-                    n.next = next.next.take();
+                    self.next = next.next.take();
                     return true;
                 }
-                Some(ref mut next) => n = next,
+                Some(ref mut next) => self = next,
                 None => return false,
             }
         }
index c0334d2e23e5544d62f6790733c6d95b91c1853d..4b95e62bef02b1391af37c41a4e9823bec5b3983 100644 (file)
@@ -6,7 +6,7 @@
 impl<T> TinyList<T> {
     fn len(&self) -> usize {
         let (mut elem, mut count) = (self.head.as_ref(), 0);
-        while let Some(ref e) = elem {
+        while let Some(e) = elem {
             count += 1;
             elem = e.next.as_deref();
         }
index 1ff0d58df140907129ac3d97d39a81ee10a3e630..cd391fe357a6f0b76f9975649bd6898fe547bf43 100644 (file)
@@ -250,7 +250,7 @@ pub fn minimal_upper_bounds(&self, a: T, b: T) -> Vec<T> {
             // values. So here is what we do:
             //
             // 1. Find the vector `[X | a < X && b < X]` of all values
-            //    `X` where `a < X` and `b < X`.  In terms of the
+            //    `X` where `a < X` and `b < X`. In terms of the
             //    graph, this means all values reachable from both `a`
             //    and `b`. Note that this vector is also a set, but we
             //    use the term vector because the order matters
index 14257e4d5c60b8c1ccbd5c40b89267b0162d19eb..f35f18e51cb4e5339dfd8a4b88b9c291ce773256 100644 (file)
@@ -6,13 +6,15 @@
 use smallvec::SmallVec;
 use std::{
     borrow::Borrow,
+    collections::hash_map::Entry,
     hash::Hash,
     iter::{Product, Sum},
+    ops::Index,
 };
 
 use crate::{
     fingerprint::Fingerprint,
-    stable_hasher::{HashStable, StableHasher, ToStableHashKey},
+    stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey},
 };
 
 /// `UnordItems` is the order-less version of `Iterator`. It only contains methods
@@ -38,17 +40,17 @@ pub fn map<U, F: Fn(T) -> U>(self, f: F) -> UnordItems<U, impl Iterator<Item = U
     }
 
     #[inline]
-    pub fn all<U, F: Fn(T) -> bool>(mut self, f: F) -> bool {
+    pub fn all<F: Fn(T) -> bool>(mut self, f: F) -> bool {
         self.0.all(f)
     }
 
     #[inline]
-    pub fn any<U, F: Fn(T) -> bool>(mut self, f: F) -> bool {
+    pub fn any<F: Fn(T) -> bool>(mut self, f: F) -> bool {
         self.0.any(f)
     }
 
     #[inline]
-    pub fn filter<U, F: Fn(&T) -> bool>(self, f: F) -> UnordItems<T, impl Iterator<Item = T>> {
+    pub fn filter<F: Fn(&T) -> bool>(self, f: F) -> UnordItems<T, impl Iterator<Item = T>> {
         UnordItems(self.0.filter(f))
     }
 
@@ -96,6 +98,15 @@ pub fn product<S>(self) -> S
     pub fn count(self) -> usize {
         self.0.count()
     }
+
+    #[inline]
+    pub fn flat_map<U, F, O>(self, f: F) -> UnordItems<O, impl Iterator<Item = O>>
+    where
+        U: IntoIterator<Item = O>,
+        F: Fn(T) -> U,
+    {
+        UnordItems(self.0.flat_map(f))
+    }
 }
 
 impl<'a, T: Clone + 'a, I: Iterator<Item = &'a T>> UnordItems<&'a T, I> {
@@ -147,6 +158,7 @@ pub struct UnordSet<V: Eq + Hash> {
 }
 
 impl<V: Eq + Hash> Default for UnordSet<V> {
+    #[inline]
     fn default() -> Self {
         Self { inner: FxHashSet::default() }
     }
@@ -178,7 +190,16 @@ pub fn contains<Q: ?Sized>(&self, v: &Q) -> bool
     }
 
     #[inline]
-    pub fn items(&self) -> UnordItems<&V, impl Iterator<Item = &V>> {
+    pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> bool
+    where
+        V: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.inner.remove(k)
+    }
+
+    #[inline]
+    pub fn items<'a>(&'a self) -> UnordItems<&'a V, impl Iterator<Item = &'a V>> {
         UnordItems(self.inner.iter())
     }
 
@@ -187,20 +208,75 @@ pub fn into_items(self) -> UnordItems<V, impl Iterator<Item = V>> {
         UnordItems(self.inner.into_iter())
     }
 
+    /// Returns the items of this set in stable sort order (as defined by `ToStableHashKey`).
+    ///
+    /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+    /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+    /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+    /// for `V` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+    #[inline]
+    pub fn to_sorted<HCX>(&self, hcx: &HCX, cache_sort_key: bool) -> Vec<&V>
+    where
+        V: ToStableHashKey<HCX>,
+    {
+        to_sorted_vec(hcx, self.inner.iter(), cache_sort_key, |&x| x)
+    }
+
+    /// Returns the items of this set in stable sort order (as defined by
+    /// `StableOrd`). This method is much more efficient than
+    /// `into_sorted` because it does not need to transform keys to their
+    /// `ToStableHashKey` equivalent.
+    #[inline]
+    pub fn to_sorted_stable_ord(&self) -> Vec<V>
+    where
+        V: Ord + StableOrd + Copy,
+    {
+        let mut items: Vec<V> = self.inner.iter().copied().collect();
+        items.sort_unstable();
+        items
+    }
+
+    /// Returns the items of this set in stable sort order (as defined by `ToStableHashKey`).
+    ///
+    /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+    /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+    /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+    /// for `V` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+    #[inline]
+    pub fn into_sorted<HCX>(self, hcx: &HCX, cache_sort_key: bool) -> Vec<V>
+    where
+        V: ToStableHashKey<HCX>,
+    {
+        to_sorted_vec(hcx, self.inner.into_iter(), cache_sort_key, |x| x)
+    }
+
     // We can safely extend this UnordSet from a set of unordered values because that
     // won't expose the internal ordering anywhere.
     #[inline]
     pub fn extend<I: Iterator<Item = V>>(&mut self, items: UnordItems<V, I>) {
         self.inner.extend(items.0)
     }
+
+    #[inline]
+    pub fn clear(&mut self) {
+        self.inner.clear();
+    }
 }
 
 impl<V: Hash + Eq> Extend<V> for UnordSet<V> {
+    #[inline]
     fn extend<T: IntoIterator<Item = V>>(&mut self, iter: T) {
         self.inner.extend(iter)
     }
 }
 
+impl<V: Hash + Eq> FromIterator<V> for UnordSet<V> {
+    #[inline]
+    fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
+        UnordSet { inner: FxHashSet::from_iter(iter) }
+    }
+}
+
 impl<HCX, V: Hash + Eq + HashStable<HCX>> HashStable<HCX> for UnordSet<V> {
     #[inline]
     fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
@@ -223,17 +299,33 @@ pub struct UnordMap<K: Eq + Hash, V> {
 }
 
 impl<K: Eq + Hash, V> Default for UnordMap<K, V> {
+    #[inline]
     fn default() -> Self {
         Self { inner: FxHashMap::default() }
     }
 }
 
 impl<K: Hash + Eq, V> Extend<(K, V)> for UnordMap<K, V> {
+    #[inline]
     fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
         self.inner.extend(iter)
     }
 }
 
+impl<K: Hash + Eq, V> FromIterator<(K, V)> for UnordMap<K, V> {
+    #[inline]
+    fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
+        UnordMap { inner: FxHashMap::from_iter(iter) }
+    }
+}
+
+impl<K: Hash + Eq, V, I: Iterator<Item = (K, V)>> From<UnordItems<(K, V), I>> for UnordMap<K, V> {
+    #[inline]
+    fn from(items: UnordItems<(K, V), I>) -> Self {
+        UnordMap { inner: FxHashMap::from_iter(items.0) }
+    }
+}
+
 impl<K: Eq + Hash, V> UnordMap<K, V> {
     #[inline]
     pub fn len(&self) -> usize {
@@ -255,7 +347,44 @@ pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
     }
 
     #[inline]
-    pub fn items(&self) -> UnordItems<(&K, &V), impl Iterator<Item = (&K, &V)>> {
+    pub fn is_empty(&self) -> bool {
+        self.inner.is_empty()
+    }
+
+    #[inline]
+    pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
+        self.inner.entry(key)
+    }
+
+    #[inline]
+    pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.inner.get(k)
+    }
+
+    #[inline]
+    pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.inner.get_mut(k)
+    }
+
+    #[inline]
+    pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
+    where
+        K: Borrow<Q>,
+        Q: Hash + Eq,
+    {
+        self.inner.remove(k)
+    }
+
+    #[inline]
+    pub fn items<'a>(&'a self) -> UnordItems<(&'a K, &'a V), impl Iterator<Item = (&'a K, &'a V)>> {
         UnordItems(self.inner.iter())
     }
 
@@ -270,6 +399,77 @@ pub fn into_items(self) -> UnordItems<(K, V), impl Iterator<Item = (K, V)>> {
     pub fn extend<I: Iterator<Item = (K, V)>>(&mut self, items: UnordItems<(K, V), I>) {
         self.inner.extend(items.0)
     }
+
+    /// Returns the entries of this map in stable sort order (as defined by `ToStableHashKey`).
+    ///
+    /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+    /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+    /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+    /// for `K` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+    #[inline]
+    pub fn to_sorted<HCX>(&self, hcx: &HCX, cache_sort_key: bool) -> Vec<(&K, &V)>
+    where
+        K: ToStableHashKey<HCX>,
+    {
+        to_sorted_vec(hcx, self.inner.iter(), cache_sort_key, |&(k, _)| k)
+    }
+
+    /// Returns the entries of this map in stable sort order (as defined by `StableOrd`).
+    /// This method can be much more efficient than `into_sorted` because it does not need
+    /// to transform keys to their `ToStableHashKey` equivalent.
+    #[inline]
+    pub fn to_sorted_stable_ord(&self) -> Vec<(K, &V)>
+    where
+        K: Ord + StableOrd + Copy,
+    {
+        let mut items: Vec<(K, &V)> = self.inner.iter().map(|(&k, v)| (k, v)).collect();
+        items.sort_unstable_by_key(|&(k, _)| k);
+        items
+    }
+
+    /// Returns the entries of this map in stable sort order (as defined by `ToStableHashKey`).
+    ///
+    /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+    /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+    /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+    /// for `K` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+    #[inline]
+    pub fn into_sorted<HCX>(self, hcx: &HCX, cache_sort_key: bool) -> Vec<(K, V)>
+    where
+        K: ToStableHashKey<HCX>,
+    {
+        to_sorted_vec(hcx, self.inner.into_iter(), cache_sort_key, |(k, _)| k)
+    }
+
+    /// Returns the values of this map in stable sort order (as defined by K's
+    /// `ToStableHashKey` implementation).
+    ///
+    /// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
+    /// [slice::sort_unstable_by_key] will be used for sorting the vec. Use
+    /// `cache_sort_key` when the [ToStableHashKey::to_stable_hash_key] implementation
+    /// for `K` is expensive (e.g. a `DefId -> DefPathHash` lookup).
+    #[inline]
+    pub fn values_sorted<HCX>(&self, hcx: &HCX, cache_sort_key: bool) -> impl Iterator<Item = &V>
+    where
+        K: ToStableHashKey<HCX>,
+    {
+        to_sorted_vec(hcx, self.inner.iter(), cache_sort_key, |&(k, _)| k)
+            .into_iter()
+            .map(|(_, v)| v)
+    }
+}
+
+impl<K, Q: ?Sized, V> Index<&Q> for UnordMap<K, V>
+where
+    K: Eq + Hash + Borrow<Q>,
+    Q: Eq + Hash,
+{
+    type Output = V;
+
+    #[inline]
+    fn index(&self, key: &Q) -> &V {
+        &self.inner[key]
+    }
 }
 
 impl<HCX, K: Hash + Eq + HashStable<HCX>, V: HashStable<HCX>> HashStable<HCX> for UnordMap<K, V> {
@@ -334,6 +534,12 @@ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
     }
 }
 
+impl<T, I: Iterator<Item = T>> From<UnordItems<T, I>> for UnordBag<T> {
+    fn from(value: UnordItems<T, I>) -> Self {
+        UnordBag { inner: Vec::from_iter(value.0) }
+    }
+}
+
 impl<HCX, V: Hash + Eq + HashStable<HCX>> HashStable<HCX> for UnordBag<V> {
     #[inline]
     fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
@@ -341,6 +547,27 @@ fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
     }
 }
 
+#[inline]
+fn to_sorted_vec<HCX, T, K, I>(
+    hcx: &HCX,
+    iter: I,
+    cache_sort_key: bool,
+    extract_key: fn(&T) -> &K,
+) -> Vec<T>
+where
+    I: Iterator<Item = T>,
+    K: ToStableHashKey<HCX>,
+{
+    let mut items: Vec<T> = iter.collect();
+    if cache_sort_key {
+        items.sort_by_cached_key(|x| extract_key(x).to_stable_hash_key(hcx));
+    } else {
+        items.sort_unstable_by_key(|x| extract_key(x).to_stable_hash_key(hcx));
+    }
+
+    items
+}
+
 fn hash_iter_order_independent<
     HCX,
     T: HashStable<HCX>,
index 37dc7f6ba5fe4b06d1654ee4e686ecb7ce60e577..6d7fba36fb3d071f182ab20a105e485a0f2efc47 100644 (file)
@@ -1,5 +1,5 @@
 The `driver` crate is effectively the "main" function for the rust
-compiler.  It orchestrates the compilation process and "knits together"
+compiler. It orchestrates the compilation process and "knits together"
 the code from the other crates within rustc. This crate itself does
 not contain any of the "main logic" of the compiler (though it does
 have some code related to pretty printing or other minor compiler
index a62e5dec4b8643aba7ebc69ff4ffc5c6e67e1db5..f50ad0137b88aacff80bed38c6e5caeccf54cc11 100644 (file)
@@ -219,7 +219,6 @@ fn run_compiler(
         crate_cfg: cfg,
         crate_check_cfg: check_cfg,
         input: Input::File(PathBuf::new()),
-        input_path: None,
         output_file: ofile,
         output_dir: odir,
         file_loader,
@@ -237,9 +236,8 @@ fn run_compiler(
 
     match make_input(config.opts.error_format, &matches.free) {
         Err(reported) => return Err(reported),
-        Ok(Some((input, input_file_path))) => {
+        Ok(Some(input)) => {
             config.input = input;
-            config.input_path = input_file_path;
 
             callbacks.config(&mut config);
         }
@@ -261,14 +259,8 @@ fn run_compiler(
                         describe_lints(compiler.session(), &lint_store, registered_lints);
                         return;
                     }
-                    let should_stop = print_crate_info(
-                        &***compiler.codegen_backend(),
-                        compiler.session(),
-                        None,
-                        compiler.output_dir(),
-                        compiler.output_file(),
-                        compiler.temps_dir(),
-                    );
+                    let should_stop =
+                        print_crate_info(&***compiler.codegen_backend(), compiler.session(), false);
 
                     if should_stop == Compilation::Stop {
                         return;
@@ -290,18 +282,9 @@ fn run_compiler(
 
     interface::run_compiler(config, |compiler| {
         let sess = compiler.session();
-        let should_stop = print_crate_info(
-            &***compiler.codegen_backend(),
-            sess,
-            Some(compiler.input()),
-            compiler.output_dir(),
-            compiler.output_file(),
-            compiler.temps_dir(),
-        )
-        .and_then(|| {
-            list_metadata(sess, &*compiler.codegen_backend().metadata_loader(), compiler.input())
-        })
-        .and_then(|| try_process_rlink(sess, compiler));
+        let should_stop = print_crate_info(&***compiler.codegen_backend(), sess, true)
+            .and_then(|| list_metadata(sess, &*compiler.codegen_backend().metadata_loader()))
+            .and_then(|| try_process_rlink(sess, compiler));
 
         if should_stop == Compilation::Stop {
             return sess.compile_status();
@@ -315,24 +298,12 @@ fn run_compiler(
                 if ppm.needs_ast_map() {
                     let expanded_crate = queries.expansion()?.borrow().0.clone();
                     queries.global_ctxt()?.enter(|tcx| {
-                        pretty::print_after_hir_lowering(
-                            tcx,
-                            compiler.input(),
-                            &*expanded_crate,
-                            *ppm,
-                            compiler.output_file().as_deref(),
-                        );
+                        pretty::print_after_hir_lowering(tcx, &*expanded_crate, *ppm);
                         Ok(())
                     })?;
                 } else {
                     let krate = queries.parse()?.steal();
-                    pretty::print_after_parsing(
-                        sess,
-                        compiler.input(),
-                        &krate,
-                        *ppm,
-                        compiler.output_file().as_deref(),
-                    );
+                    pretty::print_after_parsing(sess, &krate, *ppm);
                 }
                 trace!("finished pretty-printing");
                 return early_exit();
@@ -357,21 +328,17 @@ fn run_compiler(
                 }
             }
 
-            queries.expansion()?;
+            queries.global_ctxt()?;
             if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
                 return early_exit();
             }
 
-            queries.prepare_outputs()?;
-
             if sess.opts.output_types.contains_key(&OutputType::DepInfo)
                 && sess.opts.output_types.len() == 1
             {
                 return early_exit();
             }
 
-            queries.global_ctxt()?;
-
             if sess.opts.unstable_opts.no_analysis {
                 return early_exit();
             }
@@ -384,9 +351,9 @@ fn run_compiler(
                         save::process_crate(
                             tcx,
                             crate_name,
-                            compiler.input(),
+                            &sess.io.input,
                             None,
-                            DumpHandler::new(compiler.output_dir().as_deref(), crate_name),
+                            DumpHandler::new(sess.io.output_dir.as_deref(), crate_name),
                         )
                     });
                 }
@@ -439,7 +406,7 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>)
 fn make_input(
     error_format: ErrorOutputType,
     free_matches: &[String],
-) -> Result<Option<(Input, Option<PathBuf>)>, ErrorGuaranteed> {
+) -> Result<Option<Input>, ErrorGuaranteed> {
     if free_matches.len() == 1 {
         let ifile = &free_matches[0];
         if ifile == "-" {
@@ -461,12 +428,12 @@ fn make_input(
                 let line = isize::from_str_radix(&line, 10)
                     .expect("UNSTABLE_RUSTDOC_TEST_LINE needs to be an number");
                 let file_name = FileName::doc_test_source_code(PathBuf::from(path), line);
-                Ok(Some((Input::Str { name: file_name, input: src }, None)))
+                Ok(Some(Input::Str { name: file_name, input: src }))
             } else {
-                Ok(Some((Input::Str { name: FileName::anon_source_code(&src), input: src }, None)))
+                Ok(Some(Input::Str { name: FileName::anon_source_code(&src), input: src }))
             }
         } else {
-            Ok(Some((Input::File(PathBuf::from(ifile)), Some(PathBuf::from(ifile)))))
+            Ok(Some(Input::File(PathBuf::from(ifile))))
         }
     } else {
         Ok(None)
@@ -560,7 +527,7 @@ fn show_content_with_pager(content: &str) {
 
 pub fn try_process_rlink(sess: &Session, compiler: &interface::Compiler) -> Compilation {
     if sess.opts.unstable_opts.link_only {
-        if let Input::File(file) = compiler.input() {
+        if let Input::File(file) = &sess.io.input {
             // FIXME: #![crate_type] and #![crate_name] support not implemented yet
             sess.init_crate_types(collect_crate_types(sess, &[]));
             let outputs = compiler.build_output_filenames(sess, &[]);
@@ -601,13 +568,9 @@ pub fn try_process_rlink(sess: &Session, compiler: &interface::Compiler) -> Comp
     }
 }
 
-pub fn list_metadata(
-    sess: &Session,
-    metadata_loader: &dyn MetadataLoader,
-    input: &Input,
-) -> Compilation {
+pub fn list_metadata(sess: &Session, metadata_loader: &dyn MetadataLoader) -> Compilation {
     if sess.opts.unstable_opts.ls {
-        match *input {
+        match sess.io.input {
             Input::File(ref ifile) => {
                 let path = &(*ifile);
                 let mut v = Vec::new();
@@ -627,10 +590,7 @@ pub fn list_metadata(
 fn print_crate_info(
     codegen_backend: &dyn CodegenBackend,
     sess: &Session,
-    input: Option<&Input>,
-    odir: &Option<PathBuf>,
-    ofile: &Option<PathBuf>,
-    temps_dir: &Option<PathBuf>,
+    parse_attrs: bool,
 ) -> Compilation {
     use rustc_session::config::PrintRequest::*;
     // NativeStaticLibs and LinkArgs are special - printed during linking
@@ -639,18 +599,17 @@ fn print_crate_info(
         return Compilation::Continue;
     }
 
-    let attrs = match input {
-        None => None,
-        Some(input) => {
-            let result = parse_crate_attrs(sess, input);
-            match result {
-                Ok(attrs) => Some(attrs),
-                Err(mut parse_error) => {
-                    parse_error.emit();
-                    return Compilation::Stop;
-                }
+    let attrs = if parse_attrs {
+        let result = parse_crate_attrs(sess);
+        match result {
+            Ok(attrs) => Some(attrs),
+            Err(mut parse_error) => {
+                parse_error.emit();
+                return Compilation::Stop;
             }
         }
+    } else {
+        None
     };
     for req in &sess.opts.prints {
         match *req {
@@ -665,14 +624,9 @@ fn print_crate_info(
                 println!("{}", serde_json::to_string_pretty(&sess.target.to_json()).unwrap());
             }
             FileNames | CrateName => {
-                let input = input.unwrap_or_else(|| {
-                    early_error(ErrorOutputType::default(), "no input file provided")
-                });
                 let attrs = attrs.as_ref().unwrap();
-                let t_outputs = rustc_interface::util::build_output_filenames(
-                    input, odir, ofile, temps_dir, attrs, sess,
-                );
-                let id = rustc_session::output::find_crate_name(sess, attrs, input);
+                let t_outputs = rustc_interface::util::build_output_filenames(attrs, sess);
+                let id = rustc_session::output::find_crate_name(sess, attrs);
                 if *req == PrintRequest::CrateName {
                     println!("{id}");
                     continue;
@@ -1108,8 +1062,8 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
     Some(matches)
 }
 
-fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::AttrVec> {
-    match input {
+fn parse_crate_attrs<'a>(sess: &'a Session) -> PResult<'a, ast::AttrVec> {
+    match &sess.io.input {
         Input::File(ifile) => rustc_parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess),
         Input::Str { name, input } => rustc_parse::parse_crate_attrs_from_source_str(
             name.clone(),
index b2451bc730f79f7eb1ddea06cf54708d1dc24f52..ae3ac8625b1862ca109c1a216329268abc6a6557 100644 (file)
@@ -9,14 +9,13 @@
 use rustc_middle::hir::map as hir_map;
 use rustc_middle::mir::{write_mir_graphviz, write_mir_pretty};
 use rustc_middle::ty::{self, TyCtxt};
-use rustc_session::config::{Input, PpAstTreeMode, PpHirMode, PpMode, PpSourceMode};
+use rustc_session::config::{PpAstTreeMode, PpHirMode, PpMode, PpSourceMode};
 use rustc_session::Session;
 use rustc_span::symbol::Ident;
 use rustc_span::FileName;
 
 use std::cell::Cell;
 use std::fmt::Write;
-use std::path::Path;
 
 pub use self::PpMode::*;
 pub use self::PpSourceMode::*;
@@ -345,8 +344,8 @@ fn post(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) {
     }
 }
 
-fn get_source(input: &Input, sess: &Session) -> (String, FileName) {
-    let src_name = input.source_name();
+fn get_source(sess: &Session) -> (String, FileName) {
+    let src_name = sess.io.input.source_name();
     let src = String::clone(
         sess.source_map()
             .get_source_file(&src_name)
@@ -358,8 +357,8 @@ fn get_source(input: &Input, sess: &Session) -> (String, FileName) {
     (src, src_name)
 }
 
-fn write_or_print(out: &str, ofile: Option<&Path>, sess: &Session) {
-    match ofile {
+fn write_or_print(out: &str, sess: &Session) {
+    match &sess.io.output_file {
         None => print!("{out}"),
         Some(p) => {
             if let Err(e) = std::fs::write(p, out) {
@@ -372,14 +371,8 @@ fn write_or_print(out: &str, ofile: Option<&Path>, sess: &Session) {
     }
 }
 
-pub fn print_after_parsing(
-    sess: &Session,
-    input: &Input,
-    krate: &ast::Crate,
-    ppm: PpMode,
-    ofile: Option<&Path>,
-) {
-    let (src, src_name) = get_source(input, sess);
+pub fn print_after_parsing(sess: &Session, krate: &ast::Crate, ppm: PpMode) {
+    let (src, src_name) = get_source(sess);
 
     let out = match ppm {
         Source(s) => {
@@ -407,22 +400,16 @@ pub fn print_after_parsing(
         _ => unreachable!(),
     };
 
-    write_or_print(&out, ofile, sess);
+    write_or_print(&out, sess);
 }
 
-pub fn print_after_hir_lowering<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    input: &Input,
-    krate: &ast::Crate,
-    ppm: PpMode,
-    ofile: Option<&Path>,
-) {
+pub fn print_after_hir_lowering<'tcx>(tcx: TyCtxt<'tcx>, krate: &ast::Crate, ppm: PpMode) {
     if ppm.needs_analysis() {
-        abort_on_err(print_with_analysis(tcx, ppm, ofile), tcx.sess);
+        abort_on_err(print_with_analysis(tcx, ppm), tcx.sess);
         return;
     }
 
-    let (src, src_name) = get_source(input, tcx.sess);
+    let (src, src_name) = get_source(tcx.sess);
 
     let out = match ppm {
         Source(s) => {
@@ -474,18 +461,14 @@ pub fn print_after_hir_lowering<'tcx>(
         _ => unreachable!(),
     };
 
-    write_or_print(&out, ofile, tcx.sess);
+    write_or_print(&out, tcx.sess);
 }
 
 // In an ideal world, this would be a public function called by the driver after
 // analysis is performed. However, we want to call `phase_3_run_analysis_passes`
 // with a different callback than the standard driver, so that isn't easy.
 // Instead, we call that function ourselves.
-fn print_with_analysis(
-    tcx: TyCtxt<'_>,
-    ppm: PpMode,
-    ofile: Option<&Path>,
-) -> Result<(), ErrorGuaranteed> {
+fn print_with_analysis(tcx: TyCtxt<'_>, ppm: PpMode) -> Result<(), ErrorGuaranteed> {
     tcx.analysis(())?;
     let out = match ppm {
         Mir => {
@@ -518,7 +501,7 @@ fn print_with_analysis(
         _ => unreachable!(),
     };
 
-    write_or_print(&out, ofile, tcx.sess);
+    write_or_print(&out, tcx.sess);
 
     Ok(())
 }
index 24258974bb97c67e05cabf9d649757f5d5d4ebf6..9d5f4ad752051a41b0f23435c36c16252e9c5ea0 100644 (file)
@@ -1,5 +1,5 @@
-// Error messages for EXXXX errors.  Each message should start and end with a
-// new line, and be wrapped to 80 characters.  In vim you can `:set tw=80` and
+// Error messages for EXXXX errors. Each message should start and end with a
+// new line, and be wrapped to 80 characters. In vim you can `:set tw=80` and
 // use `gq` to wrap paragraphs. Use `:set tw=0` to disable.
 //
 // /!\ IMPORTANT /!\
 E0788: include_str!("./error_codes/E0788.md"),
 E0790: include_str!("./error_codes/E0790.md"),
 E0791: include_str!("./error_codes/E0791.md"),
+E0792: include_str!("./error_codes/E0792.md"),
 ;
 //  E0006, // merged with E0005
 //  E0008, // cannot bind by-move into a pattern guard
index 7edd93e56a945e4e9096efee47c07e9f14b1a410..1ae01106f2014c4362d29d3a4a306dde7f091bd0 100644 (file)
@@ -1 +1,46 @@
 #### This error code is internal to the compiler and will not be emitted with normal Rust code.
+#### Note: this error code is no longer emitted by the compiler.
+
+This error code shows the variance of a type's generic parameters.
+
+Erroneous code example:
+
+```compile_fail
+// NOTE: this feature is perma-unstable and should *only* be used for
+//       testing purposes.
+#![feature(rustc_attrs)]
+
+#[rustc_variance]
+struct Foo<'a, T> { // error: deliberate error to display type's variance
+    t: &'a mut T,
+}
+```
+
+which produces the following error:
+
+```text
+error: [-, o]
+ --> <anon>:4:1
+  |
+4 | struct Foo<'a, T> {
+  | ^^^^^^^^^^^^^^^^^
+```
+
+*Note that while `#[rustc_variance]` still exists and is used within the*
+*compiler, it no longer is marked as `E0208` and instead has no error code.*
+
+This error is deliberately triggered with the `#[rustc_variance]` attribute
+(`#![feature(rustc_attrs)]` must be enabled) and helps to show you the variance
+of the type's generic parameters. You can read more about variance and
+subtyping in [this section of the Rustnomicon]. For a more in depth look at
+variance (including a more complete list of common variances) see
+[this section of the Reference]. For information on how variance is implemented
+in the compiler, see [this section of `rustc-dev-guide`].
+
+This error can be easily fixed by removing the `#[rustc_variance]` attribute,
+the compiler's suggestion to comment it out can be applied automatically with
+`rustfix`.
+
+[this section of the Rustnomicon]: https://doc.rust-lang.org/nomicon/subtyping.html
+[this section of the Reference]: https://doc.rust-lang.org/reference/subtyping.html#variance
+[this section of `rustc-dev-guide`]: https://rustc-dev-guide.rust-lang.org/variance.html
index 38ad19bd6aa9a37c7b6030e2524a4975fd8402ed..1c62d410efe4799503ce55a0087140ed15a1ed59 100644 (file)
@@ -17,7 +17,7 @@ fn mutable() {
     foo(|| x = 2);
 }
 
-// Attempts to take a mutable reference to closed-over data.  Error message
+// Attempts to take a mutable reference to closed-over data. Error message
 // reads: `cannot borrow data mutably in a captured outer variable...`
 fn mut_addr() {
     let mut x = 0u32;
index 9b1b77f3bc70652751f198928e76cc1dceff8686..a7b9bbeb122f70fcbc03c15dd4f33ad4b4d52ab2 100644 (file)
@@ -22,7 +22,7 @@ gets called when they go out of scope. This destructor gets exclusive
 access to the fields of the struct when it runs.
 
 This means that when `s` reaches the end of `demo`, its destructor
-gets exclusive access to its `&mut`-borrowed string data.  allowing
+gets exclusive access to its `&mut`-borrowed string data. allowing
 another borrow of that string data (`p`), to exist across the drop of
 `s` would be a violation of the principle that `&mut`-borrows have
 exclusive, unaliased access to their referenced data.
index 45d1cafa690624a0e49bf753cabfe4695d86e94d..b75735d602e053677ebcdc2efb7c6338a1d87910 100644 (file)
@@ -15,5 +15,5 @@ fn main() {}
 ```
 
 The items of marker traits cannot be overridden, so there's no need to have them
-when they cannot be changed per-type anyway.  If you wanted them for ergonomic
+when they cannot be changed per-type anyway. If you wanted them for ergonomic
 reasons, consider making an extension trait instead.
diff --git a/compiler/rustc_error_codes/src/error_codes/E0792.md b/compiler/rustc_error_codes/src/error_codes/E0792.md
new file mode 100644 (file)
index 0000000..bad2b5a
--- /dev/null
@@ -0,0 +1,60 @@
+A type alias impl trait can only have its hidden type assigned
+when used fully generically (and within their defining scope).
+This means
+
+```compile_fail,E0792
+#![feature(type_alias_impl_trait)]
+
+type Foo<T> = impl std::fmt::Debug;
+
+fn foo() -> Foo<u32> {
+    5u32
+}
+```
+
+is not accepted. If it were accepted, one could create unsound situations like
+
+```compile_fail,E0792
+#![feature(type_alias_impl_trait)]
+
+type Foo<T> = impl Default;
+
+fn foo() -> Foo<u32> {
+    5u32
+}
+
+fn main() {
+    let x = Foo::<&'static mut String>::default();
+}
+```
+
+
+Instead you need to make the function generic:
+
+```
+#![feature(type_alias_impl_trait)]
+
+type Foo<T> = impl std::fmt::Debug;
+
+fn foo<U>() -> Foo<U> {
+    5u32
+}
+```
+
+This means that no matter the generic parameter to `foo`,
+the hidden type will always be `u32`.
+If you want to link the generic parameter to the hidden type,
+you can do that, too:
+
+
+```
+#![feature(type_alias_impl_trait)]
+
+use std::fmt::Debug;
+
+type Foo<T: Debug> = impl Debug;
+
+fn foo<U: Debug>() -> Foo<U> {
+    Vec::<U>::new()
+}
+```
index e5cd1142b20c8c8738055ac3d52d845fb801b24d..5f28839f136d6276fd3fb465dd749b8edb7d5ad2 100644 (file)
@@ -88,4 +88,5 @@ ast_passes_ty_alias_without_body =
 ast_passes_fn_without_body =
     free function without a body
     .suggestion = provide a definition for the function
-    .extern_block_suggestion = if you meant to declare an externally defined function, use an `extern` block
+
+ast_passes_extern_block_suggestion = if you meant to declare an externally defined function, use an `extern` block
index 2cd4733220e829ce4d68d0dc4bb3aee962277608..9e4332c428386ff068dd0a2f49ba859cd3b758d4 100644 (file)
@@ -120,3 +120,7 @@ borrowck_cannot_move_when_borrowed =
         [value] value
         *[other] {$value_place}
     } occurs here
+
+borrowck_opaque_type_non_generic_param =
+    expected generic {$kind} parameter, found `{$ty}`
+    .label = this generic parameter must be used with a generic {$kind} parameter
index ae0091b03736f3b6c78343ae5d7487ab7b12606a..164d6d26d230d656f0896e69cf8269b9d8845aaa 100644 (file)
@@ -193,7 +193,7 @@ infer_actual_impl_expl_expected_signature_any = {$leading_ellipsis ->
 infer_actual_impl_expl_expected_signature_some = {$leading_ellipsis ->
     [true] ...
     *[false] {""}
-}closure with signature `{$ty_or_sig}` must implement `{$trait_path}`, for some specific lifetime `'{lifetime_1}`...
+}closure with signature `{$ty_or_sig}` must implement `{$trait_path}`, for some specific lifetime `'{$lifetime_1}`...
 infer_actual_impl_expl_expected_signature_nothing = {$leading_ellipsis ->
     [true] ...
     *[false] {""}
@@ -209,7 +209,7 @@ infer_actual_impl_expl_expected_passive_any = {$leading_ellipsis ->
 infer_actual_impl_expl_expected_passive_some = {$leading_ellipsis ->
     [true] ...
     *[false] {""}
-}`{$trait_path}` would have to be implemented for the type `{$ty_or_sig}`, for some specific lifetime `'{lifetime_1}`...
+}`{$trait_path}` would have to be implemented for the type `{$ty_or_sig}`, for some specific lifetime `'{$lifetime_1}`...
 infer_actual_impl_expl_expected_passive_nothing = {$leading_ellipsis ->
     [true] ...
     *[false] {""}
@@ -225,7 +225,7 @@ infer_actual_impl_expl_expected_other_any = {$leading_ellipsis ->
 infer_actual_impl_expl_expected_other_some = {$leading_ellipsis ->
     [true] ...
     *[false] {""}
-}`{$ty_or_sig}` must implement `{$trait_path}`, for some specific lifetime `'{lifetime_1}`...
+}`{$ty_or_sig}` must implement `{$trait_path}`, for some specific lifetime `'{$lifetime_1}`...
 infer_actual_impl_expl_expected_other_nothing = {$leading_ellipsis ->
     [true] ...
     *[false] {""}
@@ -268,28 +268,28 @@ infer_but_calling_introduces = {$has_param_name ->
     [true] `{$param_name}`
     *[false] `fn` parameter
 } has {$lifetime_kind ->
-    [named] lifetime `{lifetime}`
-    *[anon] an anonymous lifetime `'_`
-} but calling `{assoc_item}` introduces an implicit `'static` lifetime requirement
+    [true] lifetime `{$lifetime}`
+    *[false] an anonymous lifetime `'_`
+} but calling `{$assoc_item}` introduces an implicit `'static` lifetime requirement
     .label1 = {$has_lifetime ->
-        [named] lifetime `{lifetime}`
-        *[anon] an anonymous lifetime `'_`
+        [true] lifetime `{$lifetime}`
+        *[false] an anonymous lifetime `'_`
     }
     .label2 = ...is used and required to live as long as `'static` here because of an implicit lifetime bound on the {$has_impl_path ->
-        [named] `impl` of `{$impl_path}`
-        *[anon] inherent `impl`
+        [true] `impl` of `{$impl_path}`
+        *[false] inherent `impl`
     }
 
 infer_but_needs_to_satisfy = {$has_param_name ->
     [true] `{$param_name}`
     *[false] `fn` parameter
 } has {$has_lifetime ->
-    [named] lifetime `{lifetime}`
-    *[anon] an anonymous lifetime `'_`
+    [true] lifetime `{$lifetime}`
+    *[false] an anonymous lifetime `'_`
 } but it needs to satisfy a `'static` lifetime requirement
     .influencer = this data with {$has_lifetime ->
-        [named] lifetime `{lifetime}`
-        *[anon] an anonymous lifetime `'_`
+        [true] lifetime `{$lifetime}`
+        *[false] an anonymous lifetime `'_`
     }...
     .require = {$spans_empty ->
         *[true] ...is used and required to live as long as `'static` here
@@ -302,8 +302,8 @@ infer_more_targeted = {$has_param_name ->
     [true] `{$param_name}`
     *[false] `fn` parameter
 } has {$has_lifetime ->
-    [named] lifetime `{lifetime}`
-    *[anon] an anonymous lifetime `'_`
+    [true] lifetime `{$lifetime}`
+    *[false] an anonymous lifetime `'_`
 } but calling `{$ident}` introduces an implicit `'static` lifetime requirement
 
 infer_ril_introduced_here = `'static` requirement introduced here
index a082c0b61fa7ed293b873b9e054f7f9e1481957e..224855fff8b56f35a7e02214a87749dd8cc89890 100644 (file)
@@ -364,3 +364,5 @@ mir_build_suggest_let_else = you might want to use `let else` to handle the {$co
         [one] variant that isn't
         *[other] variants that aren't
     } matched
+
+mir_build_suggest_attempted_int_lit = alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
index 1040ee1c97d814b8a04c97bcb2a88fcfa68c30c6..abe65a0e3fef06415599478dad5d702316a7a6df 100644 (file)
@@ -10,17 +10,17 @@ ty_utils_address_and_deref_not_supported = dereferencing or taking the address i
 
 ty_utils_array_not_supported = array construction is not supported in generic constants
 
-ty_utils_block_not_supported = blocks are not supported in generic constant
+ty_utils_block_not_supported = blocks are not supported in generic constants
 
-ty_utils_never_to_any_not_supported = converting nevers to any is not supported in generic constant
+ty_utils_never_to_any_not_supported = converting nevers to any is not supported in generic constants
 
 ty_utils_tuple_not_supported = tuple construction is not supported in generic constants
 
-ty_utils_index_not_supported = indexing is not supported in generic constant
+ty_utils_index_not_supported = indexing is not supported in generic constants
 
-ty_utils_field_not_supported = field access is not supported in generic constant
+ty_utils_field_not_supported = field access is not supported in generic constants
 
-ty_utils_const_block_not_supported = const blocks are not supported in generic constant
+ty_utils_const_block_not_supported = const blocks are not supported in generic constants
 
 ty_utils_adt_not_supported = struct/enum construction is not supported in generic constants
 
@@ -44,4 +44,4 @@ ty_utils_control_flow_not_supported = control flow is not supported in generic c
 
 ty_utils_inline_asm_not_supported = assembly is not supported in generic constants
 
-ty_utils_operation_not_supported = unsupported operation in generic constant
+ty_utils_operation_not_supported = unsupported operation in generic constants
index e19a6fe0ee9bff2d5a0a5c28a7ace6c976c2fa9a..51b2ff6a003812c01fb5f7e3762539cfa5d10c63 100644 (file)
@@ -114,9 +114,9 @@ pub struct Diagnostic {
     pub suggestions: Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
     args: FxHashMap<DiagnosticArgName<'static>, DiagnosticArgValue<'static>>,
 
-    /// This is not used for highlighting or rendering any error message.  Rather, it can be used
-    /// as a sort key to sort a buffer of diagnostics.  By default, it is the primary span of
-    /// `span` if there is one.  Otherwise, it is `DUMMY_SP`.
+    /// This is not used for highlighting or rendering any error message. Rather, it can be used
+    /// as a sort key to sort a buffer of diagnostics. By default, it is the primary span of
+    /// `span` if there is one. Otherwise, it is `DUMMY_SP`.
     pub sort_span: Span,
 
     /// If diagnostic is from Lint, custom hash function ignores notes
index 7f01df321010bd39682d7378b980e6ea29a0e285..628e199992152937392392452603f05f8662ff62 100644 (file)
@@ -1791,7 +1791,7 @@ fn emit_suggestion_default(
 
             if let Some(span) = span.primary_span() {
                 // Compare the primary span of the diagnostic with the span of the suggestion
-                // being emitted.  If they belong to the same file, we don't *need* to show the
+                // being emitted. If they belong to the same file, we don't *need* to show the
                 // file name, saving in verbosity, but if it *isn't* we do need it, otherwise we're
                 // telling users to make a change but not clarifying *where*.
                 let loc = sm.lookup_char_pos(parts[0].span.lo());
@@ -2529,11 +2529,11 @@ fn emit_to_destination(
     //
     // On Unix systems, we write into a buffered terminal rather than directly to a terminal. When
     // the .flush() is called we take the buffer created from the buffered writes and write it at
-    // one shot.  Because the Unix systems use ANSI for the colors, which is a text-based styling
+    // one shot. Because the Unix systems use ANSI for the colors, which is a text-based styling
     // scheme, this buffered approach works and maintains the styling.
     //
     // On Windows, styling happens through calls to a terminal API. This prevents us from using the
-    // same buffering approach.  Instead, we use a global Windows mutex, which we acquire long
+    // same buffering approach. Instead, we use a global Windows mutex, which we acquire long
     // enough to output the full error message, then we release.
     let _buffer_lock = lock::acquire_global_lock("rustc_errors");
     for (pos, line) in rendered_buffer.iter().enumerate() {
index ffde8480c02117283c3ac7c47d96f4c48dcabe58..951d59246785d08bff71641690fe154308428a34 100644 (file)
@@ -63,21 +63,21 @@ pub enum Annotatable {
 
 impl Annotatable {
     pub fn span(&self) -> Span {
-        match *self {
-            Annotatable::Item(ref item) => item.span,
-            Annotatable::TraitItem(ref trait_item) => trait_item.span,
-            Annotatable::ImplItem(ref impl_item) => impl_item.span,
-            Annotatable::ForeignItem(ref foreign_item) => foreign_item.span,
-            Annotatable::Stmt(ref stmt) => stmt.span,
-            Annotatable::Expr(ref expr) => expr.span,
-            Annotatable::Arm(ref arm) => arm.span,
-            Annotatable::ExprField(ref field) => field.span,
-            Annotatable::PatField(ref fp) => fp.pat.span,
-            Annotatable::GenericParam(ref gp) => gp.ident.span,
-            Annotatable::Param(ref p) => p.span,
-            Annotatable::FieldDef(ref sf) => sf.span,
-            Annotatable::Variant(ref v) => v.span,
-            Annotatable::Crate(ref c) => c.spans.inner_span,
+        match self {
+            Annotatable::Item(item) => item.span,
+            Annotatable::TraitItem(trait_item) => trait_item.span,
+            Annotatable::ImplItem(impl_item) => impl_item.span,
+            Annotatable::ForeignItem(foreign_item) => foreign_item.span,
+            Annotatable::Stmt(stmt) => stmt.span,
+            Annotatable::Expr(expr) => expr.span,
+            Annotatable::Arm(arm) => arm.span,
+            Annotatable::ExprField(field) => field.span,
+            Annotatable::PatField(fp) => fp.pat.span,
+            Annotatable::GenericParam(gp) => gp.ident.span,
+            Annotatable::Param(p) => p.span,
+            Annotatable::FieldDef(sf) => sf.span,
+            Annotatable::Variant(v) => v.span,
+            Annotatable::Crate(c) => c.spans.inner_span,
         }
     }
 
index f4c6f3386ade23ce244e629ce715ce72a37d83e7..1fcbdfd9be5ce14353b7f1c572e2af7d6333f42b 100644 (file)
@@ -298,7 +298,7 @@ fn can_skip(stream: &AttrTokenStream) -> bool {
                     Some(AttrTokenTree::Delimited(sp, delim, inner))
                         .into_iter()
                 }
-                AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => {
+                AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(nt) = &token.kind => {
                     panic!(
                         "Nonterminal should have been flattened at {:?}: {:?}",
                         token.span, nt
index 5d47c1ed363fbf0463ddde0118bba718d9ca10ec..79d058d9c97360d0aec61b0128d8ad140e14593d 100644 (file)
@@ -144,12 +144,12 @@ pub fn mut_visit_with<F: MutVisitor>(&mut self, vis: &mut F) {
             }
 
             pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
-                match *self {
-                    AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr),
+                match self {
+                    AstFragment::OptExpr(Some(expr)) => visitor.visit_expr(expr),
                     AstFragment::OptExpr(None) => {}
-                    AstFragment::MethodReceiverExpr(ref expr) => visitor.visit_method_receiver_expr(expr),
-                    $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)*
-                    $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] {
+                    AstFragment::MethodReceiverExpr(expr) => visitor.visit_method_receiver_expr(expr),
+                    $($(AstFragment::$Kind(ast) => visitor.$visit_ast(ast),)?)*
+                    $($(AstFragment::$Kind(ast) => for ast_elt in &ast[..] {
                         visitor.$visit_ast_elt(ast_elt, $($args)*);
                     })?)*
                 }
@@ -592,7 +592,7 @@ fn collect_invocations(
                     let expn_id = invoc.expansion_data.id;
                     let parent_def = self.cx.resolver.invocation_parent(expn_id);
                     let span = match &mut invoc.kind {
-                        InvocationKind::Bang { ref mut span, .. } => span,
+                        InvocationKind::Bang { span, .. } => span,
                         InvocationKind::Attr { attr, .. } => &mut attr.span,
                         InvocationKind::Derive { path, .. } => &mut path.span,
                     };
@@ -945,8 +945,8 @@ pub fn ensure_complete_parse<'a>(
         let def_site_span = parser.token.span.with_ctxt(SyntaxContext::root());
 
         let semi_span = parser.sess.source_map().next_point(span);
-        let add_semicolon = match parser.sess.source_map().span_to_snippet(semi_span) {
-            Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => {
+        let add_semicolon = match &parser.sess.source_map().span_to_snippet(semi_span) {
+            Ok(snippet) if &snippet[..] != ";" && kind_name == "expression" => {
                 Some(span.shrink_to_hi())
             }
             _ => None,
index 0b8847f827df1aa6ad9b8a056ccadba7d03ed9aa..5be134f4e664c80c189cefab7d41baa380b41045 100644 (file)
@@ -151,9 +151,9 @@ impl<'a, T> Iterator for &'a Stack<'a, T> {
 
     // Iterates from top to bottom of the stack.
     fn next(&mut self) -> Option<&'a T> {
-        match *self {
+        match self {
             Stack::Empty => None,
-            Stack::Push { ref top, ref prev } => {
+            Stack::Push { top, prev } => {
                 *self = prev;
                 Some(top)
             }
@@ -437,8 +437,8 @@ fn check_nested_occurrences(
                 // We check that the meta-variable is correctly used.
                 check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
             }
-            (NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
-            | (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
+            (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(_, del))
+            | (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
                 if del.delim == Delimiter::Brace =>
             {
                 let macro_rules = state == NestedMacroState::MacroRulesNotName;
@@ -497,7 +497,7 @@ fn check_nested_occurrences(
                     valid,
                 );
             }
-            (_, ref tt) => {
+            (_, tt) => {
                 state = NestedMacroState::Empty;
                 check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
             }
index c0489f686336b2747c9252500425e451561a83b6..4ebd75f0185604b0038a3ea8d8c70ac489344af7 100644 (file)
@@ -486,11 +486,11 @@ pub fn compile_declarative_macro(
     let mut valid = true;
 
     // Extract the arguments:
-    let lhses = match argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
-        MatchedSeq(ref s) => s
+    let lhses = match &argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
+        MatchedSeq(s) => s
             .iter()
             .map(|m| {
-                if let MatchedTokenTree(ref tt) = *m {
+                if let MatchedTokenTree(tt) = m {
                     let tt = mbe::quoted::parse(
                         TokenStream::new(vec![tt.clone()]),
                         true,
@@ -510,11 +510,11 @@ pub fn compile_declarative_macro(
         _ => sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
     };
 
-    let rhses = match argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
-        MatchedSeq(ref s) => s
+    let rhses = match &argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
+        MatchedSeq(s) => s
             .iter()
             .map(|m| {
-                if let MatchedTokenTree(ref tt) = *m {
+                if let MatchedTokenTree(tt) = m {
                     return mbe::quoted::parse(
                         TokenStream::new(vec![tt.clone()]),
                         false,
@@ -624,21 +624,21 @@ fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree)
 fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
     use mbe::TokenTree;
     for tt in tts {
-        match *tt {
+        match tt {
             TokenTree::Token(..)
             | TokenTree::MetaVar(..)
             | TokenTree::MetaVarDecl(..)
             | TokenTree::MetaVarExpr(..) => (),
-            TokenTree::Delimited(_, ref del) => {
+            TokenTree::Delimited(_, del) => {
                 if !check_lhs_no_empty_seq(sess, &del.tts) {
                     return false;
                 }
             }
-            TokenTree::Sequence(span, ref seq) => {
+            TokenTree::Sequence(span, seq) => {
                 if seq.separator.is_none()
-                    && seq.tts.iter().all(|seq_tt| match *seq_tt {
+                    && seq.tts.iter().all(|seq_tt| match seq_tt {
                         TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true,
-                        TokenTree::Sequence(_, ref sub_seq) => {
+                        TokenTree::Sequence(_, sub_seq) => {
                             sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
                                 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
                         }
@@ -736,21 +736,21 @@ fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
         fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
             let mut first = TokenSet::empty();
             for tt in tts.iter().rev() {
-                match *tt {
+                match tt {
                     TokenTree::Token(..)
                     | TokenTree::MetaVar(..)
                     | TokenTree::MetaVarDecl(..)
                     | TokenTree::MetaVarExpr(..) => {
                         first.replace_with(TtHandle::TtRef(tt));
                     }
-                    TokenTree::Delimited(span, ref delimited) => {
+                    TokenTree::Delimited(span, delimited) => {
                         build_recur(sets, &delimited.tts);
                         first.replace_with(TtHandle::from_token_kind(
                             token::OpenDelim(delimited.delim),
                             span.open,
                         ));
                     }
-                    TokenTree::Sequence(sp, ref seq_rep) => {
+                    TokenTree::Sequence(sp, seq_rep) => {
                         let subfirst = build_recur(sets, &seq_rep.tts);
 
                         match sets.first.entry(sp.entire()) {
@@ -804,7 +804,7 @@ fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
         let mut first = TokenSet::empty();
         for tt in tts.iter() {
             assert!(first.maybe_empty);
-            match *tt {
+            match tt {
                 TokenTree::Token(..)
                 | TokenTree::MetaVar(..)
                 | TokenTree::MetaVarDecl(..)
@@ -812,14 +812,14 @@ fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
                     first.add_one(TtHandle::TtRef(tt));
                     return first;
                 }
-                TokenTree::Delimited(span, ref delimited) => {
+                TokenTree::Delimited(span, delimited) => {
                     first.add_one(TtHandle::from_token_kind(
                         token::OpenDelim(delimited.delim),
                         span.open,
                     ));
                     return first;
                 }
-                TokenTree::Sequence(sp, ref seq_rep) => {
+                TokenTree::Sequence(sp, seq_rep) => {
                     let subfirst_owned;
                     let subfirst = match self.first.get(&sp.entire()) {
                         Some(Some(subfirst)) => subfirst,
@@ -1041,7 +1041,7 @@ fn check_matcher_core<'tt>(
 
         // First, update `last` so that it corresponds to the set
         // of NT tokens that might end the sequence `... token`.
-        match *token {
+        match token {
             TokenTree::Token(..)
             | TokenTree::MetaVar(..)
             | TokenTree::MetaVarDecl(..)
@@ -1057,7 +1057,7 @@ fn check_matcher_core<'tt>(
                     suffix_first = build_suffix_first();
                 }
             }
-            TokenTree::Delimited(span, ref d) => {
+            TokenTree::Delimited(span, d) => {
                 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
                     token::CloseDelim(d.delim),
                     span.close,
@@ -1070,7 +1070,7 @@ fn check_matcher_core<'tt>(
                 // against SUFFIX
                 continue 'each_token;
             }
-            TokenTree::Sequence(_, ref seq_rep) => {
+            TokenTree::Sequence(_, seq_rep) => {
                 suffix_first = build_suffix_first();
                 // The trick here: when we check the interior, we want
                 // to include the separator (if any) as a potential
@@ -1372,8 +1372,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
 }
 
 fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
-    match *tt {
-        mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token).into(),
+    match tt {
+        mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
         mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
         mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
         mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),
index 878284f5928de6cc4fffbe77a8bf94f4a20b3891..bc298b0ad2b1e37f8d78faa0d3433edbe7bea88a 100644 (file)
@@ -171,7 +171,7 @@ fn parse_tree(
                     } else {
                         match delim {
                             Delimiter::Brace => {
-                                // The delimiter is `{`.  This indicates the beginning
+                                // The delimiter is `{`. This indicates the beginning
                                 // of a meta-variable expression (e.g. `${count(ident)}`).
                                 // Try to parse the meta-variable expression.
                                 match MetaVarExpr::parse(&tts, delim_span.entire(), sess) {
@@ -200,7 +200,7 @@ fn parse_tree(
                         }
                     }
                     // If we didn't find a metavar expression above, then we must have a
-                    // repetition sequence in the macro (e.g. `$(pat)*`).  Parse the
+                    // repetition sequence in the macro (e.g. `$(pat)*`). Parse the
                     // contents of the sequence itself
                     let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
                     // Get the Kleene operator and optional separator
index bec6d1a2df7d873301f1d9ebe3568fb8143f574e..b79835be73a7ec1dfd04438ea4257c8cc326bda7 100644 (file)
@@ -47,8 +47,7 @@ impl<'a> Iterator for Frame<'a> {
 
     fn next(&mut self) -> Option<&'a mbe::TokenTree> {
         match self {
-            Frame::Delimited { tts, ref mut idx, .. }
-            | Frame::Sequence { tts, ref mut idx, .. } => {
+            Frame::Delimited { tts, idx, .. } | Frame::Sequence { tts, idx, .. } => {
                 let res = tts.get(*idx);
                 *idx += 1;
                 res
@@ -220,13 +219,13 @@ pub(super) fn transcribe<'a>(
                 let ident = MacroRulesNormalizedIdent::new(original_ident);
                 if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
                     match cur_matched {
-                        MatchedTokenTree(ref tt) => {
+                        MatchedTokenTree(tt) => {
                             // `tt`s are emitted into the output stream directly as "raw tokens",
                             // without wrapping them into groups.
                             let token = tt.clone();
                             result.push(token);
                         }
-                        MatchedNonterminal(ref nt) => {
+                        MatchedNonterminal(nt) => {
                             // Other variables are emitted into the output stream as groups with
                             // `Delimiter::Invisible` to maintain parsing priorities.
                             // `Interpolated` is currently used for such groups in rustc parser.
@@ -299,12 +298,11 @@ fn lookup_cur_matched<'a>(
     interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
     repeats: &[(usize, usize)],
 ) -> Option<&'a NamedMatch> {
-    interpolations.get(&ident).map(|matched| {
-        let mut matched = matched;
+    interpolations.get(&ident).map(|mut matched| {
         for &(idx, _) in repeats {
             match matched {
                 MatchedTokenTree(_) | MatchedNonterminal(_) => break,
-                MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(),
+                MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
             }
         }
 
@@ -339,7 +337,7 @@ fn with(self, other: LockstepIterSize) -> LockstepIterSize {
         match self {
             LockstepIterSize::Unconstrained => other,
             LockstepIterSize::Contradiction(_) => self,
-            LockstepIterSize::Constraint(l_len, ref l_id) => match other {
+            LockstepIterSize::Constraint(l_len, l_id) => match other {
                 LockstepIterSize::Unconstrained => self,
                 LockstepIterSize::Contradiction(_) => other,
                 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
@@ -378,33 +376,33 @@ fn lockstep_iter_size(
     repeats: &[(usize, usize)],
 ) -> LockstepIterSize {
     use mbe::TokenTree;
-    match *tree {
-        TokenTree::Delimited(_, ref delimited) => {
+    match tree {
+        TokenTree::Delimited(_, delimited) => {
             delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
                 size.with(lockstep_iter_size(tt, interpolations, repeats))
             })
         }
-        TokenTree::Sequence(_, ref seq) => {
+        TokenTree::Sequence(_, seq) => {
             seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
                 size.with(lockstep_iter_size(tt, interpolations, repeats))
             })
         }
         TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
-            let name = MacroRulesNormalizedIdent::new(name);
+            let name = MacroRulesNormalizedIdent::new(*name);
             match lookup_cur_matched(name, interpolations, repeats) {
                 Some(matched) => match matched {
                     MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
-                    MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name),
+                    MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
                 },
                 _ => LockstepIterSize::Unconstrained,
             }
         }
-        TokenTree::MetaVarExpr(_, ref expr) => {
+        TokenTree::MetaVarExpr(_, expr) => {
             let default_rslt = LockstepIterSize::Unconstrained;
             let Some(ident) = expr.ident() else { return default_rslt; };
             let name = MacroRulesNormalizedIdent::new(ident);
             match lookup_cur_matched(name, interpolations, repeats) {
-                Some(MatchedSeq(ref ads)) => {
+                Some(MatchedSeq(ads)) => {
                     default_rslt.with(LockstepIterSize::Constraint(ads.len(), name))
                 }
                 _ => default_rslt,
@@ -449,7 +447,7 @@ fn count<'a>(
                     Some(_) => Err(out_of_bounds_err(cx, declared_lhs_depth, sp.entire(), "count")),
                 }
             }
-            MatchedSeq(ref named_matches) => {
+            MatchedSeq(named_matches) => {
                 let new_declared_lhs_depth = declared_lhs_depth + 1;
                 match depth_opt {
                     None => named_matches
@@ -472,7 +470,7 @@ fn count<'a>(
     // before we start counting. `matched` contains the various levels of the
     // tree as we descend, and its final value is the subtree we are currently at.
     for &(idx, _) in repeats {
-        if let MatchedSeq(ref ads) = matched {
+        if let MatchedSeq(ads) = matched {
             matched = &ads[idx];
         }
     }
index e49f112bf20a69a2ec01c6d29f346ae9504d3d47..0726d922c84a3e494ccbec353c20e6df333ccd73 100644 (file)
@@ -176,9 +176,9 @@ struct PatIdentVisitor {
     }
     impl<'a> visit::Visitor<'a> for PatIdentVisitor {
         fn visit_pat(&mut self, p: &'a ast::Pat) {
-            match p.kind {
-                PatKind::Ident(_, ref ident, _) => {
-                    self.spans.push(ident.span.clone());
+            match &p.kind {
+                PatKind::Ident(_, ident, _) => {
+                    self.spans.push(ident.span);
                 }
                 _ => {
                     visit::walk_pat(self, p);
@@ -290,10 +290,8 @@ fn parse_expr_from_source_str(
         )
         .unwrap();
 
-        let tts: Vec<_> = match expr.kind {
-            ast::ExprKind::MacCall(ref mac) => mac.args.tokens.clone().into_trees().collect(),
-            _ => panic!("not a macro"),
-        };
+        let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
+        let tts: Vec<_> = mac.args.tokens.clone().into_trees().collect();
 
         let span = tts.iter().rev().next().unwrap().span();
 
@@ -318,11 +316,8 @@ fn out_of_line_mod() {
         .unwrap()
         .unwrap();
 
-        if let ast::ItemKind::Mod(_, ref mod_kind) = item.kind {
-            assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
-        } else {
-            panic!();
-        }
+        let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
+        assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
     });
 }
 
index 768bdab8a54199a10aec996bc8c9a6bfa897f17c..341ae18541b3796a5c58c2a89cfe9c1051fc181b 100644 (file)
@@ -230,7 +230,7 @@ fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self {
                     let stream = TokenStream::from_nonterminal_ast(&nt);
                     // A hack used to pass AST fragments to attribute and derive
                     // macros as a single nonterminal token instead of a token
-                    // stream.  Such token needs to be "unwrapped" and not
+                    // stream. Such token needs to be "unwrapped" and not
                     // represented as a delimited group.
                     // FIXME: It needs to be removed, but there are some
                     // compatibility issues (see #73345).
@@ -597,8 +597,8 @@ fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
     }
 
     fn path(&mut self, file: &Self::SourceFile) -> String {
-        match file.name {
-            FileName::Real(ref name) => name
+        match &file.name {
+            FileName::Real(name) => name
                 .local_path()
                 .expect("attempting to get a file path in an imported file in `proc_macro::SourceFile::path`")
                 .to_str()
index 921039797869feca60ee20aee3677e578998b757..cca5ead0f83951c19594d3704afb0264164245c6 100644 (file)
@@ -751,7 +751,7 @@ pub enum LifetimeRes {
         binder: NodeId,
     },
     /// This variant is used for anonymous lifetimes that we did not resolve during
-    /// late resolution.  Those lifetimes will be inferred by typechecking.
+    /// late resolution. Those lifetimes will be inferred by typechecking.
     Infer,
     /// Explicit `'static` lifetime.
     Static,
index 60f5b79de1033d13eb4fcfb56ba78324709eaddd..d6566860f8170a96d25bb96f3d2e08a56c88b6fc 100644 (file)
@@ -94,7 +94,7 @@ pub enum LifetimeName {
     /// Implicit lifetime in a context like `dyn Foo`. This is
     /// distinguished from implicit lifetimes elsewhere because the
     /// lifetime that they default to must appear elsewhere within the
-    /// enclosing type.  This means that, in an `impl Trait` context, we
+    /// enclosing type. This means that, in an `impl Trait` context, we
     /// don't have to create a parameter for them. That is, `impl
     /// Trait<Item = &u32>` expands to an opaque type like `type
     /// Foo<'a> = impl Trait<Item = &'a u32>`, but `impl Trait<item =
@@ -826,7 +826,7 @@ pub struct OwnerNodes<'tcx> {
     pub hash_without_bodies: Fingerprint,
     /// Full HIR for the current owner.
     // The zeroth node's parent should never be accessed: the owner's parent is computed by the
-    // hir_owner_parent query.  It is set to `ItemLocalId::INVALID` to force an ICE if accidentally
+    // hir_owner_parent query. It is set to `ItemLocalId::INVALID` to force an ICE if accidentally
     // used.
     pub nodes: IndexVec<ItemLocalId, Option<ParentedNode<'tcx>>>,
     /// Content of local bodies.
@@ -1787,6 +1787,14 @@ pub fn peel_blocks(&self) -> &Self {
         expr
     }
 
+    pub fn peel_borrows(&self) -> &Self {
+        let mut expr = self;
+        while let ExprKind::AddrOf(.., inner) = &expr.kind {
+            expr = inner;
+        }
+        expr
+    }
+
     pub fn can_have_side_effects(&self) -> bool {
         match self.peel_drop_temps().kind {
             ExprKind::Path(_) | ExprKind::Lit(_) => false,
index 3474fab34f00b1e70bb777ac1f43adaf783369ad..54fa5702fbca4b49298e49472722991f023a9751 100644 (file)
@@ -291,6 +291,7 @@ pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
     IdentityFuture,          sym::identity_future,     identity_future_fn,         Target::Fn,             GenericRequirement::None;
     GetContext,              sym::get_context,         get_context_fn,             Target::Fn,             GenericRequirement::None;
 
+    Context,                 sym::Context,             context,                    Target::Struct,         GenericRequirement::None;
     FuturePoll,              sym::poll,                future_poll_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
 
     FromFrom,                sym::from,                from_fn,                    Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
index 5368dc0735bc1994e6e4135284ef9616dbb50820..232ef2079d6bb9eb3c37e95a0547ed2772c4ada9 100644 (file)
@@ -331,7 +331,7 @@ pub(crate) fn complain_about_missing_associated_types(
             }
             if potential_assoc_types.len() == assoc_items.len() {
                 // When the amount of missing associated types equals the number of
-                // extra type arguments present.  A suggesting to replace the generic args with
+                // extra type arguments present. A suggesting to replace the generic args with
                 // associated types is already emitted.
                 already_has_generics_args_suggestion = true;
             } else if let (Ok(snippet), false) =
index ce3682a8f2d5cda48084a4566eb84e76ccb4e9c4..7a499327dbf240f1f08e9874a417a24e3e31ee82 100644 (file)
@@ -337,13 +337,13 @@ pub fn create_substs_for_generic_args<'tcx, 'a>(
                     // We should never be able to reach this point with well-formed input.
                     // There are three situations in which we can encounter this issue.
                     //
-                    //  1.  The number of arguments is incorrect. In this case, an error
-                    //      will already have been emitted, and we can ignore it.
-                    //  2.  There are late-bound lifetime parameters present, yet the
-                    //      lifetime arguments have also been explicitly specified by the
-                    //      user.
-                    //  3.  We've inferred some lifetimes, which have been provided later (i.e.
-                    //      after a type or const). We want to throw an error in this case.
+                    //  1. The number of arguments is incorrect. In this case, an error
+                    //     will already have been emitted, and we can ignore it.
+                    //  2. There are late-bound lifetime parameters present, yet the
+                    //     lifetime arguments have also been explicitly specified by the
+                    //     user.
+                    //  3. We've inferred some lifetimes, which have been provided later (i.e.
+                    //     after a type or const). We want to throw an error in this case.
 
                     if arg_count.correct.is_ok()
                         && arg_count.explicit_late_bound == ExplicitLateBound::No
index 8d2cc70c05f16b0e319a6cf15786c783ec6b01ed..6435b05cef8a8bac5041a2fe8db5207c5d9b86d9 100644 (file)
@@ -569,17 +569,17 @@ fn create_assoc_bindings_for_generic_args<'a>(
             .bindings
             .iter()
             .map(|binding| {
-                let kind = match binding.kind {
-                    hir::TypeBindingKind::Equality { ref term } => match term {
-                        hir::Term::Ty(ref ty) => {
+                let kind = match &binding.kind {
+                    hir::TypeBindingKind::Equality { term } => match term {
+                        hir::Term::Ty(ty) => {
                             ConvertedBindingKind::Equality(self.ast_ty_to_ty(ty).into())
                         }
-                        hir::Term::Const(ref c) => {
+                        hir::Term::Const(c) => {
                             let c = Const::from_anon_const(self.tcx(), c.def_id);
                             ConvertedBindingKind::Equality(c.into())
                         }
                     },
-                    hir::TypeBindingKind::Constraint { ref bounds } => {
+                    hir::TypeBindingKind::Constraint { bounds } => {
                         ConvertedBindingKind::Constraint(bounds)
                     }
                 };
@@ -992,7 +992,7 @@ pub(crate) fn add_bounds<'hir, I: Iterator<Item = &'hir hir::GenericBound<'hir>>
     /// ```
     ///
     /// The `sized_by_default` parameter indicates if, in this context, the `param_ty` should be
-    /// considered `Sized` unless there is an explicit `?Sized` bound.  This would be true in the
+    /// considered `Sized` unless there is an explicit `?Sized` bound. This would be true in the
     /// example above, but is not true in supertrait listings like `trait Foo: Bar + Baz`.
     ///
     /// `span` should be the declaration size of the parameter.
@@ -1497,7 +1497,7 @@ trait here instead: `trait NewTrait: {} {{}}`",
             i.trait_ref().map_bound(|trait_ref: ty::TraitRef<'tcx>| {
                 assert_eq!(trait_ref.self_ty(), dummy_self);
 
-                // Verify that `dummy_self` did not leak inside default type parameters.  This
+                // Verify that `dummy_self` did not leak inside default type parameters. This
                 // could not be done at path creation, since we need to see through trait aliases.
                 let mut missing_type_params = vec![];
                 let mut references_self = false;
@@ -1928,7 +1928,7 @@ pub fn associated_path_to_ty(
     ) -> Result<(Ty<'tcx>, DefKind, DefId), ErrorGuaranteed> {
         let tcx = self.tcx();
         let assoc_ident = assoc_segment.ident;
-        let qself_res = if let hir::TyKind::Path(hir::QPath::Resolved(_, ref path)) = qself.kind {
+        let qself_res = if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = &qself.kind {
             path.res
         } else {
             Res::Err
@@ -1971,8 +1971,8 @@ pub fn associated_path_to_ty(
                                 return;
                             };
                             let (qself_sugg_span, is_self) = if let hir::TyKind::Path(
-                                hir::QPath::Resolved(_, ref path)
-                            ) = qself.kind {
+                                hir::QPath::Resolved(_, path)
+                            ) = &qself.kind {
                                 // If the path segment already has type params, we want to overwrite
                                 // them.
                                 match &path.segments[..] {
@@ -2602,7 +2602,7 @@ pub fn res_to_ty(
         match path.res {
             Res::Def(DefKind::OpaqueTy | DefKind::ImplTraitPlaceholder, did) => {
                 // Check for desugared `impl Trait`.
-                assert!(ty::is_impl_trait_defn(tcx, did).is_none());
+                assert!(tcx.is_type_alias_impl_trait(did));
                 let item_segment = path.segments.split_last().unwrap();
                 self.prohibit_generics(item_segment.1.iter(), |err| {
                     err.note("`impl Trait` types can't have type parameters");
@@ -2760,7 +2760,7 @@ pub fn res_to_ty(
                         "generic `Self` types are currently not permitted in anonymous constants",
                     );
                     if let Some(hir::Node::Item(&hir::Item {
-                        kind: hir::ItemKind::Impl(ref impl_),
+                        kind: hir::ItemKind::Impl(impl_),
                         ..
                     })) = tcx.hir().get_if_local(def_id)
                     {
@@ -2832,7 +2832,7 @@ pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
     }
 
     /// Parses the programmer's textual representation of a type into our
-    /// internal notion of a type.  This is meant to be used within a path.
+    /// internal notion of a type. This is meant to be used within a path.
     pub fn ast_ty_to_ty_in_path(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
         self.ast_ty_to_ty_inner(ast_ty, false, true)
     }
@@ -2843,12 +2843,12 @@ pub fn ast_ty_to_ty_in_path(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
     fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool) -> Ty<'tcx> {
         let tcx = self.tcx();
 
-        let result_ty = match ast_ty.kind {
-            hir::TyKind::Slice(ref ty) => tcx.mk_slice(self.ast_ty_to_ty(ty)),
-            hir::TyKind::Ptr(ref mt) => {
+        let result_ty = match &ast_ty.kind {
+            hir::TyKind::Slice(ty) => tcx.mk_slice(self.ast_ty_to_ty(ty)),
+            hir::TyKind::Ptr(mt) => {
                 tcx.mk_ptr(ty::TypeAndMut { ty: self.ast_ty_to_ty(mt.ty), mutbl: mt.mutbl })
             }
-            hir::TyKind::Ref(ref region, ref mt) => {
+            hir::TyKind::Ref(region, mt) => {
                 let r = self.ast_region_to_region(region, None);
                 debug!(?r);
                 let t = self.ast_ty_to_ty_inner(mt.ty, true, false);
@@ -2868,7 +2868,7 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool
                     Some(ast_ty),
                 ))
             }
-            hir::TyKind::TraitObject(bounds, ref lifetime, repr) => {
+            hir::TyKind::TraitObject(bounds, lifetime, repr) => {
                 self.maybe_lint_bare_trait(ast_ty, in_path);
                 let repr = match repr {
                     TraitObjectSyntax::Dyn | TraitObjectSyntax::None => ty::Dyn,
@@ -2876,12 +2876,12 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool
                 };
                 self.conv_object_ty_poly_trait_ref(ast_ty.span, bounds, lifetime, borrowed, repr)
             }
-            hir::TyKind::Path(hir::QPath::Resolved(ref maybe_qself, ref path)) => {
+            hir::TyKind::Path(hir::QPath::Resolved(maybe_qself, path)) => {
                 debug!(?maybe_qself, ?path);
                 let opt_self_ty = maybe_qself.as_ref().map(|qself| self.ast_ty_to_ty(qself));
                 self.res_to_ty(opt_self_ty, path, false)
             }
-            hir::TyKind::OpaqueDef(item_id, lifetimes, in_trait) => {
+            &hir::TyKind::OpaqueDef(item_id, lifetimes, in_trait) => {
                 let opaque_ty = tcx.hir().item(item_id);
                 let def_id = item_id.owner_id.to_def_id();
 
@@ -2892,14 +2892,14 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool
                     ref i => bug!("`impl Trait` pointed to non-opaque type?? {:#?}", i),
                 }
             }
-            hir::TyKind::Path(hir::QPath::TypeRelative(ref qself, ref segment)) => {
+            hir::TyKind::Path(hir::QPath::TypeRelative(qself, segment)) => {
                 debug!(?qself, ?segment);
                 let ty = self.ast_ty_to_ty_inner(qself, false, true);
                 self.associated_path_to_ty(ast_ty.hir_id, ast_ty.span, ty, qself, segment, false)
                     .map(|(ty, _, _)| ty)
                     .unwrap_or_else(|_| tcx.ty_error())
             }
-            hir::TyKind::Path(hir::QPath::LangItem(lang_item, span, _)) => {
+            &hir::TyKind::Path(hir::QPath::LangItem(lang_item, span, _)) => {
                 let def_id = tcx.require_lang_item(lang_item, Some(span));
                 let (substs, _) = self.create_substs_for_ast_path(
                     span,
@@ -2913,7 +2913,7 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool
                 );
                 EarlyBinder(tcx.at(span).type_of(def_id)).subst(tcx, substs)
             }
-            hir::TyKind::Array(ref ty, ref length) => {
+            hir::TyKind::Array(ty, length) => {
                 let length = match length {
                     &hir::ArrayLen::Infer(_, span) => self.ct_infer(tcx.types.usize, None, span),
                     hir::ArrayLen::Body(constant) => {
@@ -2923,7 +2923,7 @@ fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool
 
                 tcx.mk_ty(ty::Array(self.ast_ty_to_ty(ty), length))
             }
-            hir::TyKind::Typeof(ref e) => {
+            hir::TyKind::Typeof(e) => {
                 let ty_erased = tcx.type_of(e.def_id);
                 let ty = tcx.fold_regions(ty_erased, |r, _| {
                     if r.is_erased() { tcx.lifetimes.re_static } else { r }
@@ -3305,7 +3305,13 @@ fn maybe_lint_bare_trait(&self, self_ty: &hir::Ty<'_>, in_path: bool) {
                 let label = "add `dyn` keyword before this trait";
                 let mut diag =
                     rustc_errors::struct_span_err!(tcx.sess, self_ty.span, E0782, "{}", msg);
-                diag.multipart_suggestion_verbose(label, sugg, Applicability::MachineApplicable);
+                if self_ty.span.can_be_used_for_suggestions() {
+                    diag.multipart_suggestion_verbose(
+                        label,
+                        sugg,
+                        Applicability::MachineApplicable,
+                    );
+                }
                 // check if the impl trait that we are considering is a impl of a local trait
                 self.maybe_lint_blanket_trait_impl(&self_ty, &mut diag);
                 diag.emit();
index e58669433e2189c501620fd0af3f826f2accd333..abc1c2d7b8d1754ade0e1b977b8032a7c331c787 100644 (file)
@@ -267,7 +267,7 @@ impl<'tcx> ty::visit::TypeVisitor<'tcx> for ProhibitOpaqueVisitor<'tcx> {
         fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             debug!(?t, "root_visit_ty");
             if t == self.opaque_identity_ty {
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             } else {
                 t.visit_with(&mut ConstrainOpaqueTypeRegionVisitor {
                     tcx: self.tcx,
@@ -282,7 +282,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 if self.references_parent_regions {
                     ControlFlow::Break(t)
                 } else {
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 }
             }
         }
@@ -531,9 +531,7 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) {
         DefKind::Fn => {} // entirely within check_item_body
         DefKind::Impl => {
             let it = tcx.hir().item(id);
-            let hir::ItemKind::Impl(ref impl_) = it.kind else {
-                return;
-            };
+            let hir::ItemKind::Impl(impl_) = it.kind else { return };
             debug!("ItemKind::Impl {} with id {:?}", it.ident, it.owner_id);
             if let Some(impl_trait_ref) = tcx.impl_trait_ref(it.owner_id) {
                 check_impl_items_against_trait(
@@ -548,15 +546,15 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) {
         }
         DefKind::Trait => {
             let it = tcx.hir().item(id);
-            let hir::ItemKind::Trait(_, _, _, _, ref items) = it.kind else {
+            let hir::ItemKind::Trait(_, _, _, _, items) = it.kind else {
                 return;
             };
             check_on_unimplemented(tcx, it);
 
             for item in items.iter() {
                 let item = tcx.hir().trait_item(item.id);
-                match item.kind {
-                    hir::TraitItemKind::Fn(ref sig, _) => {
+                match &item.kind {
+                    hir::TraitItemKind::Fn(sig, _) => {
                         let abi = sig.header.abi;
                         fn_maybe_err(tcx, item.ident.span, abi);
                     }
@@ -652,8 +650,8 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) {
                     }
 
                     let item = tcx.hir().foreign_item(item.id);
-                    match item.kind {
-                        hir::ForeignItemKind::Fn(ref fn_decl, _, _) => {
+                    match &item.kind {
+                        hir::ForeignItemKind::Fn(fn_decl, _, _) => {
                             require_c_abi_if_c_variadic(tcx, fn_decl, abi, item.span);
                         }
                         hir::ForeignItemKind::Static(..) => {
@@ -1393,11 +1391,15 @@ fn async_opaque_type_cycle_error(tcx: TyCtxt<'_>, span: Span) -> ErrorGuaranteed
 ///
 /// If all the return expressions evaluate to `!`, then we explain that the error will go away
 /// after changing it. This can happen when a user uses `panic!()` or similar as a placeholder.
-fn opaque_type_cycle_error(tcx: TyCtxt<'_>, def_id: LocalDefId, span: Span) -> ErrorGuaranteed {
+fn opaque_type_cycle_error(
+    tcx: TyCtxt<'_>,
+    opaque_def_id: LocalDefId,
+    span: Span,
+) -> ErrorGuaranteed {
     let mut err = struct_span_err!(tcx.sess, span, E0720, "cannot resolve opaque type");
 
     let mut label = false;
-    if let Some((def_id, visitor)) = get_owner_return_paths(tcx, def_id) {
+    if let Some((def_id, visitor)) = get_owner_return_paths(tcx, opaque_def_id) {
         let typeck_results = tcx.typeck(def_id);
         if visitor
             .returns
@@ -1433,21 +1435,30 @@ fn opaque_type_cycle_error(tcx: TyCtxt<'_>, def_id: LocalDefId, span: Span) -> E
                 .filter_map(|e| typeck_results.node_type_opt(e.hir_id).map(|t| (e.span, t)))
                 .filter(|(_, ty)| !matches!(ty.kind(), ty::Never))
             {
-                struct OpaqueTypeCollector(Vec<DefId>);
+                #[derive(Default)]
+                struct OpaqueTypeCollector {
+                    opaques: Vec<DefId>,
+                    closures: Vec<DefId>,
+                }
                 impl<'tcx> ty::visit::TypeVisitor<'tcx> for OpaqueTypeCollector {
                     fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                         match *t.kind() {
                             ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => {
-                                self.0.push(def);
-                                ControlFlow::CONTINUE
+                                self.opaques.push(def);
+                                ControlFlow::Continue(())
+                            }
+                            ty::Closure(def_id, ..) | ty::Generator(def_id, ..) => {
+                                self.closures.push(def_id);
+                                t.super_visit_with(self)
                             }
                             _ => t.super_visit_with(self),
                         }
                     }
                 }
-                let mut visitor = OpaqueTypeCollector(vec![]);
+
+                let mut visitor = OpaqueTypeCollector::default();
                 ty.visit_with(&mut visitor);
-                for def_id in visitor.0 {
+                for def_id in visitor.opaques {
                     let ty_span = tcx.def_span(def_id);
                     if !seen.contains(&ty_span) {
                         err.span_label(ty_span, &format!("returning this opaque type `{ty}`"));
@@ -1455,6 +1466,40 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                     }
                     err.span_label(sp, &format!("returning here with type `{ty}`"));
                 }
+
+                for closure_def_id in visitor.closures {
+                    let Some(closure_local_did) = closure_def_id.as_local() else { continue; };
+                    let typeck_results = tcx.typeck(closure_local_did);
+
+                    let mut label_match = |ty: Ty<'_>, span| {
+                        for arg in ty.walk() {
+                            if let ty::GenericArgKind::Type(ty) = arg.unpack()
+                                && let ty::Alias(ty::Opaque, ty::AliasTy { def_id: captured_def_id, .. }) = *ty.kind()
+                                && captured_def_id == opaque_def_id.to_def_id()
+                            {
+                                err.span_label(
+                                    span,
+                                    format!(
+                                        "{} captures itself here",
+                                        tcx.def_kind(closure_def_id).descr(closure_def_id)
+                                    ),
+                                );
+                            }
+                        }
+                    };
+
+                    // Label any closure upvars that capture the opaque
+                    for capture in typeck_results.closure_min_captures_flattened(closure_local_did)
+                    {
+                        label_match(capture.place.ty(), capture.get_path_span(tcx));
+                    }
+                    // Label any generator locals that capture the opaque
+                    for interior_ty in
+                        typeck_results.generator_interior_types.as_ref().skip_binder()
+                    {
+                        label_match(interior_ty.ty, interior_ty.span);
+                    }
+                }
             }
         }
     }
index 770d7b6f927e4a4179752e85ca412ee7924e07d0..cfebcceef3cdb1e1096907e76fd6df256d5dbdd5 100644 (file)
@@ -47,42 +47,22 @@ pub(super) fn compare_impl_method<'tcx>(
 
     let impl_m_span = tcx.def_span(impl_m.def_id);
 
-    if let Err(_) = compare_self_type(tcx, impl_m, impl_m_span, trait_m, impl_trait_ref) {
-        return;
-    }
-
-    if let Err(_) = compare_number_of_generics(tcx, impl_m, trait_m, trait_item_span, false) {
-        return;
-    }
-
-    if let Err(_) = compare_generic_param_kinds(tcx, impl_m, trait_m, false) {
-        return;
-    }
-
-    if let Err(_) =
-        compare_number_of_method_arguments(tcx, impl_m, impl_m_span, trait_m, trait_item_span)
-    {
-        return;
-    }
-
-    if let Err(_) = compare_synthetic_generics(tcx, impl_m, trait_m) {
-        return;
-    }
-
-    if let Err(_) = compare_asyncness(tcx, impl_m, impl_m_span, trait_m, trait_item_span) {
-        return;
-    }
-
-    if let Err(_) = compare_method_predicate_entailment(
-        tcx,
-        impl_m,
-        impl_m_span,
-        trait_m,
-        impl_trait_ref,
-        CheckImpliedWfMode::Check,
-    ) {
-        return;
-    }
+    let _: Result<_, ErrorGuaranteed> = try {
+        compare_self_type(tcx, impl_m, impl_m_span, trait_m, impl_trait_ref)?;
+        compare_number_of_generics(tcx, impl_m, trait_m, trait_item_span, false)?;
+        compare_generic_param_kinds(tcx, impl_m, trait_m, false)?;
+        compare_number_of_method_arguments(tcx, impl_m, impl_m_span, trait_m, trait_item_span)?;
+        compare_synthetic_generics(tcx, impl_m, trait_m)?;
+        compare_asyncness(tcx, impl_m, impl_m_span, trait_m, trait_item_span)?;
+        compare_method_predicate_entailment(
+            tcx,
+            impl_m,
+            impl_m_span,
+            trait_m,
+            impl_trait_ref,
+            CheckImpliedWfMode::Check,
+        )?;
+    };
 }
 
 /// This function is best explained by example. Consider a trait:
@@ -138,7 +118,7 @@ pub(super) fn compare_impl_method<'tcx>(
 ///     <'a> fn(t: &'i0 U0, m: &'a) -> Foo
 ///
 /// This type is also the same but the name of the bound region (`'a`
-/// vs `'b`).  However, the normal subtyping rules on fn types handle
+/// vs `'b`). However, the normal subtyping rules on fn types handle
 /// this kind of equivalency just fine.
 ///
 /// We now use these substitutions to ensure that all declared bounds are
@@ -209,9 +189,11 @@ fn compare_method_predicate_entailment<'tcx>(
     //
     // We then register the obligations from the impl_m and check to see
     // if all constraints hold.
-    hybrid_preds
-        .predicates
-        .extend(trait_m_predicates.instantiate_own(tcx, trait_to_placeholder_substs).predicates);
+    hybrid_preds.predicates.extend(
+        trait_m_predicates
+            .instantiate_own(tcx, trait_to_placeholder_substs)
+            .map(|(predicate, _)| predicate),
+    );
 
     // Construct trait parameter environment and then shift it into the placeholder viewpoint.
     // The key step here is to update the caller_bounds's predicates to be
@@ -230,7 +212,7 @@ fn compare_method_predicate_entailment<'tcx>(
     debug!("compare_impl_method: caller_bounds={:?}", param_env.caller_bounds());
 
     let impl_m_own_bounds = impl_m_predicates.instantiate_own(tcx, impl_to_placeholder_substs);
-    for (predicate, span) in iter::zip(impl_m_own_bounds.predicates, impl_m_own_bounds.spans) {
+    for (predicate, span) in impl_m_own_bounds {
         let normalize_cause = traits::ObligationCause::misc(span, impl_m_hir_id);
         let predicate = ocx.normalize(&normalize_cause, param_env, predicate);
 
@@ -762,7 +744,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
         match infcx.fully_resolve(ty) {
             Ok(ty) => {
                 // `ty` contains free regions that we created earlier while liberating the
-                // trait fn signature.  However, projection normalization expects `ty` to
+                // trait fn signature. However, projection normalization expects `ty` to
                 // contains `def_id`'s early-bound regions.
                 let id_substs = InternalSubsts::identity_for_item(tcx, def_id);
                 debug!(?id_substs, ?substs);
@@ -934,16 +916,14 @@ fn report_trait_method_mismatch<'tcx>(
             // When the `impl` receiver is an arbitrary self type, like `self: Box<Self>`, the
             // span points only at the type `Box<Self`>, but we want to cover the whole
             // argument pattern and type.
-            let span = match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
-                ImplItemKind::Fn(ref sig, body) => tcx
-                    .hir()
-                    .body_param_names(body)
-                    .zip(sig.decl.inputs.iter())
-                    .map(|(param, ty)| param.span.to(ty.span))
-                    .next()
-                    .unwrap_or(impl_err_span),
-                _ => bug!("{:?} is not a method", impl_m),
-            };
+            let ImplItemKind::Fn(ref sig, body) = tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind else { bug!("{impl_m:?} is not a method") };
+            let span = tcx
+                .hir()
+                .body_param_names(body)
+                .zip(sig.decl.inputs.iter())
+                .map(|(param, ty)| param.span.to(ty.span))
+                .next()
+                .unwrap_or(impl_err_span);
 
             diag.span_suggestion(
                 span,
@@ -956,22 +936,21 @@ fn report_trait_method_mismatch<'tcx>(
             if trait_sig.inputs().len() == *i {
                 // Suggestion to change output type. We do not suggest in `async` functions
                 // to avoid complex logic or incorrect output.
-                match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
-                    ImplItemKind::Fn(ref sig, _) if !sig.header.asyncness.is_async() => {
-                        let msg = "change the output type to match the trait";
-                        let ap = Applicability::MachineApplicable;
-                        match sig.decl.output {
-                            hir::FnRetTy::DefaultReturn(sp) => {
-                                let sugg = format!("-> {} ", trait_sig.output());
-                                diag.span_suggestion_verbose(sp, msg, sugg, ap);
-                            }
-                            hir::FnRetTy::Return(hir_ty) => {
-                                let sugg = trait_sig.output();
-                                diag.span_suggestion(hir_ty.span, msg, sugg, ap);
-                            }
-                        };
-                    }
-                    _ => {}
+                if let ImplItemKind::Fn(sig, _) = &tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind
+                    && !sig.header.asyncness.is_async()
+                {
+                    let msg = "change the output type to match the trait";
+                    let ap = Applicability::MachineApplicable;
+                    match sig.decl.output {
+                        hir::FnRetTy::DefaultReturn(sp) => {
+                            let sugg = format!("-> {} ", trait_sig.output());
+                            diag.span_suggestion_verbose(sp, msg, sugg, ap);
+                        }
+                        hir::FnRetTy::Return(hir_ty) => {
+                            let sugg = trait_sig.output();
+                            diag.span_suggestion(hir_ty.span, msg, sugg, ap);
+                        }
+                    };
                 };
             } else if let Some(trait_ty) = trait_sig.inputs().get(*i) {
                 diag.span_suggestion(
@@ -1020,7 +999,7 @@ fn check_region_bounds_on_impl_item<'tcx>(
 
     // Must have same number of early-bound lifetime parameters.
     // Unfortunately, if the user screws up the bounds, then this
-    // will change classification between early and late.  E.g.,
+    // will change classification between early and late. E.g.,
     // if in trait we have `<'a,'b:'a>`, and in impl we just have
     // `<'a,'b>`, then we have 2 early-bound lifetime parameters
     // in trait but 0 in the impl. But if we report "expected 2
@@ -1098,25 +1077,18 @@ fn extract_spans_for_error_reporting<'tcx>(
     trait_m: &ty::AssocItem,
 ) -> (Span, Option<Span>) {
     let tcx = infcx.tcx;
-    let mut impl_args = match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
-        ImplItemKind::Fn(ref sig, _) => {
-            sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
-        }
-        _ => bug!("{:?} is not a method", impl_m),
+    let mut impl_args = {
+        let ImplItemKind::Fn(sig, _) = &tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind else { bug!("{:?} is not a method", impl_m) };
+        sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
     };
-    let trait_args =
-        trait_m.def_id.as_local().map(|def_id| match tcx.hir().expect_trait_item(def_id).kind {
-            TraitItemKind::Fn(ref sig, _) => {
-                sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
-            }
-            _ => bug!("{:?} is not a TraitItemKind::Fn", trait_m),
-        });
+
+    let trait_args = trait_m.def_id.as_local().map(|def_id| {
+        let TraitItemKind::Fn(sig, _) = &tcx.hir().expect_trait_item(def_id).kind else { bug!("{:?} is not a TraitItemKind::Fn", trait_m) };
+        sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
+    });
 
     match terr {
-        TypeError::ArgumentMutability(i) => {
-            (impl_args.nth(i).unwrap(), trait_args.and_then(|mut args| args.nth(i)))
-        }
-        TypeError::ArgumentSorts(ExpectedFound { .. }, i) => {
+        TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(ExpectedFound { .. }, i) => {
             (impl_args.nth(i).unwrap(), trait_args.and_then(|mut args| args.nth(i)))
         }
         _ => (cause.span(), tcx.hir().span_if_local(trait_m.def_id)),
@@ -1131,9 +1103,9 @@ fn compare_self_type<'tcx>(
     impl_trait_ref: ty::TraitRef<'tcx>,
 ) -> Result<(), ErrorGuaranteed> {
     // Try to give more informative error messages about self typing
-    // mismatches.  Note that any mismatch will also be detected
+    // mismatches. Note that any mismatch will also be detected
     // below, where we construct a canonical function type that
-    // includes the self parameter as a normal parameter.  It's just
+    // includes the self parameter as a normal parameter. It's just
     // that the error messages you get out of this code are a bit more
     // inscrutable, particularly for cases where one method has no
     // self.
@@ -1176,8 +1148,7 @@ fn compare_self_type<'tcx>(
             } else {
                 err.note_trait_signature(trait_m.name, trait_m.signature(tcx));
             }
-            let reported = err.emit();
-            return Err(reported);
+            return Err(err.emit());
         }
 
         (true, false) => {
@@ -1196,8 +1167,8 @@ fn compare_self_type<'tcx>(
             } else {
                 err.note_trait_signature(trait_m.name, trait_m.signature(tcx));
             }
-            let reported = err.emit();
-            return Err(reported);
+
+            return Err(err.emit());
         }
     }
 
@@ -1379,41 +1350,39 @@ fn compare_number_of_method_arguments<'tcx>(
     let trait_m_fty = tcx.fn_sig(trait_m.def_id);
     let trait_number_args = trait_m_fty.inputs().skip_binder().len();
     let impl_number_args = impl_m_fty.inputs().skip_binder().len();
+
     if trait_number_args != impl_number_args {
-        let trait_span = if let Some(def_id) = trait_m.def_id.as_local() {
-            match tcx.hir().expect_trait_item(def_id).kind {
-                TraitItemKind::Fn(ref trait_m_sig, _) => {
-                    let pos = if trait_number_args > 0 { trait_number_args - 1 } else { 0 };
-                    if let Some(arg) = trait_m_sig.decl.inputs.get(pos) {
-                        Some(if pos == 0 {
-                            arg.span
-                        } else {
-                            arg.span.with_lo(trait_m_sig.decl.inputs[0].span.lo())
-                        })
-                    } else {
-                        trait_item_span
-                    }
-                }
-                _ => bug!("{:?} is not a method", impl_m),
-            }
-        } else {
-            trait_item_span
-        };
-        let impl_span = match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
-            ImplItemKind::Fn(ref impl_m_sig, _) => {
-                let pos = if impl_number_args > 0 { impl_number_args - 1 } else { 0 };
-                if let Some(arg) = impl_m_sig.decl.inputs.get(pos) {
+        let trait_span = trait_m
+            .def_id
+            .as_local()
+            .and_then(|def_id| {
+                let TraitItemKind::Fn(trait_m_sig, _) = &tcx.hir().expect_trait_item(def_id).kind else { bug!("{:?} is not a method", impl_m) };
+                let pos = trait_number_args.saturating_sub(1);
+                trait_m_sig.decl.inputs.get(pos).map(|arg| {
                     if pos == 0 {
                         arg.span
                     } else {
-                        arg.span.with_lo(impl_m_sig.decl.inputs[0].span.lo())
+                        arg.span.with_lo(trait_m_sig.decl.inputs[0].span.lo())
                     }
+                })
+            })
+            .or(trait_item_span);
+
+        let ImplItemKind::Fn(impl_m_sig, _) = &tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind else { bug!("{:?} is not a method", impl_m) };
+        let pos = impl_number_args.saturating_sub(1);
+        let impl_span = impl_m_sig
+            .decl
+            .inputs
+            .get(pos)
+            .map(|arg| {
+                if pos == 0 {
+                    arg.span
                 } else {
-                    impl_m_span
+                    arg.span.with_lo(impl_m_sig.decl.inputs[0].span.lo())
                 }
-            }
-            _ => bug!("{:?} is not a method", impl_m),
-        };
+            })
+            .unwrap_or(impl_m_span);
+
         let mut err = struct_span_err!(
             tcx.sess,
             impl_span,
@@ -1424,6 +1393,7 @@ fn compare_number_of_method_arguments<'tcx>(
             tcx.def_path_str(trait_m.def_id),
             trait_number_args
         );
+
         if let Some(trait_span) = trait_span {
             err.span_label(
                 trait_span,
@@ -1435,6 +1405,7 @@ fn compare_number_of_method_arguments<'tcx>(
         } else {
             err.note_trait_signature(trait_m.name, trait_m.signature(tcx));
         }
+
         err.span_label(
             impl_span,
             format!(
@@ -1443,8 +1414,8 @@ fn compare_number_of_method_arguments<'tcx>(
                 impl_number_args
             ),
         );
-        let reported = err.emit();
-        return Err(reported);
+
+        return Err(err.emit());
     }
 
     Ok(())
@@ -1491,7 +1462,7 @@ fn compare_synthetic_generics<'tcx>(
                 // explicit generics
                 (true, false) => {
                     err.span_label(impl_span, "expected generic parameter, found `impl Trait`");
-                    (|| {
+                    let _: Option<_> = try {
                         // try taking the name from the trait impl
                         // FIXME: this is obviously suboptimal since the name can already be used
                         // as another generic argument
@@ -1524,26 +1495,23 @@ fn compare_synthetic_generics<'tcx>(
                             ],
                             Applicability::MaybeIncorrect,
                         );
-                        Some(())
-                    })();
+                    };
                 }
                 // The case where the trait method uses `impl Trait`, but the impl method uses
                 // explicit generics.
                 (false, true) => {
                     err.span_label(impl_span, "expected `impl Trait`, found generic parameter");
-                    (|| {
+                    let _: Option<_> = try {
                         let impl_m = impl_m.def_id.as_local()?;
                         let impl_m = tcx.hir().expect_impl_item(impl_m);
-                        let input_tys = match impl_m.kind {
-                            hir::ImplItemKind::Fn(ref sig, _) => sig.decl.inputs,
-                            _ => unreachable!(),
-                        };
+                        let hir::ImplItemKind::Fn(sig, _) = &impl_m.kind else { unreachable!() };
+                        let input_tys = sig.decl.inputs;
+
                         struct Visitor(Option<Span>, hir::def_id::LocalDefId);
                         impl<'v> intravisit::Visitor<'v> for Visitor {
                             fn visit_ty(&mut self, ty: &'v hir::Ty<'v>) {
                                 intravisit::walk_ty(self, ty);
-                                if let hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) =
-                                    ty.kind
+                                if let hir::TyKind::Path(hir::QPath::Resolved(None, path)) = ty.kind
                                     && let Res::Def(DefKind::TyParam, def_id) = path.res
                                     && def_id == self.1.to_def_id()
                                 {
@@ -1551,6 +1519,7 @@ fn visit_ty(&mut self, ty: &'v hir::Ty<'v>) {
                                 }
                             }
                         }
+
                         let mut visitor = Visitor(None, impl_def_id);
                         for ty in input_tys {
                             intravisit::Visitor::visit_ty(&mut visitor, ty);
@@ -1571,13 +1540,11 @@ fn visit_ty(&mut self, ty: &'v hir::Ty<'v>) {
                             ],
                             Applicability::MaybeIncorrect,
                         );
-                        Some(())
-                    })();
+                    };
                 }
                 _ => unreachable!(),
             }
-            let reported = err.emit();
-            error_found = Some(reported);
+            error_found = Some(err.emit());
         }
     }
     if let Some(reported) = error_found { Err(reported) } else { Ok(()) }
@@ -1737,10 +1704,8 @@ pub(super) fn compare_impl_const_raw(
         );
 
         // Locate the Span containing just the type of the offending impl
-        match tcx.hir().expect_impl_item(impl_const_item_def).kind {
-            ImplItemKind::Const(ref ty, _) => cause.span = ty.span,
-            _ => bug!("{:?} is not a impl const", impl_const_item),
-        }
+        let ImplItemKind::Const(ty, _) = tcx.hir().expect_impl_item(impl_const_item_def).kind else { bug!("{impl_const_item:?} is not a impl const") };
+        cause.span = ty.span;
 
         let mut diag = struct_span_err!(
             tcx.sess,
@@ -1752,10 +1717,8 @@ pub(super) fn compare_impl_const_raw(
 
         let trait_c_span = trait_const_item_def.as_local().map(|trait_c_def_id| {
             // Add a label to the Span containing just the type of the const
-            match tcx.hir().expect_trait_item(trait_c_def_id).kind {
-                TraitItemKind::Const(ref ty, _) => ty.span,
-                _ => bug!("{:?} is not a trait const", trait_const_item),
-            }
+            let TraitItemKind::Const(ty, _) = tcx.hir().expect_trait_item(trait_c_def_id).kind else { bug!("{trait_const_item:?} is not a trait const") };
+            ty.span
         });
 
         infcx.err_ctxt().note_type_err(
@@ -1797,7 +1760,7 @@ pub(super) fn compare_impl_ty<'tcx>(
 ) {
     debug!("compare_impl_type(impl_trait_ref={:?})", impl_trait_ref);
 
-    let _: Result<(), ErrorGuaranteed> = (|| {
+    let _: Result<(), ErrorGuaranteed> = try {
         compare_number_of_generics(tcx, impl_ty, trait_ty, trait_item_span, false)?;
 
         compare_generic_param_kinds(tcx, impl_ty, trait_ty, false)?;
@@ -1805,8 +1768,8 @@ pub(super) fn compare_impl_ty<'tcx>(
         let sp = tcx.def_span(impl_ty.def_id);
         compare_type_predicate_entailment(tcx, impl_ty, sp, trait_ty, impl_trait_ref)?;
 
-        check_type_bounds(tcx, trait_ty, impl_ty, impl_ty_span, impl_trait_ref)
-    })();
+        check_type_bounds(tcx, trait_ty, impl_ty, impl_ty_span, impl_trait_ref)?;
+    };
 }
 
 /// The equivalent of [compare_method_predicate_entailment], but for associated types
@@ -1828,8 +1791,7 @@ fn compare_type_predicate_entailment<'tcx>(
     check_region_bounds_on_impl_item(tcx, impl_ty, trait_ty, false)?;
 
     let impl_ty_own_bounds = impl_ty_predicates.instantiate_own(tcx, impl_substs);
-
-    if impl_ty_own_bounds.is_empty() {
+    if impl_ty_own_bounds.len() == 0 {
         // Nothing to check.
         return Ok(());
     }
@@ -1844,9 +1806,11 @@ fn compare_type_predicate_entailment<'tcx>(
     // associated type in the trait are assumed.
     let impl_predicates = tcx.predicates_of(impl_ty_predicates.parent.unwrap());
     let mut hybrid_preds = impl_predicates.instantiate_identity(tcx);
-    hybrid_preds
-        .predicates
-        .extend(trait_ty_predicates.instantiate_own(tcx, trait_to_impl_substs).predicates);
+    hybrid_preds.predicates.extend(
+        trait_ty_predicates
+            .instantiate_own(tcx, trait_to_impl_substs)
+            .map(|(predicate, _)| predicate),
+    );
 
     debug!("compare_type_predicate_entailment: bounds={:?}", hybrid_preds);
 
@@ -1862,9 +1826,7 @@ fn compare_type_predicate_entailment<'tcx>(
 
     debug!("compare_type_predicate_entailment: caller_bounds={:?}", param_env.caller_bounds());
 
-    assert_eq!(impl_ty_own_bounds.predicates.len(), impl_ty_own_bounds.spans.len());
-    for (span, predicate) in std::iter::zip(impl_ty_own_bounds.spans, impl_ty_own_bounds.predicates)
-    {
+    for (predicate, span) in impl_ty_own_bounds {
         let cause = ObligationCause::misc(span, impl_ty_hir_id);
         let predicate = ocx.normalize(&cause, param_env, predicate);
 
index d6e3ddb0a613964238574c6fbc46261d8a00f620..64fd61c1359b599edbd2ae97bb5baae89e6e41b0 100644 (file)
@@ -46,7 +46,7 @@ pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), Erro
             )
         }
         _ => {
-            // Destructors only work on nominal types.  This was
+            // Destructors only work on nominal types. This was
             // already checked by coherence, but compilation may
             // not have been terminated.
             let span = tcx.def_span(drop_impl_did);
index 17c4d0d482f2ab0b2a72e58c27aad8717f3decff..82030d82f57a0536f90667df5ba79183aeed8f02 100644 (file)
@@ -351,7 +351,7 @@ pub fn check_asm(&self, asm: &hir::InlineAsm<'tcx>, enclosing_id: hir::HirId) {
             }
 
             match *op {
-                hir::InlineAsmOperand::In { reg, ref expr } => {
+                hir::InlineAsmOperand::In { reg, expr } => {
                     self.check_asm_operand_type(
                         idx,
                         reg,
@@ -362,7 +362,7 @@ pub fn check_asm(&self, asm: &hir::InlineAsm<'tcx>, enclosing_id: hir::HirId) {
                         &target_features,
                     );
                 }
-                hir::InlineAsmOperand::Out { reg, late: _, ref expr } => {
+                hir::InlineAsmOperand::Out { reg, late: _, expr } => {
                     if let Some(expr) = expr {
                         self.check_asm_operand_type(
                             idx,
@@ -375,7 +375,7 @@ pub fn check_asm(&self, asm: &hir::InlineAsm<'tcx>, enclosing_id: hir::HirId) {
                         );
                     }
                 }
-                hir::InlineAsmOperand::InOut { reg, late: _, ref expr } => {
+                hir::InlineAsmOperand::InOut { reg, late: _, expr } => {
                     self.check_asm_operand_type(
                         idx,
                         reg,
@@ -386,7 +386,7 @@ pub fn check_asm(&self, asm: &hir::InlineAsm<'tcx>, enclosing_id: hir::HirId) {
                         &target_features,
                     );
                 }
-                hir::InlineAsmOperand::SplitInOut { reg, late: _, ref in_expr, ref out_expr } => {
+                hir::InlineAsmOperand::SplitInOut { reg, late: _, in_expr, out_expr } => {
                     let in_ty = self.check_asm_operand_type(
                         idx,
                         reg,
index 382c3f5294511a40c364060f7b050773bcad737f..14bca34b77bea005d5057b464da32dbeb7606ef4 100644 (file)
 
 - main: the main pass does the lion's share of the work: it
   determines the types of all expressions, resolves
-  methods, checks for most invalid conditions, and so forth.  In
+  methods, checks for most invalid conditions, and so forth. In
   some cases, where a type is unknown, it may create a type or region
   variable and use that as the type of an expression.
 
   In the process of checking, various constraints will be placed on
   these type variables through the subtyping relationships requested
-  through the `demand` module.  The `infer` module is in charge
+  through the `demand` module. The `infer` module is in charge
   of resolving those constraints.
 
 - regionck: after main is complete, the regionck pass goes over all
   types looking for regions and making sure that they did not escape
-  into places where they are not in scope.  This may also influence the
+  into places where they are not in scope. This may also influence the
   final assignments of the various region variables if there is some
   flexibility.
 
 - writeback: writes the final types within a function body, replacing
-  type variables with their final inferred types.  These final types
+  type variables with their final inferred types. These final types
   are written into the `tcx.node_types` table, which should *never* contain
   any reference to a type variable.
 
@@ -38,8 +38,8 @@
 
 While type checking a function, the intermediate types for the
 expressions, blocks, and so forth contained within the function are
-stored in `fcx.node_types` and `fcx.node_substs`.  These types
-may contain unresolved type variables.  After type checking is
+stored in `fcx.node_types` and `fcx.node_substs`. These types
+may contain unresolved type variables. After type checking is
 complete, the functions in the writeback module are used to take the
 types from this table, resolve them, and then write them into their
 permanent home in the type context `tcx`.
 The types of top-level items, which never contain unbound type
 variables, are stored directly into the `tcx` typeck_results.
 
-N.B., a type variable is not the same thing as a type parameter.  A
+N.B., a type variable is not the same thing as a type parameter. A
 type variable is an instance of a type parameter. That is,
 given a generic function `fn foo<T>(t: T)`, while checking the
 function `foo`, the type `ty_param(0)` refers to the type `T`, which
 is treated in abstract. However, when `foo()` is called, `T` will be
-substituted for a fresh type variable `N`.  This variable will
+substituted for a fresh type variable `N`. This variable will
 eventually be resolved to some concrete type (which might itself be
 a type parameter).
 
@@ -441,7 +441,7 @@ fn suggestion_signature(assoc: &ty::AssocItem, tcx: TyCtxt<'_>) -> String {
         ty::AssocKind::Fn => {
             // We skip the binder here because the binder would deanonymize all
             // late-bound regions, and we don't want method signatures to show up
-            // `as for<'r> fn(&'r MyType)`.  Pretty-printing handles late-bound
+            // `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound
             // regions just fine, showing `fn(&MyType)`.
             fn_sig_suggestion(
                 tcx,
index b315ebad4686c37d4bac86575a991a744e2c27f0..b28bfb1d54b6ca7d1bc256a16b564ffe954a720f 100644 (file)
@@ -180,7 +180,7 @@ fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir
 
     visitor.terminating_scopes.insert(arm.body.hir_id.local_id);
 
-    if let Some(hir::Guard::If(ref expr)) = arm.guard {
+    if let Some(hir::Guard::If(expr)) = arm.guard {
         visitor.terminating_scopes.insert(expr.hir_id.local_id);
     }
 
@@ -242,8 +242,8 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
             // This ensures fixed size stacks.
             hir::ExprKind::Binary(
                 source_map::Spanned { node: hir::BinOpKind::And | hir::BinOpKind::Or, .. },
-                ref l,
-                ref r,
+                l,
+                r,
             ) => {
                 // expr is a short circuiting operator (|| or &&). As its
                 // functionality can't be overridden by traits, it always
@@ -288,20 +288,20 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
                     terminating(r.hir_id.local_id);
                 }
             }
-            hir::ExprKind::If(_, ref then, Some(ref otherwise)) => {
+            hir::ExprKind::If(_, then, Some(otherwise)) => {
                 terminating(then.hir_id.local_id);
                 terminating(otherwise.hir_id.local_id);
             }
 
-            hir::ExprKind::If(_, ref then, None) => {
+            hir::ExprKind::If(_, then, None) => {
                 terminating(then.hir_id.local_id);
             }
 
-            hir::ExprKind::Loop(ref body, _, _, _) => {
+            hir::ExprKind::Loop(body, _, _, _) => {
                 terminating(body.hir_id.local_id);
             }
 
-            hir::ExprKind::DropTemps(ref expr) => {
+            hir::ExprKind::DropTemps(expr) => {
                 // `DropTemps(expr)` does not denote a conditional scope.
                 // Rather, we want to achieve the same behavior as `{ let _t = expr; _t }`.
                 terminating(expr.hir_id.local_id);
@@ -325,7 +325,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
                 // The idea is that call.callee_id represents *the time when
                 // the invoked function is actually running* and call.id
                 // represents *the time to prepare the arguments and make the
-                // call*.  See the section "Borrows in Calls" borrowck/README.md
+                // call*. See the section "Borrows in Calls" borrowck/README.md
                 // for an extended explanation of why this distinction is
                 // important.
                 //
@@ -396,7 +396,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
             let body = visitor.tcx.hir().body(body);
             visitor.visit_body(body);
         }
-        hir::ExprKind::AssignOp(_, ref left_expr, ref right_expr) => {
+        hir::ExprKind::AssignOp(_, left_expr, right_expr) => {
             debug!(
                 "resolve_expr - enabling pessimistic_yield, was previously {}",
                 prev_pessimistic
@@ -447,7 +447,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
             }
         }
 
-        hir::ExprKind::If(ref cond, ref then, Some(ref otherwise)) => {
+        hir::ExprKind::If(cond, then, Some(otherwise)) => {
             let expr_cx = visitor.cx;
             visitor.enter_scope(Scope { id: then.hir_id.local_id, data: ScopeData::IfThen });
             visitor.cx.var_parent = visitor.cx.parent;
@@ -457,7 +457,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
             visitor.visit_expr(otherwise);
         }
 
-        hir::ExprKind::If(ref cond, ref then, None) => {
+        hir::ExprKind::If(cond, then, None) => {
             let expr_cx = visitor.cx;
             visitor.enter_scope(Scope { id: then.hir_id.local_id, data: ScopeData::IfThen });
             visitor.cx.var_parent = visitor.cx.parent;
@@ -641,21 +641,21 @@ fn is_binding_pat(pat: &hir::Pat<'_>) -> bool {
         match pat.kind {
             PatKind::Binding(hir::BindingAnnotation(hir::ByRef::Yes, _), ..) => true,
 
-            PatKind::Struct(_, ref field_pats, _) => {
+            PatKind::Struct(_, field_pats, _) => {
                 field_pats.iter().any(|fp| is_binding_pat(&fp.pat))
             }
 
-            PatKind::Slice(ref pats1, ref pats2, ref pats3) => {
+            PatKind::Slice(pats1, pats2, pats3) => {
                 pats1.iter().any(|p| is_binding_pat(&p))
                     || pats2.iter().any(|p| is_binding_pat(&p))
                     || pats3.iter().any(|p| is_binding_pat(&p))
             }
 
-            PatKind::Or(ref subpats)
-            | PatKind::TupleStruct(_, ref subpats, _)
-            | PatKind::Tuple(ref subpats, _) => subpats.iter().any(|p| is_binding_pat(&p)),
+            PatKind::Or(subpats)
+            | PatKind::TupleStruct(_, subpats, _)
+            | PatKind::Tuple(subpats, _) => subpats.iter().any(|p| is_binding_pat(&p)),
 
-            PatKind::Box(ref subpat) => is_binding_pat(&subpat),
+            PatKind::Box(subpat) => is_binding_pat(&subpat),
 
             PatKind::Ref(_, _)
             | PatKind::Binding(hir::BindingAnnotation(hir::ByRef::No, _), ..)
@@ -704,11 +704,11 @@ fn record_rvalue_scope_if_borrow_expr<'tcx>(
                     record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
                 }
             }
-            hir::ExprKind::Cast(ref subexpr, _) => {
+            hir::ExprKind::Cast(subexpr, _) => {
                 record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id)
             }
-            hir::ExprKind::Block(ref block, _) => {
-                if let Some(ref subexpr) = block.expr {
+            hir::ExprKind::Block(block, _) => {
+                if let Some(subexpr) = block.expr {
                     record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
                 }
             }
index 912e0ec560b49e9768e482ec31ffa98a8740ecd3..11237afe8a0e36f50b7153370acbe3c20e51b81b 100644 (file)
@@ -32,7 +32,6 @@
 };
 
 use std::cell::LazyCell;
-use std::iter;
 use std::ops::{ControlFlow, Deref};
 
 pub(super) struct WfCheckingCtxt<'a, 'tcx> {
@@ -179,7 +178,7 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) {
         //
         // won't be allowed unless there's an *explicit* implementation of `Send`
         // for `T`
-        hir::ItemKind::Impl(ref impl_) => {
+        hir::ItemKind::Impl(impl_) => {
             let is_auto = tcx
                 .impl_trait_ref(def_id)
                 .map_or(false, |trait_ref| tcx.trait_is_auto(trait_ref.skip_binder().def_id));
@@ -225,15 +224,15 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) {
         hir::ItemKind::Const(ty, ..) => {
             check_item_type(tcx, def_id, ty.span, false);
         }
-        hir::ItemKind::Struct(_, ref ast_generics) => {
+        hir::ItemKind::Struct(_, ast_generics) => {
             check_type_defn(tcx, item, false);
             check_variances_for_type_defn(tcx, item, ast_generics);
         }
-        hir::ItemKind::Union(_, ref ast_generics) => {
+        hir::ItemKind::Union(_, ast_generics) => {
             check_type_defn(tcx, item, true);
             check_variances_for_type_defn(tcx, item, ast_generics);
         }
-        hir::ItemKind::Enum(_, ref ast_generics) => {
+        hir::ItemKind::Enum(_, ast_generics) => {
             check_type_defn(tcx, item, true);
             check_variances_for_type_defn(tcx, item, ast_generics);
         }
@@ -1248,8 +1247,8 @@ fn check_impl<'tcx>(
     constness: hir::Constness,
 ) {
     enter_wf_checking_ctxt(tcx, item.span, item.owner_id.def_id, |wfcx| {
-        match *ast_trait_ref {
-            Some(ref ast_trait_ref) => {
+        match ast_trait_ref {
+            Some(ast_trait_ref) => {
                 // `#[rustc_reservation_impl]` impls are not real impls and
                 // therefore don't need to be WF (the trait's `Self: Trait` predicate
                 // won't hold).
@@ -1310,7 +1309,7 @@ fn check_where_clauses<'tcx>(wfcx: &WfCheckingCtxt<'_, 'tcx>, span: Span, def_id
     let infcx = wfcx.infcx;
     let tcx = wfcx.tcx();
 
-    let predicates = tcx.bound_predicates_of(def_id.to_def_id());
+    let predicates = tcx.predicates_of(def_id.to_def_id());
     let generics = tcx.generics_of(def_id);
 
     let is_our_default = |def: &ty::GenericParamDef| match def.kind {
@@ -1411,7 +1410,6 @@ fn check_where_clauses<'tcx>(wfcx: &WfCheckingCtxt<'_, 'tcx>, span: Span, def_id
 
     // Now we build the substituted predicates.
     let default_obligations = predicates
-        .0
         .predicates
         .iter()
         .flat_map(|&(pred, sp)| {
@@ -1430,7 +1428,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 }
 
                 fn visit_region(&mut self, _: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
-                    ControlFlow::BREAK
+                    ControlFlow::Break(())
                 }
 
                 fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
@@ -1442,13 +1440,13 @@ fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
             }
             let mut param_count = CountParams::default();
             let has_region = pred.visit_with(&mut param_count).is_break();
-            let substituted_pred = predicates.rebind(pred).subst(tcx, substs);
+            let substituted_pred = ty::EarlyBinder(pred).subst(tcx, substs);
             // Don't check non-defaulted params, dependent defaults (including lifetimes)
             // or preds with multiple params.
             if substituted_pred.has_non_region_param() || param_count.params.len() > 1 || has_region
             {
                 None
-            } else if predicates.0.predicates.iter().any(|&(p, _)| p == substituted_pred) {
+            } else if predicates.predicates.iter().any(|&(p, _)| p == substituted_pred) {
                 // Avoid duplication of predicates that contain no parameters, for example.
                 None
             } else {
@@ -1474,22 +1472,21 @@ fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
             traits::Obligation::new(tcx, cause, wfcx.param_env, pred)
         });
 
-    let predicates = predicates.0.instantiate_identity(tcx);
+    let predicates = predicates.instantiate_identity(tcx);
 
     let predicates = wfcx.normalize(span, None, predicates);
 
     debug!(?predicates.predicates);
     assert_eq!(predicates.predicates.len(), predicates.spans.len());
-    let wf_obligations =
-        iter::zip(&predicates.predicates, &predicates.spans).flat_map(|(&p, &sp)| {
-            traits::wf::predicate_obligations(
-                infcx,
-                wfcx.param_env.without_const(),
-                wfcx.body_id,
-                p,
-                sp,
-            )
-        });
+    let wf_obligations = predicates.into_iter().flat_map(|(p, sp)| {
+        traits::wf::predicate_obligations(
+            infcx,
+            wfcx.param_env.without_const(),
+            wfcx.body_id,
+            p,
+            sp,
+        )
+    });
 
     let obligations: Vec<_> = wf_obligations.chain(default_obligations).collect();
     wfcx.register_obligations(obligations);
index 5749b04783ce4ff8c7608e83771a3754471fffe4..ebb78213a63a13dd052f0b3b4e7c48f5c5acc451 100644 (file)
@@ -50,7 +50,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) {
 fn unused_crates_lint(tcx: TyCtxt<'_>) {
     let lint = lint::builtin::UNUSED_EXTERN_CRATES;
 
-    // Collect first the crates that are completely unused.  These we
+    // Collect first the crates that are completely unused. These we
     // can always suggest removing (no matter which edition we are
     // in).
     let unused_extern_crates: FxHashMap<LocalDefId, Span> = tcx
index 5bdd18fcd637c7eb9b214e588f5a5e053081832d..28c04087868a7d35e4677488297a5ebe865eed6f 100644 (file)
@@ -7,13 +7,15 @@
 use rustc_hir::def_id::{DefId, LocalDefId};
 use rustc_hir::lang_items::LangItem;
 use rustc_hir::ItemKind;
-use rustc_infer::infer;
 use rustc_infer::infer::outlives::env::OutlivesEnvironment;
 use rustc_infer::infer::TyCtxtInferExt;
+use rustc_infer::infer::{self, RegionResolutionError};
 use rustc_middle::ty::adjustment::CoerceUnsizedInfo;
 use rustc_middle::ty::{self, suggest_constraining_type_params, Ty, TyCtxt, TypeVisitable};
 use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt;
-use rustc_trait_selection::traits::misc::{can_type_implement_copy, CopyImplementationError};
+use rustc_trait_selection::traits::misc::{
+    type_allowed_to_implement_copy, CopyImplementationError, InfringingFieldsReason,
+};
 use rustc_trait_selection::traits::predicate_for_trait_def;
 use rustc_trait_selection::traits::{self, ObligationCause};
 use std::collections::BTreeMap;
@@ -54,12 +56,9 @@ fn visit_implementation_of_drop(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
         _ => {}
     }
 
-    let sp = match tcx.hir().expect_item(impl_did).kind {
-        ItemKind::Impl(ref impl_) => impl_.self_ty.span,
-        _ => bug!("expected Drop impl item"),
-    };
+    let ItemKind::Impl(impl_) = tcx.hir().expect_item(impl_did).kind else { bug!("expected Drop impl item") };
 
-    tcx.sess.emit_err(DropImplOnWrongItem { span: sp });
+    tcx.sess.emit_err(DropImplOnWrongItem { span: impl_.self_ty.span });
 }
 
 fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
@@ -82,7 +81,7 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
     };
 
     let cause = traits::ObligationCause::misc(span, impl_hir_id);
-    match can_type_implement_copy(tcx, param_env, self_type, cause) {
+    match type_allowed_to_implement_copy(tcx, param_env, self_type, cause) {
         Ok(()) => {}
         Err(CopyImplementationError::InfrigingFields(fields)) => {
             let mut err = struct_span_err!(
@@ -97,50 +96,70 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
             let mut errors: BTreeMap<_, Vec<_>> = Default::default();
             let mut bounds = vec![];
 
-            for (field, ty) in fields {
+            for (field, ty, reason) in fields {
                 let field_span = tcx.def_span(field.did);
-                let field_ty_span = match tcx.hir().get_if_local(field.did) {
-                    Some(hir::Node::Field(field_def)) => field_def.ty.span,
-                    _ => field_span,
-                };
                 err.span_label(field_span, "this field does not implement `Copy`");
-                // Spin up a new FulfillmentContext, so we can get the _precise_ reason
-                // why this field does not implement Copy. This is useful because sometimes
-                // it is not immediately clear why Copy is not implemented for a field, since
-                // all we point at is the field itself.
-                let infcx = tcx.infer_ctxt().ignoring_regions().build();
-                for error in traits::fully_solve_bound(
-                    &infcx,
-                    traits::ObligationCause::dummy_with_span(field_ty_span),
-                    param_env,
-                    ty,
-                    tcx.require_lang_item(LangItem::Copy, Some(span)),
-                ) {
-                    let error_predicate = error.obligation.predicate;
-                    // Only note if it's not the root obligation, otherwise it's trivial and
-                    // should be self-explanatory (i.e. a field literally doesn't implement Copy).
-
-                    // FIXME: This error could be more descriptive, especially if the error_predicate
-                    // contains a foreign type or if it's a deeply nested type...
-                    if error_predicate != error.root_obligation.predicate {
-                        errors
-                            .entry((ty.to_string(), error_predicate.to_string()))
-                            .or_default()
-                            .push(error.obligation.cause.span);
+
+                match reason {
+                    InfringingFieldsReason::Fulfill(fulfillment_errors) => {
+                        for error in fulfillment_errors {
+                            let error_predicate = error.obligation.predicate;
+                            // Only note if it's not the root obligation, otherwise it's trivial and
+                            // should be self-explanatory (i.e. a field literally doesn't implement Copy).
+
+                            // FIXME: This error could be more descriptive, especially if the error_predicate
+                            // contains a foreign type or if it's a deeply nested type...
+                            if error_predicate != error.root_obligation.predicate {
+                                errors
+                                    .entry((ty.to_string(), error_predicate.to_string()))
+                                    .or_default()
+                                    .push(error.obligation.cause.span);
+                            }
+                            if let ty::PredicateKind::Clause(ty::Clause::Trait(
+                                ty::TraitPredicate {
+                                    trait_ref,
+                                    polarity: ty::ImplPolarity::Positive,
+                                    ..
+                                },
+                            )) = error_predicate.kind().skip_binder()
+                            {
+                                let ty = trait_ref.self_ty();
+                                if let ty::Param(_) = ty.kind() {
+                                    bounds.push((
+                                        format!("{ty}"),
+                                        trait_ref.print_only_trait_path().to_string(),
+                                        Some(trait_ref.def_id),
+                                    ));
+                                }
+                            }
+                        }
                     }
-                    if let ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
-                        trait_ref,
-                        polarity: ty::ImplPolarity::Positive,
-                        ..
-                    })) = error_predicate.kind().skip_binder()
-                    {
-                        let ty = trait_ref.self_ty();
-                        if let ty::Param(_) = ty.kind() {
-                            bounds.push((
-                                format!("{ty}"),
-                                trait_ref.print_only_trait_path().to_string(),
-                                Some(trait_ref.def_id),
-                            ));
+                    InfringingFieldsReason::Regions(region_errors) => {
+                        for error in region_errors {
+                            let ty = ty.to_string();
+                            match error {
+                                RegionResolutionError::ConcreteFailure(origin, a, b) => {
+                                    let predicate = format!("{b}: {a}");
+                                    errors
+                                        .entry((ty.clone(), predicate.clone()))
+                                        .or_default()
+                                        .push(origin.span());
+                                    if let ty::RegionKind::ReEarlyBound(ebr) = *b && ebr.has_name() {
+                                        bounds.push((b.to_string(), a.to_string(), None));
+                                    }
+                                }
+                                RegionResolutionError::GenericBoundFailure(origin, a, b) => {
+                                    let predicate = format!("{a}: {b}");
+                                    errors
+                                        .entry((ty.clone(), predicate.clone()))
+                                        .or_default()
+                                        .push(origin.span());
+                                    if let infer::region_constraints::GenericKind::Param(_) = a {
+                                        bounds.push((a.to_string(), b.to_string(), None));
+                                    }
+                                }
+                                _ => continue,
+                            }
                         }
                     }
                 }
@@ -438,7 +457,7 @@ pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
             // when this coercion occurs, we would be changing the
             // field `ptr` from a thin pointer of type `*mut [i32;
             // 3]` to a fat pointer of type `*mut [i32]` (with
-            // extra data `3`).  **The purpose of this check is to
+            // extra data `3`). **The purpose of this check is to
             // make sure that we know how to do this conversion.**
             //
             // To check if this impl is legal, we would walk down
@@ -505,12 +524,11 @@ pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
                 return err_info;
             } else if diff_fields.len() > 1 {
                 let item = tcx.hir().expect_item(impl_did);
-                let span =
-                    if let ItemKind::Impl(hir::Impl { of_trait: Some(ref t), .. }) = item.kind {
-                        t.path.span
-                    } else {
-                        tcx.def_span(impl_did)
-                    };
+                let span = if let ItemKind::Impl(hir::Impl { of_trait: Some(t), .. }) = &item.kind {
+                    t.path.span
+                } else {
+                    tcx.def_span(impl_did)
+                };
 
                 struct_span_err!(
                     tcx.sess,
index 6469f389bf91b313141ce01452fe74cb68b72f98..dfb9824094346949c74b14f117fdd45dbaf6b375 100644 (file)
@@ -182,7 +182,7 @@ fn check_item(&mut self, id: hir::ItemId) {
         }
 
         let item = self.tcx.hir().item(id);
-        let hir::ItemKind::Impl(hir::Impl { of_trait: None, self_ty: ty, ref items, .. }) = item.kind else {
+        let hir::ItemKind::Impl(hir::Impl { of_trait: None, self_ty: ty, items, .. }) = item.kind else {
             return;
         };
 
index 2e9cd2fca01c139d366a0f1f43a06d1179c30f31..d3b5778ba3b7d61130b41fde472f4d08deec7689 100644 (file)
@@ -171,7 +171,7 @@ fn check_object_overlap<'tcx>(
         for component_def_id in component_def_ids {
             if !tcx.is_object_safe(component_def_id) {
                 // Without the 'object_safe_for_dispatch' feature this is an error
-                // which will be reported by wfcheck.  Ignore it here.
+                // which will be reported by wfcheck. Ignore it here.
                 // This is tested by `coherence-impl-trait-for-trait-object-safe.rs`.
                 // With the feature enabled, the trait is not implemented automatically,
                 // so this is valid.
index 0aadc9f311b033e4ec3983d86fd8b072c4b85f2b..95b03eb8263fda2031504dd4ef6a22319b31b23a 100644 (file)
@@ -40,7 +40,7 @@ fn do_orphan_check_impl<'tcx>(
     let trait_def_id = trait_ref.def_id;
 
     let item = tcx.hir().expect_item(def_id);
-    let hir::ItemKind::Impl(ref impl_) = item.kind else {
+    let hir::ItemKind::Impl(impl_) = item.kind else {
         bug!("{:?} is not an impl: {:?}", def_id, item);
     };
     let sp = tcx.def_span(def_id);
@@ -416,13 +416,13 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             if t != self.self_ty_root {
                 for impl_def_id in tcx.non_blanket_impls_for_ty(self.trait_def_id, t) {
                     match tcx.impl_polarity(impl_def_id) {
-                        ImplPolarity::Negative => return ControlFlow::BREAK,
+                        ImplPolarity::Negative => return ControlFlow::Break(()),
                         ImplPolarity::Reservation => {}
                         // FIXME(@lcnr): That's probably not good enough, idk
                         //
                         // We might just want to take the rustdoc code and somehow avoid
                         // explicit impls for `Self`.
-                        ImplPolarity::Positive => return ControlFlow::CONTINUE,
+                        ImplPolarity::Positive => return ControlFlow::Continue(()),
                     }
                 }
             }
@@ -440,7 +440,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                         }
                     }
 
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 }
                 _ => t.super_visit_with(self),
             }
index a485768e37b832c551740a167bc64c20efb0df4a..fe6119dce873573baf819591798c33e3ecc19b92 100644 (file)
@@ -11,7 +11,7 @@
 pub(super) fn check_item(tcx: TyCtxt<'_>, def_id: LocalDefId) {
     debug_assert!(matches!(tcx.def_kind(def_id), DefKind::Impl));
     let item = tcx.hir().expect_item(def_id);
-    let hir::ItemKind::Impl(ref impl_) = item.kind else { bug!() };
+    let hir::ItemKind::Impl(impl_) = item.kind else { bug!() };
 
     if let Some(trait_ref) = tcx.impl_trait_ref(item.owner_id) {
         let trait_ref = trait_ref.subst_identity();
index 35f47dfc1a5e289b038e15041e93ff421ddd5149..c17778ce8bc090e934b82b8399a5d05aa501ac22 100644 (file)
@@ -76,6 +76,7 @@ pub fn provide(providers: &mut Providers) {
         is_foreign_item,
         generator_kind,
         collect_mod_item_types,
+        is_type_alias_impl_trait,
         ..*providers
     };
 }
@@ -560,7 +561,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
     debug!("convert: item {} with id {}", it.ident, it.hir_id());
     let def_id = item_id.owner_id.def_id;
 
-    match it.kind {
+    match &it.kind {
         // These don't define types.
         hir::ItemKind::ExternCrate(_)
         | hir::ItemKind::Use(..)
@@ -568,7 +569,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
         | hir::ItemKind::Mod(_)
         | hir::ItemKind::GlobalAsm(_) => {}
         hir::ItemKind::ForeignMod { items, .. } => {
-            for item in items {
+            for item in *items {
                 let item = tcx.hir().foreign_item(item.id);
                 tcx.ensure().generics_of(item.owner_id);
                 tcx.ensure().type_of(item.owner_id);
@@ -618,7 +619,7 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
             tcx.at(it.span).super_predicates_of(def_id);
             tcx.ensure().predicates_of(def_id);
         }
-        hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => {
+        hir::ItemKind::Struct(struct_def, _) | hir::ItemKind::Union(struct_def, _) => {
             tcx.ensure().generics_of(def_id);
             tcx.ensure().type_of(def_id);
             tcx.ensure().predicates_of(def_id);
@@ -853,14 +854,14 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> ty::AdtDef<'_> {
     };
 
     let repr = tcx.repr_options_of_def(def_id.to_def_id());
-    let (kind, variants) = match item.kind {
-        ItemKind::Enum(ref def, _) => {
+    let (kind, variants) = match &item.kind {
+        ItemKind::Enum(def, _) => {
             let mut distance_from_explicit = 0;
             let variants = def
                 .variants
                 .iter()
                 .map(|v| {
-                    let discr = if let Some(ref e) = v.disr_expr {
+                    let discr = if let Some(e) = &v.disr_expr {
                         distance_from_explicit = 0;
                         ty::VariantDiscr::Explicit(e.def_id.to_def_id())
                     } else {
@@ -882,7 +883,7 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> ty::AdtDef<'_> {
 
             (AdtKind::Enum, variants)
         }
-        ItemKind::Struct(ref def, _) | ItemKind::Union(ref def, _) => {
+        ItemKind::Struct(def, _) | ItemKind::Union(def, _) => {
             let adt_kind = match item.kind {
                 ItemKind::Struct(..) => AdtKind::Struct,
                 _ => AdtKind::Union,
@@ -1342,21 +1343,19 @@ fn suggest_impl_trait<'tcx>(
 fn impl_trait_ref(tcx: TyCtxt<'_>, def_id: DefId) -> Option<ty::EarlyBinder<ty::TraitRef<'_>>> {
     let icx = ItemCtxt::new(tcx, def_id);
     let item = tcx.hir().expect_item(def_id.expect_local());
-    match item.kind {
-        hir::ItemKind::Impl(ref impl_) => impl_
-            .of_trait
-            .as_ref()
-            .map(|ast_trait_ref| {
-                let selfty = tcx.type_of(def_id);
-                icx.astconv().instantiate_mono_trait_ref(
-                    ast_trait_ref,
-                    selfty,
-                    check_impl_constness(tcx, impl_.constness, ast_trait_ref),
-                )
-            })
-            .map(ty::EarlyBinder),
-        _ => bug!(),
-    }
+    let hir::ItemKind::Impl(impl_) = item.kind else { bug!() };
+    impl_
+        .of_trait
+        .as_ref()
+        .map(|ast_trait_ref| {
+            let selfty = tcx.type_of(def_id);
+            icx.astconv().instantiate_mono_trait_ref(
+                ast_trait_ref,
+                selfty,
+                check_impl_constness(tcx, impl_.constness, ast_trait_ref),
+            )
+        })
+        .map(ty::EarlyBinder)
 }
 
 fn check_impl_constness(
@@ -1511,7 +1510,7 @@ fn compute_sig_of_foreign_fn_decl<'tcx>(
         for (input, ty) in iter::zip(decl.inputs, fty.inputs().skip_binder()) {
             check(input, *ty)
         }
-        if let hir::FnRetTy::Return(ref ty) = decl.output {
+        if let hir::FnRetTy::Return(ty) = decl.output {
             check(ty, fty.output().skip_binder())
         }
     }
@@ -1537,3 +1536,13 @@ fn generator_kind(tcx: TyCtxt<'_>, def_id: DefId) -> Option<hir::GeneratorKind>
         _ => bug!("generator_kind applied to non-local def-id {:?}", def_id),
     }
 }
+
+fn is_type_alias_impl_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
+    match tcx.hir().get_if_local(def_id) {
+        Some(Node::Item(hir::Item { kind: hir::ItemKind::OpaqueTy(opaque), .. })) => {
+            matches!(opaque.origin, hir::OpaqueTyOrigin::TyAlias)
+        }
+        Some(_) => bug!("tried getting opaque_ty_origin for non-opaque: {:?}", def_id),
+        _ => bug!("tried getting opaque_ty_origin for non-local def-id {:?}", def_id),
+    }
+}
index 9a5f447c260f54d8a835ac20fe791f244eb1624e..014ee9fcc207b425b06bac78729f606d2fbc6ebd 100644 (file)
@@ -110,12 +110,12 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
                     // expressions' count (i.e. `N` in `[x; N]`), and explicit
                     // `enum` discriminants (i.e. `D` in `enum Foo { Bar = D }`),
                     // as they shouldn't be able to cause query cycle errors.
-                    Node::Expr(&Expr { kind: ExprKind::Repeat(_, ref constant), .. })
+                    Node::Expr(Expr { kind: ExprKind::Repeat(_, constant), .. })
                         if constant.hir_id() == hir_id =>
                     {
                         Some(parent_def_id.to_def_id())
                     }
-                    Node::Variant(Variant { disr_expr: Some(ref constant), .. })
+                    Node::Variant(Variant { disr_expr: Some(constant), .. })
                         if constant.hir_id == hir_id =>
                     {
                         Some(parent_def_id.to_def_id())
@@ -259,7 +259,7 @@ enum Defaults {
 
     params.extend(ast_generics.params.iter().filter_map(|param| match param.kind {
         GenericParamKind::Lifetime { .. } => None,
-        GenericParamKind::Type { ref default, synthetic, .. } => {
+        GenericParamKind::Type { default, synthetic, .. } => {
             if default.is_some() {
                 match allow_defaults {
                     Defaults::Allowed => {}
@@ -426,26 +426,22 @@ fn has_late_bound_regions<'tcx>(
     }
 
     match node {
-        Node::TraitItem(item) => match item.kind {
-            hir::TraitItemKind::Fn(ref sig, _) => {
-                has_late_bound_regions(tcx, &item.generics, sig.decl)
-            }
+        Node::TraitItem(item) => match &item.kind {
+            hir::TraitItemKind::Fn(sig, _) => has_late_bound_regions(tcx, &item.generics, sig.decl),
             _ => None,
         },
-        Node::ImplItem(item) => match item.kind {
-            hir::ImplItemKind::Fn(ref sig, _) => {
-                has_late_bound_regions(tcx, &item.generics, sig.decl)
-            }
+        Node::ImplItem(item) => match &item.kind {
+            hir::ImplItemKind::Fn(sig, _) => has_late_bound_regions(tcx, &item.generics, sig.decl),
             _ => None,
         },
         Node::ForeignItem(item) => match item.kind {
-            hir::ForeignItemKind::Fn(fn_decl, _, ref generics) => {
+            hir::ForeignItemKind::Fn(fn_decl, _, generics) => {
                 has_late_bound_regions(tcx, generics, fn_decl)
             }
             _ => None,
         },
-        Node::Item(item) => match item.kind {
-            hir::ItemKind::Fn(ref sig, .., ref generics, _) => {
+        Node::Item(item) => match &item.kind {
+            hir::ItemKind::Fn(sig, .., generics, _) => {
                 has_late_bound_regions(tcx, generics, sig.decl)
             }
             _ => None,
index 62eef710ba48f0542079996933b500f5b5643304..8d479f1c3e335f37e6b6d8dab2d5fdf083055d8a 100644 (file)
@@ -99,12 +99,16 @@ pub(super) fn explicit_item_bounds(
     }
 }
 
-pub(super) fn item_bounds(tcx: TyCtxt<'_>, def_id: DefId) -> &'_ ty::List<ty::Predicate<'_>> {
-    tcx.mk_predicates(
+pub(super) fn item_bounds(
+    tcx: TyCtxt<'_>,
+    def_id: DefId,
+) -> ty::EarlyBinder<&'_ ty::List<ty::Predicate<'_>>> {
+    let bounds = tcx.mk_predicates(
         util::elaborate_predicates(
             tcx,
             tcx.explicit_item_bounds(def_id).iter().map(|&(bound, _span)| bound),
         )
         .map(|obligation| obligation.predicate),
-    )
+    );
+    ty::EarlyBinder(bounds)
 }
index 35f10dc873745f59d4355f1ebe675890f468f0b7..359122d4e16abd81c0f79463bcdad18cc21c4230 100644 (file)
@@ -1,9 +1,9 @@
 //! Resolution of early vs late bound lifetimes.
 //!
-//! Name resolution for lifetimes is performed on the AST and embedded into HIR.  From this
+//! Name resolution for lifetimes is performed on the AST and embedded into HIR. From this
 //! information, typechecking needs to transform the lifetime parameters into bound lifetimes.
-//! Lifetimes can be early-bound or late-bound.  Construction of typechecking terms needs to visit
-//! the types in HIR to identify late-bound lifetimes and assign their Debruijn indices.  This file
+//! Lifetimes can be early-bound or late-bound. Construction of typechecking terms needs to visit
+//! the types in HIR to identify late-bound lifetimes and assign their Debruijn indices. This file
 //! is also responsible for assigning their semantics to implicit lifetimes in trait objects.
 
 use rustc_ast::walk_list;
@@ -70,7 +70,7 @@ fn shifted(self, amount: u32) -> Region {
 /// that it corresponds to.
 ///
 /// FIXME. This struct gets converted to a `ResolveLifetimes` for
-/// actual use. It has the same data, but indexed by `LocalDefId`.  This
+/// actual use. It has the same data, but indexed by `LocalDefId`. This
 /// is silly.
 #[derive(Debug, Default)]
 struct NamedRegionMap {
@@ -428,7 +428,7 @@ fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
             _ => {}
         }
         match item.kind {
-            hir::ItemKind::Fn(_, ref generics, _) => {
+            hir::ItemKind::Fn(_, generics, _) => {
                 self.visit_early_late(item.hir_id(), generics, |this| {
                     intravisit::walk_item(this, item);
                 });
@@ -508,13 +508,13 @@ fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
                     this.with(scope, |this| intravisit::walk_item(this, item))
                 });
             }
-            hir::ItemKind::TyAlias(_, ref generics)
-            | hir::ItemKind::Enum(_, ref generics)
-            | hir::ItemKind::Struct(_, ref generics)
-            | hir::ItemKind::Union(_, ref generics)
-            | hir::ItemKind::Trait(_, _, ref generics, ..)
-            | hir::ItemKind::TraitAlias(ref generics, ..)
-            | hir::ItemKind::Impl(hir::Impl { ref generics, .. }) => {
+            hir::ItemKind::TyAlias(_, generics)
+            | hir::ItemKind::Enum(_, generics)
+            | hir::ItemKind::Struct(_, generics)
+            | hir::ItemKind::Union(_, generics)
+            | hir::ItemKind::Trait(_, _, generics, ..)
+            | hir::ItemKind::TraitAlias(generics, ..)
+            | hir::ItemKind::Impl(&hir::Impl { generics, .. }) => {
                 // These kinds of items have only early-bound lifetime parameters.
                 let lifetimes = generics
                     .params
@@ -544,7 +544,7 @@ fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
 
     fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) {
         match item.kind {
-            hir::ForeignItemKind::Fn(_, _, ref generics) => {
+            hir::ForeignItemKind::Fn(_, _, generics) => {
                 self.visit_early_late(item.hir_id(), generics, |this| {
                     intravisit::walk_foreign_item(this, item);
                 })
@@ -561,7 +561,7 @@ fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) {
     #[instrument(level = "debug", skip(self))]
     fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx>) {
         match ty.kind {
-            hir::TyKind::BareFn(ref c) => {
+            hir::TyKind::BareFn(c) => {
                 let (lifetimes, binders): (FxIndexMap<LocalDefId, Region>, Vec<_>) = c
                     .generic_params
                     .iter()
@@ -587,7 +587,7 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx>) {
                     intravisit::walk_ty(this, ty);
                 });
             }
-            hir::TyKind::TraitObject(bounds, ref lifetime, _) => {
+            hir::TyKind::TraitObject(bounds, lifetime, _) => {
                 debug!(?bounds, ?lifetime, "TraitObject");
                 let scope = Scope::TraitRefBoundary { s: self.scope };
                 self.with(scope, |this| {
@@ -617,7 +617,7 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx>) {
                     LifetimeName::Error => {}
                 }
             }
-            hir::TyKind::Ref(ref lifetime_ref, ref mt) => {
+            hir::TyKind::Ref(lifetime_ref, ref mt) => {
                 self.visit_lifetime(lifetime_ref);
                 let scope = Scope::ObjectLifetimeDefault {
                     lifetime: self.map.defs.get(&lifetime_ref.hir_id).cloned(),
@@ -632,7 +632,7 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx>) {
                 //                 ^                  ^ this gets resolved in the scope of
                 //                                      the opaque_ty generics
                 let opaque_ty = self.tcx.hir().item(item_id);
-                match opaque_ty.kind {
+                match &opaque_ty.kind {
                     hir::ItemKind::OpaqueTy(hir::OpaqueTy {
                         origin: hir::OpaqueTyOrigin::TyAlias,
                         ..
@@ -655,7 +655,7 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx>) {
                         origin: hir::OpaqueTyOrigin::FnReturn(..) | hir::OpaqueTyOrigin::AsyncFn(..),
                         ..
                     }) => {}
-                    ref i => bug!("`impl Trait` pointed to non-opaque type?? {:#?}", i),
+                    i => bug!("`impl Trait` pointed to non-opaque type?? {:#?}", i),
                 };
 
                 // Resolve the lifetimes that are applied to the opaque type.
@@ -720,7 +720,7 @@ fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem<'tcx>) {
                     intravisit::walk_trait_item(this, trait_item)
                 });
             }
-            Type(bounds, ref ty) => {
+            Type(bounds, ty) => {
                 let generics = &trait_item.generics;
                 let lifetimes = generics
                     .params
@@ -766,7 +766,7 @@ fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem<'tcx>) {
             Fn(..) => self.visit_early_late(impl_item.hir_id(), &impl_item.generics, |this| {
                 intravisit::walk_impl_item(this, impl_item)
             }),
-            Type(ref ty) => {
+            Type(ty) => {
                 let generics = &impl_item.generics;
                 let lifetimes: FxIndexMap<LocalDefId, Region> = generics
                     .params
@@ -817,7 +817,7 @@ fn visit_lifetime(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
     fn visit_path(&mut self, path: &hir::Path<'tcx>, _: hir::HirId) {
         for (i, segment) in path.segments.iter().enumerate() {
             let depth = path.segments.len() - i - 1;
-            if let Some(ref args) = segment.args {
+            if let Some(args) = segment.args {
                 self.visit_segment_args(path.res, depth, args);
             }
         }
@@ -833,7 +833,7 @@ fn visit_fn(
     ) {
         let output = match fd.output {
             hir::FnRetTy::DefaultReturn(_) => None,
-            hir::FnRetTy::Return(ref ty) => Some(&**ty),
+            hir::FnRetTy::Return(ty) => Some(ty),
         };
         self.visit_fn_like_elision(&fd.inputs, output, matches!(fk, intravisit::FnKind::Closure));
         intravisit::walk_fn_kind(self, fk);
@@ -846,13 +846,13 @@ fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
             for param in generics.params {
                 match param.kind {
                     GenericParamKind::Lifetime { .. } => {}
-                    GenericParamKind::Type { ref default, .. } => {
-                        if let Some(ref ty) = default {
-                            this.visit_ty(&ty);
+                    GenericParamKind::Type { default, .. } => {
+                        if let Some(ty) = default {
+                            this.visit_ty(ty);
                         }
                     }
-                    GenericParamKind::Const { ref ty, default } => {
-                        this.visit_ty(&ty);
+                    GenericParamKind::Const { ty, default } => {
+                        this.visit_ty(ty);
                         if let Some(default) = default {
                             this.visit_body(this.tcx.hir().body(default.body));
                         }
@@ -863,9 +863,9 @@ fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
                 match predicate {
                     &hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
                         hir_id,
-                        ref bounded_ty,
+                        bounded_ty,
                         bounds,
-                        ref bound_generic_params,
+                        bound_generic_params,
                         origin,
                         ..
                     }) => {
@@ -905,7 +905,7 @@ fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
                         })
                     }
                     &hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
-                        ref lifetime,
+                        lifetime,
                         bounds,
                         ..
                     }) => {
@@ -914,7 +914,7 @@ fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
 
                         if lifetime.res != hir::LifetimeName::Static {
                             for bound in bounds {
-                                let hir::GenericBound::Outlives(ref lt) = bound else {
+                                let hir::GenericBound::Outlives(lt) = bound else {
                                     continue;
                                 };
                                 if lt.res != hir::LifetimeName::Static {
@@ -939,8 +939,8 @@ fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
                         }
                     }
                     &hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
-                        ref lhs_ty,
-                        ref rhs_ty,
+                        lhs_ty,
+                        rhs_ty,
                         ..
                     }) => {
                         this.visit_ty(lhs_ty);
@@ -1042,7 +1042,7 @@ fn object_lifetime_default(tcx: TyCtxt<'_>, param_def_id: DefId) -> ObjectLifeti
                 }
 
                 for bound in bound.bounds {
-                    if let hir::GenericBound::Outlives(ref lifetime) = *bound {
+                    if let hir::GenericBound::Outlives(lifetime) = bound {
                         set.insert(lifetime.res);
                     }
                 }
@@ -1283,7 +1283,7 @@ fn resolve_lifetime_ref(
 
         // We may fail to resolve higher-ranked lifetimes that are mentioned by APIT.
         // AST-based resolution does not care for impl-trait desugaring, which are the
-        // responibility of lowering.  This may create a mismatch between the resolution
+        // responibility of lowering. This may create a mismatch between the resolution
         // AST found (`region_def_id`) which points to HRTB, and what HIR allows.
         // ```
         // fn foo(x: impl for<'a> Trait<'a, Assoc = impl Copy + 'a>) {}
@@ -1434,7 +1434,7 @@ fn visit_segment_args(
                         DefKind::ConstParam => Some(ObjectLifetimeDefault::Empty),
                         DefKind::TyParam => Some(self.tcx.object_lifetime_default(param.def_id)),
                         // We may also get a `Trait` or `TraitAlias` because of how generics `Self` parameter
-                        // works.  Ignore it because it can't have a meaningful lifetime default.
+                        // works. Ignore it because it can't have a meaningful lifetime default.
                         DefKind::LifetimeParam | DefKind::Trait | DefKind::TraitAlias => None,
                         dk => bug!("unexpected def_kind {:?}", dk),
                     }
@@ -1828,7 +1828,7 @@ fn visit_ty(&mut self, ty: &'v hir::Ty<'v>) {
                     }
                 }
 
-                hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) => {
+                hir::TyKind::Path(hir::QPath::Resolved(None, path)) => {
                     // consider only the lifetimes on the final
                     // segment; I am not sure it's even currently
                     // valid to have them elsewhere, but even if it
index a7e6494c15adb4cf57198821423968c51c4b9c66..46b277d9803d78db081c53ea63bc3861aa209c7c 100644 (file)
@@ -85,30 +85,30 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
         Node::ImplItem(item) => item.generics,
 
         Node::Item(item) => match item.kind {
-            ItemKind::Impl(ref impl_) => {
+            ItemKind::Impl(impl_) => {
                 if impl_.defaultness.is_default() {
                     is_default_impl_trait =
                         tcx.impl_trait_ref(def_id).map(|t| ty::Binder::dummy(t.subst_identity()));
                 }
-                &impl_.generics
+                impl_.generics
             }
-            ItemKind::Fn(.., ref generics, _)
-            | ItemKind::TyAlias(_, ref generics)
-            | ItemKind::Enum(_, ref generics)
-            | ItemKind::Struct(_, ref generics)
-            | ItemKind::Union(_, ref generics) => *generics,
+            ItemKind::Fn(.., generics, _)
+            | ItemKind::TyAlias(_, generics)
+            | ItemKind::Enum(_, generics)
+            | ItemKind::Struct(_, generics)
+            | ItemKind::Union(_, generics) => generics,
 
-            ItemKind::Trait(_, _, ref generics, ..) | ItemKind::TraitAlias(ref generics, _) => {
+            ItemKind::Trait(_, _, generics, ..) | ItemKind::TraitAlias(generics, _) => {
                 is_trait = Some(ty::TraitRef::identity(tcx, def_id));
-                *generics
+                generics
             }
-            ItemKind::OpaqueTy(OpaqueTy { ref generics, .. }) => generics,
+            ItemKind::OpaqueTy(OpaqueTy { generics, .. }) => generics,
             _ => NO_GENERICS,
         },
 
         Node::ForeignItem(item) => match item.kind {
             ForeignItemKind::Static(..) => NO_GENERICS,
-            ForeignItemKind::Fn(_, _, ref generics) => *generics,
+            ForeignItemKind::Fn(_, _, generics) => generics,
             ForeignItemKind::Type => NO_GENERICS,
         },
 
@@ -247,7 +247,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
 
     // Subtle: before we store the predicates into the tcx, we
     // sort them so that predicates like `T: Foo<Item=U>` come
-    // before uses of `U`.  This avoids false ambiguity errors
+    // before uses of `U`. This avoids false ambiguity errors
     // in trait checking. See `setup_constraining_predicates`
     // for details.
     if let Node::Item(&Item { kind: ItemKind::Impl { .. }, .. }) = node {
@@ -350,7 +350,7 @@ fn visit_const_param_default(&mut self, _param: HirId, _ct: &'tcx hir::AnonConst
     let node = tcx.hir().get(hir_id);
 
     let mut collector = ConstCollector { tcx, preds: FxIndexSet::default() };
-    if let hir::Node::Item(item) = node && let hir::ItemKind::Impl(ref impl_) = item.kind {
+    if let hir::Node::Item(item) = node && let hir::ItemKind::Impl(impl_) = item.kind {
         if let Some(of_trait) = &impl_.of_trait {
             debug!("const_evaluatable_predicates_of({:?}): visit impl trait_ref", def_id);
             collector.visit_trait_ref(of_trait);
@@ -511,8 +511,8 @@ pub(super) fn super_predicates_that_define_assoc_type(
         };
 
         let (generics, bounds) = match item.kind {
-            hir::ItemKind::Trait(.., ref generics, ref supertraits, _) => (generics, supertraits),
-            hir::ItemKind::TraitAlias(ref generics, ref supertraits) => (generics, supertraits),
+            hir::ItemKind::Trait(.., generics, supertraits, _) => (generics, supertraits),
+            hir::ItemKind::TraitAlias(generics, supertraits) => (generics, supertraits),
             _ => span_bug!(item.span, "super_predicates invoked on non-trait"),
         };
 
@@ -612,18 +612,18 @@ pub(super) fn type_param_predicates(
 
         Node::Item(item) => {
             match item.kind {
-                ItemKind::Fn(.., ref generics, _)
-                | ItemKind::Impl(hir::Impl { ref generics, .. })
-                | ItemKind::TyAlias(_, ref generics)
+                ItemKind::Fn(.., generics, _)
+                | ItemKind::Impl(&hir::Impl { generics, .. })
+                | ItemKind::TyAlias(_, generics)
                 | ItemKind::OpaqueTy(OpaqueTy {
-                    ref generics,
+                    generics,
                     origin: hir::OpaqueTyOrigin::TyAlias,
                     ..
                 })
-                | ItemKind::Enum(_, ref generics)
-                | ItemKind::Struct(_, ref generics)
-                | ItemKind::Union(_, ref generics) => generics,
-                ItemKind::Trait(_, _, ref generics, ..) => {
+                | ItemKind::Enum(_, generics)
+                | ItemKind::Struct(_, generics)
+                | ItemKind::Union(_, generics) => generics,
+                ItemKind::Trait(_, _, generics, ..) => {
                     // Implied `Self: Trait` and supertrait bounds.
                     if param_id == item_hir_id {
                         let identity_trait_ref = ty::TraitRef::identity(tcx, item_def_id);
@@ -637,7 +637,7 @@ pub(super) fn type_param_predicates(
         }
 
         Node::ForeignItem(item) => match item.kind {
-            ForeignItemKind::Fn(_, _, ref generics) => generics,
+            ForeignItemKind::Fn(_, _, generics) => generics,
             _ => return result,
         },
 
@@ -681,8 +681,8 @@ fn type_parameter_bounds_in_generics(
         ast_generics
             .predicates
             .iter()
-            .filter_map(|wp| match *wp {
-                hir::WherePredicate::BoundPredicate(ref bp) => Some(bp),
+            .filter_map(|wp| match wp {
+                hir::WherePredicate::BoundPredicate(bp) => Some(bp),
                 _ => None,
             })
             .flat_map(|bp| {
index 1f9a9f80302e3590957b9ef5a7d466ddfd860c1e..5e388a2f2babb0a0a17a2196316b198748fffb0a 100644 (file)
@@ -379,7 +379,7 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
             ForeignItemKind::Type => tcx.mk_foreign(def_id.to_def_id()),
         },
 
-        Node::Ctor(&ref def) | Node::Variant(Variant { data: ref def, .. }) => match *def {
+        Node::Ctor(def) | Node::Variant(Variant { data: def, .. }) => match def {
             VariantData::Unit(..) | VariantData::Struct(..) => {
                 tcx.type_of(tcx.hir().get_parent_item(hir_id))
             }
@@ -404,17 +404,17 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
         Node::AnonConst(_) => {
             let parent_node = tcx.hir().get_parent(hir_id);
             match parent_node {
-                Node::Ty(&Ty { kind: TyKind::Array(_, ref constant), .. })
-                | Node::Expr(&Expr { kind: ExprKind::Repeat(_, ref constant), .. })
+                Node::Ty(Ty { kind: TyKind::Array(_, constant), .. })
+                | Node::Expr(Expr { kind: ExprKind::Repeat(_, constant), .. })
                     if constant.hir_id() == hir_id =>
                 {
                     tcx.types.usize
                 }
-                Node::Ty(&Ty { kind: TyKind::Typeof(ref e), .. }) if e.hir_id == hir_id => {
+                Node::Ty(Ty { kind: TyKind::Typeof(e), .. }) if e.hir_id == hir_id => {
                     tcx.typeck(def_id).node_type(e.hir_id)
                 }
 
-                Node::Expr(&Expr { kind: ExprKind::ConstBlock(ref anon_const), .. })
+                Node::Expr(Expr { kind: ExprKind::ConstBlock(anon_const), .. })
                     if anon_const.hir_id == hir_id =>
                 {
                     let substs = InternalSubsts::identity_for_item(tcx, def_id.to_def_id());
@@ -434,18 +434,19 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
                     tcx.typeck(def_id).node_type(hir_id)
                 }
 
-                Node::Variant(Variant { disr_expr: Some(ref e), .. }) if e.hir_id == hir_id => {
+                Node::Variant(Variant { disr_expr: Some(e), .. }) if e.hir_id == hir_id => {
                     tcx.adt_def(tcx.hir().get_parent_item(hir_id)).repr().discr_type().to_ty(tcx)
                 }
 
                 Node::TypeBinding(
-                    binding @ &TypeBinding {
+                    TypeBinding {
                         hir_id: binding_id,
-                        kind: TypeBindingKind::Equality { term: Term::Const(ref e) },
+                        kind: TypeBindingKind::Equality { term: Term::Const(e) },
+                        ident,
                         ..
                     },
                 ) if let Node::TraitRef(trait_ref) =
-                    tcx.hir().get_parent(binding_id)
+                    tcx.hir().get_parent(*binding_id)
                     && e.hir_id == hir_id =>
                 {
                     let Some(trait_def_id) = trait_ref.trait_def_id() else {
@@ -454,7 +455,7 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
                     let assoc_items = tcx.associated_items(trait_def_id);
                     let assoc_item = assoc_items.find_by_name_and_kind(
                         tcx,
-                        binding.ident,
+                        *ident,
                         ty::AssocKind::Const,
                         def_id.to_def_id(),
                     );
@@ -470,9 +471,9 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
                 }
 
                 Node::TypeBinding(
-                    binding @ &TypeBinding { hir_id: binding_id, gen_args, ref kind, .. },
+                    TypeBinding { hir_id: binding_id, gen_args, kind, ident, .. },
                 ) if let Node::TraitRef(trait_ref) =
-                    tcx.hir().get_parent(binding_id)
+                    tcx.hir().get_parent(*binding_id)
                     && let Some((idx, _)) =
                         gen_args.args.iter().enumerate().find(|(_, arg)| {
                             if let GenericArg::Const(ct) = arg {
@@ -488,7 +489,7 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
                     let assoc_items = tcx.associated_items(trait_def_id);
                     let assoc_item = assoc_items.find_by_name_and_kind(
                         tcx,
-                        binding.ident,
+                        *ident,
                         match kind {
                             // I think `<A: T>` type bindings requires that `A` is a type
                             TypeBindingKind::Constraint { .. }
index 95c971c0d7845eee2d63668b8527c419b67eb4e6..56cc1d8fadc00e7ac6e9611c18dd0f5bcdf3039a 100644 (file)
@@ -61,7 +61,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         match *t.kind() {
             ty::Alias(ty::Projection, ..) if !self.include_nonconstraining => {
                 // projections are not injective
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
             ty::Param(data) => {
                 self.parameters.push(Parameter::from(data));
@@ -76,7 +76,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
         if let ty::ReEarlyBound(data) = *r {
             self.parameters.push(Parameter::from(data));
         }
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
index 2dbfc1bc9a229c4a4a069ddf76ab79cbb26db179..17dbb126bd1b0f357ac0f0204d8a6d70614cc0ba 100644 (file)
@@ -128,7 +128,7 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx>) {
             },
             hir::Node::Item(item) => match item.kind {
                 hir::ItemKind::Static(ty, _, _) | hir::ItemKind::Const(ty, _) => vec![ty],
-                hir::ItemKind::Impl(ref impl_) => match &impl_.of_trait {
+                hir::ItemKind::Impl(impl_) => match &impl_.of_trait {
                     Some(t) => t
                         .path
                         .segments
index ddc5b7668812668e9e6f923956ba85bd0a2cb484..02548ae893f28682a16ed0d5593c3f6e642ab524 100644 (file)
@@ -22,7 +22,7 @@
 4. Finally, the check phase then checks function bodies and so forth.
    Within the check phase, we check each function body one at a time
    (bodies of function expressions are checked as part of the
-   containing function).  Inference is used to supply types wherever
+   containing function). Inference is used to supply types wherever
    they are unknown. The actual checking of a function itself has
    several phases (check, regionck, writeback), as discussed in the
    documentation for the [`check`] module.
@@ -46,7 +46,7 @@
   local variables, type parameters, etc as necessary.
 
 - infer: finds the types to use for each type variable such that
-  all subtyping and assignment constraints are met.  In essence, the check
+  all subtyping and assignment constraints are met. In essence, the check
   module specifies the constraints, and the infer module solves them.
 
 ## Note
 use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode};
 
 use std::iter;
+use std::ops::Not;
 
 use astconv::AstConv;
 use bounds::Bounds;
@@ -203,12 +204,8 @@ fn main_fn_generics_params_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span>
         }
         let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
         match tcx.hir().find(hir_id) {
-            Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, ref generics, _), .. })) => {
-                if !generics.params.is_empty() {
-                    Some(generics.span)
-                } else {
-                    None
-                }
+            Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })) => {
+                generics.params.is_empty().not().then(|| generics.span)
             }
             _ => {
                 span_bug!(tcx.def_span(def_id), "main has a non-function type");
@@ -222,7 +219,7 @@ fn main_fn_where_clauses_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> {
         }
         let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
         match tcx.hir().find(hir_id) {
-            Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, ref generics, _), .. })) => {
+            Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })) => {
                 Some(generics.where_clause_span)
             }
             _ => {
@@ -244,7 +241,7 @@ fn main_fn_return_type_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> {
         }
         let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
         match tcx.hir().find(hir_id) {
-            Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(ref fn_sig, _, _), .. })) => {
+            Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(fn_sig, _, _), .. })) => {
                 Some(fn_sig.decl.output.span())
             }
             _ => {
@@ -374,7 +371,7 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) {
     match start_t.kind() {
         ty::FnDef(..) => {
             if let Some(Node::Item(it)) = tcx.hir().find(start_id) {
-                if let hir::ItemKind::Fn(ref sig, ref generics, _) = it.kind {
+                if let hir::ItemKind::Fn(sig, generics, _) = &it.kind {
                     let mut error = false;
                     if !generics.params.is_empty() {
                         struct_span_err!(
@@ -542,7 +539,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
 pub fn hir_ty_to_ty<'tcx>(tcx: TyCtxt<'tcx>, hir_ty: &hir::Ty<'_>) -> Ty<'tcx> {
     // In case there are any projections, etc., find the "environment"
     // def-ID that will be used to determine the traits/predicates in
-    // scope.  This is derived from the enclosing item-like thing.
+    // scope. This is derived from the enclosing item-like thing.
     let env_def_id = tcx.hir().get_parent_item(hir_ty.hir_id);
     let item_cx = self::collect::ItemCtxt::new(tcx, env_def_id.to_def_id());
     item_cx.astconv().ast_ty_to_ty(hir_ty)
@@ -555,7 +552,7 @@ pub fn hir_trait_to_predicates<'tcx>(
 ) -> Bounds<'tcx> {
     // In case there are any projections, etc., find the "environment"
     // def-ID that will be used to determine the traits/predicates in
-    // scope.  This is derived from the enclosing item-like thing.
+    // scope. This is derived from the enclosing item-like thing.
     let env_def_id = tcx.hir().get_parent_item(hir_trait.hir_ref_id);
     let item_cx = self::collect::ItemCtxt::new(tcx, env_def_id.to_def_id());
     let mut bounds = Bounds::default();
index a46f2a94cd281a17006d5cb2a6be6332eaabb661..925042436dec150821c957051f7f13283069b84e 100644 (file)
@@ -139,7 +139,7 @@ fn insert_required_predicates_to_be_wf<'tcx>(
                 if let Some(unsubstituted_predicates) = global_inferred_outlives.get(&def.did()) {
                     for (unsubstituted_predicate, &span) in &unsubstituted_predicates.0 {
                         // `unsubstituted_predicate` is `U: 'b` in the
-                        // example above.  So apply the substitution to
+                        // example above. So apply the substitution to
                         // get `T: 'a` (or `predicate`):
                         let predicate = unsubstituted_predicates
                             .rebind(*unsubstituted_predicate)
index b51b740d08e2e7a2e87f7bb578205bbcc430237c..9459c5f54abbf7c0105f3666c1c347b39efb5964 100644 (file)
@@ -48,7 +48,7 @@ pub(crate) fn insert_outlives_predicate<'tcx>(
                         // ```
                         //
                         // Here `outlived_region = 'a` and `kind = &'b
-                        // u32`.  Decomposing `&'b u32` into
+                        // u32`. Decomposing `&'b u32` into
                         // components would yield `'b`, and we add the
                         // where clause that `'b: 'a`.
                         insert_outlives_predicate(
@@ -71,7 +71,7 @@ pub(crate) fn insert_outlives_predicate<'tcx>(
                         // ```
                         //
                         // Here `outlived_region = 'a` and `kind =
-                        // Vec<U>`.  Decomposing `Vec<U>` into
+                        // Vec<U>`. Decomposing `Vec<U>` into
                         // components would yield `U`, and we add the
                         // where clause that `U: 'a`.
                         let ty: Ty<'tcx> = param_ty.to_ty(tcx);
@@ -80,8 +80,8 @@ pub(crate) fn insert_outlives_predicate<'tcx>(
                             .or_insert(span);
                     }
 
-                    Component::Projection(proj_ty) => {
-                        // This would arise from something like:
+                    Component::Alias(alias_ty) => {
+                        // This would either arise from something like:
                         //
                         // ```
                         // struct Foo<'a, T: Iterator> {
@@ -89,15 +89,7 @@ pub(crate) fn insert_outlives_predicate<'tcx>(
                         // }
                         // ```
                         //
-                        // Here we want to add an explicit `where <T as Iterator>::Item: 'a`.
-                        let ty: Ty<'tcx> = tcx.mk_projection(proj_ty.def_id, proj_ty.substs);
-                        required_predicates
-                            .entry(ty::OutlivesPredicate(ty.into(), outlived_region))
-                            .or_insert(span);
-                    }
-
-                    Component::Opaque(def_id, substs) => {
-                        // This would arise from something like:
+                        // or:
                         //
                         // ```rust
                         // type Opaque<T> = impl Sized;
@@ -105,17 +97,17 @@ pub(crate) fn insert_outlives_predicate<'tcx>(
                         // struct Ss<'a, T>(&'a Opaque<T>);
                         // ```
                         //
-                        // Here we want to have an implied bound `Opaque<T>: 'a`
-
-                        let ty = tcx.mk_opaque(def_id, substs);
+                        // Here we want to add an explicit `where <T as Iterator>::Item: 'a`
+                        // or `Opaque<T>: 'a` depending on the alias kind.
+                        let ty = alias_ty.to_ty(tcx);
                         required_predicates
                             .entry(ty::OutlivesPredicate(ty.into(), outlived_region))
                             .or_insert(span);
                     }
 
-                    Component::EscapingProjection(_) => {
+                    Component::EscapingAlias(_) => {
                         // As above, but the projection involves
-                        // late-bound regions.  Therefore, the WF
+                        // late-bound regions. Therefore, the WF
                         // requirement is not checked in type definition
                         // but at fn call site, so ignore it.
                         //
@@ -175,7 +167,7 @@ fn is_free_region(region: Region<'_>) -> bool {
         //     }
         //
         // The type above might generate a `T: 'b` bound, but we can
-        // ignore it.  We can't put it on the struct header anyway.
+        // ignore it. We can't put it on the struct header anyway.
         ty::ReLateBound(..) => false,
 
         // These regions don't appear in types from type declarations:
index 574b1e8b485afad6ffc8d1ada76b48f564d73786..9133e6540d4450e07e96f9742c8115d8553e9722 100644 (file)
@@ -727,8 +727,8 @@ fn suggest_moving_args_from_assoc_fn_to_trait(&self, err: &mut Diagnostic) {
         if let Some(parent_node) = self.tcx.hir().opt_parent_id(self.path_segment.hir_id)
         && let Some(parent_node) = self.tcx.hir().find(parent_node)
         && let hir::Node::Expr(expr) = parent_node {
-            match expr.kind {
-                hir::ExprKind::Path(ref qpath) => {
+            match &expr.kind {
+                hir::ExprKind::Path(qpath) => {
                     self.suggest_moving_args_from_assoc_fn_to_trait_for_qualified_path(
                         err,
                         qpath,
index 24008f88814339d1f343fe38c0cf368165afe994..079070be27983a8c72f5d04120ffff76e660ed81 100644 (file)
@@ -92,7 +92,7 @@ fn visit_opaque(&mut self, def_id: DefId, substs: SubstsRef<'tcx>) -> ControlFlo
                         a.visit_with(self)?;
                     }
                 }
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             } else {
                 substs.visit_with(self)
             }
index 97aca621aa21745e78cfc319bd6acf089c5e9fd4..a17edb598ad5e5eb79d86072571bbc55fda8c2c3 100644 (file)
@@ -5,8 +5,7 @@
 //! optimal solution to the constraints. The final variance for each
 //! inferred is then written into the `variance_map` in the tcx.
 
-use rustc_data_structures::fx::FxHashMap;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::DefIdMap;
 use rustc_middle::ty;
 
 use super::constraints::*;
@@ -28,8 +27,8 @@ pub fn solve_constraints<'tcx>(
     let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
 
     let mut solutions = vec![ty::Bivariant; terms_cx.inferred_terms.len()];
-    for &(id, ref variances) in &terms_cx.lang_items {
-        let InferredIndex(start) = terms_cx.inferred_starts[&id];
+    for (id, variances) in &terms_cx.lang_items {
+        let InferredIndex(start) = terms_cx.inferred_starts[id];
         for (i, &variance) in variances.iter().enumerate() {
             solutions[start + i] = variance;
         }
@@ -44,7 +43,7 @@ pub fn solve_constraints<'tcx>(
 
 impl<'a, 'tcx> SolveContext<'a, 'tcx> {
     fn solve(&mut self) {
-        // Propagate constraints until a fixed point is reached.  Note
+        // Propagate constraints until a fixed point is reached. Note
         // that the maximum number of iterations is 2C where C is the
         // number of constraints (each variable can change values at most
         // twice). Since number of constraints is linear in size of the
@@ -89,14 +88,12 @@ fn enforce_const_invariance(&self, generics: &ty::Generics, variances: &mut [ty:
         }
     }
 
-    fn create_map(&self) -> FxHashMap<DefId, &'tcx [ty::Variance]> {
+    fn create_map(&self) -> DefIdMap<&'tcx [ty::Variance]> {
         let tcx = self.terms_cx.tcx;
 
         let solutions = &self.solutions;
-        self.terms_cx
-            .inferred_starts
-            .iter()
-            .map(|(&def_id, &InferredIndex(start))| {
+        DefIdMap::from(self.terms_cx.inferred_starts.items().map(
+            |(&def_id, &InferredIndex(start))| {
                 let generics = tcx.generics_of(def_id);
                 let count = generics.count();
 
@@ -115,8 +112,8 @@ fn create_map(&self) -> FxHashMap<DefId, &'tcx [ty::Variance]> {
                 }
 
                 (def_id.to_def_id(), &*variances)
-            })
-            .collect()
+            },
+        ))
     }
 
     fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance {
index 83ed3e44b3d733ca28477c985a4674c1eb201041..5feeb92d3378251e9e551acf2a435c9c24134c62 100644 (file)
@@ -1,4 +1,3 @@
-use rustc_errors::struct_span_err;
 use rustc_middle::ty::TyCtxt;
 use rustc_span::symbol::sym;
 
@@ -8,8 +7,8 @@ pub fn test_variance(tcx: TyCtxt<'_>) {
     for id in tcx.hir().items() {
         if tcx.has_attr(id.owner_id.to_def_id(), sym::rustc_variance) {
             let variances_of = tcx.variances_of(id.owner_id);
-            struct_span_err!(tcx.sess, tcx.def_span(id.owner_id), E0208, "{:?}", variances_of)
-                .emit();
+
+            tcx.sess.struct_span_err(tcx.def_span(id.owner_id), format!("{variances_of:?}")).emit();
         }
     }
 }
index 8d417290407ed033049bbe2d48afb48b99d94c2a..b617821fbd6528bbf6feee0f9efb8ff867a34ae5 100644 (file)
@@ -375,14 +375,12 @@ fn confirm_builtin_call(
                 if self.tcx.has_attr(def_id, sym::rustc_evaluate_where_clauses) {
                     let predicates = self.tcx.predicates_of(def_id);
                     let predicates = predicates.instantiate(self.tcx, subst);
-                    for (predicate, predicate_span) in
-                        predicates.predicates.iter().zip(&predicates.spans)
-                    {
+                    for (predicate, predicate_span) in predicates {
                         let obligation = Obligation::new(
                             self.tcx,
                             ObligationCause::dummy_with_span(callee_expr.span),
                             self.param_env,
-                            *predicate,
+                            predicate,
                         );
                         let result = self.evaluate_obligation(&obligation);
                         self.tcx
@@ -391,7 +389,7 @@ fn confirm_builtin_call(
                                 callee_expr.span,
                                 &format!("evaluate({:?}) = {:?}", predicate, result),
                             )
-                            .span_label(*predicate_span, "predicate")
+                            .span_label(predicate_span, "predicate")
                             .emit();
                     }
                 }
@@ -659,8 +657,7 @@ fn report_invalid_callee(
         };
 
         if !self.maybe_suggest_bad_array_definition(&mut err, call_expr, callee_expr) {
-            if let Some((maybe_def, output_ty, _)) =
-                self.extract_callable_info(callee_expr, callee_ty)
+            if let Some((maybe_def, output_ty, _)) = self.extract_callable_info(callee_ty)
                 && !self.type_is_sized_modulo_regions(self.param_env, output_ty, callee_expr.span)
             {
                 let descr = match maybe_def {
index 042a50f2fd42eb89906167fde85ed9b0e4eb1622..712f9b87aed0ad623b3bcd77be096c07cb5ce5b1 100644 (file)
 use super::FnCtxt;
 
 use crate::type_error_struct;
-use rustc_errors::{struct_span_err, Applicability, DelayDm, DiagnosticBuilder, ErrorGuaranteed};
+use hir::ExprKind;
+use rustc_errors::{
+    struct_span_err, Applicability, DelayDm, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
+};
 use rustc_hir as hir;
 use rustc_macros::{TypeFoldable, TypeVisitable};
 use rustc_middle::mir::Mutability;
@@ -149,7 +152,7 @@ fn pointer_kind(
 
 #[derive(Copy, Clone)]
 pub enum CastError {
-    ErrorGuaranteed,
+    ErrorGuaranteed(ErrorGuaranteed),
 
     CastToBool,
     CastToChar,
@@ -174,8 +177,8 @@ pub enum CastError {
 }
 
 impl From<ErrorGuaranteed> for CastError {
-    fn from(_: ErrorGuaranteed) -> Self {
-        CastError::ErrorGuaranteed
+    fn from(err: ErrorGuaranteed) -> Self {
+        CastError::ErrorGuaranteed(err)
     }
 }
 
@@ -223,11 +226,10 @@ pub fn new(
 
     fn report_cast_error(&self, fcx: &FnCtxt<'a, 'tcx>, e: CastError) {
         match e {
-            CastError::ErrorGuaranteed => {
+            CastError::ErrorGuaranteed(_) => {
                 // an error has already been reported
             }
             CastError::NeedDeref => {
-                let error_span = self.span;
                 let mut err = make_invalid_casting_error(
                     fcx.tcx.sess,
                     self.span,
@@ -235,21 +237,25 @@ fn report_cast_error(&self, fcx: &FnCtxt<'a, 'tcx>, e: CastError) {
                     self.cast_ty,
                     fcx,
                 );
-                let cast_ty = fcx.ty_to_string(self.cast_ty);
-                err.span_label(
-                    error_span,
-                    format!("cannot cast `{}` as `{}`", fcx.ty_to_string(self.expr_ty), cast_ty),
-                );
-                if let Ok(snippet) = fcx.sess().source_map().span_to_snippet(self.expr_span) {
-                    err.span_suggestion(
-                        self.expr_span,
-                        "dereference the expression",
-                        format!("*{}", snippet),
-                        Applicability::MaybeIncorrect,
+
+                if matches!(self.expr.kind, ExprKind::AddrOf(..)) {
+                    // get just the borrow part of the expression
+                    let span = self.expr_span.with_hi(self.expr.peel_borrows().span.lo());
+                    err.span_suggestion_verbose(
+                        span,
+                        "remove the unneeded borrow",
+                        "",
+                        Applicability::MachineApplicable,
                     );
                 } else {
-                    err.span_help(self.expr_span, "dereference the expression with `*`");
+                    err.span_suggestion_verbose(
+                        self.expr_span.shrink_to_lo(),
+                        "dereference the expression",
+                        "*",
+                        Applicability::MachineApplicable,
+                    );
                 }
+
                 err.emit();
             }
             CastError::NeedViaThinPtr | CastError::NeedViaPtr => {
@@ -270,6 +276,9 @@ fn report_cast_error(&self, fcx: &FnCtxt<'a, 'tcx>, e: CastError) {
                         }
                     ));
                 }
+
+                self.try_suggest_collection_to_bool(fcx, &mut err);
+
                 err.emit();
             }
             CastError::NeedViaInt => {
@@ -517,6 +526,9 @@ fn report_cast_error(&self, fcx: &FnCtxt<'a, 'tcx>, e: CastError) {
                 } else {
                     err.span_label(self.span, "invalid cast");
                 }
+
+                self.try_suggest_collection_to_bool(fcx, &mut err);
+
                 err.emit();
             }
             CastError::SizedUnsizedCast => {
@@ -1080,4 +1092,40 @@ fn fuzzy_provenance_int2ptr_lint(&self, fcx: &FnCtxt<'a, 'tcx>) {
             },
         );
     }
+
+    /// Attempt to suggest using `.is_empty` when trying to cast from a
+    /// collection type to a boolean.
+    fn try_suggest_collection_to_bool(&self, fcx: &FnCtxt<'a, 'tcx>, err: &mut Diagnostic) {
+        if self.cast_ty.is_bool() {
+            let derefed = fcx
+                .autoderef(self.expr_span, self.expr_ty)
+                .silence_errors()
+                .find(|t| matches!(t.0.kind(), ty::Str | ty::Slice(..)));
+
+            if let Some((deref_ty, _)) = derefed {
+                // Give a note about what the expr derefs to.
+                if deref_ty != self.expr_ty.peel_refs() {
+                    err.span_note(
+                        self.expr_span,
+                        format!(
+                            "this expression `Deref`s to `{}` which implements `is_empty`",
+                            fcx.ty_to_string(deref_ty)
+                        ),
+                    );
+                }
+
+                // Create a multipart suggestion: add `!` and `.is_empty()` in
+                // place of the cast.
+                let suggestion = vec![
+                    (self.expr_span.shrink_to_lo(), "!".to_string()),
+                    (self.span.with_lo(self.expr_span.hi()), ".is_empty()".to_string()),
+                ];
+
+                err.multipart_suggestion_verbose(format!(
+                    "consider using the `is_empty` method on `{}` to determine if it contains anything",
+                    fcx.ty_to_string(self.expr_ty),
+                ),  suggestion, Applicability::MaybeIncorrect);
+            }
+        }
+    }
 }
index 399702fd41abcdabbeb055e5629452d6ddc3f0e8..12a2abfa76a921d5402860d134f10c68ad8805ce 100644 (file)
@@ -236,7 +236,7 @@ impl<'tcx> TypeVisitor<'tcx> for MentionsTy<'tcx> {
 
                     fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                         if t == self.expected_ty {
-                            ControlFlow::BREAK
+                            ControlFlow::Break(())
                         } else {
                             t.super_visit_with(self)
                         }
@@ -524,7 +524,7 @@ fn merge_supplied_sig_with_expectation(
 
         // FIXME(#45727): As discussed in [this comment][c1], naively
         // forcing equality here actually results in suboptimal error
-        // messages in some cases.  For now, if there would have been
+        // messages in some cases. For now, if there would have been
         // an obvious error, we fallback to declaring the type of the
         // closure to be the one the user gave, which allows other
         // error message code to trigger.
index 7e1c0faa453a26f08b87472f77d09cc51b237a2e..bbf7b81a2cc66ca82ccff36b50b0947c99710b77 100644 (file)
@@ -313,7 +313,7 @@ fn coerce_borrowed_pointer(
 
         // If we have a parameter of type `&M T_a` and the value
         // provided is `expr`, we will be adding an implicit borrow,
-        // meaning that we convert `f(expr)` to `f(&M *expr)`.  Therefore,
+        // meaning that we convert `f(expr)` to `f(&M *expr)`. Therefore,
         // to type check, we will construct the type that `&M*expr` would
         // yield.
 
@@ -340,7 +340,7 @@ fn coerce_borrowed_pointer(
                 continue;
             }
 
-            // At this point, we have deref'd `a` to `referent_ty`.  So
+            // At this point, we have deref'd `a` to `referent_ty`. So
             // imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
             // In the autoderef loop for `&'a mut Vec<T>`, we would get
             // three callbacks:
@@ -371,7 +371,7 @@ fn coerce_borrowed_pointer(
             // - if in sub mode, that means we want to use `'b` (the
             //   region from the target reference) for both
             //   pointers [2]. This is because sub mode (somewhat
-            //   arbitrarily) returns the subtype region.  In the case
+            //   arbitrarily) returns the subtype region. In the case
             //   where we are coercing to a target type, we know we
             //   want to use that target type region (`'b`) because --
             //   for the program to type-check -- it must be the
@@ -383,7 +383,7 @@ fn coerce_borrowed_pointer(
             //     annotate the region of a borrow), and regionck has
             //     code that adds edges from the region of a borrow
             //     (`'b`, here) into the regions in the borrowed
-            //     expression (`*x`, here).  (Search for "link".)
+            //     expression (`*x`, here). (Search for "link".)
             // - if in lub mode, things can get fairly complicated. The
             //   easiest thing is just to make a fresh
             //   region variable [4], which effectively means we defer
@@ -457,7 +457,7 @@ fn coerce_borrowed_pointer(
         if ty == a && mt_a.mutbl.is_not() && autoderef.step_count() == 1 {
             // As a special case, if we would produce `&'a *x`, that's
             // a total no-op. We end up with the type `&'a T` just as
-            // we started with.  In that case, just skip it
+            // we started with. In that case, just skip it
             // altogether. This is just an optimization.
             //
             // Note that for `&mut`, we DO want to reborrow --
@@ -1476,7 +1476,7 @@ pub(crate) fn coerce_inner<'a>(
             //     if let Some(x) = ... { }
             //
             // we wind up with a second match arm that is like `_ =>
-            // ()`.  That is the case we are considering here. We take
+            // ()`. That is the case we are considering here. We take
             // a different path to get the right "expected, found"
             // message and so forth (and because we know that
             // `expression_ty` will be unit).
index 665dc8b6a2f2a4ed18091fb4952e550ca82ba30c..bd1626dff79515908687a64fd667223ab24ca127 100644 (file)
@@ -1233,6 +1233,22 @@ pub fn check_ref(
                             sugg_sp = receiver.span;
                         }
                     }
+
+                    if let hir::ExprKind::Unary(hir::UnOp::Deref, ref inner) = expr.kind
+                        && let Some(1) = self.deref_steps(expected, checked_ty) {
+                        // We have `*&T`, check if what was expected was `&T`.
+                        // If so, we may want to suggest removing a `*`.
+                        sugg_sp = sugg_sp.with_hi(inner.span.lo());
+                        return Some((
+                            sugg_sp,
+                            "consider removing deref here".to_string(),
+                            "".to_string(),
+                            Applicability::MachineApplicable,
+                            true,
+                            false,
+                        ));
+                    }
+
                     if let Ok(src) = sm.span_to_snippet(sugg_sp) {
                         let needs_parens = match expr.kind {
                             // parenthesize if needed (Issue #46756)
index ba1a5a0cb03e114312a39f3ca23a70404707d781..bc7474cdfcf3de1023b1b2af40ab46d7a82cb52b 100644 (file)
@@ -459,9 +459,9 @@ fn check_expr_addr_of(
             }
             hir::BorrowKind::Ref => {
                 // Note: at this point, we cannot say what the best lifetime
-                // is to use for resulting pointer.  We want to use the
+                // is to use for resulting pointer. We want to use the
                 // shortest lifetime possible so as to avoid spurious borrowck
-                // errors.  Moreover, the longest lifetime will depend on the
+                // errors. Moreover, the longest lifetime will depend on the
                 // precise details of the value whose address is being taken
                 // (and how long it is valid), which we don't know yet until
                 // type inference is complete.
@@ -687,7 +687,7 @@ fn check_expr_break(
                 }
             } else {
                 // If `ctxt.coerce` is `None`, we can just ignore
-                // the type of the expression.  This is because
+                // the type of the expression. This is because
                 // either this was a break *without* a value, in
                 // which case it is always a legal type (`()`), or
                 // else an error would have been flagged by the
index 7774ffc9b9793ea8ac6a93a0e41b9ca196514a71..c8cda0dc90c6daf39095057859453ec164196609 100644 (file)
@@ -417,7 +417,7 @@ fn maybe_read_scrutinee<'t>(
                                 // Named constants have to be equated with the value
                                 // being matched, so that's a read of the value being matched.
                                 //
-                                // FIXME: We don't actually  reads for ZSTs.
+                                // FIXME: We don't actually reads for ZSTs.
                                 needs_to_be_read = true;
                             }
                             _ => {
index 2cc7b357c0a466afdce888e0f5998f8e532e0771..943dc9b9646fc63131cd4f4394b0b530f7bbd978 100644 (file)
@@ -42,7 +42,7 @@ pub(super) fn type_inference_fallback(&self) {
         // We now see if we can make progress. This might cause us to
         // unify inference variables for opaque types, since we may
         // have unified some other type variables during the first
-        // phase of fallback.  This means that we only replace
+        // phase of fallback. This means that we only replace
         // inference variables with their underlying opaque types as a
         // last resort.
         //
@@ -76,7 +76,7 @@ pub(super) fn type_inference_fallback(&self) {
     //   (and the setting of `#![feature(never_type_fallback)]`).
     //
     // Fallback becomes very dubious if we have encountered
-    // type-checking errors.  In that case, fallback to Error.
+    // type-checking errors. In that case, fallback to Error.
     //
     // Sets `FnCtxt::fallback_has_occurred` if fallback is performed
     // during this call.
@@ -136,7 +136,7 @@ fn fallback_if_possible(
     /// constrained to have some other type).
     ///
     /// However, the fallback used to be `()` (before the `!` type was
-    /// added).  Moreover, there are cases where the `!` type 'leaks
+    /// added). Moreover, there are cases where the `!` type 'leaks
     /// out' from dead code into type variables that affect live
     /// code. The most common case is something like this:
     ///
@@ -149,7 +149,7 @@ fn fallback_if_possible(
     /// ```
     ///
     /// Here, coercing the type `!` into `?M` will create a diverging
-    /// type variable `?X` where `?X <: ?M`.  We also have that `?D <:
+    /// type variable `?X` where `?X <: ?M`. We also have that `?D <:
     /// ?M`. If `?M` winds up unconstrained, then `?X` will
     /// fallback. If it falls back to `!`, then all the type variables
     /// will wind up equal to `!` -- this includes the type `?D`
@@ -185,7 +185,7 @@ fn fallback_if_possible(
     ///
     /// The algorithm we use:
     /// * Identify all variables that are coerced *into* by a
-    ///   diverging variable.  Do this by iterating over each
+    ///   diverging variable. Do this by iterating over each
     ///   diverging, unsolved variable and finding all variables
     ///   reachable from there. Call that set `D`.
     /// * Walk over all unsolved, non-diverging variables, and find
@@ -196,8 +196,6 @@ fn calculate_diverging_fallback(
     ) -> FxHashMap<Ty<'tcx>, Ty<'tcx>> {
         debug!("calculate_diverging_fallback({:?})", unsolved_variables);
 
-        let relationships = self.fulfillment_cx.borrow_mut().relationships().clone();
-
         // Construct a coercion graph where an edge `A -> B` indicates
         // a type variable is that is coerced
         let coercion_graph = self.create_coercion_graph();
@@ -281,9 +279,7 @@ fn calculate_diverging_fallback(
             roots_reachable_from_non_diverging,
         );
 
-        debug!("inherited: {:#?}", self.inh.fulfillment_cx.borrow_mut().pending_obligations());
         debug!("obligations: {:#?}", self.fulfillment_cx.borrow_mut().pending_obligations());
-        debug!("relationships: {:#?}", relationships);
 
         // For each diverging variable, figure out whether it can
         // reach a member of N. If so, it falls back to `()`. Else
@@ -297,16 +293,16 @@ fn calculate_diverging_fallback(
                 .depth_first_search(root_vid)
                 .any(|n| roots_reachable_from_non_diverging.visited(n));
 
-            let mut relationship = ty::FoundRelationships { self_in_trait: false, output: false };
+            let mut found_infer_var_info = ty::InferVarInfo { self_in_trait: false, output: false };
 
-            for (vid, rel) in relationships.iter() {
-                if self.root_var(*vid) == root_vid {
-                    relationship.self_in_trait |= rel.self_in_trait;
-                    relationship.output |= rel.output;
+            for (vid, info) in self.inh.infer_var_info.borrow().iter() {
+                if self.infcx.root_var(*vid) == root_vid {
+                    found_infer_var_info.self_in_trait |= info.self_in_trait;
+                    found_infer_var_info.output |= info.output;
                 }
             }
 
-            if relationship.self_in_trait && relationship.output {
+            if found_infer_var_info.self_in_trait && found_infer_var_info.output {
                 // This case falls back to () to ensure that the code pattern in
                 // tests/ui/never_type/fallback-closure-ret.rs continues to
                 // compile when never_type_fallback is enabled.
index b9e13fd20092421554e9d9d3ce9899334f100d74..c9609e69439812477b27d84f3d037abe82167ea0 100644 (file)
@@ -2140,8 +2140,7 @@ fn label_fn_like(
                         // FIXME(compiler-errors): This could be problematic if something has two
                         // fn-like predicates with different args, but callable types really never
                         // do that, so it's OK.
-                        for (predicate, span) in
-                            std::iter::zip(instantiated.predicates, instantiated.spans)
+                        for (predicate, span) in instantiated
                         {
                             if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) = predicate.kind().skip_binder()
                                 && pred.self_ty().peel_refs() == callee_ty
index 005bd164065d80f612a6075c2f886957796a6bef..4d673ac91472f10f2351b4ba265eaed4cd6b8329 100644 (file)
@@ -11,7 +11,6 @@
     Expr, ExprKind, GenericBound, Node, Path, QPath, Stmt, StmtKind, TyKind, WherePredicate,
 };
 use rustc_hir_analysis::astconv::AstConv;
-use rustc_infer::infer;
 use rustc_infer::traits::{self, StatementAsExpression};
 use rustc_middle::lint::in_external_macro;
 use rustc_middle::ty::{
@@ -23,9 +22,9 @@
 use rustc_span::symbol::{sym, Ident};
 use rustc_span::{Span, Symbol};
 use rustc_trait_selection::infer::InferCtxtExt;
+use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt;
 use rustc_trait_selection::traits::error_reporting::DefIdOrName;
 use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
-use rustc_trait_selection::traits::NormalizeExt;
 
 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
     pub(crate) fn body_fn_sig(&self) -> Option<ty::FnSig<'tcx>> {
@@ -94,7 +93,7 @@ pub(crate) fn suggest_fn_call(
         found: Ty<'tcx>,
         can_satisfy: impl FnOnce(Ty<'tcx>) -> bool,
     ) -> bool {
-        let Some((def_id_or_name, output, inputs)) = self.extract_callable_info(expr, found)
+        let Some((def_id_or_name, output, inputs)) = self.extract_callable_info(found)
             else { return false; };
         if can_satisfy(output) {
             let (sugg_call, mut applicability) = match inputs.len() {
@@ -163,99 +162,9 @@ pub(crate) fn suggest_fn_call(
     /// because the callable type must also be well-formed to be called.
     pub(in super::super) fn extract_callable_info(
         &self,
-        expr: &Expr<'_>,
-        found: Ty<'tcx>,
+        ty: Ty<'tcx>,
     ) -> Option<(DefIdOrName, Ty<'tcx>, Vec<Ty<'tcx>>)> {
-        // Autoderef is useful here because sometimes we box callables, etc.
-        let Some((def_id_or_name, output, inputs)) = self.autoderef(expr.span, found).silence_errors().find_map(|(found, _)| {
-            match *found.kind() {
-                ty::FnPtr(fn_sig) =>
-                    Some((DefIdOrName::Name("function pointer"), fn_sig.output(), fn_sig.inputs())),
-                ty::FnDef(def_id, _) => {
-                    let fn_sig = found.fn_sig(self.tcx);
-                    Some((DefIdOrName::DefId(def_id), fn_sig.output(), fn_sig.inputs()))
-                }
-                ty::Closure(def_id, substs) => {
-                    let fn_sig = substs.as_closure().sig();
-                    Some((DefIdOrName::DefId(def_id), fn_sig.output(), fn_sig.inputs().map_bound(|inputs| &inputs[1..])))
-                }
-                ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
-                    self.tcx.bound_item_bounds(def_id).subst(self.tcx, substs).iter().find_map(|pred| {
-                        if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
-                        && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output()
-                        // args tuple will always be substs[1]
-                        && let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
-                        {
-                            Some((
-                                DefIdOrName::DefId(def_id),
-                                pred.kind().rebind(proj.term.ty().unwrap()),
-                                pred.kind().rebind(args.as_slice()),
-                            ))
-                        } else {
-                            None
-                        }
-                    })
-                }
-                ty::Dynamic(data, _, ty::Dyn) => {
-                    data.iter().find_map(|pred| {
-                        if let ty::ExistentialPredicate::Projection(proj) = pred.skip_binder()
-                        && Some(proj.def_id) == self.tcx.lang_items().fn_once_output()
-                        // for existential projection, substs are shifted over by 1
-                        && let ty::Tuple(args) = proj.substs.type_at(0).kind()
-                        {
-                            Some((
-                                DefIdOrName::Name("trait object"),
-                                pred.rebind(proj.term.ty().unwrap()),
-                                pred.rebind(args.as_slice()),
-                            ))
-                        } else {
-                            None
-                        }
-                    })
-                }
-                ty::Param(param) => {
-                    let def_id = self.tcx.generics_of(self.body_id.owner).type_param(&param, self.tcx).def_id;
-                    self.tcx.predicates_of(self.body_id.owner).predicates.iter().find_map(|(pred, _)| {
-                        if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
-                        && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output()
-                        && proj.projection_ty.self_ty() == found
-                        // args tuple will always be substs[1]
-                        && let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
-                        {
-                            Some((
-                                DefIdOrName::DefId(def_id),
-                                pred.kind().rebind(proj.term.ty().unwrap()),
-                                pred.kind().rebind(args.as_slice()),
-                            ))
-                        } else {
-                            None
-                        }
-                    })
-                }
-                _ => None,
-            }
-        }) else { return None; };
-
-        let output = self.replace_bound_vars_with_fresh_vars(expr.span, infer::FnCall, output);
-        let inputs = inputs
-            .skip_binder()
-            .iter()
-            .map(|ty| {
-                self.replace_bound_vars_with_fresh_vars(
-                    expr.span,
-                    infer::FnCall,
-                    inputs.rebind(*ty),
-                )
-            })
-            .collect();
-
-        // We don't want to register any extra obligations, which should be
-        // implied by wf, but also because that would possibly result in
-        // erroneous errors later on.
-        let infer::InferOk { value: output, obligations: _ } =
-            self.at(&self.misc(expr.span), self.param_env).normalize(output);
-
-        if output.is_ty_var() { None } else { Some((def_id_or_name, output, inputs)) }
+        self.err_ctxt().extract_callable_info(self.body_id, self.param_env, ty)
     }
 
     pub fn suggest_two_fn_call(
@@ -267,9 +176,9 @@ pub fn suggest_two_fn_call(
         rhs_ty: Ty<'tcx>,
         can_satisfy: impl FnOnce(Ty<'tcx>, Ty<'tcx>) -> bool,
     ) -> bool {
-        let Some((_, lhs_output_ty, lhs_inputs)) = self.extract_callable_info(lhs_expr, lhs_ty)
+        let Some((_, lhs_output_ty, lhs_inputs)) = self.extract_callable_info(lhs_ty)
             else { return false; };
-        let Some((_, rhs_output_ty, rhs_inputs)) = self.extract_callable_info(rhs_expr, rhs_ty)
+        let Some((_, rhs_output_ty, rhs_inputs)) = self.extract_callable_info(rhs_ty)
             else { return false; };
 
         if can_satisfy(lhs_output_ty, rhs_output_ty) {
@@ -452,7 +361,7 @@ pub fn suggest_deref_ref_or_into(
                             && method_call_list.contains(&conversion_method.name)
                             // If receiver is `.clone()` and found type has one of those methods,
                             // we guess that the user wants to convert from a slice type (`&[]` or `&str`)
-                            // to an owned type (`Vec` or `String`).  These conversions clone internally,
+                            // to an owned type (`Vec` or `String`). These conversions clone internally,
                             // so we remove the user's `clone` call.
                         {
                             vec![(
@@ -649,7 +558,7 @@ pub(in super::super) fn suggest_calling_boxed_future_when_appropriate(
                 }
             }
             ty::Adt(def, _) if def.is_box() && self.can_coerce(box_found, expected) => {
-                // Check if the parent expression is a call to Pin::new.  If it
+                // Check if the parent expression is a call to Pin::new. If it
                 // is and we were expecting a Box, ergo Pin<Box<expected>>, we
                 // can suggest Box::pin.
                 let parent = self.tcx.hir().parent_id(expr.hir_id);
index 16806fdba4fbc2113c766be2ed489c032b5f1fda..b3dd3031db2a98d75780d00288364133be6041f4 100644 (file)
@@ -304,8 +304,8 @@ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
         let mut reinit = None;
         match expr.kind {
             ExprKind::Assign(lhs, rhs, _) => {
-                self.visit_expr(lhs);
                 self.visit_expr(rhs);
+                self.visit_expr(lhs);
 
                 reinit = Some(lhs);
             }
@@ -433,7 +433,7 @@ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
                     self.drop_ranges.add_control_edge(self.expr_index, *target)
                 }),
 
-            ExprKind::Break(destination, ..) => {
+            ExprKind::Break(destination, value) => {
                 // destination either points to an expression or to a block. We use
                 // find_target_expression_from_destination to use the last expression of the block
                 // if destination points to a block.
@@ -443,7 +443,11 @@ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
                 // will refer to the end of the block due to the post order traversal.
                 self.find_target_expression_from_destination(destination).map_or((), |target| {
                     self.drop_ranges.add_control_edge_hir_id(self.expr_index, target)
-                })
+                });
+
+                if let Some(value) = value {
+                    self.visit_expr(value);
+                }
             }
 
             ExprKind::Call(f, args) => {
@@ -465,6 +469,12 @@ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
 
             ExprKind::AddrOf(..)
             | ExprKind::Array(..)
+            // FIXME(eholk): We probably need special handling for AssignOps. The ScopeTree builder
+            // in region.rs runs both lhs then rhs and rhs then lhs and then sets all yields to be
+            // the latest they show up in either traversal. With the older scope-based
+            // approximation, this was fine, but it's probably not right now. What we probably want
+            // to do instead is still run both orders, but consider anything that showed up as a
+            // yield in either order.
             | ExprKind::AssignOp(..)
             | ExprKind::Binary(..)
             | ExprKind::Block(..)
@@ -502,6 +512,9 @@ fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
 
         // Increment expr_count here to match what InteriorVisitor expects.
         self.expr_index = self.expr_index + 1;
+
+        // Save a node mapping to get better CFG visualization
+        self.drop_ranges.add_node_mapping(pat.hir_id, self.expr_index);
     }
 }
 
@@ -521,7 +534,7 @@ fn new(
                 }
             });
         }
-        debug!("hir_id_map: {:?}", tracked_value_map);
+        debug!("hir_id_map: {:#?}", tracked_value_map);
         let num_values = tracked_value_map.len();
         Self {
             tracked_value_map,
index c0a0bfe8e1c00a98cb53dd41b5d1e6e9bb7ba82c..e8d31be79d9c9fccff6d6722db6cadf044cbb7a8 100644 (file)
@@ -2,6 +2,7 @@
 //! flow graph when needed for debugging.
 
 use rustc_graphviz as dot;
+use rustc_hir::{Expr, ExprKind, Node};
 use rustc_middle::ty::TyCtxt;
 
 use super::{DropRangesBuilder, PostOrderId};
@@ -80,10 +81,14 @@ fn node_label(&'a self, n: &Self::Node) -> dot::LabelText<'a> {
                     .post_order_map
                     .iter()
                     .find(|(_hir_id, &post_order_id)| post_order_id == *n)
-                    .map_or("<unknown>".into(), |(hir_id, _)| self
-                        .tcx
-                        .hir()
-                        .node_to_string(*hir_id))
+                    .map_or("<unknown>".into(), |(hir_id, _)| format!(
+                        "{}{}",
+                        self.tcx.hir().node_to_string(*hir_id),
+                        match self.tcx.hir().find(*hir_id) {
+                            Some(Node::Expr(Expr { kind: ExprKind::Yield(..), .. })) => " (yield)",
+                            _ => "",
+                        }
+                    ))
             )
             .into(),
         )
index 472205be7b5e3e81a96f1438b97d6865a691786c..ed3d890315704cefd1f2932e62b3f3e30a6c47e8 100644 (file)
@@ -116,7 +116,7 @@ fn borrow_place(&mut self, place_with_id: &expr_use_visitor::PlaceWithHirId<'tcx
         // where the `identity(...)` (the rvalue) produces a return type
         // of `&'rv mut A`, where `'a: 'rv`. We then assign this result to
         // `'y`, resulting in (transitively) `'a: 'y` (i.e., while `y` is in use,
-        // `a` will be considered borrowed).  Other parts of the code will ensure
+        // `a` will be considered borrowed). Other parts of the code will ensure
         // that if `y` is live over a yield, `&'y mut A` appears in the generator
         // state. If `'y` is live, then any sound region analysis must conclude
         // that `'a` is also live. So if this causes a bug, blame some other
index 7990d95310be59489989d8d4753c74b39aaa7eb1..7af5260538568c0d873ad2767cdbb903e5a6517e 100644 (file)
@@ -71,10 +71,8 @@ fn record(
                                 yield_data.expr_and_pat_count, self.expr_count, source_span
                             );
 
-                            if self.fcx.sess().opts.unstable_opts.drop_tracking
-                                && self
-                                    .drop_ranges
-                                    .is_dropped_at(hir_id, yield_data.expr_and_pat_count)
+                            if self
+                                .is_dropped_at_yield_location(hir_id, yield_data.expr_and_pat_count)
                             {
                                 debug!("value is dropped at yield point; not recording");
                                 return false;
@@ -173,6 +171,18 @@ fn record(
             }
         }
     }
+
+    /// If drop tracking is enabled, consult drop_ranges to see if a value is
+    /// known to be dropped at a yield point and therefore can be omitted from
+    /// the generator witness.
+    fn is_dropped_at_yield_location(&self, value_hir_id: HirId, yield_location: usize) -> bool {
+        // short-circuit if drop tracking is not enabled.
+        if !self.fcx.sess().opts.unstable_opts.drop_tracking {
+            return false;
+        }
+
+        self.drop_ranges.is_dropped_at(value_hir_id, yield_location)
+    }
 }
 
 pub fn resolve_interior<'a, 'tcx>(
index b33e7b8d68cf927251a4e85a9c57a672fcabf9b5..ba34f299453ecff0473190c130ccf5643542d351 100644 (file)
@@ -1,6 +1,6 @@
 use super::callee::DeferredCallResolution;
 
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_hir as hir;
 use rustc_hir::def_id::LocalDefId;
 use rustc_hir::HirIdMap;
@@ -10,7 +10,8 @@
 use rustc_middle::ty::{self, Ty, TyCtxt};
 use rustc_span::def_id::LocalDefIdMap;
 use rustc_span::{self, Span};
-use rustc_trait_selection::traits::{self, TraitEngine, TraitEngineExt as _};
+use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
+use rustc_trait_selection::traits::{self, PredicateObligation, TraitEngine, TraitEngineExt as _};
 
 use std::cell::RefCell;
 use std::ops::Deref;
@@ -63,6 +64,8 @@ pub struct Inherited<'tcx> {
     /// we record that type variable here. This is later used to inform
     /// fallback. See the `fallback` module for details.
     pub(super) diverging_type_vars: RefCell<FxHashSet<Ty<'tcx>>>,
+
+    pub(super) infer_var_info: RefCell<FxHashMap<ty::TyVid, ty::InferVarInfo>>,
 }
 
 impl<'tcx> Deref for Inherited<'tcx> {
@@ -128,6 +131,7 @@ fn new(
             deferred_generator_interiors: RefCell::new(Vec::new()),
             diverging_type_vars: RefCell::new(Default::default()),
             body_id,
+            infer_var_info: RefCell::new(Default::default()),
         }
     }
 
@@ -136,6 +140,9 @@ pub(super) fn register_predicate(&self, obligation: traits::PredicateObligation<
         if obligation.has_escaping_bound_vars() {
             span_bug!(obligation.cause.span, "escaping bound vars in predicate {:?}", obligation);
         }
+
+        self.update_infer_var_info(&obligation);
+
         self.fulfillment_cx.borrow_mut().register_predicate_obligation(self, obligation);
     }
 
@@ -152,4 +159,43 @@ pub(super) fn register_infer_ok_obligations<T>(&self, infer_ok: InferOk<'tcx, T>
         self.register_predicates(infer_ok.obligations);
         infer_ok.value
     }
+
+    pub fn update_infer_var_info(&self, obligation: &PredicateObligation<'tcx>) {
+        let infer_var_info = &mut self.infer_var_info.borrow_mut();
+
+        // (*) binder skipped
+        if let ty::PredicateKind::Clause(ty::Clause::Trait(tpred)) = obligation.predicate.kind().skip_binder()
+            && let Some(ty) = self.shallow_resolve(tpred.self_ty()).ty_vid().map(|t| self.root_var(t))
+            && self.tcx.lang_items().sized_trait().map_or(false, |st| st != tpred.trait_ref.def_id)
+        {
+            let new_self_ty = self.tcx.types.unit;
+
+            // Then construct a new obligation with Self = () added
+            // to the ParamEnv, and see if it holds.
+            let o = obligation.with(self.tcx,
+                obligation
+                    .predicate
+                    .kind()
+                    .rebind(
+                        // (*) binder moved here
+                        ty::PredicateKind::Clause(ty::Clause::Trait(tpred.with_self_ty(self.tcx, new_self_ty)))
+                    ),
+            );
+            // Don't report overflow errors. Otherwise equivalent to may_hold.
+            if let Ok(result) = self.probe(|_| self.evaluate_obligation(&o)) && result.may_apply() {
+                infer_var_info.entry(ty).or_default().self_in_trait = true;
+            }
+        }
+
+        if let ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) =
+            obligation.predicate.kind().skip_binder()
+        {
+            // If the projection predicate (Foo::Bar == X) has X as a non-TyVid,
+            // we need to make it into one.
+            if let Some(vid) = predicate.term.ty().and_then(|ty| ty.ty_vid()) {
+                debug!("infer_var_info: {:?}.output = true", vid);
+                infer_var_info.entry(vid).or_default().output = true;
+            }
+        }
+    }
 }
index c2dc14024655aeba39308b9efad08977356f5580..3c873024c924f3837c473312fb3c0395be8eeaa1 100644 (file)
@@ -105,6 +105,16 @@ pub fn check_transmute(&self, from: Ty<'tcx>, to: Ty<'tcx>, hir_id: HirId) {
         } else {
             err.note(&format!("source type: `{}` ({})", from, skeleton_string(from, sk_from)))
                 .note(&format!("target type: `{}` ({})", to, skeleton_string(to, sk_to)));
+            let mut should_delay_as_bug = false;
+            if let Err(LayoutError::Unknown(bad_from)) = sk_from && bad_from.references_error() {
+                should_delay_as_bug = true;
+            }
+            if let Err(LayoutError::Unknown(bad_to)) = sk_to && bad_to.references_error() {
+                should_delay_as_bug = true;
+            }
+            if should_delay_as_bug {
+                err.delay_as_bug();
+            }
         }
         err.emit();
     }
index 0b5dc946c1deffa589f20f036542cfacc7edd6a8..48c75cde9a5fc544948e231ea4077a86ad698f9d 100644 (file)
@@ -736,7 +736,7 @@ fn cat_pattern_<F>(
             }
 
             PatKind::Box(ref subpat) | PatKind::Ref(ref subpat, _) => {
-                // box p1, &p1, &mut p1.  we can ignore the mutability of
+                // box p1, &p1, &mut p1. we can ignore the mutability of
                 // PatKind::Ref since that information is already contained
                 // in the type.
                 let subplace = self.cat_deref(pat, place_with_id)?;
index 4a33a791e1b7f3b12db9c6e54ca541ab93b5c0fa..372ea30ebd08e4877df583a8672b7b94df1bf2ae 100644 (file)
@@ -19,7 +19,6 @@
 use rustc_span::{Span, DUMMY_SP};
 use rustc_trait_selection::traits;
 
-use std::iter;
 use std::ops::Deref;
 
 struct ConfirmContext<'a, 'tcx> {
@@ -101,7 +100,7 @@ fn confirm(
         let filler_substs = rcvr_substs
             .extend_to(self.tcx, pick.item.def_id, |def, _| self.tcx.mk_param_from_def(def));
         let illegal_sized_bound = self.predicates_require_illegal_sized_bound(
-            &self.tcx.predicates_of(pick.item.def_id).instantiate(self.tcx, filler_substs),
+            self.tcx.predicates_of(pick.item.def_id).instantiate(self.tcx, filler_substs),
         );
 
         // Unify the (adjusted) self type with what the method expects.
@@ -565,7 +564,7 @@ fn add_obligations(
 
     fn predicates_require_illegal_sized_bound(
         &self,
-        predicates: &ty::InstantiatedPredicates<'tcx>,
+        predicates: ty::InstantiatedPredicates<'tcx>,
     ) -> Option<Span> {
         let sized_def_id = self.tcx.lang_items().sized_trait()?;
 
@@ -575,10 +574,11 @@ fn predicates_require_illegal_sized_bound(
                 ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred))
                     if trait_pred.def_id() == sized_def_id =>
                 {
-                    let span = iter::zip(&predicates.predicates, &predicates.spans)
+                    let span = predicates
+                        .iter()
                         .find_map(
                             |(p, span)| {
-                                if *p == obligation.predicate { Some(*span) } else { None }
+                                if p == obligation.predicate { Some(span) } else { None }
                             },
                         )
                         .unwrap_or(rustc_span::DUMMY_SP);
index 146d5e60c2f388c2bfd7257c66af6b91414ed12f..47396204b14e7c50314d32a35ef9ecbd8bc7bb01 100644 (file)
@@ -76,7 +76,7 @@ pub struct NoMatchData<'tcx> {
     pub unsatisfied_predicates:
         Vec<(ty::Predicate<'tcx>, Option<ty::Predicate<'tcx>>, Option<ObligationCause<'tcx>>)>,
     pub out_of_scope_traits: Vec<DefId>,
-    pub lev_candidate: Option<ty::AssocItem>,
+    pub similar_candidate: Option<ty::AssocItem>,
     pub mode: probe::Mode,
 }
 
@@ -413,7 +413,7 @@ fn construct_obligation_for_trait(
 
         // Register obligations for the parameters. This will include the
         // `Self` parameter, which in turn has a bound of the main trait,
-        // so this also effectively registers `obligation` as well.  (We
+        // so this also effectively registers `obligation` as well. (We
         // used to register `obligation` explicitly, but that resulted in
         // double error messages being reported.)
         //
index 15f6e11717768cfd11ec078f0d620cc451c20897..9c06a22315bcba12189377e2770e9884588d5e5f 100644 (file)
@@ -232,7 +232,7 @@ pub enum PickKind<'tcx> {
 pub enum Mode {
     // An expression of the form `receiver.method_name(...)`.
     // Autoderefs are performed on `receiver`, lookup is done based on the
-    // `self` argument  of the method, and static methods aren't considered.
+    // `self` argument of the method, and static methods aren't considered.
     MethodCall,
     // An expression of the form `Type::item` or `<T>::item`.
     // No autoderefs are performed, lookup is done based on the type each
@@ -461,7 +461,7 @@ fn probe_op<OP, R>(
                     static_candidates: Vec::new(),
                     unsatisfied_predicates: Vec::new(),
                     out_of_scope_traits: Vec::new(),
-                    lev_candidate: None,
+                    similar_candidate: None,
                     mode,
                 }));
             }
@@ -486,7 +486,7 @@ fn probe_op<OP, R>(
             probe_cx.assemble_inherent_candidates();
             match scope {
                 ProbeScope::TraitsInScope => {
-                    probe_cx.assemble_extension_candidates_for_traits_in_scope(scope_expr_id)
+                    probe_cx.assemble_extension_candidates_for_traits_in_scope()
                 }
                 ProbeScope::AllTraits => probe_cx.assemble_extension_candidates_for_all_traits(),
             };
@@ -889,9 +889,9 @@ fn elaborate_bounds<F>(
         }
     }
 
-    fn assemble_extension_candidates_for_traits_in_scope(&mut self, expr_hir_id: hir::HirId) {
+    fn assemble_extension_candidates_for_traits_in_scope(&mut self) {
         let mut duplicates = FxHashSet::default();
-        let opt_applicable_traits = self.tcx.in_scope_traits(expr_hir_id);
+        let opt_applicable_traits = self.tcx.in_scope_traits(self.scope_expr_id);
         if let Some(applicable_traits) = opt_applicable_traits {
             for trait_candidate in applicable_traits.iter() {
                 let trait_did = trait_candidate.def_id;
@@ -1076,13 +1076,13 @@ fn pick(mut self) -> PickResult<'tcx> {
         if let Some((kind, def_id)) = private_candidate {
             return Err(MethodError::PrivateMatch(kind, def_id, out_of_scope_traits));
         }
-        let lev_candidate = self.probe_for_lev_candidate()?;
+        let similar_candidate = self.probe_for_similar_candidate()?;
 
         Err(MethodError::NoMatch(NoMatchData {
             static_candidates,
             unsatisfied_predicates,
             out_of_scope_traits,
-            lev_candidate,
+            similar_candidate,
             mode: self.mode,
         }))
     }
@@ -1787,7 +1787,7 @@ fn collapse_candidates_to_trait_pick(
     /// Similarly to `probe_for_return_type`, this method attempts to find the best matching
     /// candidate method where the method name may have been misspelled. Similarly to other
     /// Levenshtein based suggestions, we provide at most one such suggestion.
-    fn probe_for_lev_candidate(&mut self) -> Result<Option<ty::AssocItem>, MethodError<'tcx>> {
+    fn probe_for_similar_candidate(&mut self) -> Result<Option<ty::AssocItem>, MethodError<'tcx>> {
         debug!("probing for method names similar to {:?}", self.method_name);
 
         let steps = self.steps.clone();
@@ -1831,6 +1831,12 @@ fn probe_for_lev_candidate(&mut self) -> Result<Option<ty::AssocItem>, MethodErr
                         None,
                     )
                 }
+                .or_else(|| {
+                    applicable_close_candidates
+                        .iter()
+                        .find(|cand| self.matches_by_doc_alias(cand.def_id))
+                        .map(|cand| cand.name)
+                })
                 .unwrap();
                 Ok(applicable_close_candidates.into_iter().find(|method| method.name == best_name))
             }
@@ -1981,6 +1987,38 @@ fn is_relevant_kind_for_mode(&self, kind: ty::AssocKind) -> bool {
         }
     }
 
+    /// Determine if the associated item withe the given DefId matches
+    /// the desired name via a doc alias.
+    fn matches_by_doc_alias(&self, def_id: DefId) -> bool {
+        let Some(name) = self.method_name else { return false; };
+        let Some(local_def_id) = def_id.as_local() else { return false; };
+        let hir_id = self.fcx.tcx.hir().local_def_id_to_hir_id(local_def_id);
+        let attrs = self.fcx.tcx.hir().attrs(hir_id);
+        for attr in attrs {
+            let sym::doc = attr.name_or_empty() else { continue; };
+            let Some(values) = attr.meta_item_list() else { continue; };
+            for v in values {
+                if v.name_or_empty() != sym::alias {
+                    continue;
+                }
+                if let Some(nested) = v.meta_item_list() {
+                    // #[doc(alias("foo", "bar"))]
+                    for n in nested {
+                        if let Some(lit) = n.lit() && name.as_str() == lit.symbol.as_str() {
+                            return true;
+                        }
+                    }
+                } else if let Some(meta) = v.meta_item()
+                    && let Some(lit) = meta.name_value_literal()
+                    && name.as_str() == lit.symbol.as_str() {
+                        // #[doc(alias = "foo")]
+                        return true;
+                }
+            }
+        }
+        false
+    }
+
     /// Finds the method with the appropriate name (or return type, as the case may be). If
     /// `allow_similar_names` is set, find methods with close-matching names.
     // The length of the returned iterator is nearly always 0 or 1 and this
@@ -1996,6 +2034,9 @@ fn is_relevant_kind_for_mode(&self, kind: ty::AssocKind) -> bool {
                         if !self.is_relevant_kind_for_mode(x.kind) {
                             return false;
                         }
+                        if self.matches_by_doc_alias(x.def_id) {
+                            return true;
+                        }
                         match lev_distance_with_substrings(name.as_str(), x.name.as_str(), max_dist)
                         {
                             Some(d) => d > 0,
index 3c4eef781634fefdc339c36c432cf7ae3c3b6ae4..8c54e9bdb5fb3a3ae7670f01c5d8e3b66c346b98 100644 (file)
@@ -262,7 +262,7 @@ pub fn report_no_match_method_error(
         let ty_str = with_forced_trimmed_paths!(self.ty_to_string(rcvr_ty));
         let is_method = mode == Mode::MethodCall;
         let unsatisfied_predicates = &no_match_data.unsatisfied_predicates;
-        let lev_candidate = no_match_data.lev_candidate;
+        let similar_candidate = no_match_data.similar_candidate;
         let item_kind = if is_method {
             "method"
         } else if rcvr_ty.is_enum() {
@@ -937,7 +937,7 @@ trait bound{s}",
         // give a helping note that it has to be called as `(x.f)(...)`.
         if let SelfSource::MethodCall(expr) = source {
             if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_name, &mut err)
-                && lev_candidate.is_none()
+                && similar_candidate.is_none()
                 && !custom_span_label
             {
                 label_span_not_found(&mut err);
@@ -1015,20 +1015,20 @@ trait bound{s}",
             if fallback_span {
                 err.span_label(span, msg);
             }
-        } else if let Some(lev_candidate) = lev_candidate {
+        } else if let Some(similar_candidate) = similar_candidate {
             // Don't emit a suggestion if we found an actual method
             // that had unsatisfied trait bounds
             if unsatisfied_predicates.is_empty() {
-                let def_kind = lev_candidate.kind.as_def_kind();
+                let def_kind = similar_candidate.kind.as_def_kind();
                 // Methods are defined within the context of a struct and their first parameter is always self,
                 // which represents the instance of the struct the method is being called on
                 // Associated functions don’t take self as a parameter and
                 // they are not methods because they don’t have an instance of the struct to work with.
-                if def_kind == DefKind::AssocFn && lev_candidate.fn_has_self_parameter {
+                if def_kind == DefKind::AssocFn && similar_candidate.fn_has_self_parameter {
                     err.span_suggestion(
                         span,
                         "there is a method with a similar name",
-                        lev_candidate.name,
+                        similar_candidate.name,
                         Applicability::MaybeIncorrect,
                     );
                 } else {
@@ -1037,9 +1037,9 @@ trait bound{s}",
                         &format!(
                             "there is {} {} with a similar name",
                             def_kind.article(),
-                            def_kind.descr(lev_candidate.def_id),
+                            def_kind.descr(similar_candidate.def_id),
                         ),
-                        lev_candidate.name,
+                        similar_candidate.name,
                         Applicability::MaybeIncorrect,
                     );
                 }
@@ -2700,8 +2700,10 @@ pub(crate) fn suggest_else_fn_with_closure(
         found: Ty<'tcx>,
         expected: Ty<'tcx>,
     ) -> bool {
-        let Some((_def_id_or_name, output, _inputs)) = self.extract_callable_info(expr, found)
-        else { return false; };
+        let Some((_def_id_or_name, output, _inputs)) =
+            self.extract_callable_info(found) else {
+                return false;
+        };
 
         if !self.can_coerce(output, expected) {
             return false;
index 8c24b6006444a8dc26d21d57b009768b05a0ad82..250f4cd3f65fbf4c857e3b3366fa56148f699f54 100644 (file)
@@ -448,8 +448,11 @@ fn visit_closures(&mut self) {
         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
         let common_hir_owner = fcx_typeck_results.hir_owner;
 
-        for (id, origin) in fcx_typeck_results.closure_kind_origins().iter() {
-            let hir_id = hir::HirId { owner: common_hir_owner, local_id: *id };
+        let fcx_closure_kind_origins =
+            fcx_typeck_results.closure_kind_origins().items_in_stable_order();
+
+        for (local_id, origin) in fcx_closure_kind_origins {
+            let hir_id = hir::HirId { owner: common_hir_owner, local_id };
             let place_span = origin.0;
             let place = self.resolve(origin.1.clone(), &place_span);
             self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
@@ -458,11 +461,12 @@ fn visit_closures(&mut self) {
 
     fn visit_coercion_casts(&mut self) {
         let fcx_typeck_results = self.fcx.typeck_results.borrow();
-        let fcx_coercion_casts = fcx_typeck_results.coercion_casts();
+
         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
 
+        let fcx_coercion_casts = fcx_typeck_results.coercion_casts().to_sorted_stable_ord();
         for local_id in fcx_coercion_casts {
-            self.typeck_results.set_coercion_cast(*local_id);
+            self.typeck_results.set_coercion_cast(local_id);
         }
     }
 
@@ -471,22 +475,15 @@ fn visit_user_provided_tys(&mut self) {
         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
         let common_hir_owner = fcx_typeck_results.hir_owner;
 
-        let mut errors_buffer = Vec::new();
-        for (&local_id, c_ty) in fcx_typeck_results.user_provided_types().iter() {
-            let hir_id = hir::HirId { owner: common_hir_owner, local_id };
-
-            if cfg!(debug_assertions) && c_ty.needs_infer() {
-                span_bug!(
-                    hir_id.to_span(self.fcx.tcx),
-                    "writeback: `{:?}` has inference variables",
-                    c_ty
-                );
-            };
+        if self.rustc_dump_user_substs {
+            let sorted_user_provided_types =
+                fcx_typeck_results.user_provided_types().items_in_stable_order();
 
-            self.typeck_results.user_provided_types_mut().insert(hir_id, *c_ty);
+            let mut errors_buffer = Vec::new();
+            for (local_id, c_ty) in sorted_user_provided_types {
+                let hir_id = hir::HirId { owner: common_hir_owner, local_id };
 
-            if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
-                if self.rustc_dump_user_substs {
+                if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
                     // This is a unit-testing mechanism.
                     let span = self.tcx().hir().span(hir_id);
                     // We need to buffer the errors in order to guarantee a consistent
@@ -498,31 +495,49 @@ fn visit_user_provided_tys(&mut self) {
                     err.buffer(&mut errors_buffer);
                 }
             }
-        }
 
-        if !errors_buffer.is_empty() {
-            errors_buffer.sort_by_key(|diag| diag.span.primary_span());
-            for mut diag in errors_buffer {
-                self.tcx().sess.diagnostic().emit_diagnostic(&mut diag);
+            if !errors_buffer.is_empty() {
+                errors_buffer.sort_by_key(|diag| diag.span.primary_span());
+                for mut diag in errors_buffer {
+                    self.tcx().sess.diagnostic().emit_diagnostic(&mut diag);
+                }
             }
         }
+
+        self.typeck_results.user_provided_types_mut().extend(
+            fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| {
+                let hir_id = hir::HirId { owner: common_hir_owner, local_id };
+
+                if cfg!(debug_assertions) && c_ty.needs_infer() {
+                    span_bug!(
+                        hir_id.to_span(self.fcx.tcx),
+                        "writeback: `{:?}` has inference variables",
+                        c_ty
+                    );
+                };
+
+                (hir_id, *c_ty)
+            }),
+        );
     }
 
     fn visit_user_provided_sigs(&mut self) {
         let fcx_typeck_results = self.fcx.typeck_results.borrow();
         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
 
-        for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
-            if cfg!(debug_assertions) && c_sig.needs_infer() {
-                span_bug!(
-                    self.fcx.tcx.def_span(def_id),
-                    "writeback: `{:?}` has inference variables",
-                    c_sig
-                );
-            };
-
-            self.typeck_results.user_provided_sigs.insert(def_id, *c_sig);
-        }
+        self.typeck_results.user_provided_sigs.extend(
+            fcx_typeck_results.user_provided_sigs.items().map(|(&def_id, c_sig)| {
+                if cfg!(debug_assertions) && c_sig.needs_infer() {
+                    span_bug!(
+                        self.fcx.tcx.def_span(def_id),
+                        "writeback: `{:?}` has inference variables",
+                        c_sig
+                    );
+                };
+
+                (def_id, *c_sig)
+            }),
+        );
     }
 
     fn visit_generator_interior_types(&mut self) {
@@ -564,7 +579,6 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 opaque_type_key,
                 self.fcx.infcx.tcx,
                 true,
-                decl.origin,
             );
 
             self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id, hidden_type);
@@ -642,7 +656,9 @@ fn visit_liberated_fn_sigs(&mut self) {
         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
         let common_hir_owner = fcx_typeck_results.hir_owner;
 
-        for (&local_id, &fn_sig) in fcx_typeck_results.liberated_fn_sigs().iter() {
+        let fcx_liberated_fn_sigs = fcx_typeck_results.liberated_fn_sigs().items_in_stable_order();
+
+        for (local_id, &fn_sig) in fcx_liberated_fn_sigs {
             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
             let fn_sig = self.resolve(fn_sig, &hir_id);
             self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
@@ -654,7 +670,9 @@ fn visit_fru_field_types(&mut self) {
         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
         let common_hir_owner = fcx_typeck_results.hir_owner;
 
-        for (&local_id, ftys) in fcx_typeck_results.fru_field_types().iter() {
+        let fcx_fru_field_types = fcx_typeck_results.fru_field_types().items_in_stable_order();
+
+        for (local_id, ftys) in fcx_fru_field_types {
             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
             let ftys = self.resolve(ftys.clone(), &hir_id);
             self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
index 67b4d6d6959f602c042889c168d0047adf2fbd5b..33a9a0cabb9d5067266f1d9025e4d04bc8cf324a 100644 (file)
@@ -368,7 +368,7 @@ fn walk_between<'q>(
 ) -> FxHashSet<DepKind> {
     // This is a bit tricky. We want to include a node only if it is:
     // (a) reachable from a source and (b) will reach a target. And we
-    // have to be careful about cycles etc.  Luckily efficiency is not
+    // have to be careful about cycles etc. Luckily efficiency is not
     // a big concern!
 
     #[derive(Copy, Clone, PartialEq)]
index d1d328128bc15a145f7b60835cdfe62f7733beae..ed7b272b13d178087c969aa3bda7f25da0bdec9a 100644 (file)
@@ -1,4 +1,4 @@
-//! Debugging code to test fingerprints computed for query results.  For each node marked with
+//! Debugging code to test fingerprints computed for query results. For each node marked with
 //! `#[rustc_clean]` we will compare the fingerprint from the current and from the previous
 //! compilation session as appropriate:
 //!
index c18a911b2fbcd4db06dfe417f9efbfee8579a4a2..68cdc6d7711d4eb840bb49ea3faef8a6c0f121f5 100644 (file)
@@ -207,7 +207,12 @@ pub fn drain_enumerated<'a, R: RangeBounds<usize>>(
         &'a mut self,
         range: R,
     ) -> impl Iterator<Item = (I, T)> + 'a {
-        self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t))
+        let begin = match range.start_bound() {
+            std::ops::Bound::Included(i) => *i,
+            std::ops::Bound::Excluded(i) => i.checked_add(1).unwrap(),
+            std::ops::Bound::Unbounded => 0,
+        };
+        self.raw.drain(range).enumerate().map(move |(n, t)| (I::new(begin + n), t))
     }
 
     #[inline]
index 033a1842edb25c57c95ea20d97c910be67a82c3e..8bf3a160abbb4aaf2f064fef5967b0f3b7b83e8b 100644 (file)
@@ -927,6 +927,8 @@ pub struct ButNeedsToSatisfy {
     #[subdiagnostic]
     pub req_introduces_loc: Option<ReqIntroducedLocations>,
 
+    pub has_param_name: bool,
+    pub param_name: String,
     pub spans_empty: bool,
     pub has_lifetime: bool,
     pub lifetime: String,
index 77e38e47fcfa0b5ee9eb1e0dfd676ac9dae4ecab..72676b718fabe61853a9820a3282e271b8aed203 100644 (file)
@@ -331,7 +331,7 @@ pub fn instantiate(
         debug_assert!(self.infcx.inner.borrow_mut().type_variables().probe(b_vid).is_unknown());
 
         // Generalize type of `a_ty` appropriately depending on the
-        // direction.  As an example, assume:
+        // direction. As an example, assume:
         //
         // - `a_ty == &'x ?1`, where `'x` is some free region and `?1` is an
         //   inference variable,
index 533a3c768eb1693144f6673fcb5175bb2ab68b95..28fd03b878b2b69b2da3ef46b59a09823f1ea5f3 100644 (file)
@@ -1923,6 +1923,22 @@ fn escape_literal(s: &str) -> String {
                         (ty::Tuple(fields), _) => {
                             self.emit_tuple_wrap_err(&mut err, span, found, fields)
                         }
+                        // If a byte was expected and the found expression is a char literal
+                        // containing a single ASCII character, perhaps the user meant to write `b'c'` to
+                        // specify a byte literal
+                        (ty::Uint(ty::UintTy::U8), ty::Char) => {
+                            if let Ok(code) = self.tcx.sess().source_map().span_to_snippet(span)
+                                && let Some(code) = code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
+                                && code.chars().next().map_or(false, |c| c.is_ascii())
+                            {
+                                err.span_suggestion(
+                                    span,
+                                    "if you meant to write a byte literal, prefix with `b`",
+                                    format!("b'{}'", escape_literal(code)),
+                                    Applicability::MachineApplicable,
+                                );
+                            }
+                        }
                         // If a character was expected and the found expression is a string literal
                         // containing a single character, perhaps the user meant to write `'c'` to
                         // specify a character literal (issue #92479)
@@ -2256,10 +2272,10 @@ pub fn construct_generic_bound_failure(
 
         let labeled_user_string = match bound_kind {
             GenericKind::Param(ref p) => format!("the parameter type `{}`", p),
-            GenericKind::Projection(ref p) => format!("the associated type `{}`", p),
-            GenericKind::Opaque(def_id, substs) => {
-                format!("the opaque type `{}`", self.tcx.def_path_str_with_substs(def_id, substs))
-            }
+            GenericKind::Alias(ref p) => match p.kind(self.tcx) {
+                ty::AliasKind::Projection => format!("the associated type `{}`", p),
+                ty::AliasKind::Opaque => format!("the opaque type `{}`", p),
+            },
         };
 
         if let Some(SubregionOrigin::CompareImplItemObligation {
index 202f39521e967d7283fbfe8b747ccb94788878ed..99431567edac413f4aef86a04e49516ddf932599 100644 (file)
@@ -370,7 +370,7 @@ fn explain_actual_impl_that_was_found(
         //   in the types are about to print
         // - Meanwhile, the `maybe_highlighting_region` calls set up
         //   highlights so that, if they do appear, we will replace
-        //   them `'0` and whatever.  (This replacement takes place
+        //   them `'0` and whatever. (This replacement takes place
         //   inside the closure given to `maybe_highlighting_region`.)
         //
         // There is some duplication between the calls -- i.e., the
index fb0f09198ccc185330064cb2b7df525d1048f26c..6a463583dfb0ff42573d3cbf0c308ef67fe434a1 100644 (file)
@@ -98,6 +98,7 @@ pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorGuaranteed> {
         let sp = var_origin.span();
         let return_sp = sub_origin.span();
         let param = self.find_param_with_region(*sup_r, *sub_r)?;
+        let simple_ident = param.param.pat.simple_ident();
         let lifetime_name = if sup_r.has_name() { sup_r.to_string() } else { "'_".to_owned() };
 
         let (mention_influencer, influencer_point) =
@@ -187,7 +188,9 @@ pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorGuaranteed> {
             req_introduces_loc: subdiag,
 
             has_lifetime: sup_r.has_name(),
-            lifetime: sup_r.to_string(),
+            lifetime: lifetime_name.clone(),
+            has_param_name: simple_ident.is_some(),
+            param_name: simple_ident.map(|x| x.to_string()).unwrap_or_default(),
             spans_empty,
             bound,
         };
@@ -543,7 +546,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 if let Some(def_id) = preds.principal_def_id() {
                     self.0.insert(def_id);
                 }
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
             _ => t.super_visit_with(self),
         }
index 021e741ee2f710b871fb74f33e65cedb1f6152fc..b18cbd404d47f8c03895ef5db759d18f4f722077 100644 (file)
@@ -330,9 +330,8 @@ pub fn suggest_copy_trait_method_bounds(
 
         let Ok(trait_predicates) = self
             .tcx
-            .bound_explicit_predicates_of(trait_item_def_id)
-            .map_bound(|p| p.predicates)
-            .subst_iter_copied(self.tcx, trait_item_substs)
+            .explicit_predicates_of(trait_item_def_id)
+            .instantiate_own(self.tcx, trait_item_substs)
             .map(|(pred, _)| {
                 if pred.is_suggestable(self.tcx, false) {
                     Ok(pred.to_string())
index 0ebc6d55bcba952678c86e6ff3c1dba91d8a11bd..4dbb4b4d7b4da8b681d38180acc87270092f1fd6 100644 (file)
@@ -78,7 +78,7 @@ pub fn super_lattice_tys<'a, 'tcx: 'a, L>(
         //
         // Example: if the LHS is a type variable, and RHS is
         // `Box<i32>`, then we current compare `v` to the RHS first,
-        // which will instantiate `v` with `Box<i32>`.  Then when `v`
+        // which will instantiate `v` with `Box<i32>`. Then when `v`
         // is compared to the LHS, we instantiate LHS with `Box<i32>`.
         // But if we did in reverse order, we would create a `v <:
         // LHS` (or vice versa) constraint and then instantiate
index 897545046c33f3bfd2b6886e974ab1f1673409ea..ce8aec8044bae63422a1fba1131dcd8b6aaaf746 100644 (file)
@@ -52,7 +52,7 @@ pub struct LexicalRegionResolutions<'tcx> {
 
 #[derive(Copy, Clone, Debug)]
 pub(crate) enum VarValue<'tcx> {
-    /// Empty lifetime is for data that is never accessed.  We tag the
+    /// Empty lifetime is for data that is never accessed. We tag the
     /// empty lifetime with a universe -- the idea is that we don't
     /// want `exists<'a> { forall<'b> { 'b: 'a } }` to be satisfiable.
     /// Therefore, the `'empty` in a universe `U` is less than all
@@ -251,7 +251,7 @@ fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
                     VarValue::Empty(a_universe) => {
                         let b_data = var_values.value_mut(b_vid);
 
-                        let changed = (|| match *b_data {
+                        let changed = match *b_data {
                             VarValue::Empty(b_universe) => {
                                 // Empty regions are ordered according to the universe
                                 // they are associated with.
@@ -280,20 +280,20 @@ fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
                                 };
 
                                 if lub == cur_region {
-                                    return false;
+                                    false
+                                } else {
+                                    debug!(
+                                        "Expanding value of {:?} from {:?} to {:?}",
+                                        b_vid, cur_region, lub
+                                    );
+
+                                    *b_data = VarValue::Value(lub);
+                                    true
                                 }
-
-                                debug!(
-                                    "Expanding value of {:?} from {:?} to {:?}",
-                                    b_vid, cur_region, lub
-                                );
-
-                                *b_data = VarValue::Value(lub);
-                                true
                             }
 
                             VarValue::ErrorValue => false,
-                        })();
+                        };
 
                         if changed {
                             changes.push(b_vid);
@@ -510,7 +510,7 @@ fn sub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> bool {
         }
 
         // If both `a` and `b` are free, consult the declared
-        // relationships.  Note that this can be more precise than the
+        // relationships. Note that this can be more precise than the
         // `lub` relationship defined below, since sometimes the "lub"
         // is actually the `postdom_upper_bound` (see
         // `TransitiveRelation` for more details).
@@ -665,7 +665,7 @@ fn collect_var_errors(
         // conflicting regions to report to the user. As we walk, we
         // trip the flags from false to true, and if we find that
         // we've already reported an error involving any particular
-        // node we just stop and don't report the current error.  The
+        // node we just stop and don't report the current error. The
         // idea is to report errors that derive from independent
         // regions of the graph, but not those that derive from
         // overlapping locations.
index 4acd0d0edfec774546b1a73bda0ae049d8fe720e..f0e42c1fce49c3ea685f6301b22f1d09e702e774 100644 (file)
@@ -1105,7 +1105,7 @@ pub fn next_region_var_in_universe(
         self.tcx.mk_region(ty::ReVar(region_var))
     }
 
-    /// Return the universe that the region `r` was created in.  For
+    /// Return the universe that the region `r` was created in. For
     /// most regions (e.g., `'static`, named regions from the user,
     /// etc) this is the root universe U0. For inference variables or
     /// placeholders, however, it will return the universe which they
@@ -1361,7 +1361,7 @@ pub fn probe_ty_var(&self, vid: TyVid) -> Result<Ty<'tcx>, ty::UniverseIndex> {
     }
 
     /// Resolve any type variables found in `value` -- but only one
-    /// level.  So, if the variable `?X` is bound to some type
+    /// level. So, if the variable `?X` is bound to some type
     /// `Foo<?Y>`, then this would return `Foo<?Y>` (but `?Y` may
     /// itself be bound to a type).
     ///
@@ -1720,7 +1720,7 @@ pub fn resolve_regions_and_report_errors(
         if let None = self.tainted_by_errors() {
             // As a heuristic, just skip reporting region errors
             // altogether if other errors have been reported while
-            // this infcx was in use.  This is totally hokey but
+            // this infcx was in use. This is totally hokey but
             // otherwise we have a hard time separating legit region
             // errors from silly ones.
             self.report_region_errors(generic_param_scope, &errors);
index 1f9d86a78d6e50912f7cc3dba6e0b006391a33aa..f235cb5ab4503429be84df40c85338cf0f1b4b69 100644 (file)
@@ -439,7 +439,7 @@ trait VidValuePair<'tcx>: Debug {
     fn value_ty(&self) -> Ty<'tcx>;
 
     /// Extract the scopes that apply to whichever side of the tuple
-    /// the vid was found on.  See the comment where this is called
+    /// the vid was found on. See the comment where this is called
     /// for more details on why we want them.
     fn vid_scopes<'r, D: TypeRelatingDelegate<'tcx>>(
         &self,
@@ -831,7 +831,7 @@ fn const_equate_obligation(&mut self, _a: ty::Const<'tcx>, _b: ty::Const<'tcx>)
 /// (these are not explicitly present in the ty representation right
 /// now). This visitor handles that: it descends the type, tracking
 /// binder depth, and finds late-bound regions targeting the
-/// `for<..`>.  For each of those, it creates an entry in
+/// `for<..`>. For each of those, it creates an entry in
 /// `bound_region_scope`.
 struct ScopeInstantiator<'me, 'tcx> {
     next_region: &'me mut dyn FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
@@ -849,7 +849,7 @@ fn visit_binder<T: TypeVisitable<'tcx>>(
         t.super_visit_with(self);
         self.target_index.shift_out(1);
 
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
@@ -863,7 +863,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
             _ => {}
         }
 
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
index 749e960bfd03090876186167b6b6247b5eb44efc..c54c66eab27998cdf0616e36d25404fdc417a143 100644 (file)
@@ -112,7 +112,7 @@ pub fn handle_opaque_type(
                     DefiningAnchor::Bind(_) => {
                         // Check that this is `impl Trait` type is
                         // declared by `parent_def_id` -- i.e., one whose
-                        // value we are inferring.  At present, this is
+                        // value we are inferring. At present, this is
                         // always true during the first phase of
                         // type-check, but not always true later on during
                         // NLL. Once we support named opaque types more fully,
@@ -380,7 +380,7 @@ pub fn opaque_type_origin(&self, def_id: LocalDefId, span: Span) -> Option<Opaqu
         };
         let item_kind = &self.tcx.hir().expect_item(def_id).kind;
 
-        let hir::ItemKind::OpaqueTy(hir::OpaqueTy { origin, ..  }) = item_kind else {
+        let hir::ItemKind::OpaqueTy(hir::OpaqueTy { origin, .. }) = item_kind else {
             span_bug!(
                 span,
                 "weird opaque type: {:#?}, {:#?}",
@@ -440,16 +440,16 @@ fn visit_binder<T: TypeVisitable<'tcx>>(
         t: &ty::Binder<'tcx, T>,
     ) -> ControlFlow<Self::BreakTy> {
         t.super_visit_with(self);
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
         match *r {
             // ignore bound regions, keep visiting
-            ty::ReLateBound(_, _) => ControlFlow::CONTINUE,
+            ty::ReLateBound(_, _) => ControlFlow::Continue(()),
             _ => {
                 (self.op)(r);
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
         }
     }
@@ -457,7 +457,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
     fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         // We're only interested in types involving regions
         if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) {
-            return ControlFlow::CONTINUE;
+            return ControlFlow::Continue(());
         }
 
         match ty.kind() {
@@ -479,7 +479,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             }
 
             ty::Alias(ty::Opaque, ty::AliasTy { def_id, ref substs, .. }) => {
-                // Skip lifetime paramters that are not captures.
+                // Skip lifetime parameters that are not captures.
                 let variances = self.tcx.variances_of(*def_id);
 
                 for (v, s) in std::iter::zip(variances, substs.iter()) {
@@ -492,7 +492,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             ty::Alias(ty::Projection, proj)
                 if self.tcx.def_kind(proj.def_id) == DefKind::ImplTraitPlaceholder =>
             {
-                // Skip lifetime paramters that are not captures.
+                // Skip lifetime parameters that are not captures.
                 let variances = self.tcx.variances_of(proj.def_id);
 
                 for (v, s) in std::iter::zip(variances, proj.substs.iter()) {
@@ -507,7 +507,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             }
         }
 
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
index aa2b5d067d266d8742c322ea147ffe5bc4905442..3d86279b03cc621f6ba372c973b0c8b70ea8f013 100644 (file)
@@ -3,9 +3,8 @@
 // RFC for reference.
 
 use rustc_data_structures::sso::SsoHashSet;
-use rustc_hir::def_id::DefId;
 use rustc_middle::ty::subst::{GenericArg, GenericArgKind};
-use rustc_middle::ty::{self, SubstsRef, Ty, TyCtxt, TypeVisitable};
+use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable};
 use smallvec::{smallvec, SmallVec};
 
 #[derive(Debug)]
@@ -23,7 +22,7 @@ pub enum Component<'tcx> {
     // is not in a position to judge which is the best technique, so
     // we just product the projection as a component and leave it to
     // the consumer to decide (but see `EscapingProjection` below).
-    Projection(ty::AliasTy<'tcx>),
+    Alias(ty::AliasTy<'tcx>),
 
     // In the case where a projection has escaping regions -- meaning
     // regions bound within the type itself -- we always use
@@ -45,9 +44,7 @@ pub enum Component<'tcx> {
     // projection, so that implied bounds code can avoid relying on
     // them. This gives us room to improve the regionck reasoning in
     // the future without breaking backwards compat.
-    EscapingProjection(Vec<Component<'tcx>>),
-
-    Opaque(DefId, SubstsRef<'tcx>),
+    EscapingAlias(Vec<Component<'tcx>>),
 }
 
 /// Push onto `out` all the things that must outlive `'a` for the condition
@@ -123,17 +120,6 @@ fn compute_components<'tcx>(
                 out.push(Component::Param(p));
             }
 
-            // Ignore lifetimes found in opaque types. Opaque types can
-            // have lifetimes in their substs which their hidden type doesn't
-            // actually use. If we inferred that an opaque type is outlived by
-            // its parameter lifetimes, then we could prove that any lifetime
-            // outlives any other lifetime, which is unsound.
-            // See https://github.com/rust-lang/rust/issues/84305 for
-            // more details.
-            ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
-                out.push(Component::Opaque(def_id, substs));
-            },
-
             // For projections, we prefer to generate an obligation like
             // `<P0 as Trait<P1...Pn>>::Foo: 'a`, because this gives the
             // regionck more ways to prove that it holds. However,
@@ -142,23 +128,23 @@ fn compute_components<'tcx>(
             // trait-ref. Therefore, if we see any higher-ranked regions,
             // we simply fallback to the most restrictive rule, which
             // requires that `Pi: 'a` for all `i`.
-            ty::Alias(ty::Projection, ref data) => {
-                if !data.has_escaping_bound_vars() {
+            ty::Alias(_, alias_ty) => {
+                if !alias_ty.has_escaping_bound_vars() {
                     // best case: no escaping regions, so push the
                     // projection and skip the subtree (thus generating no
                     // constraints for Pi). This defers the choice between
                     // the rules OutlivesProjectionEnv,
                     // OutlivesProjectionTraitDef, and
                     // OutlivesProjectionComponents to regionck.
-                    out.push(Component::Projection(*data));
+                    out.push(Component::Alias(alias_ty));
                 } else {
                     // fallback case: hard code
-                    // OutlivesProjectionComponents.  Continue walking
+                    // OutlivesProjectionComponents. Continue walking
                     // through and constrain Pi.
                     let mut subcomponents = smallvec![];
                     let mut subvisited = SsoHashSet::new();
                     compute_components_recursive(tcx, ty.into(), &mut subcomponents, &mut subvisited);
-                    out.push(Component::EscapingProjection(subcomponents.into_iter().collect()));
+                    out.push(Component::EscapingAlias(subcomponents.into_iter().collect()));
                 }
             }
 
@@ -195,7 +181,7 @@ fn compute_components<'tcx>(
             ty::Error(_) => {
                 // (*) Function pointers and trait objects are both binders.
                 // In the RFC, this means we would add the bound regions to
-                // the "bound regions list".  In our representation, no such
+                // the "bound regions list". In our representation, no such
                 // list is maintained explicitly, because bound regions
                 // themselves can be readily identified.
                 compute_components_recursive(tcx, ty.into(), out, visited);
index 33543135ddb0ef68a3f3a92369ca3b629a6d948c..24e3c34dd94fc40d28b22d2ea7675f62b7618144 100644 (file)
@@ -138,13 +138,9 @@ fn add_outlives_bounds<I>(&mut self, infcx: Option<&InferCtxt<'tcx>>, outlives_b
                     self.region_bound_pairs
                         .insert(ty::OutlivesPredicate(GenericKind::Param(param_b), r_a));
                 }
-                OutlivesBound::RegionSubProjection(r_a, projection_b) => {
+                OutlivesBound::RegionSubAlias(r_a, alias_b) => {
                     self.region_bound_pairs
-                        .insert(ty::OutlivesPredicate(GenericKind::Projection(projection_b), r_a));
-                }
-                OutlivesBound::RegionSubOpaque(r_a, def_id, substs) => {
-                    self.region_bound_pairs
-                        .insert(ty::OutlivesPredicate(GenericKind::Opaque(def_id, substs), r_a));
+                        .insert(ty::OutlivesPredicate(GenericKind::Alias(alias_b), r_a));
                 }
                 OutlivesBound::RegionSubRegion(r_a, r_b) => {
                     if let (ReEarlyBound(_) | ReFree(_), ReVar(vid_b)) = (r_a.kind(), r_b.kind()) {
index a85e6a19b11b65b99fdc279a195f5822cd3a4220..0194549a8868d3c5c3506a7ddd839715fca57bfe 100644 (file)
@@ -67,7 +67,6 @@
 };
 use crate::traits::{ObligationCause, ObligationCauseCode};
 use rustc_data_structures::undo_log::UndoLogs;
-use rustc_hir::def_id::DefId;
 use rustc_middle::mir::ConstraintCategory;
 use rustc_middle::ty::subst::GenericArgKind;
 use rustc_middle::ty::{self, Region, SubstsRef, Ty, TyCtxt, TypeVisitable};
@@ -266,13 +265,8 @@ fn components_must_outlive(
                 Component::Param(param_ty) => {
                     self.param_ty_must_outlive(origin, region, *param_ty);
                 }
-                Component::Opaque(def_id, substs) => {
-                    self.opaque_must_outlive(*def_id, substs, origin, region)
-                }
-                Component::Projection(projection_ty) => {
-                    self.projection_must_outlive(origin, region, *projection_ty);
-                }
-                Component::EscapingProjection(subcomponents) => {
+                Component::Alias(alias_ty) => self.alias_ty_must_outlive(origin, region, *alias_ty),
+                Component::EscapingAlias(subcomponents) => {
                     self.components_must_outlive(origin, &subcomponents, region, category);
                 }
                 Component::UnresolvedInferenceVariable(v) => {
@@ -288,80 +282,26 @@ fn components_must_outlive(
         }
     }
 
+    #[instrument(level = "debug", skip(self))]
     fn param_ty_must_outlive(
         &mut self,
         origin: infer::SubregionOrigin<'tcx>,
         region: ty::Region<'tcx>,
         param_ty: ty::ParamTy,
     ) {
-        debug!(
-            "param_ty_must_outlive(region={:?}, param_ty={:?}, origin={:?})",
-            region, param_ty, origin
-        );
-
-        let generic = GenericKind::Param(param_ty);
         let verify_bound = self.verify_bound.param_bound(param_ty);
-        self.delegate.push_verify(origin, generic, region, verify_bound);
-    }
-
-    #[instrument(level = "debug", skip(self))]
-    fn opaque_must_outlive(
-        &mut self,
-        def_id: DefId,
-        substs: SubstsRef<'tcx>,
-        origin: infer::SubregionOrigin<'tcx>,
-        region: ty::Region<'tcx>,
-    ) {
-        self.generic_must_outlive(
-            origin,
-            region,
-            GenericKind::Opaque(def_id, substs),
-            def_id,
-            substs,
-            true,
-            |ty| match *ty.kind() {
-                ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => (def_id, substs),
-                _ => bug!("expected only projection types from env, not {:?}", ty),
-            },
-        );
+        self.delegate.push_verify(origin, GenericKind::Param(param_ty), region, verify_bound);
     }
 
     #[instrument(level = "debug", skip(self))]
-    fn projection_must_outlive(
+    fn alias_ty_must_outlive(
         &mut self,
         origin: infer::SubregionOrigin<'tcx>,
         region: ty::Region<'tcx>,
-        projection_ty: ty::AliasTy<'tcx>,
-    ) {
-        self.generic_must_outlive(
-            origin,
-            region,
-            GenericKind::Projection(projection_ty),
-            projection_ty.def_id,
-            projection_ty.substs,
-            false,
-            |ty| match ty.kind() {
-                ty::Alias(ty::Projection, projection_ty) => {
-                    (projection_ty.def_id, projection_ty.substs)
-                }
-                _ => bug!("expected only projection types from env, not {:?}", ty),
-            },
-        );
-    }
-
-    #[instrument(level = "debug", skip(self, filter))]
-    fn generic_must_outlive(
-        &mut self,
-        origin: infer::SubregionOrigin<'tcx>,
-        region: ty::Region<'tcx>,
-        generic: GenericKind<'tcx>,
-        def_id: DefId,
-        substs: SubstsRef<'tcx>,
-        is_opaque: bool,
-        filter: impl Fn(Ty<'tcx>) -> (DefId, SubstsRef<'tcx>),
+        alias_ty: ty::AliasTy<'tcx>,
     ) {
         // An optimization for a common case with opaque types.
-        if substs.is_empty() {
+        if alias_ty.substs.is_empty() {
             return;
         }
 
@@ -371,7 +311,7 @@ fn generic_must_outlive(
         // particular). :) First off, we have to choose between using the
         // OutlivesProjectionEnv, OutlivesProjectionTraitDef, and
         // OutlivesProjectionComponent rules, any one of which is
-        // sufficient.  If there are no inference variables involved, it's
+        // sufficient. If there are no inference variables involved, it's
         // not hard to pick the right rule, but if there are, we're in a
         // bit of a catch 22: if we picked which rule we were going to
         // use, we could add constraints to the region inference graph
@@ -383,14 +323,14 @@ fn generic_must_outlive(
         // These are guaranteed to apply, no matter the inference
         // results.
         let trait_bounds: Vec<_> =
-            self.verify_bound.declared_region_bounds(def_id, substs).collect();
+            self.verify_bound.declared_bounds_from_definition(alias_ty).collect();
 
         debug!(?trait_bounds);
 
         // Compute the bounds we can derive from the environment. This
         // is an "approximate" match -- in some cases, these bounds
         // may not apply.
-        let mut approx_env_bounds = self.verify_bound.approx_declared_bounds_from_env(generic);
+        let mut approx_env_bounds = self.verify_bound.approx_declared_bounds_from_env(alias_ty);
         debug!(?approx_env_bounds);
 
         // Remove outlives bounds that we get from the environment but
@@ -405,8 +345,8 @@ fn generic_must_outlive(
             // If the declaration is `trait Trait<'b> { type Item: 'b; }`, then `projection_declared_bounds_from_trait`
             // will be invoked with `['b => ^1]` and so we will get `^1` returned.
             let bound = bound_outlives.skip_binder();
-            let (def_id, substs) = filter(bound.0);
-            self.verify_bound.declared_region_bounds(def_id, substs).all(|r| r != bound.1)
+            let ty::Alias(_, alias_ty) = bound.0.kind() else { bug!("expected AliasTy") };
+            self.verify_bound.declared_bounds_from_definition(*alias_ty).all(|r| r != bound.1)
         });
 
         // If declared bounds list is empty, the only applicable rule is
@@ -423,12 +363,12 @@ fn generic_must_outlive(
         // the problem is to add `T: 'r`, which isn't true. So, if there are no
         // inference variables, we use a verify constraint instead of adding
         // edges, which winds up enforcing the same condition.
-        let needs_infer = substs.needs_infer();
-        if approx_env_bounds.is_empty() && trait_bounds.is_empty() && (needs_infer || is_opaque) {
+        if approx_env_bounds.is_empty()
+            && trait_bounds.is_empty()
+            && (alias_ty.needs_infer() || alias_ty.kind(self.tcx) == ty::Opaque)
+        {
             debug!("no declared bounds");
-
-            self.substs_must_outlive(substs, origin, region);
-
+            self.substs_must_outlive(alias_ty.substs, origin, region);
             return;
         }
 
@@ -469,14 +409,9 @@ fn generic_must_outlive(
         // projection outlive; in some cases, this may add insufficient
         // edges into the inference graph, leading to inference failures
         // even though a satisfactory solution exists.
-        let verify_bound = self.verify_bound.projection_opaque_bounds(
-            generic,
-            def_id,
-            substs,
-            &mut Default::default(),
-        );
-        debug!("projection_must_outlive: pushing {:?}", verify_bound);
-        self.delegate.push_verify(origin, generic, region, verify_bound);
+        let verify_bound = self.verify_bound.alias_bound(alias_ty, &mut Default::default());
+        debug!("alias_must_outlive: pushing {:?}", verify_bound);
+        self.delegate.push_verify(origin, GenericKind::Alias(alias_ty), region, verify_bound);
     }
 
     fn substs_must_outlive(
index 40bbec8ddd091da396b17dcb866004b4b10cab20..94de9bc2d02283be28c98c6c576ff4f5c1499027 100644 (file)
@@ -1,11 +1,10 @@
 use crate::infer::outlives::components::{compute_components_recursive, Component};
 use crate::infer::outlives::env::RegionBoundPairs;
 use crate::infer::region_constraints::VerifyIfEq;
-use crate::infer::{GenericKind, VerifyBound};
+use crate::infer::VerifyBound;
 use rustc_data_structures::sso::SsoHashSet;
-use rustc_hir::def_id::DefId;
 use rustc_middle::ty::GenericArg;
-use rustc_middle::ty::{self, OutlivesPredicate, SubstsRef, Ty, TyCtxt};
+use rustc_middle::ty::{self, OutlivesPredicate, Ty, TyCtxt};
 
 use smallvec::smallvec;
 
@@ -94,29 +93,26 @@ pub fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound<'tcx> {
     /// this list.
     pub fn approx_declared_bounds_from_env(
         &self,
-        generic: GenericKind<'tcx>,
+        alias_ty: ty::AliasTy<'tcx>,
     ) -> Vec<ty::Binder<'tcx, ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>>> {
-        let projection_ty = generic.to_ty(self.tcx);
-        let erased_projection_ty = self.tcx.erase_regions(projection_ty);
-        self.declared_generic_bounds_from_env_for_erased_ty(erased_projection_ty)
+        let erased_alias_ty = self.tcx.erase_regions(alias_ty.to_ty(self.tcx));
+        self.declared_generic_bounds_from_env_for_erased_ty(erased_alias_ty)
     }
 
     #[instrument(level = "debug", skip(self, visited))]
-    pub fn projection_opaque_bounds(
+    pub fn alias_bound(
         &self,
-        generic: GenericKind<'tcx>,
-        def_id: DefId,
-        substs: SubstsRef<'tcx>,
+        alias_ty: ty::AliasTy<'tcx>,
         visited: &mut SsoHashSet<GenericArg<'tcx>>,
     ) -> VerifyBound<'tcx> {
-        let generic_ty = generic.to_ty(self.tcx);
+        let alias_ty_as_ty = alias_ty.to_ty(self.tcx);
 
         // Search the env for where clauses like `P: 'a`.
-        let projection_opaque_bounds = self
-            .approx_declared_bounds_from_env(generic)
+        let env_bounds = self
+            .approx_declared_bounds_from_env(alias_ty)
             .into_iter()
             .map(|binder| {
-                if let Some(ty::OutlivesPredicate(ty, r)) = binder.no_bound_vars() && ty == generic_ty {
+                if let Some(ty::OutlivesPredicate(ty, r)) = binder.no_bound_vars() && ty == alias_ty_as_ty {
                     // Micro-optimize if this is an exact match (this
                     // occurs often when there are no region variables
                     // involved).
@@ -126,19 +122,19 @@ pub fn projection_opaque_bounds(
                     VerifyBound::IfEq(verify_if_eq_b)
                 }
             });
-        // Extend with bounds that we can find from the trait.
-        let trait_bounds =
-            self.declared_region_bounds(def_id, substs).map(|r| VerifyBound::OutlivedBy(r));
+
+        // Extend with bounds that we can find from the definition.
+        let definition_bounds =
+            self.declared_bounds_from_definition(alias_ty).map(|r| VerifyBound::OutlivedBy(r));
 
         // see the extensive comment in projection_must_outlive
         let recursive_bound = {
             let mut components = smallvec![];
-            compute_components_recursive(self.tcx, generic_ty.into(), &mut components, visited);
+            compute_components_recursive(self.tcx, alias_ty_as_ty.into(), &mut components, visited);
             self.bound_from_components(&components, visited)
         };
 
-        VerifyBound::AnyBound(projection_opaque_bounds.chain(trait_bounds).collect())
-            .or(recursive_bound)
+        VerifyBound::AnyBound(env_bounds.chain(definition_bounds).collect()).or(recursive_bound)
     }
 
     fn bound_from_components(
@@ -149,10 +145,8 @@ fn bound_from_components(
         let mut bounds = components
             .iter()
             .map(|component| self.bound_from_single_component(component, visited))
-            .filter(|bound| {
-                // Remove bounds that must hold, since they are not interesting.
-                !bound.must_hold()
-            });
+            // Remove bounds that must hold, since they are not interesting.
+            .filter(|bound| !bound.must_hold());
 
         match (bounds.next(), bounds.next()) {
             (Some(first), None) => first,
@@ -170,19 +164,8 @@ fn bound_from_single_component(
         match *component {
             Component::Region(lt) => VerifyBound::OutlivedBy(lt),
             Component::Param(param_ty) => self.param_bound(param_ty),
-            Component::Opaque(did, substs) => self.projection_opaque_bounds(
-                GenericKind::Opaque(did, substs),
-                did,
-                substs,
-                visited,
-            ),
-            Component::Projection(projection_ty) => self.projection_opaque_bounds(
-                GenericKind::Projection(projection_ty),
-                projection_ty.def_id,
-                projection_ty.substs,
-                visited,
-            ),
-            Component::EscapingProjection(ref components) => {
+            Component::Alias(alias_ty) => self.alias_bound(alias_ty, visited),
+            Component::EscapingAlias(ref components) => {
                 self.bound_from_components(components, visited)
             }
             Component::UnresolvedInferenceVariable(v) => {
@@ -298,16 +281,15 @@ fn declared_generic_bounds_from_env_for_erased_ty(
     ///
     /// This is for simplicity, and because we are not really smart
     /// enough to cope with such bounds anywhere.
-    pub fn declared_region_bounds(
+    pub fn declared_bounds_from_definition(
         &self,
-        def_id: DefId,
-        substs: SubstsRef<'tcx>,
+        alias_ty: ty::AliasTy<'tcx>,
     ) -> impl Iterator<Item = ty::Region<'tcx>> {
         let tcx = self.tcx;
-        let bounds = tcx.bound_item_bounds(def_id);
+        let bounds = tcx.item_bounds(alias_ty.def_id);
         trace!("{:#?}", bounds.0);
         bounds
-            .subst_iter(tcx, substs)
+            .subst_iter(tcx, alias_ty.substs)
             .filter_map(|p| p.to_opt_type_outlives())
             .filter_map(|p| p.no_bound_vars())
             .map(|OutlivesPredicate(_, r)| r)
index 9a427ceacd0a7c63737805a2cb370adfa6788d7c..0428481b7ff0282fff2ba59b311733e25c1fbd39 100644 (file)
 use rustc_data_structures::sync::Lrc;
 use rustc_data_structures::undo_log::UndoLogs;
 use rustc_data_structures::unify as ut;
-use rustc_hir::def_id::DefId;
 use rustc_index::vec::IndexVec;
 use rustc_middle::infer::unify_key::{RegionVidKey, UnifiedRegion};
-use rustc_middle::ty::subst::SubstsRef;
 use rustc_middle::ty::ReStatic;
 use rustc_middle::ty::{self, Ty, TyCtxt};
 use rustc_middle::ty::{ReLateBound, ReVar};
@@ -169,8 +167,7 @@ pub struct Verify<'tcx> {
 #[derive(Copy, Clone, PartialEq, Eq, Hash, TypeFoldable, TypeVisitable)]
 pub enum GenericKind<'tcx> {
     Param(ty::ParamTy),
-    Projection(ty::AliasTy<'tcx>),
-    Opaque(DefId, SubstsRef<'tcx>),
+    Alias(ty::AliasTy<'tcx>),
 }
 
 /// Describes the things that some `GenericKind` value `G` is known to
@@ -749,10 +746,7 @@ impl<'tcx> fmt::Debug for GenericKind<'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             GenericKind::Param(ref p) => write!(f, "{:?}", p),
-            GenericKind::Projection(ref p) => write!(f, "{:?}", p),
-            GenericKind::Opaque(def_id, substs) => ty::tls::with(|tcx| {
-                write!(f, "{}", tcx.def_path_str_with_substs(def_id, tcx.lift(substs).unwrap()))
-            }),
+            GenericKind::Alias(ref p) => write!(f, "{:?}", p),
         }
     }
 }
@@ -761,10 +755,7 @@ impl<'tcx> fmt::Display for GenericKind<'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             GenericKind::Param(ref p) => write!(f, "{}", p),
-            GenericKind::Projection(ref p) => write!(f, "{}", p),
-            GenericKind::Opaque(def_id, substs) => ty::tls::with(|tcx| {
-                write!(f, "{}", tcx.def_path_str_with_substs(def_id, tcx.lift(substs).unwrap()))
-            }),
+            GenericKind::Alias(ref p) => write!(f, "{}", p),
         }
     }
 }
@@ -773,8 +764,7 @@ impl<'tcx> GenericKind<'tcx> {
     pub fn to_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
         match *self {
             GenericKind::Param(ref p) => p.to_ty(tcx),
-            GenericKind::Projection(ref p) => tcx.mk_projection(p.def_id, p.substs),
-            GenericKind::Opaque(def_id, substs) => tcx.mk_opaque(def_id, substs),
+            GenericKind::Alias(ref p) => p.to_ty(tcx),
         }
     }
 }
index 8671f8d45a91721d597bb8aa72d514e4dd5027cf..65b90aa3d79d3e285365339e8ec6812c000e09b6 100644 (file)
@@ -147,7 +147,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         } else if !t.has_non_region_infer() {
             // All const/type variables in inference types must already be resolved,
             // no need to visit the contents.
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         } else {
             // Otherwise, keep visiting.
             t.super_visit_with(self)
@@ -178,7 +178,7 @@ fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
         } else if !ct.has_non_region_infer() {
             // All const/type variables in inference types must already be resolved,
             // no need to visit the contents.
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         } else {
             // Otherwise, keep visiting.
             ct.super_visit_with(self)
index 7ff086452536bbfafa30d80f7a9050f9540eeed6..263c6a47dd2af79f791343f418ca911376d44a52 100644 (file)
@@ -433,7 +433,7 @@ impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> {
     fn unify_values(value1: &Self, value2: &Self) -> Result<Self, ut::NoError> {
         match (value1, value2) {
             // We never equate two type variables, both of which
-            // have known types.  Instead, we recursively equate
+            // have known types. Instead, we recursively equate
             // those types.
             (&TypeVariableValue::Known { .. }, &TypeVariableValue::Known { .. }) => {
                 bug!("equating two type variables, both of which have known types")
index d3519f4b37b8287b254efd802ae4edeaa3b174e5..fcde00056cbf1c4cf4738e80e851497506a05dcf 100644 (file)
@@ -1,6 +1,5 @@
 use crate::infer::InferCtxt;
 use crate::traits::Obligation;
-use rustc_data_structures::fx::FxHashMap;
 use rustc_hir::def_id::DefId;
 use rustc_middle::ty::{self, ToPredicate, Ty};
 
@@ -42,8 +41,6 @@ fn register_predicate_obligation(
     fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentError<'tcx>>;
 
     fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>>;
-
-    fn relationships(&mut self) -> &mut FxHashMap<ty::TyVid, ty::FoundRelationships>;
 }
 
 pub trait TraitEngineExt<'tcx> {
index 1817bbf92285476c0b0b3d9fde2353304033cbf8..cd5bde2a791309c6c0f6d5aa6d73a55e412e0e36 100644 (file)
@@ -261,23 +261,15 @@ fn elaborate(&mut self, obligation: &PredicateObligation<'tcx>) {
 
                             Component::UnresolvedInferenceVariable(_) => None,
 
-                            Component::Opaque(def_id, substs) => {
-                                let ty = tcx.mk_opaque(def_id, substs);
-                                Some(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
-                                    ty::OutlivesPredicate(ty, r_min),
-                                )))
-                            }
-
-                            Component::Projection(projection) => {
+                            Component::Alias(alias_ty) => {
                                 // We might end up here if we have `Foo<<Bar as Baz>::Assoc>: 'a`.
                                 // With this, we can deduce that `<Bar as Baz>::Assoc: 'a`.
-                                let ty = tcx.mk_projection(projection.def_id, projection.substs);
                                 Some(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
-                                    ty::OutlivesPredicate(ty, r_min),
+                                    ty::OutlivesPredicate(alias_ty.to_ty(tcx), r_min),
                                 )))
                             }
 
-                            Component::EscapingProjection(_) => {
+                            Component::EscapingAlias(_) => {
                                 // We might be able to do more here, but we don't
                                 // want to deal with escaping vars right now.
                                 None
@@ -345,7 +337,7 @@ pub fn transitive_bounds<'tcx>(
 /// A specialized variant of `elaborate_trait_refs` that only elaborates trait references that may
 /// define the given associated type `assoc_name`. It uses the
 /// `super_predicates_that_define_assoc_type` query to avoid enumerating super-predicates that
-/// aren't related to `assoc_item`.  This is used when resolving types like `Self::Item` or
+/// aren't related to `assoc_item`. This is used when resolving types like `Self::Item` or
 /// `T::Item` and helps to avoid cycle errors (see e.g. #35237).
 pub fn transitive_bounds_that_define_assoc_type<'tcx>(
     tcx: TyCtxt<'tcx>,
index 7f761b005edd0946ce8e406282fa529f899268a5..7a5e45ada3f6a8f9528778435c6496d78980dc65 100644 (file)
 use rustc_parse::maybe_new_parser_from_source_str;
 use rustc_query_impl::QueryCtxt;
 use rustc_session::config::{self, CheckCfg, ErrorOutputType, Input, OutputFilenames};
-use rustc_session::early_error;
 use rustc_session::lint;
 use rustc_session::parse::{CrateConfig, ParseSess};
 use rustc_session::Session;
+use rustc_session::{early_error, CompilerIO};
 use rustc_span::source_map::{FileLoader, FileName};
 use rustc_span::symbol::sym;
 use std::path::PathBuf;
 pub struct Compiler {
     pub(crate) sess: Lrc<Session>,
     codegen_backend: Lrc<Box<dyn CodegenBackend>>,
-    pub(crate) input: Input,
-    pub(crate) input_path: Option<PathBuf>,
-    pub(crate) output_dir: Option<PathBuf>,
-    pub(crate) output_file: Option<PathBuf>,
-    pub(crate) temps_dir: Option<PathBuf>,
     pub(crate) register_lints: Option<Box<dyn Fn(&Session, &mut LintStore) + Send + Sync>>,
     pub(crate) override_queries:
         Option<fn(&Session, &mut ty::query::Providers, &mut ty::query::ExternProviders)>,
@@ -52,18 +47,6 @@ pub fn session(&self) -> &Lrc<Session> {
     pub fn codegen_backend(&self) -> &Lrc<Box<dyn CodegenBackend>> {
         &self.codegen_backend
     }
-    pub fn input(&self) -> &Input {
-        &self.input
-    }
-    pub fn output_dir(&self) -> &Option<PathBuf> {
-        &self.output_dir
-    }
-    pub fn output_file(&self) -> &Option<PathBuf> {
-        &self.output_file
-    }
-    pub fn temps_dir(&self) -> &Option<PathBuf> {
-        &self.temps_dir
-    }
     pub fn register_lints(&self) -> &Option<Box<dyn Fn(&Session, &mut LintStore) + Send + Sync>> {
         &self.register_lints
     }
@@ -72,14 +55,7 @@ pub fn build_output_filenames(
         sess: &Session,
         attrs: &[ast::Attribute],
     ) -> OutputFilenames {
-        util::build_output_filenames(
-            &self.input,
-            &self.output_dir,
-            &self.output_file,
-            &self.temps_dir,
-            attrs,
-            sess,
-        )
+        util::build_output_filenames(attrs, sess)
     }
 }
 
@@ -244,7 +220,6 @@ pub struct Config {
     pub crate_check_cfg: CheckCfg,
 
     pub input: Input,
-    pub input_path: Option<PathBuf>,
     pub output_dir: Option<PathBuf>,
     pub output_file: Option<PathBuf>,
     pub file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
@@ -287,12 +262,19 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
             crate::callbacks::setup_callbacks();
 
             let registry = &config.registry;
+
+            let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
             let (mut sess, codegen_backend) = util::create_session(
                 config.opts,
                 config.crate_cfg,
                 config.crate_check_cfg,
                 config.file_loader,
-                config.input_path.clone(),
+                CompilerIO {
+                    input: config.input,
+                    output_dir: config.output_dir,
+                    output_file: config.output_file,
+                    temps_dir,
+                },
                 config.lint_caps,
                 config.make_codegen_backend,
                 registry.clone(),
@@ -302,16 +284,9 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
                 parse_sess_created(&mut sess.parse_sess);
             }
 
-            let temps_dir = sess.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
-
             let compiler = Compiler {
                 sess: Lrc::new(sess),
                 codegen_backend: Lrc::new(codegen_backend),
-                input: config.input,
-                input_path: config.input_path,
-                output_dir: config.output_dir,
-                output_file: config.output_file,
-                temps_dir,
                 register_lints: config.register_lints,
                 override_queries: config.override_queries,
             };
index 542b638bbd7a40dcb5596aca193092332b93a811..82bc4770b6b471dcd60caa00fea9ed733d7e9d03 100644 (file)
@@ -3,6 +3,7 @@
 #![feature(internal_output_capture)]
 #![feature(thread_spawn_unchecked)]
 #![feature(once_cell)]
+#![feature(try_blocks)]
 #![recursion_limit = "256"]
 #![allow(rustc::potential_query_instability)]
 #![deny(rustc::untranslatable_diagnostic)]
index 50c40206d8026f66626a3ac07b9c1e0c117dbc3d..379a76528f3bbba1d92aa19af21d0f15d8752d9d 100644 (file)
@@ -13,7 +13,6 @@
 use rustc_borrowck as mir_borrowck;
 use rustc_codegen_ssa::traits::CodegenBackend;
 use rustc_data_structures::parallel;
-use rustc_data_structures::steal::Steal;
 use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
 use rustc_errors::{ErrorGuaranteed, PResult};
 use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
@@ -31,7 +30,7 @@
 use rustc_query_impl::{OnDiskCache, Queries as TcxQueries};
 use rustc_resolve::{Resolver, ResolverArenas};
 use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType};
-use rustc_session::cstore::{MetadataLoader, MetadataLoaderDyn};
+use rustc_session::cstore::{MetadataLoader, MetadataLoaderDyn, Untracked};
 use rustc_session::output::filename_for_input;
 use rustc_session::search_paths::PathKind;
 use rustc_session::{Limit, Session};
@@ -51,8 +50,8 @@
 use std::sync::LazyLock;
 use std::{env, fs, iter};
 
-pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
-    let krate = sess.time("parse_crate", || match input {
+pub fn parse<'a>(sess: &'a Session) -> PResult<'a, ast::Crate> {
+    let krate = sess.time("parse_crate", || match &sess.io.input {
         Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
         Input::Str { input, name } => {
             parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess)
@@ -559,7 +558,7 @@ fn write_out_deps(
     }
     let deps_filename = outputs.path(OutputType::DepInfo);
 
-    let result = (|| -> io::Result<()> {
+    let result: io::Result<()> = try {
         // Build a list of files used to compile the output and
         // write Makefile-compatible dependency rules
         let mut files: Vec<String> = sess
@@ -646,9 +645,7 @@ fn write_out_deps(
                 writeln!(file)?;
             }
         }
-
-        Ok(())
-    })();
+    };
 
     match result {
         Ok(_) => {
@@ -666,7 +663,6 @@ fn write_out_deps(
 
 pub fn prepare_outputs(
     sess: &Session,
-    compiler: &Compiler,
     krate: &ast::Crate,
     boxed_resolver: &RefCell<BoxedResolver>,
     crate_name: Symbol,
@@ -674,20 +670,13 @@ pub fn prepare_outputs(
     let _timer = sess.timer("prepare_outputs");
 
     // FIXME: rustdoc passes &[] instead of &krate.attrs here
-    let outputs = util::build_output_filenames(
-        &compiler.input,
-        &compiler.output_dir,
-        &compiler.output_file,
-        &compiler.temps_dir,
-        &krate.attrs,
-        sess,
-    );
+    let outputs = util::build_output_filenames(&krate.attrs, sess);
 
     let output_paths =
-        generated_output_paths(sess, &outputs, compiler.output_file.is_some(), crate_name);
+        generated_output_paths(sess, &outputs, sess.io.output_file.is_some(), crate_name);
 
     // Ensure the source file isn't accidentally overwritten during compilation.
-    if let Some(ref input_path) = compiler.input_path {
+    if let Some(ref input_path) = sess.io.input.opt_path() {
         if sess.opts.will_create_output_file() {
             if output_contains_path(&output_paths, input_path) {
                 let reported = sess.emit_err(InputFileWouldBeOverWritten { path: input_path });
@@ -701,7 +690,7 @@ pub fn prepare_outputs(
         }
     }
 
-    if let Some(ref dir) = compiler.temps_dir {
+    if let Some(ref dir) = sess.io.temps_dir {
         if fs::create_dir_all(dir).is_err() {
             let reported = sess.emit_err(TempsDirError);
             return Err(reported);
@@ -714,7 +703,7 @@ pub fn prepare_outputs(
         && sess.opts.output_types.len() == 1;
 
     if !only_dep_info {
-        if let Some(ref dir) = compiler.output_dir {
+        if let Some(ref dir) = sess.io.output_dir {
             if fs::create_dir_all(dir).is_err() {
                 let reported = sess.emit_err(OutDirError);
                 return Err(reported);
@@ -775,11 +764,8 @@ pub fn enter<F, R>(&mut self, f: F) -> R
 pub fn create_global_ctxt<'tcx>(
     compiler: &'tcx Compiler,
     lint_store: Lrc<LintStore>,
-    krate: Lrc<ast::Crate>,
     dep_graph: DepGraph,
-    resolver: Rc<RefCell<BoxedResolver>>,
-    outputs: OutputFilenames,
-    crate_name: Symbol,
+    untracked: Untracked,
     queries: &'tcx OnceCell<TcxQueries<'tcx>>,
     global_ctxt: &'tcx OnceCell<GlobalCtxt<'tcx>>,
     arena: &'tcx WorkerLocal<Arena<'tcx>>,
@@ -790,8 +776,6 @@ pub fn create_global_ctxt<'tcx>(
     // incr. comp. yet.
     dep_graph.assert_ignored();
 
-    let resolver_outputs = BoxedResolver::to_resolver_outputs(resolver);
-
     let sess = &compiler.session();
     let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
 
@@ -810,12 +794,6 @@ pub fn create_global_ctxt<'tcx>(
         TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
     });
 
-    let ty::ResolverOutputs {
-        global_ctxt: untracked_resolutions,
-        ast_lowering: untracked_resolver_for_lowering,
-        untracked,
-    } = resolver_outputs;
-
     let gcx = sess.time("setup_global_ctxt", || {
         global_ctxt.get_or_init(move || {
             TyCtxt::create_global_ctxt(
@@ -832,19 +810,7 @@ pub fn create_global_ctxt<'tcx>(
         })
     });
 
-    let mut qcx = QueryContext { gcx };
-    qcx.enter(|tcx| {
-        let feed = tcx.feed_unit_query();
-        feed.resolver_for_lowering(
-            tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, krate))),
-        );
-        feed.resolutions(tcx.arena.alloc(untracked_resolutions));
-        feed.output_filenames(tcx.arena.alloc(std::sync::Arc::new(outputs)));
-        feed.features_query(sess.features_untracked());
-        let feed = tcx.feed_local_crate();
-        feed.crate_name(crate_name);
-    });
-    qcx
+    QueryContext { gcx }
 }
 
 /// Runs the resolution, type-checking, region checking and other
index 041bb9eb7a1cbcd58b9239fa432b9784334fbc99..d5a49dd75be6a06bf7a7efa310342a139c15335b 100644 (file)
@@ -13,7 +13,7 @@
 use rustc_lint::LintStore;
 use rustc_middle::arena::Arena;
 use rustc_middle::dep_graph::DepGraph;
-use rustc_middle::ty::{GlobalCtxt, TyCtxt};
+use rustc_middle::ty::{self, GlobalCtxt, TyCtxt};
 use rustc_query_impl::Queries as TcxQueries;
 use rustc_session::config::{self, OutputFilenames, OutputType};
 use rustc_session::{output::find_crate_name, Session};
@@ -90,7 +90,6 @@ pub struct Queries<'tcx> {
     register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
     expansion: Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>,
     dep_graph: Query<DepGraph>,
-    prepare_outputs: Query<OutputFilenames>,
     global_ctxt: Query<QueryContext<'tcx>>,
     ongoing_codegen: Query<Box<dyn Any>>,
 }
@@ -109,7 +108,6 @@ pub fn new(compiler: &'tcx Compiler) -> Queries<'tcx> {
             register_plugins: Default::default(),
             expansion: Default::default(),
             dep_graph: Default::default(),
-            prepare_outputs: Default::default(),
             global_ctxt: Default::default(),
             ongoing_codegen: Default::default(),
         }
@@ -130,10 +128,8 @@ fn dep_graph_future(&self) -> Result<QueryResult<'_, Option<DepGraphFuture>>> {
     }
 
     pub fn parse(&self) -> Result<QueryResult<'_, ast::Crate>> {
-        self.parse.compute(|| {
-            passes::parse(self.session(), &self.compiler.input)
-                .map_err(|mut parse_error| parse_error.emit())
-        })
+        self.parse
+            .compute(|| passes::parse(self.session()).map_err(|mut parse_error| parse_error.emit()))
     }
 
     pub fn register_plugins(&self) -> Result<QueryResult<'_, (ast::Crate, Lrc<LintStore>)>> {
@@ -161,13 +157,13 @@ pub fn register_plugins(&self) -> Result<QueryResult<'_, (ast::Crate, Lrc<LintSt
         })
     }
 
-    pub fn crate_name(&self) -> Result<QueryResult<'_, Symbol>> {
+    fn crate_name(&self) -> Result<QueryResult<'_, Symbol>> {
         self.crate_name.compute(|| {
             Ok({
                 let parse_result = self.parse()?;
                 let krate = parse_result.borrow();
                 // parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches.
-                find_crate_name(self.session(), &krate.attrs, &self.compiler.input)
+                find_crate_name(self.session(), &krate.attrs)
             })
         })
     }
@@ -211,40 +207,42 @@ fn dep_graph(&self) -> Result<QueryResult<'_, DepGraph>> {
         })
     }
 
-    pub fn prepare_outputs(&self) -> Result<QueryResult<'_, OutputFilenames>> {
-        self.prepare_outputs.compute(|| {
-            let expansion = self.expansion()?;
-            let (krate, boxed_resolver, _) = &*expansion.borrow();
-            let crate_name = *self.crate_name()?.borrow();
-            passes::prepare_outputs(
-                self.session(),
-                self.compiler,
-                krate,
-                &*boxed_resolver,
-                crate_name,
-            )
-        })
-    }
-
     pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, QueryContext<'tcx>>> {
         self.global_ctxt.compute(|| {
             let crate_name = *self.crate_name()?.borrow();
-            let outputs = self.prepare_outputs()?.steal();
-            let dep_graph = self.dep_graph()?.borrow().clone();
             let (krate, resolver, lint_store) = self.expansion()?.steal();
-            Ok(passes::create_global_ctxt(
+
+            let outputs = passes::prepare_outputs(self.session(), &krate, &resolver, crate_name)?;
+
+            let ty::ResolverOutputs {
+                untracked,
+                global_ctxt: untracked_resolutions,
+                ast_lowering: untracked_resolver_for_lowering,
+            } = BoxedResolver::to_resolver_outputs(resolver);
+
+            let mut qcx = passes::create_global_ctxt(
                 self.compiler,
                 lint_store,
-                krate,
-                dep_graph,
-                resolver,
-                outputs,
-                crate_name,
+                self.dep_graph()?.steal(),
+                untracked,
                 &self.queries,
                 &self.gcx,
                 &self.arena,
                 &self.hir_arena,
-            ))
+            );
+
+            qcx.enter(|tcx| {
+                let feed = tcx.feed_unit_query();
+                feed.resolver_for_lowering(
+                    tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, krate))),
+                );
+                feed.resolutions(tcx.arena.alloc(untracked_resolutions));
+                feed.output_filenames(tcx.arena.alloc(std::sync::Arc::new(outputs)));
+                feed.features_query(tcx.sess.features_untracked());
+                let feed = tcx.feed_local_crate();
+                feed.crate_name(crate_name);
+            });
+            Ok(qcx)
         })
     }
 
index 07b28cc86cee1c95601f320e5ead863001913ab7..f94bc4d4c66ac9d0fd872586a12308b6aa86731d 100644 (file)
@@ -4,6 +4,7 @@
 use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig};
 use rustc_session::config::rustc_optgroups;
+use rustc_session::config::Input;
 use rustc_session::config::TraitSolver;
 use rustc_session::config::{build_configuration, build_session_options, to_crate_config};
 use rustc_session::config::{
 use rustc_session::lint::Level;
 use rustc_session::search_paths::SearchPath;
 use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind};
+use rustc_session::CompilerIO;
 use rustc_session::{build_session, getopts, Session};
 use rustc_span::edition::{Edition, DEFAULT_EDITION};
 use rustc_span::symbol::sym;
+use rustc_span::FileName;
 use rustc_span::SourceFileHashAlgorithm;
 use rustc_target::spec::{CodeModel, LinkerFlavorCli, MergeFunctions, PanicStrategy, RelocModel};
 use rustc_target::spec::{RelroLevel, SanitizerSet, SplitDebuginfo, StackProtector, TlsModel};
@@ -39,7 +42,14 @@ fn build_session_options_and_crate_config(matches: getopts::Matches) -> (Options
 fn mk_session(matches: getopts::Matches) -> (Session, CfgSpecs) {
     let registry = registry::Registry::new(&[]);
     let (sessopts, cfg) = build_session_options_and_crate_config(matches);
-    let sess = build_session(sessopts, None, None, registry, Default::default(), None, None);
+    let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
+    let io = CompilerIO {
+        input: Input::Str { name: FileName::Custom(String::new()), input: String::new() },
+        output_dir: None,
+        output_file: None,
+        temps_dir,
+    };
+    let sess = build_session(sessopts, io, None, registry, Default::default(), None, None);
     (sess, cfg)
 }
 
index 02a7756c8d4532077cde69573a8c3e49c1b6ba76..54363e07b971a2830c789a7c34336d362e480573 100644 (file)
@@ -8,7 +8,7 @@
 use rustc_session as session;
 use rustc_session::config::CheckCfg;
 use rustc_session::config::{self, CrateType};
-use rustc_session::config::{ErrorOutputType, Input, OutputFilenames};
+use rustc_session::config::{ErrorOutputType, OutputFilenames};
 use rustc_session::filesearch::sysroot_candidates;
 use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer};
 use rustc_session::parse::CrateConfig;
@@ -17,6 +17,7 @@
 use rustc_span::lev_distance::find_best_match_for_name;
 use rustc_span::source_map::FileLoader;
 use rustc_span::symbol::{sym, Symbol};
+use session::CompilerIO;
 use std::env;
 use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
 use std::mem;
@@ -58,7 +59,7 @@ pub fn create_session(
     cfg: FxHashSet<(String, Option<String>)>,
     check_cfg: CheckCfg,
     file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
-    input_path: Option<PathBuf>,
+    io: CompilerIO,
     lint_caps: FxHashMap<lint::LintId, lint::Level>,
     make_codegen_backend: Option<
         Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
@@ -89,7 +90,7 @@ pub fn create_session(
 
     let mut sess = session::build_session(
         sopts,
-        input_path,
+        io,
         bundle,
         descriptions,
         lint_caps,
@@ -486,20 +487,13 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<C
     base
 }
 
-pub fn build_output_filenames(
-    input: &Input,
-    odir: &Option<PathBuf>,
-    ofile: &Option<PathBuf>,
-    temps_dir: &Option<PathBuf>,
-    attrs: &[ast::Attribute],
-    sess: &Session,
-) -> OutputFilenames {
-    match *ofile {
+pub fn build_output_filenames(attrs: &[ast::Attribute], sess: &Session) -> OutputFilenames {
+    match sess.io.output_file {
         None => {
             // "-" as input file will cause the parser to read from stdin so we
             // have to make up a name
             // We want to toss everything after the final '.'
-            let dirpath = (*odir).as_ref().cloned().unwrap_or_default();
+            let dirpath = sess.io.output_dir.clone().unwrap_or_default();
 
             // If a crate name is present, we use it as the link name
             let stem = sess
@@ -507,13 +501,13 @@ pub fn build_output_filenames(
                 .crate_name
                 .clone()
                 .or_else(|| rustc_attr::find_crate_name(sess, attrs).map(|n| n.to_string()))
-                .unwrap_or_else(|| input.filestem().to_owned());
+                .unwrap_or_else(|| sess.io.input.filestem().to_owned());
 
             OutputFilenames::new(
                 dirpath,
                 stem,
                 None,
-                temps_dir.clone(),
+                sess.io.temps_dir.clone(),
                 sess.opts.cg.extra_filename.clone(),
                 sess.opts.output_types.clone(),
             )
@@ -534,7 +528,7 @@ pub fn build_output_filenames(
                 }
                 Some(out_file.clone())
             };
-            if *odir != None {
+            if sess.io.output_dir != None {
                 sess.warn("ignoring --out-dir flag due to -o flag");
             }
 
@@ -542,7 +536,7 @@ pub fn build_output_filenames(
                 out_file.parent().unwrap_or_else(|| Path::new("")).to_path_buf(),
                 out_file.file_stem().unwrap_or_default().to_str().unwrap().to_string(),
                 ofile,
-                temps_dir.clone(),
+                sess.io.temps_dir.clone(),
                 sess.opts.cg.extra_filename.clone(),
                 sess.opts.output_types.clone(),
             )
index 4c65fca29b89721542148dcfadf447bee9f7048b..6e815863d06ff9e226d6fa1f5bb06fd0152a6e5d 100644 (file)
@@ -2,7 +2,7 @@
 //!
 //! The idea with `rustc_lexer` is to make a reusable library,
 //! by separating out pure lexing and rustc-specific concerns, like spans,
-//! error reporting, and interning.  So, rustc_lexer operates directly on `&str`,
+//! error reporting, and interning. So, rustc_lexer operates directly on `&str`,
 //! produces simple tokens which are a pair of type-tag and a bit of original text,
 //! and does not report errors, instead storing them as flags on the token.
 //!
index 87c44638a8de1918883375cd0d6357dfebd8a16b..8507ca9d89ed776b77228fec41521ab835ce75e3 100644 (file)
@@ -299,7 +299,7 @@ fn skip_ascii_whitespace<F>(chars: &mut Chars<'_>, start: usize, callback: &mut
         let tail = &tail[first_non_space..];
         if let Some(c) = tail.chars().nth(0) {
             // For error reporting, we would like the span to contain the character that was not
-            // skipped.  The +1 is necessary to account for the leading \ that started the escape.
+            // skipped. The +1 is necessary to account for the leading \ that started the escape.
             let end = start + first_non_space + c.len_utf8() + 1;
             if c.is_whitespace() {
                 callback(start..end, Err(EscapeError::UnskippedWhitespaceWarning));
index 6f445426df70e7218c5b1a744a7ca34fdd73e66f..fe188162cf85bee0c694155f9abcc97057981e88 100644 (file)
@@ -72,7 +72,7 @@
 use rustc_span::{BytePos, InnerSpan, Span};
 use rustc_target::abi::{Abi, VariantIdx};
 use rustc_trait_selection::infer::{InferCtxtExt, TyCtxtInferExt};
-use rustc_trait_selection::traits::{self, misc::can_type_implement_copy, EvaluationResult};
+use rustc_trait_selection::traits::{self, misc::type_allowed_to_implement_copy};
 
 use crate::nonstandard_style::{method_context, MethodLateContext};
 
@@ -709,12 +709,14 @@ fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
 
         // We shouldn't recommend implementing `Copy` on stateful things,
         // such as iterators.
-        if let Some(iter_trait) = cx.tcx.get_diagnostic_item(sym::Iterator) {
-            if cx.tcx.infer_ctxt().build().type_implements_trait(iter_trait, [ty], param_env)
-                == EvaluationResult::EvaluatedToOk
-            {
-                return;
-            }
+        if let Some(iter_trait) = cx.tcx.get_diagnostic_item(sym::Iterator)
+            && cx.tcx
+                .infer_ctxt()
+                .build()
+                .type_implements_trait(iter_trait, [ty], param_env)
+                .must_apply_modulo_regions()
+        {
+            return;
         }
 
         // Default value of clippy::trivially_copy_pass_by_ref
@@ -726,7 +728,7 @@ fn check_item(&mut self, cx: &LateContext<'_>, item: &hir::Item<'_>) {
             }
         }
 
-        if can_type_implement_copy(
+        if type_allowed_to_implement_copy(
             cx.tcx,
             param_env,
             ty,
index c9b9a62257148c51a8614717ade5d77b03cf30d1..8046cc21cea583eb73f15cb45228593a05139af6 100644 (file)
@@ -825,21 +825,24 @@ fn lookup_with_diagnostics(
                     debug!(?param_span, ?use_span, ?deletion_span);
                     db.span_label(param_span, "this lifetime...");
                     db.span_label(use_span, "...is used only here");
-                    let msg = "elide the single-use lifetime";
-                    let (use_span, replace_lt) = if elide {
-                        let use_span = sess.source_map().span_extend_while(
-                            use_span,
-                            char::is_whitespace,
-                        ).unwrap_or(use_span);
-                        (use_span, String::new())
-                    } else {
-                        (use_span, "'_".to_owned())
-                    };
-                    db.multipart_suggestion(
-                        msg,
-                        vec![(deletion_span, String::new()), (use_span, replace_lt)],
-                        Applicability::MachineApplicable,
-                    );
+                    if let Some(deletion_span) = deletion_span {
+                        let msg = "elide the single-use lifetime";
+                        let (use_span, replace_lt) = if elide {
+                            let use_span = sess.source_map().span_extend_while(
+                                use_span,
+                                char::is_whitespace,
+                            ).unwrap_or(use_span);
+                            (use_span, String::new())
+                        } else {
+                            (use_span, "'_".to_owned())
+                        };
+                        debug!(?deletion_span, ?use_span);
+                        db.multipart_suggestion(
+                            msg,
+                            vec![(deletion_span, String::new()), (use_span, replace_lt)],
+                            Applicability::MachineApplicable,
+                        );
+                    }
                 },
                 BuiltinLintDiagnostics::SingleUseLifetime {
                     param_span: _,
@@ -847,12 +850,14 @@ fn lookup_with_diagnostics(
                     deletion_span,
                 } => {
                     debug!(?deletion_span);
-                    db.span_suggestion(
-                        deletion_span,
-                        "elide the unused lifetime",
-                        "",
-                        Applicability::MachineApplicable,
-                    );
+                    if let Some(deletion_span) = deletion_span {
+                        db.span_suggestion(
+                            deletion_span,
+                            "elide the unused lifetime",
+                            "",
+                            Applicability::MachineApplicable,
+                        );
+                    }
                 },
                 BuiltinLintDiagnostics::NamedArgumentUsedPositionally{ position_sp_to_replace, position_sp_for_msg, named_arg_sp, named_arg_name, is_formatting_arg} => {
                     db.span_label(named_arg_sp, "this named argument is referred to by position in formatting string");
index f9b2df49592244fa701928bfbe71eeac0357df41..337a19dd024d2fec195601c77de253036df847bf 100644 (file)
@@ -248,7 +248,9 @@ fn visit_generics(&mut self, g: &'a ast::Generics) {
     }
 
     fn visit_where_predicate(&mut self, p: &'a ast::WherePredicate) {
+        lint_callback!(self, enter_where_predicate, p);
         ast_visit::walk_where_predicate(self, p);
+        lint_callback!(self, exit_where_predicate, p);
     }
 
     fn visit_poly_trait_ref(&mut self, t: &'a ast::PolyTraitRef) {
index 5eb54cc0034279392c1eac3a4cdf8ad6b7d78b80..6cefaea2bc7da8af59508ff2cad32366ccce9d54 100644 (file)
@@ -187,9 +187,9 @@ fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx Ty<'tcx>) {
                         },
                         None => cx.emit_spanned_lint(USAGE_OF_TY_TYKIND, path.span, TykindDiag),
                     }
-                } else if !ty.span.from_expansion() && path.segments.len() > 1 && let Some(t) = is_ty_or_ty_ctxt(cx, &path) {
+                } else if !ty.span.from_expansion() && path.segments.len() > 1 && let Some(ty) = is_ty_or_ty_ctxt(cx, &path) {
                     cx.emit_spanned_lint(USAGE_OF_QUALIFIED_TY, path.span, TyQualified {
-                        ty: t.clone(),
+                        ty,
                         suggestion: path.span,
                     });
                 }
index 09dfb1022d857205b5699e6f9b46a2843d5efb29..cca36913dea113650269b7376a8bef5b31da5c31 100644 (file)
@@ -50,7 +50,7 @@ struct LintStackIndex {
     }
 }
 
-/// Specifications found at this position in the stack.  This map only represents the lints
+/// Specifications found at this position in the stack. This map only represents the lints
 /// found for one set of attributes (like `shallow_lint_levels_on` does).
 ///
 /// We store the level specifications as a linked list.
@@ -163,7 +163,7 @@ fn shallow_lint_levels_on(tcx: TyCtxt<'_>, owner: hir::OwnerId) -> ShallowLintLe
     match attrs.map.range(..) {
         // There is only something to do if there are attributes at all.
         [] => {}
-        // Most of the time, there is only one attribute.  Avoid fetching HIR in that case.
+        // Most of the time, there is only one attribute. Avoid fetching HIR in that case.
         [(local_id, _)] => levels.add_id(HirId { owner, local_id: *local_id }),
         // Otherwise, we need to visit the attributes in source code order, so we fetch HIR and do
         // a standard visit.
index 3d818154cb94ff48c1a7c33fd2a19d473b089588..d6be4da03286f75f1e4868e5bac454b345008a8b 100644 (file)
@@ -145,7 +145,7 @@ fn lint_mod(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
     [
         pub BuiltinCombinedEarlyLintPass,
         [
-            UnusedParens: UnusedParens,
+            UnusedParens: UnusedParens::new(),
             UnusedBraces: UnusedBraces,
             UnusedImportBraces: UnusedImportBraces,
             UnsafeCode: UnsafeCode,
index c3782a496891db3797bab13879bc32e54f024d02..c997d8945d16ebdf2d5ac2f4a3f9dbee0e41d8b3 100644 (file)
@@ -277,7 +277,7 @@ fn add_to_diagnostic_with<F>(self, diag: &mut rustc_errors::Diagnostic, _: F)
         ) -> rustc_errors::SubdiagnosticMessage,
     {
         // Access to associates types should use `<T as Bound>::Assoc`, which does not need a
-        // bound.  Let's see if this type does that.
+        // bound. Let's see if this type does that.
 
         // We use a HIR visitor to walk the type.
         use rustc_hir::intravisit::{self, Visitor};
index 57482a9edba880761fd97b38a0b4dc567aba419d..392e13f2fa94165d802ac24c0913934dd5d43d1b 100644 (file)
@@ -32,7 +32,7 @@ fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx hir::Ty<'tcx>) {
                     cx.emit_spanned_lint(
                         PASS_BY_VALUE,
                         ty.span,
-                        PassByValueDiag { ty: t.clone(), suggestion: ty.span },
+                        PassByValueDiag { ty: t, suggestion: ty.span },
                     );
                 }
             }
index 5558156a4b9ef040a07ff54271328b64f31e2ba4..0bf01c4e567814f1f720de66362f310cf5c0f934 100644 (file)
@@ -171,6 +171,9 @@ macro_rules! early_lint_methods {
 
             /// Counterpart to `enter_lint_attrs`.
             fn exit_lint_attrs(a: &[ast::Attribute]);
+
+            fn enter_where_predicate(a: &ast::WherePredicate);
+            fn exit_where_predicate(a: &ast::WherePredicate);
         ]);
     )
 }
index f2ab44ac97c838410338ee687e3a434f81c2eb43..be47a3e238c1c16ba4fcb46616264e75637718f0 100644 (file)
@@ -1147,7 +1147,7 @@ impl<'tcx> ty::visit::TypeVisitor<'tcx> for ProhibitOpaqueTypes {
 
             fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 if !ty.has_opaque_types() {
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 }
 
                 if let ty::Alias(ty::Opaque, ..) = ty.kind() {
index ac2b32b44e6a1d42e16ab70e7f41872df229b829..4c9b3df2dbd33d706d04c3cc5e911ceeb9c70a2c 100644 (file)
@@ -824,7 +824,17 @@ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) {
     "`if`, `match`, `while` and `return` do not need parentheses"
 }
 
-declare_lint_pass!(UnusedParens => [UNUSED_PARENS]);
+pub struct UnusedParens {
+    with_self_ty_parens: bool,
+}
+
+impl UnusedParens {
+    pub fn new() -> Self {
+        Self { with_self_ty_parens: false }
+    }
+}
+
+impl_lint_pass!(UnusedParens => [UNUSED_PARENS]);
 
 impl UnusedDelimLint for UnusedParens {
     const DELIM_STR: &'static str = "parentheses";
@@ -999,36 +1009,58 @@ fn check_arm(&mut self, cx: &EarlyContext<'_>, arm: &ast::Arm) {
     }
 
     fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) {
-        if let ast::TyKind::Paren(r) = &ty.kind {
-            match &r.kind {
-                ast::TyKind::TraitObject(..) => {}
-                ast::TyKind::BareFn(b) if b.generic_params.len() > 0 => {}
-                ast::TyKind::ImplTrait(_, bounds) if bounds.len() > 1 => {}
-                ast::TyKind::Array(_, len) => {
-                    self.check_unused_delims_expr(
-                        cx,
-                        &len.value,
-                        UnusedDelimsCtx::ArrayLenExpr,
-                        false,
-                        None,
-                        None,
-                    );
-                }
-                _ => {
-                    let spans = if let Some(r) = r.span.find_ancestor_inside(ty.span) {
-                        Some((ty.span.with_hi(r.lo()), ty.span.with_lo(r.hi())))
-                    } else {
-                        None
-                    };
-                    self.emit_unused_delims(cx, ty.span, spans, "type", (false, false));
+        match &ty.kind {
+            ast::TyKind::Array(_, len) => {
+                self.check_unused_delims_expr(
+                    cx,
+                    &len.value,
+                    UnusedDelimsCtx::ArrayLenExpr,
+                    false,
+                    None,
+                    None,
+                );
+            }
+            ast::TyKind::Paren(r) => {
+                match &r.kind {
+                    ast::TyKind::TraitObject(..) => {}
+                    ast::TyKind::BareFn(b)
+                        if self.with_self_ty_parens && b.generic_params.len() > 0 => {}
+                    ast::TyKind::ImplTrait(_, bounds) if bounds.len() > 1 => {}
+                    _ => {
+                        let spans = if let Some(r) = r.span.find_ancestor_inside(ty.span) {
+                            Some((ty.span.with_hi(r.lo()), ty.span.with_lo(r.hi())))
+                        } else {
+                            None
+                        };
+                        self.emit_unused_delims(cx, ty.span, spans, "type", (false, false));
+                    }
                 }
+                self.with_self_ty_parens = false;
             }
+            _ => {}
         }
     }
 
     fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) {
         <Self as UnusedDelimLint>::check_item(self, cx, item)
     }
+
+    fn enter_where_predicate(&mut self, _: &EarlyContext<'_>, pred: &ast::WherePredicate) {
+        use rustc_ast::{WhereBoundPredicate, WherePredicate};
+        if let WherePredicate::BoundPredicate(WhereBoundPredicate {
+                bounded_ty,
+                bound_generic_params,
+                ..
+            }) = pred &&
+            let ast::TyKind::Paren(_) = &bounded_ty.kind &&
+            bound_generic_params.is_empty() {
+                self.with_self_ty_parens = true;
+        }
+    }
+
+    fn exit_where_predicate(&mut self, _: &EarlyContext<'_>, _: &ast::WherePredicate) {
+        assert!(!self.with_self_ty_parens);
+    }
 }
 
 declare_lint! {
@@ -1095,14 +1127,19 @@ fn check_unused_delims_expr(
                 //      ```
                 // - the block has no attribute and was not created inside a macro
                 // - if the block is an `anon_const`, the inner expr must be a literal
-                //      (do not lint `struct A<const N: usize>; let _: A<{ 2 + 3 }>;`)
-                //
+                //   not created by a macro, i.e. do not lint on:
+                //      ```
+                //      struct A<const N: usize>;
+                //      let _: A<{ 2 + 3 }>;
+                //      let _: A<{produces_literal!()}>;
+                //      ```
                 // FIXME(const_generics): handle paths when #67075 is fixed.
                 if let [stmt] = inner.stmts.as_slice() {
                     if let ast::StmtKind::Expr(ref expr) = stmt.kind {
                         if !Self::is_expr_delims_necessary(expr, followed_by_block, false)
                             && (ctx != UnusedDelimsCtx::AnonConst
-                                || matches!(expr.kind, ast::ExprKind::Lit(_)))
+                                || (matches!(expr.kind, ast::ExprKind::Lit(_))
+                                    && !expr.span.from_expansion()))
                             && !cx.sess().source_map().is_multiline(value.span)
                             && value.attrs.is_empty()
                             && !value.span.from_expansion()
index 6cdf50970836a99ab5a7c73033a5d68ff202c0bd..b6481d70bc8898fe2de912d9f382e90233fa28bd 100644 (file)
     ///
     /// ### Example
     ///
-    /// ```rust
+    /// ```rust,compile_fail
     /// pub enum Enum {
     ///     Foo,
     ///     Bar,
     /// [identifier pattern]: https://doc.rust-lang.org/reference/patterns.html#identifier-patterns
     /// [path pattern]: https://doc.rust-lang.org/reference/patterns.html#path-patterns
     pub BINDINGS_WITH_VARIANT_NAME,
-    Warn,
+    Deny,
     "detects pattern bindings with the same name as one of the matched variants"
 }
 
     };
 }
 
+declare_lint! {
+    /// The `proc_macro_derive_resolution_fallback` lint detects proc macro
+    /// derives using inaccessible names from parent modules.
+    ///
+    /// ### Example
+    ///
+    /// ```rust,ignore (proc-macro)
+    /// // foo.rs
+    /// #![crate_type = "proc-macro"]
+    ///
+    /// extern crate proc_macro;
+    ///
+    /// use proc_macro::*;
+    ///
+    /// #[proc_macro_derive(Foo)]
+    /// pub fn foo1(a: TokenStream) -> TokenStream {
+    ///     drop(a);
+    ///     "mod __bar { static mut BAR: Option<Something> = None; }".parse().unwrap()
+    /// }
+    /// ```
+    ///
+    /// ```rust,ignore (needs-dependency)
+    /// // bar.rs
+    /// #[macro_use]
+    /// extern crate foo;
+    ///
+    /// struct Something;
+    ///
+    /// #[derive(Foo)]
+    /// struct Another;
+    ///
+    /// fn main() {}
+    /// ```
+    ///
+    /// This will produce:
+    ///
+    /// ```text
+    /// warning: cannot find type `Something` in this scope
+    ///  --> src/main.rs:8:10
+    ///   |
+    /// 8 | #[derive(Foo)]
+    ///   |          ^^^ names from parent modules are not accessible without an explicit import
+    ///   |
+    ///   = note: `#[warn(proc_macro_derive_resolution_fallback)]` on by default
+    ///   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+    ///   = note: for more information, see issue #50504 <https://github.com/rust-lang/rust/issues/50504>
+    /// ```
+    ///
+    /// ### Explanation
+    ///
+    /// If a proc-macro generates a module, the compiler unintentionally
+    /// allowed items in that module to refer to items in the crate root
+    /// without importing them. This is a [future-incompatible] lint to
+    /// transition this to a hard error in the future. See [issue #50504] for
+    /// more details.
+    ///
+    /// [issue #50504]: https://github.com/rust-lang/rust/issues/50504
+    /// [future-incompatible]: ../index.md#future-incompatible-lints
+    pub PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
+    Deny,
+    "detects proc macro derives using inaccessible names from parent modules",
+    @future_incompatible = FutureIncompatibleInfo {
+        reference: "issue #83583 <https://github.com/rust-lang/rust/issues/83583>",
+        reason: FutureIncompatibilityReason::FutureReleaseErrorReportNow,
+    };
+}
+
 declare_lint! {
     /// The `macro_use_extern_crate` lint detects the use of the
     /// [`macro_use` attribute].
     "trailing semicolon in macro body used as expression",
     @future_incompatible = FutureIncompatibleInfo {
         reference: "issue #79813 <https://github.com/rust-lang/rust/issues/79813>",
+        reason: FutureIncompatibilityReason::FutureReleaseErrorReportNow,
     };
 }
 
         UNSTABLE_NAME_COLLISIONS,
         IRREFUTABLE_LET_PATTERNS,
         WHERE_CLAUSES_OBJECT_SAFETY,
+        PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
         MACRO_USE_EXTERN_CRATE,
         MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS,
         ILL_FORMED_ATTRIBUTE_INPUT,
index f4b4c5168bfd6c392b2c6c0b4cf3f53fcf2e9d09..7054d1e9f105e4392e513f197f3dc91d0632fe2c 100644 (file)
@@ -6,8 +6,9 @@
 extern crate rustc_macros;
 
 pub use self::Level::*;
-use rustc_ast::node_id::{NodeId, NodeMap};
+use rustc_ast::node_id::NodeId;
 use rustc_ast::{AttrId, Attribute};
+use rustc_data_structures::fx::FxIndexMap;
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
 use rustc_error_messages::{DiagnosticMessage, MultiSpan};
 use rustc_hir::HashStableContext;
@@ -502,7 +503,7 @@ pub enum BuiltinLintDiagnostics {
         param_span: Span,
         /// Span of the code that should be removed when eliding this lifetime.
         /// This span should include leading or trailing comma.
-        deletion_span: Span,
+        deletion_span: Option<Span>,
         /// Span of the single use, or None if the lifetime is never used.
         /// If true, the lifetime will be fully elided.
         use_span: Option<(Span, bool)>,
@@ -544,7 +545,7 @@ pub struct BufferedEarlyLint {
 
 #[derive(Default)]
 pub struct LintBuffer {
-    pub map: NodeMap<Vec<BufferedEarlyLint>>,
+    pub map: FxIndexMap<NodeId, Vec<BufferedEarlyLint>>,
 }
 
 impl LintBuffer {
index 2865ea8927336143490193bddbeaec125dda6789..f728bff0e3b91a3a1665ae58eab72654474ed1c9 100644 (file)
@@ -461,7 +461,7 @@ extern "C" void LLVMRustAddLibraryInfo(LLVMPassManagerRef PMR, LLVMModuleRef M,
 
 extern "C" void LLVMRustSetLLVMOptions(int Argc, char **Argv) {
   // Initializing the command-line options more than once is not allowed. So,
-  // check if they've already been initialized.  (This could happen if we're
+  // check if they've already been initialized. (This could happen if we're
   // being called from rustpkg, for example). If the arguments change, then
   // that's just kinda unfortunate.
   static bool Initialized = false;
@@ -1428,7 +1428,7 @@ LLVMRustThinLTOBufferLen(const LLVMRustThinLTOBuffer *Buffer) {
 }
 
 // This is what we used to parse upstream bitcode for actual ThinLTO
-// processing.  We'll call this once per module optimized through ThinLTO, and
+// processing. We'll call this once per module optimized through ThinLTO, and
 // it'll be called concurrently on many threads.
 extern "C" LLVMModuleRef
 LLVMRustParseBitcodeForLTO(LLVMContextRef Context,
index 8f94e8a4ab2e1d2422d009ec6665eeb7e209ab88..87b0e1273eb7761ed06ab0390704af2ae8c2b645 100644 (file)
@@ -1349,18 +1349,16 @@ extern "C" LLVMTypeKind LLVMRustGetTypeKind(LLVMTypeRef Ty) {
     return LLVMBFloatTypeKind;
   case Type::X86_AMXTyID:
     return LLVMX86_AMXTypeKind;
-#if LLVM_VERSION_GE(15, 0) && LLVM_VERSION_LT(16, 0)
-  case Type::DXILPointerTyID:
-    report_fatal_error("Rust does not support DirectX typed pointers.");
-    break;
-#endif
-#if LLVM_VERSION_GE(16, 0)
-  case Type::TypedPointerTyID:
-    report_fatal_error("Rust does not support typed pointers.");
-    break;
-#endif
+  default:
+    {
+      std::string error;
+      llvm::raw_string_ostream stream(error);
+      stream << "Rust does not support the TypeID: " << unwrap(Ty)->getTypeID()
+             << " for the type: " << *unwrap(Ty);
+      stream.flush();
+      report_fatal_error(error.c_str());
+    }
   }
-  report_fatal_error("Unhandled TypeID.");
 }
 
 DEFINE_SIMPLE_CONVERSION_FUNCTIONS(SMDiagnostic, LLVMSMDiagnosticRef)
index 14e6aa6e0c17baec8b57e251f08a96980c71b11a..1f95661ce9d5f54bb770a764c902a835bec7afe1 100644 (file)
@@ -26,7 +26,7 @@ fn visit_with<__V: ::rustc_middle::ty::visit::TypeVisitor<'tcx>>(
                 __visitor: &mut __V
             ) -> ::std::ops::ControlFlow<__V::BreakTy> {
                 match *self { #body_visit }
-                ::std::ops::ControlFlow::CONTINUE
+                ::std::ops::ControlFlow::Continue(())
             }
         },
     )
index 7601f6bd3221efaae15ef9b76f4c54366cd9628c..f6431899731fffdeea6d97a3170f96e924ba984c 100644 (file)
@@ -90,7 +90,7 @@ enum MetadataKind {
     let _prof_timer = tcx.sess.prof.generic_activity("write_crate_metadata");
 
     // If the user requests metadata as output, rename `metadata_filename`
-    // to the expected output `out_filename`.  The match above should ensure
+    // to the expected output `out_filename`. The match above should ensure
     // this file always exists.
     let need_metadata_file = tcx.sess.opts.output_types.contains_key(&OutputType::Metadata);
     let (metadata_filename, metadata_tmpdir) = if need_metadata_file {
index 92dc5bd41cbab45ae3ac84031c10d63b2f0b01c9..0f5f74007c1060536912477a6b5815635a37a7d1 100644 (file)
@@ -591,7 +591,7 @@ fn extract_one(
                     Err(MetadataError::LoadFailure(err)) => {
                         info!("no metadata found: {}", err);
                         // The file was present and created by the same compiler version, but we
-                        // couldn't load it for some reason.  Give a hard error instead of silently
+                        // couldn't load it for some reason. Give a hard error instead of silently
                         // ignoring it, but only if we would have given an error anyway.
                         self.crate_rejections
                             .via_invalid
index 59869ee417377635ff55a3093aa64ebb3f2779ca..6f05c76e89de1adc89b5e7344f186dd3d565c8c4 100644 (file)
@@ -433,10 +433,10 @@ fn process_command_line(&mut self) {
         }
 
         // Update kind and, optionally, the name of all native libraries
-        // (there may be more than one) with the specified name.  If any
+        // (there may be more than one) with the specified name. If any
         // library is mentioned more than once, keep the latest mention
         // of it, so that any possible dependent libraries appear before
-        // it.  (This ensures that the linker is able to see symbols from
+        // it. (This ensures that the linker is able to see symbols from
         // all possible dependent libraries before linking in the library
         // in question.)
         for passed_lib in &self.tcx.sess.opts.libs {
index cb451931dfe179d51047b73036f80a9f16c14865..6fd5bd52abe284c008654aee3b8272600f4199ff 100644 (file)
@@ -223,6 +223,15 @@ fn into_args(self) -> (DefId, SimplifiedType) {
     generator_kind => { table }
     trait_def => { table }
     deduced_param_attrs => { table }
+    is_type_alias_impl_trait => {
+        debug_assert_eq!(tcx.def_kind(def_id), DefKind::OpaqueTy);
+        cdata
+            .root
+            .tables
+            .is_type_alias_impl_trait
+            .get(cdata, def_id.index)
+            .is_some()
+    }
     collect_return_position_impl_trait_in_trait_tys => {
         Ok(cdata
             .root
@@ -382,7 +391,7 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
             // keys from the former.
             // This is a rudimentary check that does not catch all cases,
             // just the easiest.
-            let mut fallback_map: DefIdMap<DefId> = Default::default();
+            let mut fallback_map: Vec<(DefId, DefId)> = Default::default();
 
             // Issue 46112: We want the map to prefer the shortest
             // paths when reporting the path to an item. Therefore we
@@ -412,12 +421,12 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
 
                 if let Some(def_id) = child.res.opt_def_id() {
                     if child.ident.name == kw::Underscore {
-                        fallback_map.insert(def_id, parent);
+                        fallback_map.push((def_id, parent));
                         return;
                     }
 
                     if ty::util::is_doc_hidden(tcx, parent) {
-                        fallback_map.insert(def_id, parent);
+                        fallback_map.push((def_id, parent));
                         return;
                     }
 
@@ -451,6 +460,7 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
             // Fill in any missing entries with the less preferable path.
             // If this path re-exports the child as `_`, we still use this
             // path in a diagnostic that suggests importing `::*`.
+
             for (child, parent) in fallback_map {
                 visible_parent_map.entry(child).or_insert(parent);
             }
index a8000aa3c8a831562cd1543c91d33033b285abb5..8f7a61b72f81a2cebb7408b11ab2904e17563adf 100644 (file)
@@ -172,7 +172,7 @@ fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) {
 impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for ExpnId {
     fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) {
         if self.krate == LOCAL_CRATE {
-            // We will only write details for local expansions.  Non-local expansions will fetch
+            // We will only write details for local expansions. Non-local expansions will fetch
             // data from the corresponding crate's metadata.
             // FIXME(#43047) FIXME(#74731) We may eventually want to avoid relying on external
             // metadata from proc-macro crates.
@@ -888,8 +888,8 @@ fn should_encode_mir(tcx: TyCtxt<'_>, def_id: LocalDefId) -> (bool, bool) {
         | DefKind::AssocConst
         | DefKind::Static(..)
         | DefKind::Const => (true, false),
-        // Full-fledged functions
-        DefKind::AssocFn | DefKind::Fn => {
+        // Full-fledged functions + closures
+        DefKind::AssocFn | DefKind::Fn | DefKind::Closure => {
             let generics = tcx.generics_of(def_id);
             let needs_inline = (generics.requires_monomorphization(tcx)
                 || tcx.codegen_fn_attrs(def_id).requests_inline())
@@ -900,15 +900,6 @@ fn should_encode_mir(tcx: TyCtxt<'_>, def_id: LocalDefId) -> (bool, bool) {
             let always_encode_mir = tcx.sess.opts.unstable_opts.always_encode_mir;
             (is_const_fn, needs_inline || always_encode_mir)
         }
-        // Closures can't be const fn.
-        DefKind::Closure => {
-            let generics = tcx.generics_of(def_id);
-            let needs_inline = (generics.requires_monomorphization(tcx)
-                || tcx.codegen_fn_attrs(def_id).requests_inline())
-                && tcx.sess.opts.output_types.should_codegen();
-            let always_encode_mir = tcx.sess.opts.unstable_opts.always_encode_mir;
-            (false, needs_inline || always_encode_mir)
-        }
         // Generators require optimized MIR to compute layout.
         DefKind::Generator => (false, true),
         // The others don't have MIR.
@@ -1196,8 +1187,11 @@ fn encode_def_ids(&mut self) {
                 record!(self.tables.trait_impl_trait_tys[def_id] <- table);
             }
         }
-        let inherent_impls = tcx.crate_inherent_impls(());
-        for (def_id, implementations) in inherent_impls.inherent_impls.iter() {
+        let inherent_impls = tcx.with_stable_hashing_context(|hcx| {
+            tcx.crate_inherent_impls(()).inherent_impls.to_sorted(&hcx, true)
+        });
+
+        for (def_id, implementations) in inherent_impls {
             if implementations.is_empty() {
                 continue;
             }
@@ -1521,8 +1515,11 @@ fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item<'tcx>) {
             hir::ItemKind::Mod(ref m) => {
                 return self.encode_info_for_mod(item.owner_id.def_id, m);
             }
-            hir::ItemKind::OpaqueTy(..) => {
+            hir::ItemKind::OpaqueTy(ref opaque) => {
                 self.encode_explicit_item_bounds(def_id);
+                if matches!(opaque.origin, hir::OpaqueTyOrigin::TyAlias) {
+                    self.tables.is_type_alias_impl_trait.set(def_id.index, ());
+                }
             }
             hir::ItemKind::Enum(..) => {
                 let adt_def = self.tcx.adt_def(def_id);
index 5b7b096b4edf1a903beb400bb27b6e2de463d515..5066dbbb90f3a60168e3bd1a2829cc70d7bd1b16 100644 (file)
@@ -404,6 +404,8 @@ fn encode(&self, buf: &mut FileEncoder) -> LazyTables {
     proc_macro: Table<DefIndex, MacroKind>,
     module_reexports: Table<DefIndex, LazyArray<ModChild>>,
     deduced_param_attrs: Table<DefIndex, LazyArray<DeducedParamAttrs>>,
+    // Slot is full when opaque is TAIT.
+    is_type_alias_impl_trait: Table<DefIndex, ()>,
 
     trait_impl_trait_tys: Table<DefIndex, LazyValue<FxHashMap<DefId, Ty<'static>>>>,
 }
index 48bae7a2d4e1ff2a6a5ebdb36f082657fbe44718..9e63c2bd2216ff652f879dd787d551ce5a13471a 100644 (file)
@@ -582,10 +582,10 @@ pub fn walk_attributes(self, visitor: &mut impl Visitor<'hir>) {
 
     /// Visits all item-likes in the crate in some deterministic (but unspecified) order. If you
     /// need to process every item-like, and don't care about visiting nested items in a particular
-    /// order then this method is the best choice.  If you do care about this nesting, you should
+    /// order then this method is the best choice. If you do care about this nesting, you should
     /// use the `tcx.hir().walk_toplevel_module`.
     ///
-    /// Note that this function will access HIR for all the item-likes in the crate.  If you only
+    /// Note that this function will access HIR for all the item-likes in the crate. If you only
     /// need to access some of them, it is usually better to manually loop on the iterators
     /// provided by `tcx.hir_crate_items(())`.
     ///
index f567eaf967724909a88df86872a878956d2c2ff3..dedc65f4cbf45b27352e5b395d30a3c5f88a3c7d 100644 (file)
@@ -36,7 +36,7 @@ fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHas
 }
 
 /// Gather the LocalDefId for each item-like within a module, including items contained within
-/// bodies.  The Ids are in visitor order.  This is used to partition a pass between modules.
+/// bodies. The Ids are in visitor order. This is used to partition a pass between modules.
 #[derive(Debug, HashStable, Encodable, Decodable)]
 pub struct ModuleItems {
     submodules: Box<[OwnerId]>,
index 614cf1a0051da8b423c96e536cb018646518ab97..43583b5723e698f763e7c03f5a35b19b41e25108 100644 (file)
@@ -68,6 +68,22 @@ pub struct CanonicalVarValues<'tcx> {
     pub var_values: IndexVec<BoundVar, GenericArg<'tcx>>,
 }
 
+impl CanonicalVarValues<'_> {
+    pub fn is_identity(&self) -> bool {
+        self.var_values.iter_enumerated().all(|(bv, arg)| match arg.unpack() {
+            ty::GenericArgKind::Lifetime(r) => {
+                matches!(*r, ty::ReLateBound(ty::INNERMOST, br) if br.var == bv)
+            }
+            ty::GenericArgKind::Type(ty) => {
+                matches!(*ty.kind(), ty::Bound(ty::INNERMOST, bt) if bt.var == bv)
+            }
+            ty::GenericArgKind::Const(ct) => {
+                matches!(ct.kind(), ty::ConstKind::Bound(ty::INNERMOST, bc) if bc == bv)
+            }
+        })
+    }
+}
+
 /// When we canonicalize a value to form a query, we wind up replacing
 /// various parts of it with canonical variables. This struct stores
 /// those replaced bits to remember for when we process the query
@@ -323,6 +339,12 @@ pub fn unchecked_rebind<W>(self, value: W) -> Canonical<'tcx, W> {
 }
 
 impl<'tcx> CanonicalVarValues<'tcx> {
+    /// Creates dummy var values which should not be used in a
+    /// canonical response.
+    pub fn dummy() -> CanonicalVarValues<'tcx> {
+        CanonicalVarValues { var_values: Default::default() }
+    }
+
     #[inline]
     pub fn len(&self) -> usize {
         self.var_values.len()
index 5ca4d260179ceff29f982033a0a3cb4e171e089d..250f3d0797eb52cdb2c503ab914967d26235a956 100644 (file)
@@ -93,7 +93,7 @@ fn visit_with<F: $crate::ty::visit::TypeVisitor<$tcx>>(
                     _: &mut F)
                     -> ::std::ops::ControlFlow<F::BreakTy>
                 {
-                    ::std::ops::ControlFlow::CONTINUE
+                    ::std::ops::ControlFlow::Continue(())
                 }
             }
         )+
@@ -219,7 +219,7 @@ fn visit_with<V: $crate::ty::visit::TypeVisitor<$tcx>>(
                         $($crate::ty::visit::TypeVisitable::visit_with(
                             $variant_arg, $visitor
                         )?;)*
-                        ::std::ops::ControlFlow::CONTINUE
+                        ::std::ops::ControlFlow::Continue(())
                     }
                     $($output)*
                 )
@@ -237,7 +237,7 @@ fn visit_with<V: $crate::ty::visit::TypeVisitor<$tcx>>(
                         $($crate::ty::visit::TypeVisitable::visit_with(
                             $variant_arg, $visitor
                         )?;)*
-                        ::std::ops::ControlFlow::CONTINUE
+                        ::std::ops::ControlFlow::Continue(())
                     }
                     $($output)*
                 )
@@ -251,7 +251,7 @@ fn visit_with<V: $crate::ty::visit::TypeVisitor<$tcx>>(
             @VisitVariants($this, $visitor)
                 input($($input)*)
                 output(
-                    $variant => { ::std::ops::ControlFlow::CONTINUE }
+                    $variant => { ::std::ops::ControlFlow::Continue(()) }
                     $($output)*
                 )
         )
index 752cbdeae6b25df2eb76ad53c4db05c3edbfb534..b3354e6e9d2a2e4c7712350e8b1a11b84113d58f 100644 (file)
@@ -1,38 +1,44 @@
-use crate::mir::graph_cyclic_cache::GraphIsCyclicCache;
-use crate::mir::predecessors::{PredecessorCache, Predecessors};
-use crate::mir::switch_sources::{SwitchSourceCache, SwitchSources};
-use crate::mir::traversal::PostorderCache;
-use crate::mir::{BasicBlock, BasicBlockData, Successors, START_BLOCK};
+use crate::mir::traversal::Postorder;
+use crate::mir::{BasicBlock, BasicBlockData, Successors, Terminator, TerminatorKind, START_BLOCK};
 
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::graph;
 use rustc_data_structures::graph::dominators::{dominators, Dominators};
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
+use rustc_data_structures::sync::OnceCell;
 use rustc_index::vec::IndexVec;
+use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
+use smallvec::SmallVec;
 
 #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable, TypeVisitable)]
 pub struct BasicBlocks<'tcx> {
     basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
-    predecessor_cache: PredecessorCache,
-    switch_source_cache: SwitchSourceCache,
-    is_cyclic: GraphIsCyclicCache,
-    postorder_cache: PostorderCache,
+    cache: Cache,
+}
+
+// Typically 95%+ of basic blocks have 4 or fewer predecessors.
+pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>;
+
+pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>;
+
+#[derive(Clone, Default, Debug)]
+struct Cache {
+    predecessors: OnceCell<Predecessors>,
+    switch_sources: OnceCell<SwitchSources>,
+    is_cyclic: OnceCell<bool>,
+    postorder: OnceCell<Vec<BasicBlock>>,
 }
 
 impl<'tcx> BasicBlocks<'tcx> {
     #[inline]
     pub fn new(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>) -> Self {
-        BasicBlocks {
-            basic_blocks,
-            predecessor_cache: PredecessorCache::new(),
-            switch_source_cache: SwitchSourceCache::new(),
-            is_cyclic: GraphIsCyclicCache::new(),
-            postorder_cache: PostorderCache::new(),
-        }
+        BasicBlocks { basic_blocks, cache: Cache::default() }
     }
 
     /// Returns true if control-flow graph contains a cycle reachable from the `START_BLOCK`.
     #[inline]
     pub fn is_cfg_cyclic(&self) -> bool {
-        self.is_cyclic.is_cyclic(self)
+        *self.cache.is_cyclic.get_or_init(|| graph::is_cyclic(self))
     }
 
     #[inline]
@@ -43,20 +49,46 @@ pub fn dominators(&self) -> Dominators<BasicBlock> {
     /// Returns predecessors for each basic block.
     #[inline]
     pub fn predecessors(&self) -> &Predecessors {
-        self.predecessor_cache.compute(&self.basic_blocks)
+        self.cache.predecessors.get_or_init(|| {
+            let mut preds = IndexVec::from_elem(SmallVec::new(), &self.basic_blocks);
+            for (bb, data) in self.basic_blocks.iter_enumerated() {
+                if let Some(term) = &data.terminator {
+                    for succ in term.successors() {
+                        preds[succ].push(bb);
+                    }
+                }
+            }
+            preds
+        })
     }
 
     /// Returns basic blocks in a postorder.
     #[inline]
     pub fn postorder(&self) -> &[BasicBlock] {
-        self.postorder_cache.compute(&self.basic_blocks)
+        self.cache.postorder.get_or_init(|| {
+            Postorder::new(&self.basic_blocks, START_BLOCK).map(|(bb, _)| bb).collect()
+        })
     }
 
     /// `switch_sources()[&(target, switch)]` returns a list of switch
     /// values that lead to a `target` block from a `switch` block.
     #[inline]
     pub fn switch_sources(&self) -> &SwitchSources {
-        self.switch_source_cache.compute(&self.basic_blocks)
+        self.cache.switch_sources.get_or_init(|| {
+            let mut switch_sources: SwitchSources = FxHashMap::default();
+            for (bb, data) in self.basic_blocks.iter_enumerated() {
+                if let Some(Terminator {
+                    kind: TerminatorKind::SwitchInt { targets, .. }, ..
+                }) = &data.terminator
+                {
+                    for (value, target) in targets.iter() {
+                        switch_sources.entry((target, bb)).or_default().push(Some(value));
+                    }
+                    switch_sources.entry((targets.otherwise(), bb)).or_default().push(None);
+                }
+            }
+            switch_sources
+        })
     }
 
     /// Returns mutable reference to basic blocks. Invalidates CFG cache.
@@ -88,10 +120,7 @@ pub fn as_mut_preserves_cfg(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockDa
     /// All other methods that allow you to mutate the basic blocks also call this method
     /// themselves, thereby avoiding any risk of accidentally cache invalidation.
     pub fn invalidate_cfg_cache(&mut self) {
-        self.predecessor_cache.invalidate();
-        self.switch_source_cache.invalidate();
-        self.is_cyclic.invalidate();
-        self.postorder_cache.invalidate();
+        self.cache = Cache::default();
     }
 }
 
@@ -145,3 +174,24 @@ fn predecessors(&self, node: Self::Node) -> <Self as graph::GraphPredecessors<'_
         self.predecessors()[node].iter().copied()
     }
 }
+
+TrivialTypeTraversalAndLiftImpls! {
+    Cache,
+}
+
+impl<S: Encoder> Encodable<S> for Cache {
+    #[inline]
+    fn encode(&self, _s: &mut S) {}
+}
+
+impl<D: Decoder> Decodable<D> for Cache {
+    #[inline]
+    fn decode(_: &mut D) -> Self {
+        Default::default()
+    }
+}
+
+impl<CTX> HashStable<CTX> for Cache {
+    #[inline]
+    fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {}
+}
diff --git a/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs b/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs
deleted file mode 100644 (file)
index f97bf28..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-use rustc_data_structures::graph::{
-    self, DirectedGraph, WithNumNodes, WithStartNode, WithSuccessors,
-};
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
-use rustc_data_structures::sync::OnceCell;
-use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
-
-/// Helper type to cache the result of `graph::is_cyclic`.
-#[derive(Clone, Debug)]
-pub(super) struct GraphIsCyclicCache {
-    cache: OnceCell<bool>,
-}
-
-impl GraphIsCyclicCache {
-    #[inline]
-    pub(super) fn new() -> Self {
-        GraphIsCyclicCache { cache: OnceCell::new() }
-    }
-
-    pub(super) fn is_cyclic<G>(&self, graph: &G) -> bool
-    where
-        G: ?Sized + DirectedGraph + WithStartNode + WithSuccessors + WithNumNodes,
-    {
-        *self.cache.get_or_init(|| graph::is_cyclic(graph))
-    }
-
-    /// Invalidates the cache.
-    #[inline]
-    pub(super) fn invalidate(&mut self) {
-        // Invalidating the cache requires mutating the MIR, which in turn requires a unique
-        // reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all
-        // callers of `invalidate` have a unique reference to the MIR and thus to the
-        // cache. This means we never need to do synchronization when `invalidate` is called,
-        // we can simply reinitialize the `OnceCell`.
-        self.cache = OnceCell::new();
-    }
-}
-
-impl<S: Encoder> Encodable<S> for GraphIsCyclicCache {
-    #[inline]
-    fn encode(&self, s: &mut S) {
-        Encodable::encode(&(), s);
-    }
-}
-
-impl<D: Decoder> Decodable<D> for GraphIsCyclicCache {
-    #[inline]
-    fn decode(d: &mut D) -> Self {
-        let () = Decodable::decode(d);
-        Self::new()
-    }
-}
-
-impl<CTX> HashStable<CTX> for GraphIsCyclicCache {
-    #[inline]
-    fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
-        // do nothing
-    }
-}
-
-TrivialTypeTraversalAndLiftImpls! {
-    GraphIsCyclicCache,
-}
index 8fe349d9640dafd776814d0feb5f9b8175a7e92f..5f425a287687e60a562e446be171d156c959ebae 100644 (file)
@@ -509,7 +509,7 @@ pub fn create_static_alloc(self, static_id: DefId) -> AllocId {
         self.reserve_and_set_dedup(GlobalAlloc::Static(static_id))
     }
 
-    /// Generates an `AllocId` for a function.  Depending on the function type,
+    /// Generates an `AllocId` for a function. Depending on the function type,
     /// this might get deduplicated or assigned a new ID each time.
     pub fn create_fn_alloc(self, instance: Instance<'tcx>) -> AllocId {
         // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
@@ -518,7 +518,7 @@ pub fn create_fn_alloc(self, instance: Instance<'tcx>) -> AllocId {
         // We thus generate a new `AllocId` for every mention of a function. This means that
         // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
         // However, formatting code relies on function identity (see #58320), so we only do
-        // this for generic functions.  Lifetime parameters are ignored.
+        // this for generic functions. Lifetime parameters are ignored.
         let is_generic = instance
             .substs
             .into_iter()
@@ -535,7 +535,7 @@ pub fn create_fn_alloc(self, instance: Instance<'tcx>) -> AllocId {
         }
     }
 
-    /// Generates an `AllocId` for a (symbolic, not-reified) vtable.  Will get deduplicated.
+    /// Generates an `AllocId` for a (symbolic, not-reified) vtable. Will get deduplicated.
     pub fn create_vtable_alloc(
         self,
         ty: Ty<'tcx>,
index 14bdff4568f5e65f8e1fd4c6355bdaf89a4ae4d2..d833286dc33f008ea17ec537dd4ae8de733712d8 100644 (file)
 pub mod coverage;
 mod generic_graph;
 pub mod generic_graphviz;
-mod graph_cyclic_cache;
 pub mod graphviz;
 pub mod interpret;
 pub mod mono;
 pub mod patch;
-mod predecessors;
 pub mod pretty;
 mod query;
 pub mod spanview;
 mod syntax;
 pub use syntax::*;
-mod switch_sources;
 pub mod tcx;
 pub mod terminator;
 pub use terminator::*;
@@ -2483,7 +2480,7 @@ fn from_opt_const_arg_anon_const(
 
         // FIXME(const_generics): We currently have to special case parameters because `min_const_generics`
         // does not provide the parents generics to anonymous constants. We still allow generic const
-        // parameters by themselves however, e.g. `N`.  These constants would cause an ICE if we were to
+        // parameters by themselves however, e.g. `N`. These constants would cause an ICE if we were to
         // ever try to substitute the generic parameters in their bodies.
         //
         // While this doesn't happen as these constants are always used as `ty::ConstKind::Param`, it does
diff --git a/compiler/rustc_middle/src/mir/predecessors.rs b/compiler/rustc_middle/src/mir/predecessors.rs
deleted file mode 100644 (file)
index 5f1fada..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-//! Lazily compute the reverse control-flow graph for the MIR.
-
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
-use rustc_data_structures::sync::OnceCell;
-use rustc_index::vec::IndexVec;
-use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
-use smallvec::SmallVec;
-
-use crate::mir::{BasicBlock, BasicBlockData};
-
-// Typically 95%+ of basic blocks have 4 or fewer predecessors.
-pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>;
-
-#[derive(Clone, Debug)]
-pub(super) struct PredecessorCache {
-    cache: OnceCell<Predecessors>,
-}
-
-impl PredecessorCache {
-    #[inline]
-    pub(super) fn new() -> Self {
-        PredecessorCache { cache: OnceCell::new() }
-    }
-
-    /// Invalidates the predecessor cache.
-    #[inline]
-    pub(super) fn invalidate(&mut self) {
-        // Invalidating the predecessor cache requires mutating the MIR, which in turn requires a
-        // unique reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all
-        // callers of `invalidate` have a unique reference to the MIR and thus to the predecessor
-        // cache. This means we never need to do synchronization when `invalidate` is called, we can
-        // simply reinitialize the `OnceCell`.
-        self.cache = OnceCell::new();
-    }
-
-    /// Returns the predecessor graph for this MIR.
-    #[inline]
-    pub(super) fn compute(
-        &self,
-        basic_blocks: &IndexVec<BasicBlock, BasicBlockData<'_>>,
-    ) -> &Predecessors {
-        self.cache.get_or_init(|| {
-            let mut preds = IndexVec::from_elem(SmallVec::new(), basic_blocks);
-            for (bb, data) in basic_blocks.iter_enumerated() {
-                if let Some(term) = &data.terminator {
-                    for succ in term.successors() {
-                        preds[succ].push(bb);
-                    }
-                }
-            }
-
-            preds
-        })
-    }
-}
-
-impl<S: Encoder> Encodable<S> for PredecessorCache {
-    #[inline]
-    fn encode(&self, _s: &mut S) {}
-}
-
-impl<D: Decoder> Decodable<D> for PredecessorCache {
-    #[inline]
-    fn decode(_: &mut D) -> Self {
-        Self::new()
-    }
-}
-
-impl<CTX> HashStable<CTX> for PredecessorCache {
-    #[inline]
-    fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
-        // do nothing
-    }
-}
-
-TrivialTypeTraversalAndLiftImpls! {
-    PredecessorCache,
-}
diff --git a/compiler/rustc_middle/src/mir/switch_sources.rs b/compiler/rustc_middle/src/mir/switch_sources.rs
deleted file mode 100644 (file)
index b91c0c2..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-//! Lazily compute the inverse of each `SwitchInt`'s switch targets. Modeled after
-//! `Predecessors`/`PredecessorCache`.
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
-use rustc_data_structures::sync::OnceCell;
-use rustc_index::vec::IndexVec;
-use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
-use smallvec::SmallVec;
-
-use crate::mir::{BasicBlock, BasicBlockData, Terminator, TerminatorKind};
-
-pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>;
-
-#[derive(Clone, Debug)]
-pub(super) struct SwitchSourceCache {
-    cache: OnceCell<SwitchSources>,
-}
-
-impl SwitchSourceCache {
-    #[inline]
-    pub(super) fn new() -> Self {
-        SwitchSourceCache { cache: OnceCell::new() }
-    }
-
-    /// Invalidates the switch source cache.
-    #[inline]
-    pub(super) fn invalidate(&mut self) {
-        self.cache = OnceCell::new();
-    }
-
-    /// Returns the switch sources for this MIR.
-    #[inline]
-    pub(super) fn compute(
-        &self,
-        basic_blocks: &IndexVec<BasicBlock, BasicBlockData<'_>>,
-    ) -> &SwitchSources {
-        self.cache.get_or_init(|| {
-            let mut switch_sources: SwitchSources = FxHashMap::default();
-            for (bb, data) in basic_blocks.iter_enumerated() {
-                if let Some(Terminator {
-                    kind: TerminatorKind::SwitchInt { targets, .. }, ..
-                }) = &data.terminator
-                {
-                    for (value, target) in targets.iter() {
-                        switch_sources.entry((target, bb)).or_default().push(Some(value));
-                    }
-                    switch_sources.entry((targets.otherwise(), bb)).or_default().push(None);
-                }
-            }
-
-            switch_sources
-        })
-    }
-}
-
-impl<S: Encoder> Encodable<S> for SwitchSourceCache {
-    #[inline]
-    fn encode(&self, _s: &mut S) {}
-}
-
-impl<D: Decoder> Decodable<D> for SwitchSourceCache {
-    #[inline]
-    fn decode(_: &mut D) -> Self {
-        Self::new()
-    }
-}
-
-impl<CTX> HashStable<CTX> for SwitchSourceCache {
-    #[inline]
-    fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
-        // do nothing
-    }
-}
-
-TrivialTypeTraversalAndLiftImpls! {
-    SwitchSourceCache,
-}
index 6b4489026d3d379dee958125b73ced3eb3d40fb1..52c2b10cbbea97e286650d84ce8bf4a79be35a23 100644 (file)
@@ -512,6 +512,16 @@ pub struct CopyNonOverlapping<'tcx> {
 ///     must also be `cleanup`. This is a part of the type system and checked statically, so it is
 ///     still an error to have such an edge in the CFG even if it's known that it won't be taken at
 ///     runtime.
+///  4. The control flow between cleanup blocks must look like an upside down tree. Roughly
+///     speaking, this means that control flow that looks like a V is allowed, while control flow
+///     that looks like a W is not. This is necessary to ensure that landing pad information can be
+///     correctly codegened on MSVC. More precisely:
+///
+///     Begin with the standard control flow graph `G`. Modify `G` as follows: for any two cleanup
+///     vertices `u` and `v` such that `u` dominates `v`, contract `u` and `v` into a single vertex,
+///     deleting self edges and duplicate edges in the process. Now remove all vertices from `G`
+///     that are not cleanup vertices or are not reachable. The resulting graph must be an inverted
+///     tree, that is each vertex may have at most one successor and there may be no cycles.
 #[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, TypeFoldable, TypeVisitable)]
 pub enum TerminatorKind<'tcx> {
     /// Block has one successor; we continue execution there.
index 438f36373ca91b0682484f1052eefd7ad1aa889a..6e905224c1336b5d6e0d1839d078523058648834 100644 (file)
@@ -74,7 +74,7 @@ pub fn all_targets_mut(&mut self) -> &mut [BasicBlock] {
     }
 
     /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
-    /// specific value.  This cannot fail, as it'll return the `otherwise`
+    /// specific value. This cannot fail, as it'll return the `otherwise`
     /// branch if there's not a specific match for the value.
     pub fn target_for_value(&self, value: u128) -> BasicBlock {
         self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
index 0b461d1ce41c5f2559bda8ea0a82f3e06562431d..f37222cb29758cf827fe943b47d613b9be389bc9 100644 (file)
@@ -1,7 +1,4 @@
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
-use rustc_data_structures::sync::OnceCell;
 use rustc_index::bit_set::BitSet;
-use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
 
 use super::*;
 
@@ -339,50 +336,3 @@ pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorderIter
     let len = blocks.len();
     ReversePostorderIter { body, blocks, idx: len }
 }
-
-#[derive(Clone, Debug)]
-pub(super) struct PostorderCache {
-    cache: OnceCell<Vec<BasicBlock>>,
-}
-
-impl PostorderCache {
-    #[inline]
-    pub(super) fn new() -> Self {
-        PostorderCache { cache: OnceCell::new() }
-    }
-
-    /// Invalidates the postorder cache.
-    #[inline]
-    pub(super) fn invalidate(&mut self) {
-        self.cache = OnceCell::new();
-    }
-
-    /// Returns the `&[BasicBlocks]` represents the postorder graph for this MIR.
-    #[inline]
-    pub(super) fn compute(&self, body: &IndexVec<BasicBlock, BasicBlockData<'_>>) -> &[BasicBlock] {
-        self.cache.get_or_init(|| Postorder::new(body, START_BLOCK).map(|(bb, _)| bb).collect())
-    }
-}
-
-impl<S: Encoder> Encodable<S> for PostorderCache {
-    #[inline]
-    fn encode(&self, _s: &mut S) {}
-}
-
-impl<D: Decoder> Decodable<D> for PostorderCache {
-    #[inline]
-    fn decode(_: &mut D) -> Self {
-        Self::new()
-    }
-}
-
-impl<CTX> HashStable<CTX> for PostorderCache {
-    #[inline]
-    fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
-        // do nothing
-    }
-}
-
-TrivialTypeTraversalAndLiftImpls! {
-    PostorderCache,
-}
index e7cd497b206a9eacc500ea7f6bf76258e1b0c1a4..d44c6809bd8305da53c92e7b9c0878475781bec4 100644 (file)
@@ -4,6 +4,6 @@
 
 impl<'tcx, R: Idx, C: Idx> TypeVisitable<'tcx> for BitMatrix<R, C> {
     fn visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> ControlFlow<V::BreakTy> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
index b3acf815e0c10e931045db237c919ce7191d1acd..6bbf7fa3914e69ea746a5607cae04b6ad2db1f4e 100644 (file)
         separate_provide_extern
     }
 
+    query is_type_alias_impl_trait(key: DefId) -> bool
+    {
+        desc { "determine whether the opaque is a type-alias impl trait" }
+        separate_provide_extern
+    }
+
     query analysis(key: ()) -> Result<(), ErrorGuaranteed> {
         eval_always
         desc { "running analysis passes on this crate" }
     /// ```
     ///
     /// Bounds from the parent (e.g. with nested impl trait) are not included.
-    query item_bounds(key: DefId) -> &'tcx ty::List<ty::Predicate<'tcx>> {
+    query item_bounds(key: DefId) -> ty::EarlyBinder<&'tcx ty::List<ty::Predicate<'tcx>>> {
         desc { |tcx| "elaborating item bounds for `{}`", tcx.def_path_str(key) }
     }
 
index dd75b0d9ebc23e30de15728d67d877fb48e52a00..fcc8f457a8b7887296526a2d1093a5b3f86027a6 100644 (file)
@@ -159,18 +159,20 @@ fn debug_ty(ty: &chalk_ir::Ty<Self>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt
             }
             chalk_ir::TyKind::Array(ty, len) => Some(write!(fmt, "[{:?}; {:?}]", ty, len)),
             chalk_ir::TyKind::Slice(ty) => Some(write!(fmt, "[{:?}]", ty)),
-            chalk_ir::TyKind::Tuple(len, substs) => Some((|| {
-                write!(fmt, "(")?;
-                for (idx, substitution) in substs.interned().iter().enumerate() {
-                    if idx == *len && *len != 1 {
-                        // Don't add a trailing comma if the tuple has more than one element
-                        write!(fmt, "{:?}", substitution)?;
-                    } else {
-                        write!(fmt, "{:?},", substitution)?;
+            chalk_ir::TyKind::Tuple(len, substs) => Some(
+                try {
+                    write!(fmt, "(")?;
+                    for (idx, substitution) in substs.interned().iter().enumerate() {
+                        if idx == *len && *len != 1 {
+                            // Don't add a trailing comma if the tuple has more than one element
+                            write!(fmt, "{:?}", substitution)?;
+                        } else {
+                            write!(fmt, "{:?},", substitution)?;
+                        }
                     }
-                }
-                write!(fmt, ")")
-            })()),
+                    write!(fmt, ")")?;
+                },
+            ),
             _ => None,
         }
     }
index 543f5b87e00bccd3a0c887f9c06f31ce3149f923..615154a55e586dee13ce18a7e4fbb0d87ea96310 100644 (file)
@@ -8,9 +8,8 @@
 use crate::error::DropCheckOverflow;
 use crate::infer::canonical::{Canonical, QueryResponse};
 use crate::ty::error::TypeError;
-use crate::ty::subst::{GenericArg, SubstsRef};
+use crate::ty::subst::GenericArg;
 use crate::ty::{self, Ty, TyCtxt};
-use rustc_hir::def_id::DefId;
 use rustc_span::source_map::Span;
 
 pub mod type_op {
@@ -214,6 +213,5 @@ pub struct NormalizationResult<'tcx> {
 pub enum OutlivesBound<'tcx> {
     RegionSubRegion(ty::Region<'tcx>, ty::Region<'tcx>),
     RegionSubParam(ty::Region<'tcx>, ty::ParamTy),
-    RegionSubProjection(ty::Region<'tcx>, ty::AliasTy<'tcx>),
-    RegionSubOpaque(ty::Region<'tcx>, DefId, SubstsRef<'tcx>),
+    RegionSubAlias(ty::Region<'tcx>, ty::AliasTy<'tcx>),
 }
index 55ee5bd2f810d0999641b9e490e27c780cdab700..bb7fba3ee7119ad9c00af8c9f4311bfd4d5c11b2 100644 (file)
@@ -37,6 +37,11 @@ pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident {
         Ident::new(self.name, tcx.def_ident_span(self.def_id).unwrap())
     }
 
+    /// Gets the defaultness of the associated item.
+    /// To get the default associated type, use the [`type_of`] query on the
+    /// [`DefId`] of the type.
+    ///
+    /// [`type_of`]: crate::ty::TyCtxt::type_of
     pub fn defaultness(&self, tcx: TyCtxt<'_>) -> hir::Defaultness {
         tcx.impl_defaultness(self.def_id)
     }
@@ -72,7 +77,7 @@ pub fn signature(&self, tcx: TyCtxt<'_>) -> String {
             ty::AssocKind::Fn => {
                 // We skip the binder here because the binder would deanonymize all
                 // late-bound regions, and we don't want method signatures to show up
-                // `as for<'r> fn(&'r MyType)`.  Pretty-printing handles late-bound
+                // `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound
                 // regions just fine, showing `fn(&MyType)`.
                 tcx.fn_sig(self.def_id).skip_binder().to_string()
             }
index 63f31e5a11f39e96e835a308d2e12653767d22f7..ce04d8d21f4cdf2e4255d13faccc98f781d61681 100644 (file)
@@ -874,7 +874,7 @@ pub fn def_path_table(self) -> &'tcx rustc_hir::definitions::DefPathTable {
         self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE);
 
         // Leak a read lock once we start iterating on definitions, to prevent adding new ones
-        // while iterating.  If some query needs to add definitions, it should be `ensure`d above.
+        // while iterating. If some query needs to add definitions, it should be `ensure`d above.
         let definitions = self.untracked.definitions.leak();
         definitions.def_path_table()
     }
@@ -886,7 +886,7 @@ pub fn def_path_hash_to_def_index_map(
         // definitions change.
         self.ensure().hir_crate(());
         // Leak a read lock once we start iterating on definitions, to prevent adding new ones
-        // while iterating.  If some query needs to add definitions, it should be `ensure`d above.
+        // while iterating. If some query needs to add definitions, it should be `ensure`d above.
         let definitions = self.untracked.definitions.leak();
         definitions.def_path_hash_to_def_index_map()
     }
@@ -1952,6 +1952,15 @@ pub fn mk_generator_witness(self, types: ty::Binder<'tcx, &'tcx List<Ty<'tcx>>>)
         self.mk_ty(GeneratorWitness(types))
     }
 
+    /// Creates a `&mut Context<'_>` [`Ty`] with erased lifetimes.
+    pub fn mk_task_context(self) -> Ty<'tcx> {
+        let context_did = self.require_lang_item(LangItem::Context, None);
+        let context_adt_ref = self.adt_def(context_did);
+        let context_substs = self.intern_substs(&[self.lifetimes.re_erased.into()]);
+        let context_ty = self.mk_adt(context_adt_ref, context_substs);
+        self.mk_mut_ref(self.lifetimes.re_erased, context_ty)
+    }
+
     #[inline]
     pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> {
         self.mk_ty_infer(TyVar(v))
index 8c22df7395f1052669ad14f2e36cc7e187bf4b23..4b4518f61e8d39b444bb4716a0ff13cf1966b54d 100644 (file)
@@ -4,12 +4,13 @@
 
 use crate::ty::{
     visit::TypeVisitable, AliasTy, Const, ConstKind, DefIdTree, InferConst, InferTy, Opaque,
-    PolyTraitPredicate, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor,
+    PolyTraitPredicate, Projection, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor,
 };
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{Applicability, Diagnostic, DiagnosticArgValue, IntoDiagnosticArg};
 use rustc_hir as hir;
+use rustc_hir::def::DefKind;
 use rustc_hir::def_id::DefId;
 use rustc_hir::WherePredicate;
 use rustc_span::Span;
@@ -17,7 +18,7 @@
 
 impl<'tcx> IntoDiagnosticArg for Ty<'tcx> {
     fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
-        format!("{}", self).into_diagnostic_arg()
+        self.to_string().into_diagnostic_arg()
     }
 }
 
@@ -443,7 +444,7 @@ impl<'tcx> TypeVisitor<'tcx> for IsSuggestableVisitor<'tcx> {
     type BreakTy = ();
 
     fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
-        match t.kind() {
+        match *t.kind() {
             Infer(InferTy::TyVar(_)) if self.infer_suggestable => {}
 
             FnDef(..)
@@ -458,9 +459,9 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             }
 
             Alias(Opaque, AliasTy { def_id, .. }) => {
-                let parent = self.tcx.parent(*def_id);
-                if let hir::def::DefKind::TyAlias | hir::def::DefKind::AssocTy = self.tcx.def_kind(parent)
-                    && let Alias(Opaque, AliasTy { def_id: parent_opaque_def_id, .. }) = self.tcx.type_of(parent).kind()
+                let parent = self.tcx.parent(def_id);
+                if let DefKind::TyAlias | DefKind::AssocTy = self.tcx.def_kind(parent)
+                    && let Alias(Opaque, AliasTy { def_id: parent_opaque_def_id, .. }) = *self.tcx.type_of(parent).kind()
                     && parent_opaque_def_id == def_id
                 {
                     // Okay
@@ -469,6 +470,12 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 }
             }
 
+            Alias(Projection, AliasTy { def_id, .. }) => {
+                if self.tcx.def_kind(def_id) != DefKind::AssocTy {
+                    return ControlFlow::Break(());
+                }
+            }
+
             Param(param) => {
                 // FIXME: It would be nice to make this not use string manipulation,
                 // but it's pretty hard to do this, since `ty::ParamTy` is missing
index 09fee0c3f7c30819138b4df81f49295aa0a6d031..6b9a37d848da2966870256959d7e1d10ecb672cc 100644 (file)
@@ -290,7 +290,7 @@ pub struct RegionFolder<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
 
     /// Stores the index of a binder *just outside* the stuff we have
-    /// visited.  So this begins as INNERMOST; when we pass through a
+    /// visited. So this begins as INNERMOST; when we pass through a
     /// binder, it is incremented (via `shift_in`).
     current_index: ty::DebruijnIndex,
 
index 8a5e765b9a30664adc3a20f7c240d7ea78d7c147..801ca6004456827daba7eaf84db464e153ac1387 100644 (file)
@@ -341,15 +341,9 @@ pub fn instantiate_own(
         &self,
         tcx: TyCtxt<'tcx>,
         substs: SubstsRef<'tcx>,
-    ) -> InstantiatedPredicates<'tcx> {
-        InstantiatedPredicates {
-            predicates: self
-                .predicates
-                .iter()
-                .map(|(p, _)| EarlyBinder(*p).subst(tcx, substs))
-                .collect(),
-            spans: self.predicates.iter().map(|(_, sp)| *sp).collect(),
-        }
+    ) -> impl Iterator<Item = (Predicate<'tcx>, Span)> + DoubleEndedIterator + ExactSizeIterator
+    {
+        EarlyBinder(self.predicates).subst_iter_copied(tcx, substs)
     }
 
     #[instrument(level = "debug", skip(self, tcx))]
index 4ee4d7caec1f3c86593240e6a3da72eddc30202b..6ac00d16c53de8f859a2395f1c0ff4979653e086 100644 (file)
@@ -756,14 +756,14 @@ fn needs_fn_once_adapter_shim(
             Ok(false)
         }
         (ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => {
-            // The closure fn `llfn` is a `fn(&self, ...)`.  We want a
+            // The closure fn `llfn` is a `fn(&self, ...)`. We want a
             // `fn(&mut self, ...)`. In fact, at codegen time, these are
             // basically the same thing, so we can just return llfn.
             Ok(false)
         }
         (ty::ClosureKind::Fn | ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
             // The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut
-            // self, ...)`.  We want a `fn(self, ...)`. We can produce
+            // self, ...)`. We want a `fn(self, ...)`. We can produce
             // this by doing something like:
             //
             //     fn call_once(self, ...) { call_mut(&self, ...) }
index 00f53afd6632854f824eb5df0d8256d61407ba18..dfd016569c27a51de3135d2dbcef1706291d9747 100644 (file)
@@ -879,7 +879,7 @@ fn ty_and_layout_pointee_info_at(
                     //
                     // If the niche is a pointer, it's either valid (according
                     // to its type), or null (which the niche field's scalar
-                    // validity range encodes).  This allows using
+                    // validity range encodes). This allows using
                     // `dereferenceable_or_null` for e.g., `Option<&T>`, and
                     // this will continue to work as long as we don't start
                     // using more niches than just null (e.g., the first page of
index e9bd0e9866fdcde9cfd70e4a775045faea35b8b5..f83bceca3b53ba27bdcab4885698a9ea0193c60d 100644 (file)
@@ -28,7 +28,6 @@
 pub use adt::*;
 pub use assoc::*;
 pub use generics::*;
-use hir::OpaqueTyOrigin;
 use rustc_ast as ast;
 use rustc_ast::node_id::NodeMap;
 use rustc_attr as attr;
@@ -39,7 +38,7 @@
 use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
 use rustc_hir as hir;
 use rustc_hir::def::{CtorKind, CtorOf, DefKind, LifetimeRes, Res};
-use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalDefIdMap};
+use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap};
 use rustc_hir::Node;
 use rustc_index::vec::IndexVec;
 use rustc_macros::HashStable;
@@ -437,7 +436,7 @@ pub struct CrateVariancesMap<'tcx> {
     /// For each item with generics, maps to a vector of the variance
     /// of its generics. If an item has no generics, it will have no
     /// entry.
-    pub variances: FxHashMap<DefId, &'tcx [ty::Variance]>,
+    pub variances: DefIdMap<&'tcx [ty::Variance]>,
 }
 
 // Contains information needed to resolve types and (in the future) look up
@@ -689,7 +688,7 @@ pub fn subst_supertrait(
         //
         // In terms of why this is sound, the idea is that whenever there
         // is an impl of `T:Foo<'a>`, it must show that `T:Bar<'a,'a>`
-        // holds.  So if there is an impl of `T:Foo<'a>` that applies to
+        // holds. So if there is an impl of `T:Foo<'a>` that applies to
         // all `'a`, then we must know that `T:Bar<'a,'a>` holds for all
         // `'a`.
         //
@@ -701,7 +700,7 @@ pub fn subst_supertrait(
         // Here, if we have `for<'x> T: Foo1<'x>`, then what do we know?
         // The answer is that we know `for<'x,'b> T: Bar1<'x,'b>`. The
         // reason is similar to the previous example: any impl of
-        // `T:Foo1<'x>` must show that `for<'b> T: Bar1<'x, 'b>`.  So
+        // `T:Foo1<'x>` must show that `for<'b> T: Bar1<'x, 'b>`. So
         // basically we would want to collapse the bound lifetimes from
         // the input (`trait_ref`) and the supertraits.
         //
@@ -1252,6 +1251,35 @@ pub fn empty() -> InstantiatedPredicates<'tcx> {
     pub fn is_empty(&self) -> bool {
         self.predicates.is_empty()
     }
+
+    pub fn iter(&self) -> <&Self as IntoIterator>::IntoIter {
+        (&self).into_iter()
+    }
+}
+
+impl<'tcx> IntoIterator for InstantiatedPredicates<'tcx> {
+    type Item = (Predicate<'tcx>, Span);
+
+    type IntoIter = std::iter::Zip<std::vec::IntoIter<Predicate<'tcx>>, std::vec::IntoIter<Span>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        debug_assert_eq!(self.predicates.len(), self.spans.len());
+        std::iter::zip(self.predicates, self.spans)
+    }
+}
+
+impl<'a, 'tcx> IntoIterator for &'a InstantiatedPredicates<'tcx> {
+    type Item = (Predicate<'tcx>, Span);
+
+    type IntoIter = std::iter::Zip<
+        std::iter::Copied<std::slice::Iter<'a, Predicate<'tcx>>>,
+        std::iter::Copied<std::slice::Iter<'a, Span>>,
+    >;
+
+    fn into_iter(self) -> Self::IntoIter {
+        debug_assert_eq!(self.predicates.len(), self.spans.len());
+        std::iter::zip(self.predicates.iter().copied(), self.spans.iter().copied())
+    }
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable, Lift)]
@@ -1316,7 +1344,6 @@ pub fn remap_generic_params_to_declaration_params(
         tcx: TyCtxt<'tcx>,
         // typeck errors have subpar spans for opaque types, so delay error reporting until borrowck.
         ignore_errors: bool,
-        origin: OpaqueTyOrigin,
     ) -> Self {
         let OpaqueTypeKey { def_id, substs } = opaque_type_key;
 
@@ -1330,32 +1357,9 @@ pub fn remap_generic_params_to_declaration_params(
         debug!(?id_substs);
 
         // This zip may have several times the same lifetime in `substs` paired with a different
-        // lifetime from `id_substs`.  Simply `collect`ing the iterator is the correct behaviour:
+        // lifetime from `id_substs`. Simply `collect`ing the iterator is the correct behaviour:
         // it will pick the last one, which is the one we introduced in the impl-trait desugaring.
-        let map = substs.iter().zip(id_substs);
-
-        let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> = match origin {
-            // HACK: The HIR lowering for async fn does not generate
-            // any `+ Captures<'x>` bounds for the `impl Future<...>`, so all async fns with lifetimes
-            // would now fail to compile. We should probably just make hir lowering fill this in properly.
-            OpaqueTyOrigin::AsyncFn(_) => map.collect(),
-            OpaqueTyOrigin::FnReturn(_) | OpaqueTyOrigin::TyAlias => {
-                // Opaque types may only use regions that are bound. So for
-                // ```rust
-                // type Foo<'a, 'b, 'c> = impl Trait<'a> + 'b;
-                // ```
-                // we may not use `'c` in the hidden type.
-                let variances = tcx.variances_of(def_id);
-                debug!(?variances);
-
-                map.filter(|(_, v)| {
-                    let ty::GenericArgKind::Lifetime(lt) = v.unpack() else { return true };
-                    let ty::ReEarlyBound(ebr) = lt.kind() else { bug!() };
-                    variances[ebr.index as usize] == ty::Variance::Invariant
-                })
-                .collect()
-            }
-        };
+        let map = substs.iter().zip(id_substs).collect();
         debug!("map = {:#?}", map);
 
         // Convert the type from the function into a type valid outside
@@ -2141,7 +2145,7 @@ pub fn opt_item_name(self, def_id: DefId) -> Option<Symbol> {
 
     /// Look up the name of a definition across crates. This does not look at HIR.
     ///
-    /// This method will ICE if the corresponding item does not have a name.  In these cases, use
+    /// This method will ICE if the corresponding item does not have a name. In these cases, use
     /// [`opt_item_name`] instead.
     ///
     /// [`opt_item_name`]: Self::opt_item_name
@@ -2353,6 +2357,11 @@ pub fn trait_is_auto(self, trait_def_id: DefId) -> bool {
         self.trait_def(trait_def_id).has_auto_impl
     }
 
+    /// Returns `true` if this is a trait alias.
+    pub fn trait_is_alias(self, trait_def_id: DefId) -> bool {
+        self.def_kind(trait_def_id) == DefKind::TraitAlias
+    }
+
     pub fn trait_is_coinductive(self, trait_def_id: DefId) -> bool {
         self.trait_is_auto(trait_def_id) || self.lang_items().sized_trait() == Some(trait_def_id)
     }
@@ -2610,7 +2619,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
 }
 
 #[derive(Debug, Default, Copy, Clone)]
-pub struct FoundRelationships {
+pub struct InferVarInfo {
     /// This is true if we identified that this Ty (`?T`) is found in a `?T: Foo`
     /// obligation, where:
     ///
index e32a7ee1c354569c7df399a82b524d88b1190151..24f3d1acff1882e7d42deb256bbf4227c06d6cef 100644 (file)
@@ -52,6 +52,7 @@ impl $crate::ty::ParameterizedOverTcx for $ty {
     usize,
     (),
     u32,
+    bool,
     std::string::String,
     crate::metadata::ModChild,
     crate::middle::codegen_fn_attrs::CodegenFnAttrs,
index a91e8de5f21ea5221f33fccaa8b8b4392b46d178..ae7c20fff0c341d0e8f8e737c75d5dce4cdb9d84 100644 (file)
@@ -393,7 +393,7 @@ fn try_print_trimmed_def_path(
         match self.tcx().trimmed_def_paths(()).get(&def_id) {
             None => Ok((self, false)),
             Some(symbol) => {
-                self.write_str(symbol.as_str())?;
+                write!(self, "{}", Ident::with_dummy_span(*symbol))?;
                 Ok((self, true))
             }
         }
@@ -854,24 +854,7 @@ fn pretty_print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error>
                 }
                 p!("]");
             }
-            ty::Array(ty, sz) => {
-                p!("[", print(ty), "; ");
-                if self.should_print_verbose() {
-                    p!(write("{:?}", sz));
-                } else if let ty::ConstKind::Unevaluated(..) = sz.kind() {
-                    // Do not try to evaluate unevaluated constants. If we are const evaluating an
-                    // array length anon const, rustc will (with debug assertions) print the
-                    // constant's path. Which will end up here again.
-                    p!("_");
-                } else if let Some(n) = sz.kind().try_to_bits(self.tcx().data_layout.pointer_size) {
-                    p!(write("{}", n));
-                } else if let ty::ConstKind::Param(param) = sz.kind() {
-                    p!(print(param));
-                } else {
-                    p!("_");
-                }
-                p!("]")
-            }
+            ty::Array(ty, sz) => p!("[", print(ty), "; ", print(sz), "]"),
             ty::Slice(ty) => p!("[", print(ty), "]"),
         }
 
@@ -1303,21 +1286,25 @@ macro_rules! print_underscore {
         match ct.kind() {
             ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }) => {
                 match self.tcx().def_kind(def.did) {
-                    DefKind::Static(..) | DefKind::Const | DefKind::AssocConst => {
+                    DefKind::Const | DefKind::AssocConst => {
                         p!(print_value_path(def.did, substs))
                     }
-                    _ => {
-                        if def.is_local() {
-                            let span = self.tcx().def_span(def.did);
-                            if let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span) {
-                                p!(write("{}", snip))
-                            } else {
-                                print_underscore!()
-                            }
+                    DefKind::AnonConst => {
+                        if def.is_local()
+                            && let span = self.tcx().def_span(def.did)
+                            && let Ok(snip) = self.tcx().sess.source_map().span_to_snippet(span)
+                        {
+                            p!(write("{}", snip))
                         } else {
-                            print_underscore!()
+                            // Do not call `print_value_path` as if a parent of this anon const is an impl it will
+                            // attempt to print out the impl trait ref i.e. `<T as Trait>::{constant#0}`. This would
+                            // cause printing to enter an infinite recursion if the anon const is in the self type i.e.
+                            // `impl<T: Default> Default for [T; 32 - 1 - 1 - 1] {`
+                            // where we would try to print `<[T; /* print `constant#0` again */] as Default>::{constant#0}`
+                            p!(write("{}::{}", self.tcx().crate_name(def.did.krate), self.tcx().def_path(def.did).to_string_no_crate_verbose()))
                         }
                     }
+                    defkind => bug!("`{:?}` has unexpcted defkind {:?}", ct, defkind),
                 }
             }
             ty::ConstKind::Infer(infer_ct) => {
@@ -1339,7 +1326,7 @@ macro_rules! print_underscore {
             ty::ConstKind::Placeholder(placeholder) => p!(write("Placeholder({:?})", placeholder)),
             // FIXME(generic_const_exprs):
             // write out some legible representation of an abstract const?
-            ty::ConstKind::Expr(_) => p!("[Const Expr]"),
+            ty::ConstKind::Expr(_) => p!("[const expr]"),
             ty::ConstKind::Error(_) => p!("[const error]"),
         };
         Ok(self)
@@ -2132,9 +2119,9 @@ pub fn pretty_print_region(mut self, region: ty::Region<'tcx>) -> Result<Self, f
 
         let identify_regions = self.tcx.sess.opts.unstable_opts.identify_regions;
 
-        // These printouts are concise.  They do not contain all the information
+        // These printouts are concise. They do not contain all the information
         // the user might want to diagnose an error, but there is basically no way
-        // to fit that into a short string.  Hence the recommendation to use
+        // to fit that into a short string. Hence the recommendation to use
         // `explain_region()` or `note_and_explain_region()`.
         match *region {
             ty::ReEarlyBound(ref data) => {
@@ -2481,7 +2468,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 if not_previously_inserted {
                     ty.super_visit_with(self)
                 } else {
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 }
             }
         }
index 30073b541ecbd0e5c09a32e0c91bcd028a6fa770..7d4d35b7fdf940fd23ae8924f2db2e9a9b9a376e 100644 (file)
@@ -7,7 +7,7 @@
 use crate::ty::fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable};
 use crate::ty::print::{with_no_trimmed_paths, FmtPrinter, Printer};
 use crate::ty::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
-use crate::ty::{self, InferConst, Lift, Term, TermKind, Ty, TyCtxt};
+use crate::ty::{self, AliasTy, InferConst, Lift, Term, TermKind, Ty, TyCtxt};
 use rustc_data_structures::functor::IdFunctor;
 use rustc_hir::def::Namespace;
 use rustc_index::vec::{Idx, IndexVec};
@@ -180,6 +180,15 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
+impl<'tcx> fmt::Debug for AliasTy<'tcx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("AliasTy")
+            .field("substs", &self.substs)
+            .field("def_id", &self.def_id)
+            .finish()
+    }
+}
+
 ///////////////////////////////////////////////////////////////////////////
 // Atomic structs
 //
@@ -227,6 +236,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     crate::ty::BoundRegionKind,
     crate::ty::AssocItem,
     crate::ty::AssocKind,
+    crate::ty::AliasKind,
     crate::ty::Placeholder<crate::ty::BoundRegionKind>,
     crate::ty::ClosureKind,
     crate::ty::FreeRegion,
@@ -357,7 +367,7 @@ fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, _folder: &mut F) -> Result<S
 
 impl<'tcx> TypeVisitable<'tcx> for ty::AdtDef<'tcx> {
     fn visit_with<V: TypeVisitor<'tcx>>(&self, _visitor: &mut V) -> ControlFlow<V::BreakTy> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
@@ -455,7 +465,7 @@ fn try_fold_with<F: FallibleTypeFolder<'tcx>>(
             let slot = Rc::get_mut_unchecked(&mut unique);
 
             // Semantically move the contained type out from `unique`, fold
-            // it, then move the folded value back into `unique`.  Should
+            // it, then move the folded value back into `unique`. Should
             // folding fail, `ManuallyDrop` ensures that the "moved-out"
             // value is not re-dropped.
             let owned = ManuallyDrop::take(slot);
@@ -501,7 +511,7 @@ fn try_fold_with<F: FallibleTypeFolder<'tcx>>(
             let slot = Arc::get_mut_unchecked(&mut unique);
 
             // Semantically move the contained type out from `unique`, fold
-            // it, then move the folded value back into `unique`.  Should
+            // it, then move the folded value back into `unique`. Should
             // folding fail, `ManuallyDrop` ensures that the "moved-out"
             // value is not re-dropped.
             let owned = ManuallyDrop::take(slot);
@@ -704,7 +714,7 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> ControlFlow
             | ty::Placeholder(..)
             | ty::Param(..)
             | ty::Never
-            | ty::Foreign(..) => ControlFlow::CONTINUE,
+            | ty::Foreign(..) => ControlFlow::Continue(()),
         }
     }
 }
@@ -732,7 +742,7 @@ fn try_super_fold_with<F: FallibleTypeFolder<'tcx>>(
 
 impl<'tcx> TypeSuperVisitable<'tcx> for ty::Region<'tcx> {
     fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _visitor: &mut V) -> ControlFlow<V::BreakTy> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
@@ -834,7 +844,7 @@ fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, _folder: &mut F) -> Result<S
 
 impl<'tcx> TypeVisitable<'tcx> for InferConst<'tcx> {
     fn visit_with<V: TypeVisitor<'tcx>>(&self, _visitor: &mut V) -> ControlFlow<V::BreakTy> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
index bd5b04d5b2baac92b05047c271e24954d8817df8..6a7b23e40a7792edb3cb7f72000cbb2f6a7321f3 100644 (file)
@@ -7,8 +7,8 @@
 use crate::ty::visit::ValidateBoundVars;
 use crate::ty::InferTy::*;
 use crate::ty::{
-    self, AdtDef, DefIdTree, Discr, Term, Ty, TyCtxt, TypeFlags, TypeSuperVisitable, TypeVisitable,
-    TypeVisitor,
+    self, AdtDef, DefIdTree, Discr, FallibleTypeFolder, Term, Ty, TyCtxt, TypeFlags, TypeFoldable,
+    TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitor,
 };
 use crate::ty::{List, ParamEnv};
 use hir::def::DefKind;
@@ -100,6 +100,13 @@ pub fn get_name(&self) -> Option<Symbol> {
 
         None
     }
+
+    pub fn get_id(&self) -> Option<DefId> {
+        match *self {
+            BoundRegionKind::BrNamed(id, _) => return Some(id),
+            _ => None,
+        }
+    }
 }
 
 pub trait Article {
@@ -205,7 +212,7 @@ fn article(&self) -> &'static str {
 ///
 /// ## Generators
 ///
-/// Generators are handled similarly in `GeneratorSubsts`.  The set of
+/// Generators are handled similarly in `GeneratorSubsts`. The set of
 /// type parameters is similar, but `CK` and `CS` are replaced by the
 /// following type parameters:
 ///
@@ -1135,12 +1142,87 @@ pub fn iter(self) -> impl Iterator<Item = ty::Binder<'tcx, T::Item>> {
     }
 }
 
+struct SkipBindersAt<'tcx> {
+    tcx: TyCtxt<'tcx>,
+    index: ty::DebruijnIndex,
+}
+
+impl<'tcx> FallibleTypeFolder<'tcx> for SkipBindersAt<'tcx> {
+    type Error = ();
+
+    fn tcx(&self) -> TyCtxt<'tcx> {
+        self.tcx
+    }
+
+    fn try_fold_binder<T>(&mut self, t: Binder<'tcx, T>) -> Result<Binder<'tcx, T>, Self::Error>
+    where
+        T: ty::TypeFoldable<'tcx>,
+    {
+        self.index.shift_in(1);
+        let value = t.try_map_bound(|t| t.try_fold_with(self));
+        self.index.shift_out(1);
+        value
+    }
+
+    fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
+        if !ty.has_escaping_bound_vars() {
+            Ok(ty)
+        } else if let ty::Bound(index, bv) = *ty.kind() {
+            if index == self.index {
+                Err(())
+            } else {
+                Ok(self.tcx().mk_ty(ty::Bound(index.shifted_out(1), bv)))
+            }
+        } else {
+            ty.try_super_fold_with(self)
+        }
+    }
+
+    fn try_fold_region(&mut self, r: ty::Region<'tcx>) -> Result<ty::Region<'tcx>, Self::Error> {
+        if !r.has_escaping_bound_vars() {
+            Ok(r)
+        } else if let ty::ReLateBound(index, bv) = r.kind() {
+            if index == self.index {
+                Err(())
+            } else {
+                Ok(self.tcx().mk_region(ty::ReLateBound(index.shifted_out(1), bv)))
+            }
+        } else {
+            r.try_super_fold_with(self)
+        }
+    }
+
+    fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result<ty::Const<'tcx>, Self::Error> {
+        if !ct.has_escaping_bound_vars() {
+            Ok(ct)
+        } else if let ty::ConstKind::Bound(index, bv) = ct.kind() {
+            if index == self.index {
+                Err(())
+            } else {
+                Ok(self.tcx().mk_const(
+                    ty::ConstKind::Bound(index.shifted_out(1), bv),
+                    ct.ty().try_fold_with(self)?,
+                ))
+            }
+        } else {
+            ct.try_super_fold_with(self)
+        }
+    }
+
+    fn try_fold_predicate(
+        &mut self,
+        p: ty::Predicate<'tcx>,
+    ) -> Result<ty::Predicate<'tcx>, Self::Error> {
+        if !p.has_escaping_bound_vars() { Ok(p) } else { p.try_super_fold_with(self) }
+    }
+}
+
 /// Represents the projection of an associated type.
 ///
 /// For a projection, this would be `<Ty as Trait<...>>::N`.
 ///
 /// For an opaque type, there is no explicit syntax.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
 #[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
 pub struct AliasTy<'tcx> {
     /// The parameters of the associated or opaque item.
@@ -1163,11 +1245,26 @@ pub struct AliasTy<'tcx> {
     /// aka. `tcx.parent(def_id)`.
     pub def_id: DefId,
 
-    /// This field exists to prevent the creation of `ProjectionTy` without using
+    /// This field exists to prevent the creation of `AliasTy` without using
     /// [TyCtxt::mk_alias_ty].
     pub(super) _use_mk_alias_ty_instead: (),
 }
 
+impl<'tcx> AliasTy<'tcx> {
+    pub fn kind(self, tcx: TyCtxt<'tcx>) -> ty::AliasKind {
+        match tcx.def_kind(self.def_id) {
+            DefKind::AssocTy | DefKind::ImplTraitPlaceholder => ty::Projection,
+            DefKind::OpaqueTy => ty::Opaque,
+            kind => bug!("unexpected DefKind in AliasTy: {kind:?}"),
+        }
+    }
+
+    pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
+        tcx.mk_ty(ty::Alias(self.kind(tcx), self))
+    }
+}
+
+/// The following methods work only with associated type projections.
 impl<'tcx> AliasTy<'tcx> {
     pub fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId {
         match tcx.def_kind(self.def_id) {
@@ -1175,7 +1272,7 @@ pub fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId {
             DefKind::ImplTraitPlaceholder => {
                 tcx.parent(tcx.impl_trait_in_trait_parent(self.def_id))
             }
-            kind => bug!("unexpected DefKind in ProjectionTy: {kind:?}"),
+            kind => bug!("expected a projection AliasTy; found {kind:?}"),
         }
     }
 
@@ -1929,7 +2026,7 @@ impl<'tcx> TypeVisitor<'tcx> for ContainsTyVisitor<'tcx> {
             type BreakTy = ();
 
             fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
-                if self.0 == t { ControlFlow::BREAK } else { t.super_visit_with(self) }
+                if self.0 == t { ControlFlow::Break(()) } else { t.super_visit_with(self) }
             }
         }
 
index 8f764011d0ac32c45d71125eb777f323d3b81d6d..a07582fc8ff1ec6e8a859d4642003d90806c8bd0 100644 (file)
@@ -7,6 +7,7 @@
 use crate::ty::{self, Lift, List, ParamConst, Ty, TyCtxt};
 
 use rustc_data_structures::intern::Interned;
+use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg};
 use rustc_hir::def_id::DefId;
 use rustc_macros::HashStable;
 use rustc_serialize::{self, Decodable, Encodable};
@@ -36,6 +37,12 @@ pub struct GenericArg<'tcx> {
     marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>, ty::Const<'tcx>)>,
 }
 
+impl<'tcx> IntoDiagnosticArg for GenericArg<'tcx> {
+    fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
+        self.to_string().into_diagnostic_arg()
+    }
+}
+
 const TAG_MASK: usize = 0b11;
 const TYPE_TAG: usize = 0b00;
 const REGION_TAG: usize = 0b01;
@@ -538,6 +545,9 @@ fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> ControlFlow<V::Br
 /// Similar to [`super::Binder`] except that it tracks early bound generics, i.e. `struct Foo<T>(T)`
 /// needs `T` substituted immediately. This type primarily exists to avoid forgetting to call
 /// `subst`.
+///
+/// If you don't have anything to `subst`, you may be looking for
+/// [`subst_identity`](EarlyBinder::subst_identity) or [`skip_binder`](EarlyBinder::skip_binder).
 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
 #[derive(Encodable, Decodable, HashStable)]
 pub struct EarlyBinder<T>(pub T);
@@ -578,6 +588,14 @@ pub fn rebind<U>(&self, value: U) -> EarlyBinder<U> {
         EarlyBinder(value)
     }
 
+    /// Skips the binder and returns the "bound" value.
+    /// This can be used to extract data that does not depend on generic parameters
+    /// (e.g., getting the `DefId` of the inner value or getting the number of
+    /// arguments of an `FnSig`). Otherwise, consider using
+    /// [`subst_identity`](EarlyBinder::subst_identity).
+    ///
+    /// See also [`Binder::skip_binder`](super::Binder::skip_binder), which is
+    /// the analogous operation on [`super::Binder`].
     pub fn skip_binder(self) -> T {
         self.0
     }
@@ -639,6 +657,13 @@ fn next_back(&mut self) -> Option<Self::Item> {
     }
 }
 
+impl<'tcx, I: IntoIterator> ExactSizeIterator for SubstIter<'_, 'tcx, I>
+where
+    I::IntoIter: ExactSizeIterator,
+    I::Item: TypeFoldable<'tcx>,
+{
+}
+
 impl<'tcx, 's, I: IntoIterator> EarlyBinder<I>
 where
     I::Item: Deref,
@@ -686,6 +711,14 @@ fn next_back(&mut self) -> Option<Self::Item> {
     }
 }
 
+impl<'tcx, I: IntoIterator> ExactSizeIterator for SubstIterCopied<'_, 'tcx, I>
+where
+    I::IntoIter: ExactSizeIterator,
+    I::Item: Deref,
+    <I::Item as Deref>::Target: Copy + TypeFoldable<'tcx>,
+{
+}
+
 pub struct EarlyBinderIter<T> {
     t: T,
 }
@@ -714,6 +747,14 @@ pub fn subst(self, tcx: TyCtxt<'tcx>, substs: &[GenericArg<'tcx>]) -> T {
         self.0.fold_with(&mut folder)
     }
 
+    /// Makes the identity substitution `T0 => T0, ..., TN => TN`.
+    /// Conceptually, this converts universally bound variables into placeholders
+    /// when inside of a given item.
+    ///
+    /// For example, consider `for<T> fn foo<T>(){ .. }`:
+    /// - Outside of `foo`, `T` is bound (represented by the presence of `EarlyBinder`).
+    /// - Inside of the body of `foo`, we treat `T` as a placeholder by calling
+    /// `subst_identity` to discharge the `EarlyBinder`.
     pub fn subst_identity(self) -> T {
         self.0
     }
index 028a03c0b2bdd78d525892e8c5923289af1149e6..2902c6dc556e4140594243d2507e011055bab13a 100644 (file)
@@ -6,7 +6,12 @@
         GenericArgKind, InternalSubsts, SubstsRef, Ty, UserSubsts,
     },
 };
-use rustc_data_structures::{fx::FxHashMap, sync::Lrc, unord::UnordSet, vec_map::VecMap};
+use rustc_data_structures::{
+    fx::FxHashMap,
+    sync::Lrc,
+    unord::{UnordItems, UnordSet},
+    vec_map::VecMap,
+};
 use rustc_errors::ErrorGuaranteed;
 use rustc_hir as hir;
 use rustc_hir::{
 use rustc_middle::mir::FakeReadCause;
 use rustc_session::Session;
 use rustc_span::Span;
-use std::{
-    collections::hash_map::{self, Entry},
-    hash::Hash,
-    iter,
-};
+use std::{collections::hash_map::Entry, hash::Hash, iter};
 
 use super::RvalueScopes;
 
@@ -193,7 +194,7 @@ pub struct TypeckResults<'tcx> {
     pub generator_interior_types: ty::Binder<'tcx, Vec<GeneratorInteriorTypeCause<'tcx>>>,
 
     /// We sometimes treat byte string literals (which are of type `&[u8; N]`)
-    /// as `&[u8]`, depending on the pattern  in which they are used.
+    /// as `&[u8]`, depending on the pattern in which they are used.
     /// This hashset records all instances where we behave
     /// like this to allow `const_to_pat` to reliably handle this situation.
     pub treat_byte_string_as_slice: ItemLocalSet,
@@ -397,10 +398,10 @@ pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> {
 
     /// Returns the type of an expression as a monotype.
     ///
-    /// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression.  That is, in
+    /// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
     /// some cases, we insert `Adjustment` annotations such as auto-deref or
-    /// auto-ref.  The type returned by this function does not consider such
-    /// adjustments.  See `expr_ty_adjusted()` instead.
+    /// auto-ref. The type returned by this function does not consider such
+    /// adjustments. See `expr_ty_adjusted()` instead.
     ///
     /// NB (2): This type doesn't provide type parameter substitutions; e.g., if you
     /// ask for the type of `id` in `id(3)`, it will return `fn(&isize) -> isize`
@@ -567,8 +568,15 @@ pub fn get(&self, id: hir::HirId) -> Option<&V> {
         self.data.get(&id.local_id)
     }
 
-    pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> {
-        self.data.iter()
+    pub fn items(
+        &'a self,
+    ) -> UnordItems<(hir::ItemLocalId, &'a V), impl Iterator<Item = (hir::ItemLocalId, &'a V)>>
+    {
+        self.data.items().map(|(id, value)| (*id, value))
+    }
+
+    pub fn items_in_stable_order(&self) -> Vec<(ItemLocalId, &'a V)> {
+        self.data.to_sorted_stable_ord()
     }
 }
 
@@ -605,6 +613,16 @@ pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
         validate_hir_id_for_typeck_results(self.hir_owner, id);
         self.data.remove(&id.local_id)
     }
+
+    pub fn extend(
+        &mut self,
+        items: UnordItems<(hir::HirId, V), impl Iterator<Item = (hir::HirId, V)>>,
+    ) {
+        self.data.extend(items.map(|(id, value)| {
+            validate_hir_id_for_typeck_results(self.hir_owner, id);
+            (id.local_id, value)
+        }))
+    }
 }
 
 rustc_index::newtype_index! {
index 1286a5253c068618fcc4da0bee0515990a8eb515..d0d1dcc584f1b4f8a6222c6b9574b4e8ad7e13b4 100644 (file)
@@ -659,27 +659,6 @@ pub fn bound_explicit_item_bounds(
         ty::EarlyBinder(self.explicit_item_bounds(def_id))
     }
 
-    pub fn bound_item_bounds(
-        self,
-        def_id: DefId,
-    ) -> ty::EarlyBinder<&'tcx ty::List<ty::Predicate<'tcx>>> {
-        ty::EarlyBinder(self.item_bounds(def_id))
-    }
-
-    pub fn bound_predicates_of(
-        self,
-        def_id: DefId,
-    ) -> ty::EarlyBinder<ty::generics::GenericPredicates<'tcx>> {
-        ty::EarlyBinder(self.predicates_of(def_id))
-    }
-
-    pub fn bound_explicit_predicates_of(
-        self,
-        def_id: DefId,
-    ) -> ty::EarlyBinder<ty::generics::GenericPredicates<'tcx>> {
-        ty::EarlyBinder(self.explicit_predicates_of(def_id))
-    }
-
     pub fn bound_impl_subject(self, def_id: DefId) -> ty::EarlyBinder<ty::ImplSubject<'tcx>> {
         ty::EarlyBinder(self.impl_subject(def_id))
     }
index ca445558131380e364370ea518fb37a69a4c7bfa..bee3cc4d7cb9bf985a2b3b1583a472a135d51255 100644 (file)
@@ -294,13 +294,13 @@ fn visit_binder<T: TypeVisitable<'tcx>>(
             fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
                 match *r {
                     ty::ReLateBound(debruijn, _) if debruijn < self.outer_index => {
-                        ControlFlow::CONTINUE
+                        ControlFlow::Continue(())
                     }
                     _ => {
                         if (self.callback)(r) {
-                            ControlFlow::BREAK
+                            ControlFlow::Break(())
                         } else {
-                            ControlFlow::CONTINUE
+                            ControlFlow::Continue(())
                         }
                     }
                 }
@@ -311,7 +311,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 if ty.flags().intersects(TypeFlags::HAS_FREE_REGIONS) {
                     ty.super_visit_with(self)
                 } else {
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 }
             }
         }
@@ -394,7 +394,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         if t.outer_exclusive_binder() < self.binder_index
             || !self.visited.insert((self.binder_index, t))
         {
-            return ControlFlow::BREAK;
+            return ControlFlow::Break(());
         }
         match *t.kind() {
             ty::Bound(debruijn, bound_ty) if debruijn == self.binder_index => {
@@ -512,7 +512,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         if t.outer_exclusive_binder() > self.outer_index {
             ControlFlow::Break(FoundEscapingVars)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 
@@ -524,7 +524,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
         if r.bound_at_or_above_binder(self.outer_index) {
             ControlFlow::Break(FoundEscapingVars)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 
@@ -547,7 +547,7 @@ fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow<Sel
         if predicate.outer_exclusive_binder() > self.outer_index {
             ControlFlow::Break(FoundEscapingVars)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 }
@@ -575,7 +575,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         if flags.intersects(self.flags) {
             ControlFlow::Break(FoundFlags)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 
@@ -585,7 +585,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
         if flags.intersects(self.flags) {
             ControlFlow::Break(FoundFlags)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 
@@ -596,7 +596,7 @@ fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
         if flags.intersects(self.flags) {
             ControlFlow::Break(FoundFlags)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 
@@ -605,7 +605,7 @@ fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow<Sel
         if predicate.flags().intersects(self.flags) {
             ControlFlow::Break(FoundFlags)
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 }
@@ -653,7 +653,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         // in the normalized form
         if self.just_constrained {
             if let ty::Alias(..) = t.kind() {
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
         }
 
@@ -666,7 +666,7 @@ fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
         // in the normalized form
         if self.just_constrained {
             if let ty::ConstKind::Unevaluated(..) = c.kind() {
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
         }
 
@@ -679,7 +679,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
                 self.regions.insert(br.kind);
             }
         }
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
@@ -726,6 +726,6 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
             );
         }
 
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
index 34fefb99e09c25c1b034a2f431ce38dafbf3e39c..33fdc1901cd781b2832dbcdf832271ed706d5f8b 100644 (file)
@@ -86,10 +86,10 @@ pub(super) fn build_custom_mir<'tcx>(
         block_map: FxHashMap::default(),
     };
 
-    let res = (|| {
+    let res: PResult<_> = try {
         pctxt.parse_args(&params)?;
-        pctxt.parse_body(expr)
-    })();
+        pctxt.parse_body(expr)?;
+    };
     if let Err(err) = res {
         tcx.sess.diagnostic().span_fatal(
             err.span,
index dca4906c07de54bd2042edccd8ab65a5702e44fb..0bca02589bce131284365337c70dfa61bcae3596 100644 (file)
 impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
     pub fn parse_statement(&self, expr_id: ExprId) -> PResult<StatementKind<'tcx>> {
         parse_by_kind!(self, expr_id, _, "statement",
+            @call("mir_storage_live", args) => {
+                Ok(StatementKind::StorageLive(self.parse_local(args[0])?))
+            },
+            @call("mir_storage_dead", args) => {
+                Ok(StatementKind::StorageDead(self.parse_local(args[0])?))
+            },
             @call("mir_retag", args) => {
                 Ok(StatementKind::Retag(RetagKind::Default, Box::new(self.parse_place(args[0])?)))
             },
index dbcb0132c9f88f5993ca0c7920ba79ada3cb9d07..c621efb3b3a523e888da8f8670f926d19cb9d09c 100644 (file)
@@ -27,7 +27,7 @@ pub(crate) fn as_local_operand(
     /// suitable also to be passed as function arguments.
     ///
     /// The operand returned from this function will *not be valid* after an ExprKind::Scope is
-    /// passed, so please do *not* return it from functions to avoid bad miscompiles.  Returns an
+    /// passed, so please do *not* return it from functions to avoid bad miscompiles. Returns an
     /// operand suitable for use as a call argument. This is almost always equivalent to
     /// `as_operand`, except for the particular case of passing values of (potentially) unsized
     /// types "by value" (see details below).
index e9f327978aab1f0ac0617bafe6f3d85af4b31251..7808368519351b145df87a3b5f38b88f9a584f25 100644 (file)
@@ -57,7 +57,7 @@ pub(crate) fn stmt_expr(
                 // question raised here -- should we "freeze" the
                 // value of the lhs here?  I'm inclined to think not,
                 // since it seems closer to the semantics of the
-                // overloaded version, which takes `&mut self`.  This
+                // overloaded version, which takes `&mut self`. This
                 // only affects weird things like `x += {x += 1; x}`
                 // -- is that equal to `x + (x + 1)` or `2*(x+1)`?
 
@@ -113,7 +113,7 @@ pub(crate) fn stmt_expr(
                 //
                 // it is usually better to focus on `the_value` rather
                 // than the entirety of block(s) surrounding it.
-                let adjusted_span = (|| {
+                let adjusted_span =
                     if let ExprKind::Block { block } = expr.kind
                         && let Some(tail_ex) = this.thir[block].expr
                     {
@@ -135,10 +135,10 @@ pub(crate) fn stmt_expr(
                             tail_result_is_ignored: true,
                             span: expr.span,
                         });
-                        return Some(expr.span);
-                    }
-                    None
-                })();
+                        Some(expr.span)
+                    } else {
+                        None
+                    };
 
                 let temp =
                     unpack!(block = this.as_temp(block, statement_scope, expr, Mutability::Not));
index f90aba80bf3cf574f17fb3b0571cac22b5b4d6d2..0961ce11e2f9a0f710d78e7bce6f96ecd496655e 100644 (file)
@@ -1870,7 +1870,7 @@ fn bind_and_guard_matched_candidate<'pat>(
         // ```
         // let place = Foo::new();
         // match place { foo if inspect(foo)
-        //     => feed(foo), ...  }
+        //     => feed(foo), ... }
         // ```
         //
         // will be treated as if it were really something like:
@@ -1885,7 +1885,7 @@ fn bind_and_guard_matched_candidate<'pat>(
         // ```
         // let place = Foo::new();
         // match place { ref mut foo if inspect(foo)
-        //     => feed(foo), ...  }
+        //     => feed(foo), ... }
         // ```
         //
         // will be treated as if it were really something like:
index 46e14cc9ac3b10667b9b5a5c2b19e9f40af71655..ad7a568a231814fcf8c7dbe9aed308eb125716f6 100644 (file)
@@ -456,7 +456,7 @@ fn non_scalar_compare(
                     span: source_info.span,
 
                     // FIXME(#54571): This constant comes from user input (a
-                    // constant in a pattern).  Are there forms where users can add
+                    // constant in a pattern). Are there forms where users can add
                     // type annotations here?  For example, an associated constant?
                     // Need to experiment.
                     user_ty: None,
@@ -504,7 +504,7 @@ fn non_scalar_compare(
     /// This is used by the overall `match_candidates` algorithm to structure
     /// the match as a whole. See `match_candidates` for more details.
     ///
-    /// FIXME(#29623). In some cases, we have some tricky choices to make.  for
+    /// FIXME(#29623). In some cases, we have some tricky choices to make. for
     /// example, if we are testing that `x == 22`, but the candidate is `x @
     /// 13..55`, what should we do? In the event that the test is true, we know
     /// that the candidate applies, but in the event of false, we don't know
index c92634a609de0f762df392231779f6510bc5a72e..591b416337b3595c36380af7d0197cccf9562086 100644 (file)
@@ -53,7 +53,7 @@
 ```
 
 When processing the `let x`, we will add one drop to the scope for
-`x`.  The break will then insert a drop for `x`. When we process `let
+`x`. The break will then insert a drop for `x`. When we process `let
 y`, we will add another drop (in fact, to a subscope, but let's ignore
 that for now); any later drops would also drop `y`.
 
@@ -757,7 +757,7 @@ pub(crate) fn maybe_new_source_scope(
             if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
                 // Some consumers of rustc need to map MIR locations back to HIR nodes. Currently the
                 // the only part of rustc that tracks MIR -> HIR is the `SourceScopeLocalData::lint_root`
-                // field that tracks lint levels for MIR locations.  Normally the number of source scopes
+                // field that tracks lint levels for MIR locations. Normally the number of source scopes
                 // is limited to the set of nodes with lint annotations. The -Zmaximal-hir-to-mir-coverage
                 // flag changes this behavior to maximize the number of source scopes, increasing the
                 // granularity of the MIR->HIR mapping.
index 06523b0a1de84cb658fc3e410bb4d61837eeeec9..7f81aef1c73217d57f5013678b162a4bce27b82f 100644 (file)
@@ -770,6 +770,8 @@ pub(crate) struct PatternNotCovered<'s, 'tcx> {
     #[subdiagnostic]
     pub let_suggestion: Option<SuggestLet>,
     #[subdiagnostic]
+    pub misc_suggestion: Option<MiscPatternSuggestion>,
+    #[subdiagnostic]
     pub res_defined_here: Option<ResDefinedHere>,
 }
 
@@ -848,3 +850,16 @@ pub enum SuggestLet {
         count: usize,
     },
 }
+
+#[derive(Subdiagnostic)]
+pub enum MiscPatternSuggestion {
+    #[suggestion(
+        mir_build_suggest_attempted_int_lit,
+        code = "_",
+        applicability = "maybe-incorrect"
+    )]
+    AttemptedIntegerLiteral {
+        #[primary_span]
+        start_span: Span,
+    },
+}
index fb7ae6f1d242412e8e72197107f2214969a54e50..a428180a4fa8295bb5bac16f0030fd5de8c31406 100644 (file)
@@ -5,7 +5,6 @@
 #![feature(assert_matches)]
 #![feature(associated_type_bounds)]
 #![feature(box_patterns)]
-#![feature(control_flow_enum)]
 #![feature(if_let_guard)]
 #![feature(let_chains)]
 #![feature(min_specialization)]
index 8529c64cd5cca64213f3b1efedb38df29c0094b5..f67f24b43c4d77f680053a394e8885760a69aaf3 100644 (file)
@@ -60,7 +60,7 @@ impl<'mir, 'tcx> Search<'mir, 'tcx> {
     /// Returns `true` if `func` refers to the function we are searching in.
     fn is_recursive_call(&self, func: &Operand<'tcx>, args: &[Operand<'tcx>]) -> bool {
         let Search { tcx, body, trait_substs, .. } = *self;
-        // Resolving function type to a specific instance that is being called is expensive.  To
+        // Resolving function type to a specific instance that is being called is expensive. To
         // avoid the cost we check the number of arguments first, which is sufficient to reject
         // most of calls as non-recursive.
         if args.len() != body.arg_count {
@@ -118,7 +118,7 @@ fn node_examined(
             // A diverging InlineAsm is treated as non-recursing
             TerminatorKind::InlineAsm { destination, .. } => {
                 if destination.is_some() {
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 } else {
                     ControlFlow::Break(NonRecursive)
                 }
@@ -132,7 +132,7 @@ fn node_examined(
             | TerminatorKind::FalseEdge { .. }
             | TerminatorKind::FalseUnwind { .. }
             | TerminatorKind::Goto { .. }
-            | TerminatorKind::SwitchInt { .. } => ControlFlow::CONTINUE,
+            | TerminatorKind::SwitchInt { .. } => ControlFlow::Continue(()),
         }
     }
 
@@ -145,7 +145,7 @@ fn node_settled(&mut self, bb: BasicBlock) -> ControlFlow<Self::BreakVal> {
             }
         }
 
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn ignore_edge(&mut self, bb: BasicBlock, target: BasicBlock) -> bool {
index e13c0662ef85f17cb45edb3bd2f7dd26820e7256..34e637f594842687aaec93b1075bacb16eb8df0b 100644 (file)
@@ -6,8 +6,9 @@
 
 use crate::errors::*;
 
+use hir::{ExprKind, PatKind};
 use rustc_arena::TypedArena;
-use rustc_ast::Mutability;
+use rustc_ast::{LitKind, Mutability};
 use rustc_errors::{
     struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan,
 };
@@ -389,7 +390,7 @@ fn check_irrefutable(&self, pat: &'tcx Pat<'tcx>, origin: &str, sp: Option<Span>
             return;
         }
 
-        let (inform, interpreted_as_const, res_defined_here,let_suggestion) =
+        let (inform, interpreted_as_const, res_defined_here,let_suggestion, misc_suggestion) =
             if let hir::PatKind::Path(hir::QPath::Resolved(
                 None,
                 hir::Path {
@@ -413,6 +414,7 @@ fn check_irrefutable(&self, pat: &'tcx Pat<'tcx>, origin: &str, sp: Option<Span>
                         }
                     },
                     None,
+                    None,
                 )
             } else if let Some(span) = sp && self.tcx.sess.source_map().is_span_accessible(span) {
                 let mut bindings = vec![];
@@ -426,10 +428,19 @@ fn check_irrefutable(&self, pat: &'tcx Pat<'tcx>, origin: &str, sp: Option<Span>
                 let end_span = semi_span.shrink_to_lo();
                 let count = witnesses.len();
 
+                // If the pattern to match is an integer literal:
+                let int_suggestion = if
+                    let PatKind::Lit(expr) = &pat.kind
+                    && bindings.is_empty()
+                    && let ExprKind::Lit(Spanned { node: LitKind::Int(_, _), span }) = expr.kind {
+                    // Then give a suggestion, the user might've meant to create a binding instead.
+                    Some(MiscPatternSuggestion::AttemptedIntegerLiteral { start_span: span.shrink_to_lo() })
+                } else { None };
+
                 let let_suggestion = if bindings.is_empty() {SuggestLet::If{start_span, semi_span, count}} else{ SuggestLet::Else{end_span, count }};
-                (sp.map(|_|Inform), None, None, Some(let_suggestion))
+                (sp.map(|_|Inform), None, None, Some(let_suggestion), int_suggestion)
             } else{
-                (sp.map(|_|Inform), None, None,  None)
+                (sp.map(|_|Inform), None, None,  None, None)
             };
 
         let adt_defined_here = try {
@@ -453,6 +464,7 @@ fn check_irrefutable(&self, pat: &'tcx Pat<'tcx>, origin: &str, sp: Option<Span>
             _p: (),
             pattern_ty,
             let_suggestion,
+            misc_suggestion,
             res_defined_here,
             adt_defined_here,
         });
index 17b3c475f83c78a3d4711e65092859da5d5d147b..aba5429da435f84292cb698a9e62d7d6b53b61ba 100644 (file)
@@ -141,27 +141,22 @@ fn from_constant<'tcx>(
     ) -> Option<IntRange> {
         let ty = value.ty();
         if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, ty) {
-            let val = (|| {
-                match value {
-                    mir::ConstantKind::Val(ConstValue::Scalar(scalar), _) => {
-                        // For this specific pattern we can skip a lot of effort and go
-                        // straight to the result, after doing a bit of checking. (We
-                        // could remove this branch and just fall through, which
-                        // is more general but much slower.)
-                        return scalar.to_bits_or_ptr_internal(target_size).unwrap().left();
-                    }
-                    mir::ConstantKind::Ty(c) => match c.kind() {
-                        ty::ConstKind::Value(_) => bug!(
-                            "encountered ConstValue in mir::ConstantKind::Ty, whereas this is expected to be in ConstantKind::Val"
-                        ),
-                        _ => {}
-                    },
-                    _ => {}
+            let val = if let mir::ConstantKind::Val(ConstValue::Scalar(scalar), _) = value {
+                // For this specific pattern we can skip a lot of effort and go
+                // straight to the result, after doing a bit of checking. (We
+                // could remove this branch and just fall through, which
+                // is more general but much slower.)
+                scalar.to_bits_or_ptr_internal(target_size).unwrap().left()?
+            } else {
+                if let mir::ConstantKind::Ty(c) = value
+                    && let ty::ConstKind::Value(_) = c.kind()
+                {
+                    bug!("encountered ConstValue in mir::ConstantKind::Ty, whereas this is expected to be in ConstantKind::Val");
                 }
 
                 // This is a more general form of the previous case.
-                value.try_eval_bits(tcx, param_env, ty)
-            })()?;
+                value.try_eval_bits(tcx, param_env, ty)?
+            };
             let val = val ^ bias;
             Some(IntRange { range: val..=val, bias })
         } else {
index bc31ec42b8b6e3af8e6b3dc4f2c13e9b646a8d37..4b5324e203aa3725aad358ad587e2eb714b366b1 100644 (file)
@@ -750,7 +750,7 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
 
 /// Calls `f` for each mutable borrow or raw reference in the program.
 ///
-/// This DOES NOT call `f` for a shared borrow of a type with interior mutability.  That's okay for
+/// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
 /// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
 /// other analyses will likely need to check for `!Freeze`.
 fn for_each_mut_borrow<'tcx>(
index 3d22035f0785ed57509c86c251355156d92d3665..7d2146214c6dc2128040b12ab248e32272d1b016 100644 (file)
@@ -120,7 +120,7 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
         // PART 3
         // Add retag after assignments where data "enters" this function: the RHS is behind a deref and the LHS is not.
         for block_data in basic_blocks {
-            // We want to insert statements as we iterate.  To this end, we
+            // We want to insert statements as we iterate. To this end, we
             // iterate backwards using indices.
             for i in (0..block_data.statements.len()).rev() {
                 let (retag_kind, place) = match block_data.statements[i].kind {
index 9f842c929dc2478034f6962b05a8c4b63ac05419..c54348404536ad10cf8e05e2e2f9cbd2a60b10ed 100644 (file)
@@ -341,11 +341,11 @@ fn mir_to_initial_sorted_coverage_spans(&self) -> Vec<CoverageSpan> {
                     if a.is_in_same_bcb(b) {
                         Some(Ordering::Equal)
                     } else {
-                        // Sort equal spans by dominator relationship, in reverse order (so
-                        // dominators always come after the dominated equal spans). When later
-                        // comparing two spans in order, the first will either dominate the second,
-                        // or they will have no dominator relationship.
-                        self.basic_coverage_blocks.dominators().rank_partial_cmp(b.bcb, a.bcb)
+                        // Sort equal spans by dominator relationship (so dominators always come
+                        // before the dominated equal spans). When later comparing two spans in
+                        // order, the first will either dominate the second, or they will have no
+                        // dominator relationship.
+                        self.basic_coverage_blocks.dominators().rank_partial_cmp(a.bcb, b.bcb)
                     }
                 } else {
                     // Sort hi() in reverse order so shorter spans are attempted after longer spans.
index c097af6161159a7fcec85b6b0bb69a3605fcb8e9..39c61a34afcbdab70fa189af8a82527e35993f7e 100644 (file)
@@ -460,6 +460,104 @@ fn replace_local<'tcx>(
     new_local
 }
 
+/// Transforms the `body` of the generator applying the following transforms:
+///
+/// - Eliminates all the `get_context` calls that async lowering created.
+/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`).
+///
+/// The `Local`s that have their types replaced are:
+/// - The `resume` argument itself.
+/// - The argument to `get_context`.
+/// - The yielded value of a `yield`.
+///
+/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the
+/// `get_context` function is being used to convert that back to a `&mut Context<'_>`.
+///
+/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection,
+/// but rather directly use `&mut Context<'_>`, however that would currently
+/// lead to higher-kinded lifetime errors.
+/// See <https://github.com/rust-lang/rust/issues/105501>.
+///
+/// The async lowering step and the type / lifetime inference / checking are
+/// still using the `ResumeTy` indirection for the time being, and that indirection
+/// is removed here. After this transform, the generator body only knows about `&mut Context<'_>`.
+fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+    let context_mut_ref = tcx.mk_task_context();
+
+    // replace the type of the `resume` argument
+    replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref);
+
+    let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None);
+
+    for bb in BasicBlock::new(0)..body.basic_blocks.next_index() {
+        let bb_data = &body[bb];
+        if bb_data.is_cleanup {
+            continue;
+        }
+
+        match &bb_data.terminator().kind {
+            TerminatorKind::Call { func, .. } => {
+                let func_ty = func.ty(body, tcx);
+                if let ty::FnDef(def_id, _) = *func_ty.kind() {
+                    if def_id == get_context_def_id {
+                        let local = eliminate_get_context_call(&mut body[bb]);
+                        replace_resume_ty_local(tcx, body, local, context_mut_ref);
+                    }
+                } else {
+                    continue;
+                }
+            }
+            TerminatorKind::Yield { resume_arg, .. } => {
+                replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref);
+            }
+            _ => {}
+        }
+    }
+}
+
+fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
+    let terminator = bb_data.terminator.take().unwrap();
+    if let TerminatorKind::Call { mut args, destination, target, .. } = terminator.kind {
+        let arg = args.pop().unwrap();
+        let local = arg.place().unwrap().local;
+
+        let arg = Rvalue::Use(arg);
+        let assign = Statement {
+            source_info: terminator.source_info,
+            kind: StatementKind::Assign(Box::new((destination, arg))),
+        };
+        bb_data.statements.push(assign);
+        bb_data.terminator = Some(Terminator {
+            source_info: terminator.source_info,
+            kind: TerminatorKind::Goto { target: target.unwrap() },
+        });
+        local
+    } else {
+        bug!();
+    }
+}
+
+#[cfg_attr(not(debug_assertions), allow(unused))]
+fn replace_resume_ty_local<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &mut Body<'tcx>,
+    local: Local,
+    context_mut_ref: Ty<'tcx>,
+) {
+    let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref);
+    // We have to replace the `ResumeTy` that is used for type and borrow checking
+    // with `&mut Context<'_>` in MIR.
+    #[cfg(debug_assertions)]
+    {
+        if let ty::Adt(resume_ty_adt, _) = local_ty.kind() {
+            let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
+            assert_eq!(*resume_ty_adt, expected_adt);
+        } else {
+            panic!("expected `ResumeTy`, found `{:?}`", local_ty);
+        };
+    }
+}
+
 struct LivenessInfo {
     /// Which locals are live across any suspension point.
     saved_locals: GeneratorSavedLocals,
@@ -1283,13 +1381,13 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
             }
         };
 
-        let is_async_kind = body.generator_kind().unwrap() != GeneratorKind::Gen;
+        let is_async_kind = matches!(body.generator_kind(), Some(GeneratorKind::Async(_)));
         let (state_adt_ref, state_substs) = if is_async_kind {
             // Compute Poll<return_ty>
-            let state_did = tcx.require_lang_item(LangItem::Poll, None);
-            let state_adt_ref = tcx.adt_def(state_did);
-            let state_substs = tcx.intern_substs(&[body.return_ty().into()]);
-            (state_adt_ref, state_substs)
+            let poll_did = tcx.require_lang_item(LangItem::Poll, None);
+            let poll_adt_ref = tcx.adt_def(poll_did);
+            let poll_substs = tcx.intern_substs(&[body.return_ty().into()]);
+            (poll_adt_ref, poll_substs)
         } else {
             // Compute GeneratorState<yield_ty, return_ty>
             let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
@@ -1303,13 +1401,19 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
         // RETURN_PLACE then is a fresh unused local with type ret_ty.
         let new_ret_local = replace_local(RETURN_PLACE, ret_ty, body, tcx);
 
+        // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
+        if is_async_kind {
+            transform_async_context(tcx, body);
+        }
+
         // We also replace the resume argument and insert an `Assign`.
         // This is needed because the resume argument `_2` might be live across a `yield`, in which
         // case there is no `Assign` to it that the transform can turn into a store to the generator
         // state. After the yield the slot in the generator state would then be uninitialized.
         let resume_local = Local::new(2);
-        let new_resume_local =
-            replace_local(resume_local, body.local_decls[resume_local].ty, body, tcx);
+        let resume_ty =
+            if is_async_kind { tcx.mk_task_context() } else { body.local_decls[resume_local].ty };
+        let new_resume_local = replace_local(resume_local, resume_ty, body, tcx);
 
         // When first entering the generator, move the resume argument into its new local.
         let source_info = SourceInfo::outermost(body.span);
index 4219e6280ebbca481424a6b7ee3bbef6ed25d41c..28c9080d38d7d38720b83e5f27e48ac8916fc803 100644 (file)
@@ -542,6 +542,21 @@ fn dest_needs_borrow(place: Place<'_>) -> bool {
                     destination
                 };
 
+                // Always create a local to hold the destination, as `RETURN_PLACE` may appear
+                // where a full `Place` is not allowed.
+                let (remap_destination, destination_local) = if let Some(d) = dest.as_local() {
+                    (false, d)
+                } else {
+                    (
+                        true,
+                        self.new_call_temp(
+                            caller_body,
+                            &callsite,
+                            destination.ty(caller_body, self.tcx).ty,
+                        ),
+                    )
+                };
+
                 // Copy the arguments if needed.
                 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, &callee_body);
 
@@ -560,7 +575,7 @@ fn dest_needs_borrow(place: Place<'_>) -> bool {
                     new_locals: Local::new(caller_body.local_decls.len())..,
                     new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
                     new_blocks: BasicBlock::new(caller_body.basic_blocks.len())..,
-                    destination: dest,
+                    destination: destination_local,
                     callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(),
                     callsite,
                     cleanup_block: cleanup,
@@ -591,6 +606,16 @@ fn dest_needs_borrow(place: Place<'_>) -> bool {
                     // To avoid repeated O(n) insert, push any new statements to the end and rotate
                     // the slice once.
                     let mut n = 0;
+                    if remap_destination {
+                        caller_body[block].statements.push(Statement {
+                            source_info: callsite.source_info,
+                            kind: StatementKind::Assign(Box::new((
+                                dest,
+                                Rvalue::Use(Operand::Move(destination_local.into())),
+                            ))),
+                        });
+                        n += 1;
+                    }
                     for local in callee_body.vars_and_temps_iter().rev() {
                         if !callee_body.local_decls[local].internal
                             && integrator.always_live_locals.contains(local)
@@ -959,7 +984,7 @@ struct Integrator<'a, 'tcx> {
     new_locals: RangeFrom<Local>,
     new_scopes: RangeFrom<SourceScope>,
     new_blocks: RangeFrom<BasicBlock>,
-    destination: Place<'tcx>,
+    destination: Local,
     callsite_scope: SourceScopeData<'tcx>,
     callsite: &'a CallSite<'tcx>,
     cleanup_block: Option<BasicBlock>,
@@ -972,7 +997,7 @@ struct Integrator<'a, 'tcx> {
 impl Integrator<'_, '_> {
     fn map_local(&self, local: Local) -> Local {
         let new = if local == RETURN_PLACE {
-            self.destination.local
+            self.destination
         } else {
             let idx = local.index() - 1;
             if idx < self.args.len() {
@@ -1053,27 +1078,6 @@ fn visit_span(&mut self, span: &mut Span) {
         *span = span.fresh_expansion(self.expn_data);
     }
 
-    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
-        for elem in place.projection {
-            // FIXME: Make sure that return place is not used in an indexing projection, since it
-            // won't be rebased as it is supposed to be.
-            assert_ne!(ProjectionElem::Index(RETURN_PLACE), elem);
-        }
-
-        // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
-        let dest_proj_len = self.destination.projection.len();
-        if place.local == RETURN_PLACE && dest_proj_len > 0 {
-            let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
-            projs.extend(self.destination.projection);
-            projs.extend(place.projection);
-
-            place.projection = self.tcx.intern_place_elems(&*projs);
-        }
-        // Handles integrating any locals that occur in the base
-        // or projections
-        self.super_place(place, context, location)
-    }
-
     fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
         self.in_cleanup_block = data.is_cleanup;
         self.super_basic_block_data(block, data);
index 16b8a901f36512140a87c147910aeabcec5eadfe..20b7fdcfe6d4d44a5b98de025359fb1ed99b2974 100644 (file)
@@ -487,7 +487,6 @@ fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>
 fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
     let passes: &[&dyn MirPass<'tcx>] = &[
         &cleanup_post_borrowck::CleanupPostBorrowck,
-        &simplify_branches::SimplifyConstCondition::new("initial"),
         &remove_noop_landing_pads::RemoveNoopLandingPads,
         &simplify::SimplifyCfg::new("early-opt"),
         &deref_separator::Derefer,
index 3a2bf051516554bc6f6acdf220f42a604a5de656..42124f5a4808d0a3ff074da042b6db40791be5e1 100644 (file)
@@ -215,7 +215,7 @@ struct ReplacementVisitor<'tcx, 'll> {
     replacements: ReplacementMap<'tcx>,
     /// This is used to check that we are not leaving references to replaced locals behind.
     all_dead_locals: BitSet<Local>,
-    /// Pre-computed list of all "new" locals for each "old" local.  This is used to expand storage
+    /// Pre-computed list of all "new" locals for each "old" local. This is used to expand storage
     /// and deinit statement and debuginfo.
     fragments: IndexVec<Local, Vec<(&'tcx [PlaceElem<'tcx>], Local)>>,
 }
index b616ed35d99d7f11c983d0ecebb02f2223e4b08d..f88155e4fc7928a89b6f7cd8faa9d90cf655d391 100644 (file)
@@ -1,5 +1,4 @@
 #![feature(array_windows)]
-#![feature(control_flow_enum)]
 #![recursion_limit = "256"]
 #![allow(rustc::potential_query_instability)]
 #![deny(rustc::untranslatable_diagnostic)]
index c8fc69eb856abdc5b12b202122e5fbde68851f75..cf13d4584a12429d5f05e07bf28119221e8c46b5 100644 (file)
@@ -300,20 +300,20 @@ impl<'a, 'tcx> TypeVisitor<'tcx> for MarkUsedGenericParams<'a, 'tcx> {
     #[instrument(level = "debug", skip(self))]
     fn visit_const(&mut self, c: Const<'tcx>) -> ControlFlow<Self::BreakTy> {
         if !c.has_non_region_param() {
-            return ControlFlow::CONTINUE;
+            return ControlFlow::Continue(());
         }
 
         match c.kind() {
             ty::ConstKind::Param(param) => {
                 debug!(?param);
                 self.unused_parameters.mark_used(param.index);
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
             ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs })
                 if matches!(self.tcx.def_kind(def.did), DefKind::AnonConst) =>
             {
                 self.visit_child_body(def.did, substs);
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
             _ => c.super_visit_with(self),
         }
@@ -322,7 +322,7 @@ fn visit_const(&mut self, c: Const<'tcx>) -> ControlFlow<Self::BreakTy> {
     #[instrument(level = "debug", skip(self))]
     fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         if !ty.has_non_region_param() {
-            return ControlFlow::CONTINUE;
+            return ControlFlow::Continue(());
         }
 
         match *ty.kind() {
@@ -330,18 +330,18 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 debug!(?def_id);
                 // Avoid cycle errors with generators.
                 if def_id == self.def_id {
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 }
 
                 // Consider any generic parameters used by any closures/generators as used in the
                 // parent.
                 self.visit_child_body(def_id, substs);
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
             ty::Param(param) => {
                 debug!(?param);
                 self.unused_parameters.mark_used(param.index);
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
             _ => ty.super_visit_with(self),
         }
index 8761c23625b210063b1cdfbddf08e7f712cfc395..9fe8d9836ba60ca8ecfb16eea7d0383617931c0e 100644 (file)
@@ -52,8 +52,15 @@ pub(crate) fn parse_token_trees<'a>(
     }
 
     let cursor = Cursor::new(src);
-    let string_reader =
-        StringReader { sess, start_pos, pos: start_pos, src, cursor, override_span };
+    let string_reader = StringReader {
+        sess,
+        start_pos,
+        pos: start_pos,
+        src,
+        cursor,
+        override_span,
+        nbsp_is_whitespace: false,
+    };
     tokentrees::TokenTreesReader::parse_all_token_trees(string_reader)
 }
 
@@ -68,6 +75,10 @@ struct StringReader<'a> {
     /// Cursor for getting lexer tokens.
     cursor: Cursor<'a>,
     override_span: Option<Span>,
+    /// When a "unknown start of token: \u{a0}" has already been emitted earlier
+    /// in this file, it's safe to treat further occurrences of the non-breaking
+    /// space character as whitespace.
+    nbsp_is_whitespace: bool,
 }
 
 impl<'a> StringReader<'a> {
@@ -239,6 +250,16 @@ fn next_token(&mut self) -> (Token, bool) {
                     }
                     let mut it = self.str_from_to_end(start).chars();
                     let c = it.next().unwrap();
+                    if c == '\u{00a0}' {
+                        // If an error has already been reported on non-breaking
+                        // space characters earlier in the file, treat all
+                        // subsequent occurrences as whitespace.
+                        if self.nbsp_is_whitespace {
+                            preceded_by_whitespace = true;
+                            continue;
+                        }
+                        self.nbsp_is_whitespace = true;
+                    }
                     let repeats = it.take_while(|c1| *c1 == c).count();
                     let mut err =
                         self.struct_err_span_char(start, self.pos + Pos::from_usize(repeats * c.len_utf8()), "unknown start of token", c);
@@ -486,7 +507,7 @@ fn src_index(&self, pos: BytePos) -> usize {
 
     /// Slice of the source text from `start` up to but excluding `self.pos`,
     /// meaning the slice does not include the character `self.ch`.
-    fn str_from(&self, start: BytePos) -> &str {
+    fn str_from(&self, start: BytePos) -> &'a str {
         self.str_from_to(start, self.pos)
     }
 
@@ -497,12 +518,12 @@ fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol {
     }
 
     /// Slice of the source text spanning from `start` up to but excluding `end`.
-    fn str_from_to(&self, start: BytePos, end: BytePos) -> &str {
+    fn str_from_to(&self, start: BytePos, end: BytePos) -> &'a str {
         &self.src[self.src_index(start)..self.src_index(end)]
     }
 
     /// Slice of the source text spanning from `start` until the end
-    fn str_from_to_end(&self, start: BytePos) -> &str {
+    fn str_from_to_end(&self, start: BytePos) -> &'a str {
         &self.src[self.src_index(start)..]
     }
 
index 65479b341d7a8f5c2687a97b3e4ecd768962ea34..34d003ccfa7b4fbde64ca4b7cf6ba25f0341cc30 100644 (file)
 use rustc_span::{symbol::kw, BytePos, Pos, Span};
 
 #[rustfmt::skip] // for line breaks
-pub(crate) const UNICODE_ARRAY: &[(char, &str, char)] = &[
-    ('
', "Line Separator", ' '),
-    ('
', "Paragraph Separator", ' '),
-    (' ', "Ogham Space mark", ' '),
-    (' ', "En Quad", ' '),
-    (' ', "Em Quad", ' '),
-    (' ', "En Space", ' '),
-    (' ', "Em Space", ' '),
-    (' ', "Three-Per-Em Space", ' '),
-    (' ', "Four-Per-Em Space", ' '),
-    (' ', "Six-Per-Em Space", ' '),
-    (' ', "Punctuation Space", ' '),
-    (' ', "Thin Space", ' '),
-    (' ', "Hair Space", ' '),
-    (' ', "Medium Mathematical Space", ' '),
-    (' ', "No-Break Space", ' '),
-    (' ', "Figure Space", ' '),
-    (' ', "Narrow No-Break Space", ' '),
-    (' ', "Ideographic Space", ' '),
-
-    ('ߺ', "Nko Lajanyalan", '_'),
-    ('﹍', "Dashed Low Line", '_'),
-    ('﹎', "Centreline Low Line", '_'),
-    ('﹏', "Wavy Low Line", '_'),
-    ('_', "Fullwidth Low Line", '_'),
-
-    ('‐', "Hyphen", '-'),
-    ('‑', "Non-Breaking Hyphen", '-'),
-    ('‒', "Figure Dash", '-'),
-    ('–', "En Dash", '-'),
-    ('—', "Em Dash", '-'),
-    ('﹘', "Small Em Dash", '-'),
-    ('۔', "Arabic Full Stop", '-'),
-    ('⁃', "Hyphen Bullet", '-'),
-    ('˗', "Modifier Letter Minus Sign", '-'),
-    ('−', "Minus Sign", '-'),
-    ('➖', "Heavy Minus Sign", '-'),
-    ('Ⲻ', "Coptic Letter Dialect-P Ni", '-'),
-    ('ー', "Katakana-Hiragana Prolonged Sound Mark", '-'),
-    ('-', "Fullwidth Hyphen-Minus", '-'),
-    ('―', "Horizontal Bar", '-'),
-    ('─', "Box Drawings Light Horizontal", '-'),
-    ('━', "Box Drawings Heavy Horizontal", '-'),
-    ('㇐', "CJK Stroke H", '-'),
-    ('ꟷ', "Latin Epigraphic Letter Sideways I", '-'),
-    ('ᅳ', "Hangul Jungseong Eu", '-'),
-    ('ㅡ', "Hangul Letter Eu", '-'),
-    ('一', "CJK Unified Ideograph-4E00", '-'),
-    ('⼀', "Kangxi Radical One", '-'),
-
-    ('؍', "Arabic Date Separator", ','),
-    ('٫', "Arabic Decimal Separator", ','),
-    ('‚', "Single Low-9 Quotation Mark", ','),
-    ('¸', "Cedilla", ','),
-    ('ꓹ', "Lisu Letter Tone Na Po", ','),
-    (',', "Fullwidth Comma", ','),
-
-    (';', "Greek Question Mark", ';'),
-    (';', "Fullwidth Semicolon", ';'),
-    ('︔', "Presentation Form For Vertical Semicolon", ';'),
-
-    ('ः', "Devanagari Sign Visarga", ':'),
-    ('ઃ', "Gujarati Sign Visarga", ':'),
-    (':', "Fullwidth Colon", ':'),
-    ('։', "Armenian Full Stop", ':'),
-    ('܃', "Syriac Supralinear Colon", ':'),
-    ('܄', "Syriac Sublinear Colon", ':'),
-    ('᛬', "Runic Multiple Punctuation", ':'),
-    ('︰', "Presentation Form For Vertical Two Dot Leader", ':'),
-    ('᠃', "Mongolian Full Stop", ':'),
-    ('᠉', "Mongolian Manchu Full Stop", ':'),
-    ('⁚', "Two Dot Punctuation", ':'),
-    ('׃', "Hebrew Punctuation Sof Pasuq", ':'),
-    ('˸', "Modifier Letter Raised Colon", ':'),
-    ('꞉', "Modifier Letter Colon", ':'),
-    ('∶', "Ratio", ':'),
-    ('ː', "Modifier Letter Triangular Colon", ':'),
-    ('ꓽ', "Lisu Letter Tone Mya Jeu", ':'),
-    ('︓', "Presentation Form For Vertical Colon", ':'),
-
-    ('!', "Fullwidth Exclamation Mark", '!'),
-    ('ǃ', "Latin Letter Retroflex Click", '!'),
-    ('ⵑ', "Tifinagh Letter Tuareg Yang", '!'),
-    ('︕', "Presentation Form For Vertical Exclamation Mark", '!'),
-
-    ('ʔ', "Latin Letter Glottal Stop", '?'),
-    ('Ɂ', "Latin Capital Letter Glottal Stop", '?'),
-    ('ॽ', "Devanagari Letter Glottal Stop", '?'),
-    ('Ꭾ', "Cherokee Letter He", '?'),
-    ('ꛫ', "Bamum Letter Ntuu", '?'),
-    ('?', "Fullwidth Question Mark", '?'),
-    ('︖', "Presentation Form For Vertical Question Mark", '?'),
-
-    ('𝅭', "Musical Symbol Combining Augmentation Dot", '.'),
-    ('․', "One Dot Leader", '.'),
-    ('܁', "Syriac Supralinear Full Stop", '.'),
-    ('܂', "Syriac Sublinear Full Stop", '.'),
-    ('꘎', "Vai Full Stop", '.'),
-    ('𐩐', "Kharoshthi Punctuation Dot", '.'),
-    ('٠', "Arabic-Indic Digit Zero", '.'),
-    ('۰', "Extended Arabic-Indic Digit Zero", '.'),
-    ('ꓸ', "Lisu Letter Tone Mya Ti", '.'),
-    ('·', "Middle Dot", '.'),
-    ('・', "Katakana Middle Dot", '.'),
-    ('・', "Halfwidth Katakana Middle Dot", '.'),
-    ('᛫', "Runic Single Punctuation", '.'),
-    ('·', "Greek Ano Teleia", '.'),
-    ('⸱', "Word Separator Middle Dot", '.'),
-    ('𐄁', "Aegean Word Separator Dot", '.'),
-    ('•', "Bullet", '.'),
-    ('‧', "Hyphenation Point", '.'),
-    ('∙', "Bullet Operator", '.'),
-    ('⋅', "Dot Operator", '.'),
-    ('ꞏ', "Latin Letter Sinological Dot", '.'),
-    ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'),
-    ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'),
-    ('.', "Fullwidth Full Stop", '.'),
-    ('。', "Ideographic Full Stop", '.'),
-    ('︒', "Presentation Form For Vertical Ideographic Full Stop", '.'),
-
-    ('՝', "Armenian Comma", '\''),
-    (''', "Fullwidth Apostrophe", '\''),
-    ('‘', "Left Single Quotation Mark", '\''),
-    ('’', "Right Single Quotation Mark", '\''),
-    ('‛', "Single High-Reversed-9 Quotation Mark", '\''),
-    ('′', "Prime", '\''),
-    ('‵', "Reversed Prime", '\''),
-    ('՚', "Armenian Apostrophe", '\''),
-    ('׳', "Hebrew Punctuation Geresh", '\''),
-    ('`', "Grave Accent", '\''),
-    ('`', "Greek Varia", '\''),
-    ('`', "Fullwidth Grave Accent", '\''),
-    ('´', "Acute Accent", '\''),
-    ('΄', "Greek Tonos", '\''),
-    ('´', "Greek Oxia", '\''),
-    ('᾽', "Greek Koronis", '\''),
-    ('᾿', "Greek Psili", '\''),
-    ('῾', "Greek Dasia", '\''),
-    ('ʹ', "Modifier Letter Prime", '\''),
-    ('ʹ', "Greek Numeral Sign", '\''),
-    ('ˈ', "Modifier Letter Vertical Line", '\''),
-    ('ˊ', "Modifier Letter Acute Accent", '\''),
-    ('ˋ', "Modifier Letter Grave Accent", '\''),
-    ('˴', "Modifier Letter Middle Grave Accent", '\''),
-    ('ʻ', "Modifier Letter Turned Comma", '\''),
-    ('ʽ', "Modifier Letter Reversed Comma", '\''),
-    ('ʼ', "Modifier Letter Apostrophe", '\''),
-    ('ʾ', "Modifier Letter Right Half Ring", '\''),
-    ('ꞌ', "Latin Small Letter Saltillo", '\''),
-    ('י', "Hebrew Letter Yod", '\''),
-    ('ߴ', "Nko High Tone Apostrophe", '\''),
-    ('ߵ', "Nko Low Tone Apostrophe", '\''),
-    ('ᑊ', "Canadian Syllabics West-Cree P", '\''),
-    ('ᛌ', "Runic Letter Short-Twig-Sol S", '\''),
-    ('𖽑', "Miao Sign Aspiration", '\''),
-    ('𖽒', "Miao Sign Reformed Voicing", '\''),
-
-    ('᳓', "Vedic Sign Nihshvasa", '"'),
-    ('"', "Fullwidth Quotation Mark", '"'),
-    ('“', "Left Double Quotation Mark", '"'),
-    ('”', "Right Double Quotation Mark", '"'),
-    ('‟', "Double High-Reversed-9 Quotation Mark", '"'),
-    ('″', "Double Prime", '"'),
-    ('‶', "Reversed Double Prime", '"'),
-    ('〃', "Ditto Mark", '"'),
-    ('״', "Hebrew Punctuation Gershayim", '"'),
-    ('˝', "Double Acute Accent", '"'),
-    ('ʺ', "Modifier Letter Double Prime", '"'),
-    ('˶', "Modifier Letter Middle Double Acute Accent", '"'),
-    ('˵', "Modifier Letter Middle Double Grave Accent", '"'),
-    ('ˮ', "Modifier Letter Double Apostrophe", '"'),
-    ('ײ', "Hebrew Ligature Yiddish Double Yod", '"'),
-    ('❞', "Heavy Double Comma Quotation Mark Ornament", '"'),
-    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", '"'),
-
-    ('(', "Fullwidth Left Parenthesis", '('),
-    ('❨', "Medium Left Parenthesis Ornament", '('),
-    ('﴾', "Ornate Left Parenthesis", '('),
-
-    (')', "Fullwidth Right Parenthesis", ')'),
-    ('❩', "Medium Right Parenthesis Ornament", ')'),
-    ('﴿', "Ornate Right Parenthesis", ')'),
-
-    ('[', "Fullwidth Left Square Bracket", '['),
-    ('❲', "Light Left Tortoise Shell Bracket Ornament", '['),
-    ('「', "Left Corner Bracket", '['),
-    ('『', "Left White Corner Bracket", '['),
-    ('【', "Left Black Lenticular Bracket", '['),
-    ('〔', "Left Tortoise Shell Bracket", '['),
-    ('〖', "Left White Lenticular Bracket", '['),
-    ('〘', "Left White Tortoise Shell Bracket", '['),
-    ('〚', "Left White Square Bracket", '['),
-
-    (']', "Fullwidth Right Square Bracket", ']'),
-    ('❳', "Light Right Tortoise Shell Bracket Ornament", ']'),
-    ('」', "Right Corner Bracket", ']'),
-    ('』', "Right White Corner Bracket", ']'),
-    ('】', "Right Black Lenticular Bracket", ']'),
-    ('〕', "Right Tortoise Shell Bracket", ']'),
-    ('〗', "Right White Lenticular Bracket", ']'),
-    ('〙', "Right White Tortoise Shell Bracket", ']'),
-    ('〛', "Right White Square Bracket", ']'),
-
-    ('❴', "Medium Left Curly Bracket Ornament", '{'),
-    ('𝄔', "Musical Symbol Brace", '{'),
-    ('{', "Fullwidth Left Curly Bracket", '{'),
-
-    ('❵', "Medium Right Curly Bracket Ornament", '}'),
-    ('}', "Fullwidth Right Curly Bracket", '}'),
-
-    ('⁎', "Low Asterisk", '*'),
-    ('٭', "Arabic Five Pointed Star", '*'),
-    ('∗', "Asterisk Operator", '*'),
-    ('𐌟', "Old Italic Letter Ess", '*'),
-    ('*', "Fullwidth Asterisk", '*'),
-
-    ('᜵', "Philippine Single Punctuation", '/'),
-    ('⁁', "Caret Insertion Point", '/'),
-    ('∕', "Division Slash", '/'),
-    ('⁄', "Fraction Slash", '/'),
-    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", '/'),
-    ('⟋', "Mathematical Rising Diagonal", '/'),
-    ('⧸', "Big Solidus", '/'),
-    ('𝈺', "Greek Instrumental Notation Symbol-47", '/'),
-    ('㇓', "CJK Stroke Sp", '/'),
-    ('〳', "Vertical Kana Repeat Mark Upper Half", '/'),
-    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", '/'),
-    ('ノ', "Katakana Letter No", '/'),
-    ('丿', "CJK Unified Ideograph-4E3F", '/'),
-    ('⼃', "Kangxi Radical Slash", '/'),
-    ('/', "Fullwidth Solidus", '/'),
-
-    ('\', "Fullwidth Reverse Solidus", '\\'),
-    ('﹨', "Small Reverse Solidus", '\\'),
-    ('∖', "Set Minus", '\\'),
-    ('⟍', "Mathematical Falling Diagonal", '\\'),
-    ('⧵', "Reverse Solidus Operator", '\\'),
-    ('⧹', "Big Reverse Solidus", '\\'),
-    ('⧹', "Greek Vocal Notation Symbol-16", '\\'),
-    ('⧹', "Greek Instrumental Symbol-48", '\\'),
-    ('㇔', "CJK Stroke D", '\\'),
-    ('丶', "CJK Unified Ideograph-4E36", '\\'),
-    ('⼂', "Kangxi Radical Dot", '\\'),
-    ('、', "Ideographic Comma", '\\'),
-    ('ヽ', "Katakana Iteration Mark", '\\'),
-
-    ('ꝸ', "Latin Small Letter Um", '&'),
-    ('&', "Fullwidth Ampersand", '&'),
-
-    ('᛭', "Runic Cross Punctuation", '+'),
-    ('➕', "Heavy Plus Sign", '+'),
-    ('𐊛', "Lycian Letter H", '+'),
-    ('﬩', "Hebrew Letter Alternative Plus Sign", '+'),
-    ('+', "Fullwidth Plus Sign", '+'),
-
-    ('‹', "Single Left-Pointing Angle Quotation Mark", '<'),
-    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", '<'),
-    ('˂', "Modifier Letter Left Arrowhead", '<'),
-    ('𝈶', "Greek Instrumental Symbol-40", '<'),
-    ('ᐸ', "Canadian Syllabics Pa", '<'),
-    ('ᚲ', "Runic Letter Kauna", '<'),
-    ('❬', "Medium Left-Pointing Angle Bracket Ornament", '<'),
-    ('⟨', "Mathematical Left Angle Bracket", '<'),
-    ('〈', "Left-Pointing Angle Bracket", '<'),
-    ('〈', "Left Angle Bracket", '<'),
-    ('㇛', "CJK Stroke Pd", '<'),
-    ('く', "Hiragana Letter Ku", '<'),
-    ('𡿨', "CJK Unified Ideograph-21FE8", '<'),
-    ('《', "Left Double Angle Bracket", '<'),
-    ('<', "Fullwidth Less-Than Sign", '<'),
-
-    ('᐀', "Canadian Syllabics Hyphen", '='),
-    ('⹀', "Double Hyphen", '='),
-    ('゠', "Katakana-Hiragana Double Hyphen", '='),
-    ('꓿', "Lisu Punctuation Full Stop", '='),
-    ('=', "Fullwidth Equals Sign", '='),
-
-    ('›', "Single Right-Pointing Angle Quotation Mark", '>'),
-    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", '>'),
-    ('˃', "Modifier Letter Right Arrowhead", '>'),
-    ('𝈷', "Greek Instrumental Symbol-42", '>'),
-    ('ᐳ', "Canadian Syllabics Po", '>'),
-    ('𖼿', "Miao Letter Archaic Zza", '>'),
-    ('❭', "Medium Right-Pointing Angle Bracket Ornament", '>'),
-    ('⟩', "Mathematical Right Angle Bracket", '>'),
-    ('〉', "Right-Pointing Angle Bracket", '>'),
-    ('〉', "Right Angle Bracket", '>'),
-    ('》', "Right Double Angle Bracket", '>'),
-    ('>', "Fullwidth Greater-Than Sign", '>'),
+pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
+    ('
', "Line Separator", " "),
+    ('
', "Paragraph Separator", " "),
+    (' ', "Ogham Space mark", " "),
+    (' ', "En Quad", " "),
+    (' ', "Em Quad", " "),
+    (' ', "En Space", " "),
+    (' ', "Em Space", " "),
+    (' ', "Three-Per-Em Space", " "),
+    (' ', "Four-Per-Em Space", " "),
+    (' ', "Six-Per-Em Space", " "),
+    (' ', "Punctuation Space", " "),
+    (' ', "Thin Space", " "),
+    (' ', "Hair Space", " "),
+    (' ', "Medium Mathematical Space", " "),
+    (' ', "No-Break Space", " "),
+    (' ', "Figure Space", " "),
+    (' ', "Narrow No-Break Space", " "),
+    (' ', "Ideographic Space", " "),
+
+    ('ߺ', "Nko Lajanyalan", "_"),
+    ('﹍', "Dashed Low Line", "_"),
+    ('﹎', "Centreline Low Line", "_"),
+    ('﹏', "Wavy Low Line", "_"),
+    ('_', "Fullwidth Low Line", "_"),
+
+    ('‐', "Hyphen", "-"),
+    ('‑', "Non-Breaking Hyphen", "-"),
+    ('‒', "Figure Dash", "-"),
+    ('–', "En Dash", "-"),
+    ('—', "Em Dash", "-"),
+    ('﹘', "Small Em Dash", "-"),
+    ('۔', "Arabic Full Stop", "-"),
+    ('⁃', "Hyphen Bullet", "-"),
+    ('˗', "Modifier Letter Minus Sign", "-"),
+    ('−', "Minus Sign", "-"),
+    ('➖', "Heavy Minus Sign", "-"),
+    ('Ⲻ', "Coptic Letter Dialect-P Ni", "-"),
+    ('ー', "Katakana-Hiragana Prolonged Sound Mark", "-"),
+    ('-', "Fullwidth Hyphen-Minus", "-"),
+    ('―', "Horizontal Bar", "-"),
+    ('─', "Box Drawings Light Horizontal", "-"),
+    ('━', "Box Drawings Heavy Horizontal", "-"),
+    ('㇐', "CJK Stroke H", "-"),
+    ('ꟷ', "Latin Epigraphic Letter Sideways I", "-"),
+    ('ᅳ', "Hangul Jungseong Eu", "-"),
+    ('ㅡ', "Hangul Letter Eu", "-"),
+    ('一', "CJK Unified Ideograph-4E00", "-"),
+    ('⼀', "Kangxi Radical One", "-"),
+
+    ('؍', "Arabic Date Separator", ","),
+    ('٫', "Arabic Decimal Separator", ","),
+    ('‚', "Single Low-9 Quotation Mark", ","),
+    ('¸', "Cedilla", ","),
+    ('ꓹ', "Lisu Letter Tone Na Po", ","),
+    (',', "Fullwidth Comma", ","),
+
+    (';', "Greek Question Mark", ";"),
+    (';', "Fullwidth Semicolon", ";"),
+    ('︔', "Presentation Form For Vertical Semicolon", ";"),
+
+    ('ः', "Devanagari Sign Visarga", ":"),
+    ('ઃ', "Gujarati Sign Visarga", ":"),
+    (':', "Fullwidth Colon", ":"),
+    ('։', "Armenian Full Stop", ":"),
+    ('܃', "Syriac Supralinear Colon", ":"),
+    ('܄', "Syriac Sublinear Colon", ":"),
+    ('᛬', "Runic Multiple Punctuation", ":"),
+    ('︰', "Presentation Form For Vertical Two Dot Leader", ":"),
+    ('᠃', "Mongolian Full Stop", ":"),
+    ('᠉', "Mongolian Manchu Full Stop", ":"),
+    ('⁚', "Two Dot Punctuation", ":"),
+    ('׃', "Hebrew Punctuation Sof Pasuq", ":"),
+    ('˸', "Modifier Letter Raised Colon", ":"),
+    ('꞉', "Modifier Letter Colon", ":"),
+    ('∶', "Ratio", ":"),
+    ('ː', "Modifier Letter Triangular Colon", ":"),
+    ('ꓽ', "Lisu Letter Tone Mya Jeu", ":"),
+    ('︓', "Presentation Form For Vertical Colon", ":"),
+
+    ('!', "Fullwidth Exclamation Mark", "!"),
+    ('ǃ', "Latin Letter Retroflex Click", "!"),
+    ('ⵑ', "Tifinagh Letter Tuareg Yang", "!"),
+    ('︕', "Presentation Form For Vertical Exclamation Mark", "!"),
+
+    ('ʔ', "Latin Letter Glottal Stop", "?"),
+    ('Ɂ', "Latin Capital Letter Glottal Stop", "?"),
+    ('ॽ', "Devanagari Letter Glottal Stop", "?"),
+    ('Ꭾ', "Cherokee Letter He", "?"),
+    ('ꛫ', "Bamum Letter Ntuu", "?"),
+    ('?', "Fullwidth Question Mark", "?"),
+    ('︖', "Presentation Form For Vertical Question Mark", "?"),
+
+    ('𝅭', "Musical Symbol Combining Augmentation Dot", "."),
+    ('․', "One Dot Leader", "."),
+    ('܁', "Syriac Supralinear Full Stop", "."),
+    ('܂', "Syriac Sublinear Full Stop", "."),
+    ('꘎', "Vai Full Stop", "."),
+    ('𐩐', "Kharoshthi Punctuation Dot", "."),
+    ('٠', "Arabic-Indic Digit Zero", "."),
+    ('۰', "Extended Arabic-Indic Digit Zero", "."),
+    ('ꓸ', "Lisu Letter Tone Mya Ti", "."),
+    ('·', "Middle Dot", "."),
+    ('・', "Katakana Middle Dot", "."),
+    ('・', "Halfwidth Katakana Middle Dot", "."),
+    ('᛫', "Runic Single Punctuation", "."),
+    ('·', "Greek Ano Teleia", "."),
+    ('⸱', "Word Separator Middle Dot", "."),
+    ('𐄁', "Aegean Word Separator Dot", "."),
+    ('•', "Bullet", "."),
+    ('‧', "Hyphenation Point", "."),
+    ('∙', "Bullet Operator", "."),
+    ('⋅', "Dot Operator", "."),
+    ('ꞏ', "Latin Letter Sinological Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('.', "Fullwidth Full Stop", "."),
+    ('。', "Ideographic Full Stop", "."),
+    ('︒', "Presentation Form For Vertical Ideographic Full Stop", "."),
+
+    ('՝', "Armenian Comma", "\'"),
+    (''', "Fullwidth Apostrophe", "\'"),
+    ('‘', "Left Single Quotation Mark", "\'"),
+    ('’', "Right Single Quotation Mark", "\'"),
+    ('‛', "Single High-Reversed-9 Quotation Mark", "\'"),
+    ('′', "Prime", "\'"),
+    ('‵', "Reversed Prime", "\'"),
+    ('՚', "Armenian Apostrophe", "\'"),
+    ('׳', "Hebrew Punctuation Geresh", "\'"),
+    ('`', "Grave Accent", "\'"),
+    ('`', "Greek Varia", "\'"),
+    ('`', "Fullwidth Grave Accent", "\'"),
+    ('´', "Acute Accent", "\'"),
+    ('΄', "Greek Tonos", "\'"),
+    ('´', "Greek Oxia", "\'"),
+    ('᾽', "Greek Koronis", "\'"),
+    ('᾿', "Greek Psili", "\'"),
+    ('῾', "Greek Dasia", "\'"),
+    ('ʹ', "Modifier Letter Prime", "\'"),
+    ('ʹ', "Greek Numeral Sign", "\'"),
+    ('ˈ', "Modifier Letter Vertical Line", "\'"),
+    ('ˊ', "Modifier Letter Acute Accent", "\'"),
+    ('ˋ', "Modifier Letter Grave Accent", "\'"),
+    ('˴', "Modifier Letter Middle Grave Accent", "\'"),
+    ('ʻ', "Modifier Letter Turned Comma", "\'"),
+    ('ʽ', "Modifier Letter Reversed Comma", "\'"),
+    ('ʼ', "Modifier Letter Apostrophe", "\'"),
+    ('ʾ', "Modifier Letter Right Half Ring", "\'"),
+    ('ꞌ', "Latin Small Letter Saltillo", "\'"),
+    ('י', "Hebrew Letter Yod", "\'"),
+    ('ߴ', "Nko High Tone Apostrophe", "\'"),
+    ('ߵ', "Nko Low Tone Apostrophe", "\'"),
+    ('ᑊ', "Canadian Syllabics West-Cree P", "\'"),
+    ('ᛌ', "Runic Letter Short-Twig-Sol S", "\'"),
+    ('𖽑', "Miao Sign Aspiration", "\'"),
+    ('𖽒', "Miao Sign Reformed Voicing", "\'"),
+
+    ('᳓', "Vedic Sign Nihshvasa", "\""),
+    ('"', "Fullwidth Quotation Mark", "\""),
+    ('“', "Left Double Quotation Mark", "\""),
+    ('”', "Right Double Quotation Mark", "\""),
+    ('‟', "Double High-Reversed-9 Quotation Mark", "\""),
+    ('″', "Double Prime", "\""),
+    ('‶', "Reversed Double Prime", "\""),
+    ('〃', "Ditto Mark", "\""),
+    ('״', "Hebrew Punctuation Gershayim", "\""),
+    ('˝', "Double Acute Accent", "\""),
+    ('ʺ', "Modifier Letter Double Prime", "\""),
+    ('˶', "Modifier Letter Middle Double Acute Accent", "\""),
+    ('˵', "Modifier Letter Middle Double Grave Accent", "\""),
+    ('ˮ', "Modifier Letter Double Apostrophe", "\""),
+    ('ײ', "Hebrew Ligature Yiddish Double Yod", "\""),
+    ('❞', "Heavy Double Comma Quotation Mark Ornament", "\""),
+    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", "\""),
+
+    ('(', "Fullwidth Left Parenthesis", "("),
+    ('❨', "Medium Left Parenthesis Ornament", "("),
+    ('﴾', "Ornate Left Parenthesis", "("),
+
+    (')', "Fullwidth Right Parenthesis", ")"),
+    ('❩', "Medium Right Parenthesis Ornament", ")"),
+    ('﴿', "Ornate Right Parenthesis", ")"),
+
+    ('[', "Fullwidth Left Square Bracket", "["),
+    ('❲', "Light Left Tortoise Shell Bracket Ornament", "["),
+    ('「', "Left Corner Bracket", "["),
+    ('『', "Left White Corner Bracket", "["),
+    ('【', "Left Black Lenticular Bracket", "["),
+    ('〔', "Left Tortoise Shell Bracket", "["),
+    ('〖', "Left White Lenticular Bracket", "["),
+    ('〘', "Left White Tortoise Shell Bracket", "["),
+    ('〚', "Left White Square Bracket", "["),
+
+    (']', "Fullwidth Right Square Bracket", "]"),
+    ('❳', "Light Right Tortoise Shell Bracket Ornament", "]"),
+    ('」', "Right Corner Bracket", "]"),
+    ('』', "Right White Corner Bracket", "]"),
+    ('】', "Right Black Lenticular Bracket", "]"),
+    ('〕', "Right Tortoise Shell Bracket", "]"),
+    ('〗', "Right White Lenticular Bracket", "]"),
+    ('〙', "Right White Tortoise Shell Bracket", "]"),
+    ('〛', "Right White Square Bracket", "]"),
+
+    ('❴', "Medium Left Curly Bracket Ornament", "{"),
+    ('𝄔', "Musical Symbol Brace", "{"),
+    ('{', "Fullwidth Left Curly Bracket", "{"),
+
+    ('❵', "Medium Right Curly Bracket Ornament", "}"),
+    ('}', "Fullwidth Right Curly Bracket", "}"),
+
+    ('⁎', "Low Asterisk", "*"),
+    ('٭', "Arabic Five Pointed Star", "*"),
+    ('∗', "Asterisk Operator", "*"),
+    ('𐌟', "Old Italic Letter Ess", "*"),
+    ('*', "Fullwidth Asterisk", "*"),
+
+    ('᜵', "Philippine Single Punctuation", "/"),
+    ('⁁', "Caret Insertion Point", "/"),
+    ('∕', "Division Slash", "/"),
+    ('⁄', "Fraction Slash", "/"),
+    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", "/"),
+    ('⟋', "Mathematical Rising Diagonal", "/"),
+    ('⧸', "Big Solidus", "/"),
+    ('𝈺', "Greek Instrumental Notation Symbol-47", "/"),
+    ('㇓', "CJK Stroke Sp", "/"),
+    ('〳', "Vertical Kana Repeat Mark Upper Half", "/"),
+    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", "/"),
+    ('ノ', "Katakana Letter No", "/"),
+    ('丿', "CJK Unified Ideograph-4E3F", "/"),
+    ('⼃', "Kangxi Radical Slash", "/"),
+    ('/', "Fullwidth Solidus", "/"),
+
+    ('\', "Fullwidth Reverse Solidus", "\\"),
+    ('﹨', "Small Reverse Solidus", "\\"),
+    ('∖', "Set Minus", "\\"),
+    ('⟍', "Mathematical Falling Diagonal", "\\"),
+    ('⧵', "Reverse Solidus Operator", "\\"),
+    ('⧹', "Big Reverse Solidus", "\\"),
+    ('⧹', "Greek Vocal Notation Symbol-16", "\\"),
+    ('⧹', "Greek Instrumental Symbol-48", "\\"),
+    ('㇔', "CJK Stroke D", "\\"),
+    ('丶', "CJK Unified Ideograph-4E36", "\\"),
+    ('⼂', "Kangxi Radical Dot", "\\"),
+    ('、', "Ideographic Comma", "\\"),
+    ('ヽ', "Katakana Iteration Mark", "\\"),
+
+    ('ꝸ', "Latin Small Letter Um", "&"),
+    ('&', "Fullwidth Ampersand", "&"),
+
+    ('᛭', "Runic Cross Punctuation", "+"),
+    ('➕', "Heavy Plus Sign", "+"),
+    ('𐊛', "Lycian Letter H", "+"),
+    ('﬩', "Hebrew Letter Alternative Plus Sign", "+"),
+    ('+', "Fullwidth Plus Sign", "+"),
+
+    ('‹', "Single Left-Pointing Angle Quotation Mark", "<"),
+    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", "<"),
+    ('˂', "Modifier Letter Left Arrowhead", "<"),
+    ('𝈶', "Greek Instrumental Symbol-40", "<"),
+    ('ᐸ', "Canadian Syllabics Pa", "<"),
+    ('ᚲ', "Runic Letter Kauna", "<"),
+    ('❬', "Medium Left-Pointing Angle Bracket Ornament", "<"),
+    ('⟨', "Mathematical Left Angle Bracket", "<"),
+    ('〈', "Left-Pointing Angle Bracket", "<"),
+    ('〈', "Left Angle Bracket", "<"),
+    ('㇛', "CJK Stroke Pd", "<"),
+    ('く', "Hiragana Letter Ku", "<"),
+    ('𡿨', "CJK Unified Ideograph-21FE8", "<"),
+    ('《', "Left Double Angle Bracket", "<"),
+    ('<', "Fullwidth Less-Than Sign", "<"),
+
+    ('᐀', "Canadian Syllabics Hyphen", "="),
+    ('⹀', "Double Hyphen", "="),
+    ('゠', "Katakana-Hiragana Double Hyphen", "="),
+    ('꓿', "Lisu Punctuation Full Stop", "="),
+    ('=', "Fullwidth Equals Sign", "="),
+
+    ('›', "Single Right-Pointing Angle Quotation Mark", ">"),
+    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", ">"),
+    ('˃', "Modifier Letter Right Arrowhead", ">"),
+    ('𝈷', "Greek Instrumental Symbol-42", ">"),
+    ('ᐳ', "Canadian Syllabics Po", ">"),
+    ('𖼿', "Miao Letter Archaic Zza", ">"),
+    ('❭', "Medium Right-Pointing Angle Bracket Ornament", ">"),
+    ('⟩', "Mathematical Right Angle Bracket", ">"),
+    ('〉', "Right-Pointing Angle Bracket", ">"),
+    ('〉', "Right Angle Bracket", ">"),
+    ('》', "Right Double Angle Bracket", ">"),
+    ('>', "Fullwidth Greater-Than Sign", ">"),
+    ('⩵', "Two Consecutive Equals Signs", "==")
 ];
 
 // FIXME: the lexer could be used to turn the ASCII version of unicode homoglyphs, instead of
 // keeping the substitution token in this table. Ideally, this should be inside `rustc_lexer`.
 // However, we should first remove compound tokens like `<<` from `rustc_lexer`, and then add
 // fancier error recovery to it, as there will be less overall work to do this way.
-const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
-    (' ', "Space", None),
-    ('_', "Underscore", Some(token::Ident(kw::Underscore, false))),
-    ('-', "Minus/Hyphen", Some(token::BinOp(token::Minus))),
-    (',', "Comma", Some(token::Comma)),
-    (';', "Semicolon", Some(token::Semi)),
-    (':', "Colon", Some(token::Colon)),
-    ('!', "Exclamation Mark", Some(token::Not)),
-    ('?', "Question Mark", Some(token::Question)),
-    ('.', "Period", Some(token::Dot)),
-    ('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
-    (')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
-    ('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
-    (']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
-    ('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
-    ('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
-    ('*', "Asterisk", Some(token::BinOp(token::Star))),
-    ('/', "Slash", Some(token::BinOp(token::Slash))),
-    ('\\', "Backslash", None),
-    ('&', "Ampersand", Some(token::BinOp(token::And))),
-    ('+', "Plus Sign", Some(token::BinOp(token::Plus))),
-    ('<', "Less-Than Sign", Some(token::Lt)),
-    ('=', "Equals Sign", Some(token::Eq)),
-    ('>', "Greater-Than Sign", Some(token::Gt)),
+const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
+    (" ", "Space", None),
+    ("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
+    ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
+    (",", "Comma", Some(token::Comma)),
+    (";", "Semicolon", Some(token::Semi)),
+    (":", "Colon", Some(token::Colon)),
+    ("!", "Exclamation Mark", Some(token::Not)),
+    ("?", "Question Mark", Some(token::Question)),
+    (".", "Period", Some(token::Dot)),
+    ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
+    (")", "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
+    ("[", "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
+    ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
+    ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
+    ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
+    ("*", "Asterisk", Some(token::BinOp(token::Star))),
+    ("/", "Slash", Some(token::BinOp(token::Slash))),
+    ("\\", "Backslash", None),
+    ("&", "Ampersand", Some(token::BinOp(token::And))),
+    ("+", "Plus Sign", Some(token::BinOp(token::Plus))),
+    ("<", "Less-Than Sign", Some(token::Lt)),
+    ("=", "Equals Sign", Some(token::Eq)),
+    ("==", "Double Equals Sign", Some(token::EqEq)),
+    (">", "Greater-Than Sign", Some(token::Gt)),
     // FIXME: Literals are already lexed by this point, so we can't recover gracefully just by
     // spitting the correct token out.
-    ('\'', "Single Quote", None),
-    ('"', "Quotation Mark", None),
+    ("\'", "Single Quote", None),
+    ("\"", "Quotation Mark", None),
 ];
 
 pub(super) fn check_for_substitution<'a>(
@@ -339,11 +341,11 @@ pub(super) fn check_for_substitution<'a>(
     err: &mut Diagnostic,
     count: usize,
 ) -> Option<token::TokenKind> {
-    let &(_u_char, u_name, ascii_char) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?;
+    let &(_, u_name, ascii_str) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?;
 
     let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
 
-    let Some((_ascii_char, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) else {
+    let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
         let msg = format!("substitution character not found for '{}'", ch);
         reader.sess.span_diagnostic.span_bug_no_panic(span, &msg);
         return None;
@@ -354,7 +356,7 @@ pub(super) fn check_for_substitution<'a>(
         let msg = format!(
             "Unicode characters '“' (Left Double Quotation Mark) and \
              '”' (Right Double Quotation Mark) look like '{}' ({}), but are not",
-            ascii_char, ascii_name
+            ascii_str, ascii_name
         );
         err.span_suggestion(
             Span::with_root_ctxt(
@@ -368,12 +370,12 @@ pub(super) fn check_for_substitution<'a>(
     } else {
         let msg = format!(
             "Unicode character '{}' ({}) looks like '{}' ({}), but it is not",
-            ch, u_name, ascii_char, ascii_name
+            ch, u_name, ascii_str, ascii_name
         );
         err.span_suggestion(
             span,
             &msg,
-            ascii_char.to_string().repeat(count),
+            ascii_str.to_string().repeat(count),
             Applicability::MaybeIncorrect,
         );
     }
index 645f8633941057ecd984751f340c768e6a783374..bf93a89f065557efc5535cc75ad9ad1233047d8b 100644 (file)
@@ -1353,9 +1353,6 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                 err.span_label(sp, "while parsing this `loop` expression");
                 err
             })
-        } else if self.eat_keyword(kw::Continue) {
-            let kind = ExprKind::Continue(self.eat_label());
-            Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
         } else if self.eat_keyword(kw::Match) {
             let match_sp = self.prev_token.span;
             self.parse_match_expr().map_err(|mut err| {
@@ -1379,6 +1376,8 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
             self.parse_try_block(lo)
         } else if self.eat_keyword(kw::Return) {
             self.parse_return_expr()
+        } else if self.eat_keyword(kw::Continue) {
+            self.parse_continue_expr(lo)
         } else if self.eat_keyword(kw::Break) {
             self.parse_break_expr()
         } else if self.eat_keyword(kw::Yield) {
@@ -1475,9 +1474,8 @@ fn parse_array_or_repeat_expr(&mut self, close_delim: Delimiter) -> PResult<'a,
             } else if self.eat(&token::Comma) {
                 // Vector with two or more elements.
                 let sep = SeqSep::trailing_allowed(token::Comma);
-                let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
-                let mut exprs = vec![first_expr];
-                exprs.extend(remaining_exprs);
+                let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
+                exprs.insert(0, first_expr);
                 ExprKind::Array(exprs)
             } else {
                 // Vector with one element
@@ -1716,10 +1714,10 @@ fn parse_yeet_expr(&mut self) -> PResult<'a, P<Expr>> {
     fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let mut label = self.eat_label();
-        let kind = if label.is_some() && self.token == token::Colon {
+        let kind = if self.token == token::Colon && let Some(label) = label.take() {
             // The value expression can be a labeled loop, see issue #86948, e.g.:
             // `loop { break 'label: loop { break 'label 42; }; }`
-            let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?;
+            let lexpr = self.parse_labeled_expr(label, true)?;
             self.sess.emit_err(LabeledLoopInBreak {
                 span: lexpr.span,
                 sub: WrapExpressionInParentheses {
@@ -1731,8 +1729,8 @@ fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
         } else if self.token != token::OpenDelim(Delimiter::Brace)
             || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
         {
-            let expr = self.parse_expr_opt()?;
-            if let Some(expr) = &expr {
+            let mut expr = self.parse_expr_opt()?;
+            if let Some(expr) = &mut expr {
                 if label.is_some()
                     && matches!(
                         expr.kind,
@@ -1750,7 +1748,19 @@ fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
                         BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
                     );
                 }
+
+                // Recover `break label aaaaa`
+                if self.may_recover()
+                    && let ExprKind::Path(None, p) = &expr.kind
+                    && let [segment] = &*p.segments
+                    && let &ast::PathSegment { ident, args: None, .. } = segment
+                    && let Some(next) = self.parse_expr_opt()?
+                {
+                    label = Some(self.recover_ident_into_label(ident));
+                    *expr = next;
+                }
             }
+
             expr
         } else {
             None
@@ -1759,6 +1769,23 @@ fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
         self.maybe_recover_from_bad_qpath(expr)
     }
 
+    /// Parse `"continue" label?`.
+    fn parse_continue_expr(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let mut label = self.eat_label();
+
+        // Recover `continue label` -> `continue 'label`
+        if self.may_recover()
+            && label.is_none()
+            && let Some((ident, _)) = self.token.ident()
+        {
+            self.bump();
+            label = Some(self.recover_ident_into_label(ident));
+        }
+
+        let kind = ExprKind::Continue(label);
+        Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
+    }
+
     /// Parse `"yield" expr?`.
     fn parse_yield_expr(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
@@ -3047,6 +3074,25 @@ fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
         false
     }
 
+    /// Converts an ident into 'label and emits an "expected a label, found an identifier" error.
+    fn recover_ident_into_label(&mut self, ident: Ident) -> Label {
+        // Convert `label` -> `'label`,
+        // so that nameres doesn't complain about non-existing label
+        let label = format!("'{}", ident.name);
+        let ident = Ident { name: Symbol::intern(&label), span: ident.span };
+
+        self.struct_span_err(ident.span, "expected a label, found an identifier")
+            .span_suggestion(
+                ident.span,
+                "labels start with a tick",
+                label,
+                Applicability::MachineApplicable,
+            )
+            .emit();
+
+        Label { ident }
+    }
+
     /// Parses `ident (COLON expr)?`.
     fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
         let attrs = self.parse_outer_attributes()?;
index 2fd2a4e5154f3a98e0bd014881158a45191a7d7d..ffb23b50a160de4101fb021faff9b6af54166b4c 100644 (file)
@@ -542,9 +542,9 @@ pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
         }
     }
 
-    /// Expect next token to be edible or inedible token.  If edible,
+    /// Expect next token to be edible or inedible token. If edible,
     /// then consume it; if inedible, then return without consuming
-    /// anything.  Signal a fatal error if next token is unexpected.
+    /// anything. Signal a fatal error if next token is unexpected.
     pub fn expect_one_of(
         &mut self,
         edible: &[TokenKind],
index 0b057f2f577fe73e85489bf75063576cdf3c5d72..e73a17ced7deb2598f52a013189ee3392e379547 100644 (file)
@@ -469,7 +469,7 @@ fn recover_dotdotdot_rest_pat(&mut self, lo: Span) -> PatKind {
     /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`.
     ///
     /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs`
-    /// should already have been parsed by now  at this point,
+    /// should already have been parsed by now at this point,
     /// if the next token is `@` then we can try to parse the more general form.
     ///
     /// Consult `parse_pat_ident` for the `binding` grammar.
index a6f702e5428694aa4077eeae5363c905674c030e..1766b0293de52a33c2eceb3254ecfe0b44e7f0af 100644 (file)
@@ -727,11 +727,13 @@ fn parse_generic_bounds_common(
         let mut bounds = Vec::new();
         let mut negative_bounds = Vec::new();
 
+        // In addition to looping while we find generic bounds:
+        // We continue even if we find a keyword. This is necessary for error recovery on,
+        // for example, `impl fn()`. The only keyword that can go after generic bounds is
+        // `where`, so stop if it's it.
+        // We also continue if we find types (not traits), again for error recovery.
         while self.can_begin_bound()
-            // Continue even if we find a keyword.
-            // This is necessary for error recover on, for example, `impl fn()`.
-            //
-            // The only keyword that can go after generic bounds is `where`, so stop if it's it.
+            || self.token.can_begin_type()
             || (self.token.is_reserved_ident() && !self.token.is_keyword(kw::Where))
         {
             if self.token.is_keyword(kw::Dyn) {
@@ -938,6 +940,36 @@ fn parse_generic_ty_bound(
             && self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
             && let Some(path) = self.recover_path_from_fn()
         {
+            path
+        } else if !self.token.is_path_start() && self.token.can_begin_type() {
+            let ty = self.parse_ty_no_plus()?;
+            // Instead of finding a path (a trait), we found a type.
+            let mut err = self.struct_span_err(ty.span, "expected a trait, found type");
+
+            // If we can recover, try to extract a path from the type. Note
+            // that we do not use the try operator when parsing the type because
+            // if it fails then we get a parser error which we don't want (we're trying
+            // to recover from errors, not make more).
+            let path = if self.may_recover()
+                && matches!(ty.kind, TyKind::Ptr(..) | TyKind::Ref(..))
+                && let TyKind::Path(_, path) = &ty.peel_refs().kind {
+                // Just get the indirection part of the type.
+                let span = ty.span.until(path.span);
+
+                err.span_suggestion_verbose(
+                    span,
+                    "consider removing the indirection",
+                    "",
+                    Applicability::MaybeIncorrect,
+                );
+
+                path.clone()
+            } else {
+                return Err(err);
+            };
+
+            err.emit();
+
             path
         } else {
             self.parse_path(PathStyle::Type)?
index 1eb227503f24236971e95f3f978f19054b7d9796..7b016cadac320bcb9079fce5db9696b304377cbe 100644 (file)
@@ -20,7 +20,6 @@
 pub use Piece::*;
 pub use Position::*;
 
-use rustc_lexer::unescape;
 use std::iter;
 use std::str;
 use std::string;
@@ -314,11 +313,12 @@ pub fn new(
         append_newline: bool,
         mode: ParseMode,
     ) -> Parser<'a> {
-        let input_string_kind = find_width_map_from_snippet(s, snippet, style);
+        let input_string_kind = find_width_map_from_snippet(snippet, style);
         let (width_map, is_literal) = match input_string_kind {
             InputStringKind::Literal { width_mappings } => (width_mappings, true),
             InputStringKind::NotALiteral => (Vec::new(), false),
         };
+
         Parser {
             mode,
             input: s,
@@ -856,7 +856,6 @@ fn suggest_positional_arg_instead_of_captured_arg(&mut self, arg: Argument<'a>)
 /// written code (code snippet) and the `InternedString` that gets processed in the `Parser`
 /// in order to properly synthesise the intra-string `Span`s for error diagnostics.
 fn find_width_map_from_snippet(
-    input: &str,
     snippet: Option<string::String>,
     str_style: Option<usize>,
 ) -> InputStringKind {
@@ -869,27 +868,8 @@ fn find_width_map_from_snippet(
         return InputStringKind::Literal { width_mappings: Vec::new() };
     }
 
-    // Strip quotes.
     let snippet = &snippet[1..snippet.len() - 1];
 
-    // Macros like `println` add a newline at the end. That technically doens't make them "literals" anymore, but it's fine
-    // since we will never need to point our spans there, so we lie about it here by ignoring it.
-    // Since there might actually be newlines in the source code, we need to normalize away all trailing newlines.
-    // If we only trimmed it off the input, `format!("\n")` would cause a mismatch as here we they actually match up.
-    // Alternatively, we could just count the trailing newlines and only trim one from the input if they don't match up.
-    let input_no_nl = input.trim_end_matches('\n');
-    let Ok(unescaped) = unescape_string(snippet) else {
-        return InputStringKind::NotALiteral;
-    };
-
-    let unescaped_no_nl = unescaped.trim_end_matches('\n');
-
-    if unescaped_no_nl != input_no_nl {
-        // The source string that we're pointing at isn't our input, so spans pointing at it will be incorrect.
-        // This can for example happen with proc macros that respan generated literals.
-        return InputStringKind::NotALiteral;
-    }
-
     let mut s = snippet.char_indices();
     let mut width_mappings = vec![];
     while let Some((pos, c)) = s.next() {
@@ -972,19 +952,6 @@ fn find_width_map_from_snippet(
     InputStringKind::Literal { width_mappings }
 }
 
-fn unescape_string(string: &str) -> Result<string::String, unescape::EscapeError> {
-    let mut buf = string::String::new();
-    let mut error = Ok(());
-    unescape::unescape_literal(string, unescape::Mode::Str, &mut |_, unescaped_char| {
-        match unescaped_char {
-            Ok(c) => buf.push(c),
-            Err(err) => error = Err(err),
-        }
-    });
-
-    error.map(|_| buf)
-}
-
 // Assert a reasonable size for `Piece`
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
 rustc_data_structures::static_assert_size!(Piece<'_>, 16);
index 5885f45ae45db788642b311f7245a98e8d70fcd3..b327ba63330ba8044ddd15daa4bc110a3ab9e023 100644 (file)
@@ -195,7 +195,7 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_>) {
 
     // There is no main function.
     let mut has_filename = true;
-    let filename = tcx.sess.local_crate_source_file.clone().unwrap_or_else(|| {
+    let filename = tcx.sess.local_crate_source_file().unwrap_or_else(|| {
         has_filename = false;
         Default::default()
     });
index b5843c0ae488b1f17c58d81c1ae91758baf4e8e7..4c6a9b23fdf12a9ed33c99f71337ce81c64364d0 100644 (file)
@@ -137,6 +137,12 @@ fn visit_attribute(&mut self, attr: &'tcx Attribute) {
 }
 
 fn lib_features(tcx: TyCtxt<'_>, (): ()) -> LibFeatures {
+    // If `staged_api` is not enabled then we aren't allowed to define lib
+    // features; there is no point collecting them.
+    if !tcx.features().staged_api {
+        return new_lib_features();
+    }
+
     let mut collector = LibFeatureCollector::new(tcx);
     tcx.hir().walk_attributes(&mut collector);
     collector.lib_features
index b49432b79962bd3c288ec770a85fe2d1d1e5483c..6afdcc37fe86ea9d15652bcd848aec739c6ad4b8 100644 (file)
@@ -191,9 +191,9 @@ pub fn provide(providers: &mut Providers) {
 // Creating ir_maps
 //
 // This is the first pass and the one that drives the main
-// computation.  It walks up and down the IR once.  On the way down,
+// computation. It walks up and down the IR once. On the way down,
 // we count for each function the number of variables as well as
-// liveness nodes.  A liveness node is basically an expression or
+// liveness nodes. A liveness node is basically an expression or
 // capture clause that does something of interest: either it has
 // interesting control flow or it uses/defines a local variable.
 //
@@ -203,11 +203,11 @@ pub fn provide(providers: &mut Providers) {
 // of live variables at each program point.
 //
 // Finally, we run back over the IR one last time and, using the
-// computed liveness, check various safety conditions.  For example,
+// computed liveness, check various safety conditions. For example,
 // there must be no live nodes at the definition site for a variable
-// unless it has an initializer.  Similarly, each non-mutable local
+// unless it has an initializer. Similarly, each non-mutable local
 // variable must not be assigned if there is some successor
-// assignment.  And so forth.
+// assignment. And so forth.
 
 struct CaptureInfo {
     ln: LiveNode,
@@ -417,7 +417,7 @@ fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
                 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
 
                 // Make a live_node for each mentioned variable, with the span
-                // being the location that the variable is used.  This results
+                // being the location that the variable is used. This results
                 // in better error messages than just pointing at the closure
                 // construction site.
                 let mut call_caps = Vec::new();
@@ -792,7 +792,7 @@ fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> Li
         match stmt.kind {
             hir::StmtKind::Local(ref local) => {
                 // Note: we mark the variable as defined regardless of whether
-                // there is an initializer.  Initially I had thought to only mark
+                // there is an initializer. Initially I had thought to only mark
                 // the live variable as defined if it was initialized, and then we
                 // could check for uninit variables just by scanning what is live
                 // at the start of the function. But that doesn't work so well for
@@ -1169,24 +1169,24 @@ fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode
         //
         // # Tracked places
         //
-        // A tracked place is a local variable/argument `x`.  In
+        // A tracked place is a local variable/argument `x`. In
         // these cases, the link_node where the write occurs is linked
-        // to node id of `x`.  The `write_place()` routine generates
-        // the contents of this node.  There are no subcomponents to
+        // to node id of `x`. The `write_place()` routine generates
+        // the contents of this node. There are no subcomponents to
         // consider.
         //
         // # Non-tracked places
         //
-        // These are places like `x[5]` or `x.f`.  In that case, we
+        // These are places like `x[5]` or `x.f`. In that case, we
         // basically ignore the value which is written to but generate
-        // reads for the components---`x` in these two examples.  The
+        // reads for the components---`x` in these two examples. The
         // components reads are generated by
         // `propagate_through_place_components()` (this fn).
         //
         // # Illegal places
         //
         // It is still possible to observe assignments to non-places;
-        // these errors are detected in the later pass borrowck.  We
+        // these errors are detected in the later pass borrowck. We
         // just ignore such cases and treat them as reads.
 
         match expr.kind {
@@ -1204,7 +1204,7 @@ fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode
             }
 
             // We do not track other places, so just propagate through
-            // to their subcomponents.  Also, it may happen that
+            // to their subcomponents. Also, it may happen that
             // non-places occur here, because those are detected in the
             // later pass borrowck.
             _ => succ,
index 96f7236de5cb12ab9209f1b2827319026419cfaf..34e1abb78b2d4fa33323c278cd85d36d24a70fce 100644 (file)
@@ -147,7 +147,7 @@ fn annotate<F>(
         }
 
         if !self.tcx.features().staged_api {
-            // Propagate unstability.  This can happen even for non-staged-api crates in case
+            // Propagate unstability. This can happen even for non-staged-api crates in case
             // -Zforce-unstable-if-unmarked is set.
             if let Some(stab) = self.parent_stab {
                 if inherit_deprecation.yes() && stab.is_unstable() {
index fb55bb4afaac3adadbdc8c993038c794df18b324..9a5d3cceb914e48643012dd998844f8dc9c20234 100644 (file)
@@ -1,6 +1,5 @@
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![feature(associated_type_defaults)]
-#![feature(control_flow_enum)]
 #![feature(rustc_private)]
 #![feature(try_blocks)]
 #![feature(let_chains)]
@@ -112,7 +111,11 @@ impl<'tcx, V> DefIdVisitorSkeleton<'_, 'tcx, V>
     fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> ControlFlow<V::BreakTy> {
         let TraitRef { def_id, substs, .. } = trait_ref;
         self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref.print_only_trait_path())?;
-        if self.def_id_visitor.shallow() { ControlFlow::CONTINUE } else { substs.visit_with(self) }
+        if self.def_id_visitor.shallow() {
+            ControlFlow::Continue(())
+        } else {
+            substs.visit_with(self)
+        }
     }
 
     fn visit_projection_ty(&mut self, projection: ty::AliasTy<'tcx>) -> ControlFlow<V::BreakTy> {
@@ -131,7 +134,7 @@ fn visit_projection_ty(&mut self, projection: ty::AliasTy<'tcx>) -> ControlFlow<
             };
         self.visit_trait(trait_ref)?;
         if self.def_id_visitor.shallow() {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         } else {
             assoc_substs.iter().try_for_each(|subst| subst.visit_with(self))
         }
@@ -155,7 +158,7 @@ fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow<V::
                 ty,
                 _region,
             ))) => ty.visit_with(self),
-            ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) => ControlFlow::CONTINUE,
+            ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) => ControlFlow::Continue(()),
             ty::PredicateKind::ConstEvaluatable(ct) => ct.visit_with(self),
             ty::PredicateKind::WellFormed(arg) => arg.visit_with(self),
             _ => bug!("unexpected predicate: {:?}", predicate),
@@ -189,7 +192,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<V::BreakTy> {
             | ty::Generator(def_id, ..) => {
                 self.def_id_visitor.visit_def_id(def_id, "type", &ty)?;
                 if self.def_id_visitor.shallow() {
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 }
                 // Default type visitor doesn't visit signatures of fn types.
                 // Something like `fn() -> Priv {my_func}` is considered a private type even if
@@ -214,7 +217,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<V::BreakTy> {
                     // as visible/reachable even if both `Type` and `Trait` are private.
                     // Ideally, associated types should be substituted in the same way as
                     // free type aliases, but this isn't done yet.
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 }
                 // This will also visit substs if necessary, so we don't need to recurse.
                 return self.visit_projection_ty(proj);
@@ -274,7 +277,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<V::BreakTy> {
         }
 
         if self.def_id_visitor.shallow() {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         } else {
             ty.super_visit_with(self)
         }
@@ -319,7 +322,7 @@ fn visit_def_id(
         if let Some(def_id) = def_id.as_local() {
             self.min = VL::new_min(self, def_id);
         }
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
@@ -881,7 +884,7 @@ fn visit_def_id(
                 self.ev.update(def_id, self.level);
             }
         }
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
 
@@ -1368,9 +1371,9 @@ fn visit_def_id(
         descr: &dyn fmt::Display,
     ) -> ControlFlow<Self::BreakTy> {
         if self.check_def_id(def_id, kind, descr) {
-            ControlFlow::BREAK
+            ControlFlow::Break(())
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 }
@@ -1865,9 +1868,9 @@ fn visit_def_id(
         descr: &dyn fmt::Display,
     ) -> ControlFlow<Self::BreakTy> {
         if self.check_def_id(def_id, kind, descr) {
-            ControlFlow::BREAK
+            ControlFlow::Break(())
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 }
index 53c9da15737183b0120bce2be2d1667e9bb364da..47b2fd8f8f47a4d31da672b7e76c5de3b69d5d2f 100644 (file)
@@ -490,8 +490,8 @@ pub fn read_index(&self, dep_node_index: DepNodeIndex) {
     /// This is used to remove cycles during type-checking const generic parameters.
     ///
     /// As usual in the query system, we consider the current state of the calling query
-    /// only depends on the list of dependencies up to now.  As a consequence, the value
-    /// that this query gives us can only depend on those dependencies too.  Therefore,
+    /// only depends on the list of dependencies up to now. As a consequence, the value
+    /// that this query gives us can only depend on those dependencies too. Therefore,
     /// it is sound to use the current dependency set for the created node.
     ///
     /// During replay, the order of the nodes is relevant in the dependency graph.
@@ -510,9 +510,9 @@ pub fn with_feed_task<Ctxt: DepContext<DepKind = K>, A: Debug, R: Debug>(
         hash_result: Option<fn(&mut StableHashingContext<'_>, &R) -> Fingerprint>,
     ) -> DepNodeIndex {
         if let Some(data) = self.data.as_ref() {
-            // The caller query has more dependencies than the node we are creating.  We may
+            // The caller query has more dependencies than the node we are creating. We may
             // encounter a case where this created node is marked as green, but the caller query is
-            // subsequently marked as red or recomputed.  In this case, we will end up feeding a
+            // subsequently marked as red or recomputed. In this case, we will end up feeding a
             // value to an existing node.
             //
             // For sanity, we still check that the loaded stable hash and the new one match.
@@ -980,7 +980,7 @@ struct EdgeIndex {}
 /// graph: they are only added.
 ///
 /// The nodes in it are identified by a `DepNodeIndex`. We avoid keeping the nodes
-/// in memory.  This is important, because these graph structures are some of the
+/// in memory. This is important, because these graph structures are some of the
 /// largest in the compiler.
 ///
 /// For this reason, we avoid storing `DepNode`s more than once as map
index dfc1344f85c70cabbeb86f452cbd8abc704203dc..a81595b2420c041e9c8e535483c8fc3f5f601dfd 100644 (file)
@@ -1,14 +1,14 @@
 //! The data that we will serialize and deserialize.
 //!
 //! The dep-graph is serialized as a sequence of NodeInfo, with the dependencies
-//! specified inline.  The total number of nodes and edges are stored as the last
+//! specified inline. The total number of nodes and edges are stored as the last
 //! 16 bytes of the file, so we can find them easily at decoding time.
 //!
 //! The serialisation is performed on-demand when each node is emitted. Using this
 //! scheme, we do not need to keep the current graph in memory.
 //!
 //! The deserialization is performed manually, in order to convert from the stored
-//! sequence of NodeInfos to the different arrays in SerializedDepGraph.  Since the
+//! sequence of NodeInfos to the different arrays in SerializedDepGraph. Since the
 //! node and edge count are stored at the end of the file, all the arrays can be
 //! pre-allocated with the right length.
 
index f65846fc77f6e5114bfd384ce37c013c61e10e62..77d0d0314fc17de7a36de449436e484118bec350 100644 (file)
@@ -116,7 +116,7 @@ fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
         let mut lock = self.cache.get_shard_by_value(&key).lock();
         #[cfg(not(parallel_compiler))]
         let mut lock = self.cache.lock();
-        // We may be overwriting another value.  This is all right, since the dep-graph
+        // We may be overwriting another value. This is all right, since the dep-graph
         // will check that the fingerprint matches.
         lock.insert(key, (value.clone(), index));
         value
@@ -203,7 +203,7 @@ fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
         let mut lock = self.cache.get_shard_by_value(&key).lock();
         #[cfg(not(parallel_compiler))]
         let mut lock = self.cache.lock();
-        // We may be overwriting another value.  This is all right, since the dep-graph
+        // We may be overwriting another value. This is all right, since the dep-graph
         // will check that the fingerprint matches.
         lock.insert(key, value);
         &value.0
index 32fb5e18276ab86fe721f533c56d88871cb2e44c..eae4c9992eb08d4a6c31118b0fb73abf8d5ce2ea 100644 (file)
@@ -28,9 +28,9 @@
 use crate::Resolver;
 
 use rustc_ast as ast;
-use rustc_ast::node_id::NodeMap;
 use rustc_ast::visit::{self, Visitor};
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxIndexMap;
+use rustc_data_structures::unord::UnordSet;
 use rustc_errors::{pluralize, MultiSpan};
 use rustc_session::lint::builtin::{MACRO_USE_EXTERN_CRATE, UNUSED_IMPORTS};
 use rustc_session::lint::BuiltinLintDiagnostics;
@@ -40,7 +40,7 @@ struct UnusedImport<'a> {
     use_tree: &'a ast::UseTree,
     use_tree_id: ast::NodeId,
     item_span: Span,
-    unused: FxHashSet<ast::NodeId>,
+    unused: UnordSet<ast::NodeId>,
 }
 
 impl<'a> UnusedImport<'a> {
@@ -52,7 +52,7 @@ fn add(&mut self, id: ast::NodeId) {
 struct UnusedImportCheckVisitor<'a, 'b> {
     r: &'a mut Resolver<'b>,
     /// All the (so far) unused imports, grouped path list
-    unused_imports: NodeMap<UnusedImport<'a>>,
+    unused_imports: FxIndexMap<ast::NodeId, UnusedImport<'a>>,
     base_use_tree: Option<&'a ast::UseTree>,
     base_id: ast::NodeId,
     item_span: Span,
@@ -89,7 +89,7 @@ fn unused_import(&mut self, id: ast::NodeId) -> &mut UnusedImport<'a> {
             use_tree,
             use_tree_id,
             item_span,
-            unused: FxHashSet::default(),
+            unused: Default::default(),
         })
     }
 }
index fb2aebbd18a3d8508e5d06fc0eeba2e58f479f7a..36608615255586a45f76d16394429b231285e7ef 100644 (file)
@@ -5,10 +5,10 @@
 use rustc_ast::{self as ast, Crate, ItemKind, ModKind, NodeId, Path, CRATE_NODE_ID};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
-use rustc_errors::struct_span_err;
 use rustc_errors::{
     pluralize, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan,
 };
+use rustc_errors::{struct_span_err, SuggestionStyle};
 use rustc_feature::BUILTIN_ATTRIBUTES;
 use rustc_hir::def::Namespace::{self, *};
 use rustc_hir::def::{self, CtorKind, CtorOf, DefKind, NonMacroAttrKind, PerNS};
@@ -1033,7 +1033,7 @@ fn early_lookup_typo_candidate(
                     let root_module = this.resolve_crate_root(root_ident);
                     this.add_module_candidates(root_module, &mut suggestions, filter_fn, None);
                 }
-                Scope::Module(module) => {
+                Scope::Module(module, _) => {
                     this.add_module_candidates(module, &mut suggestions, filter_fn, None);
                 }
                 Scope::MacroUsePrelude => {
@@ -2125,9 +2125,15 @@ pub(crate) fn check_for_module_export_macro(
 
                 let source_map = self.r.session.source_map();
 
+                // Make sure this is actually crate-relative.
+                let is_definitely_crate = import
+                    .module_path
+                    .first()
+                    .map_or(false, |f| f.ident.name != kw::SelfLower && f.ident.name != kw::Super);
+
                 // Add the import to the start, with a `{` if required.
                 let start_point = source_map.start_point(after_crate_name);
-                if let Ok(start_snippet) = source_map.span_to_snippet(start_point) {
+                if is_definitely_crate && let Ok(start_snippet) = source_map.span_to_snippet(start_point) {
                     corrections.push((
                         start_point,
                         if has_nested {
@@ -2139,11 +2145,17 @@ pub(crate) fn check_for_module_export_macro(
                             format!("{{{}, {}", import_snippet, start_snippet)
                         },
                     ));
-                }
 
-                // Add a `};` to the end if nested, matching the `{` added at the start.
-                if !has_nested {
-                    corrections.push((source_map.end_point(after_crate_name), "};".to_string()));
+                    // Add a `};` to the end if nested, matching the `{` added at the start.
+                    if !has_nested {
+                        corrections.push((source_map.end_point(after_crate_name), "};".to_string()));
+                    }
+                } else {
+                    // If the root import is module-relative, add the import separately
+                    corrections.push((
+                        import.use_span.shrink_to_lo(),
+                        format!("use {module_name}::{import_snippet};\n"),
+                    ));
                 }
             }
 
@@ -2418,7 +2430,7 @@ fn show_candidates(
         }
 
         if let Some(span) = use_placement_span {
-            let add_use = match mode {
+            let (add_use, trailing) = match mode {
                 DiagnosticMode::Pattern => {
                     err.span_suggestions(
                         span,
@@ -2428,21 +2440,23 @@ fn show_candidates(
                     );
                     return;
                 }
-                DiagnosticMode::Import => "",
-                DiagnosticMode::Normal => "use ",
+                DiagnosticMode::Import => ("", ""),
+                DiagnosticMode::Normal => ("use ", ";\n"),
             };
             for candidate in &mut accessible_path_strings {
                 // produce an additional newline to separate the new use statement
                 // from the directly following item.
-                let additional_newline = if let FoundUse::Yes = found_use { "" } else { "\n" };
-                candidate.0 = format!("{add_use}{}{append};\n{additional_newline}", &candidate.0);
+                let additional_newline = if let FoundUse::No = found_use && let DiagnosticMode::Normal = mode { "\n" } else { "" };
+                candidate.0 =
+                    format!("{add_use}{}{append}{trailing}{additional_newline}", &candidate.0);
             }
 
-            err.span_suggestions(
+            err.span_suggestions_with_style(
                 span,
                 &msg,
                 accessible_path_strings.into_iter().map(|a| a.0),
                 Applicability::MaybeIncorrect,
+                SuggestionStyle::ShowAlways,
             );
             if let [first, .., last] = &path[..] {
                 let sp = first.ident.span.until(last.ident.span);
@@ -2463,7 +2477,7 @@ fn show_candidates(
                 msg.push_str(&candidate.0);
             }
 
-            err.note(&msg);
+            err.help(&msg);
         }
     } else if !matches!(mode, DiagnosticMode::Import) {
         assert!(!inaccessible_path_strings.is_empty());
index e41fe325b811cb42cf118565782a00e1f75df0eb..a84652a315dc2d5ef95b197acee8025c6e088ebb 100644 (file)
@@ -1,9 +1,11 @@
-use rustc_ast as ast;
+use rustc_ast::{self as ast, NodeId};
 use rustc_feature::is_builtin_attr_name;
 use rustc_hir::def::{DefKind, Namespace, NonMacroAttrKind, PartialRes, PerNS};
 use rustc_hir::PrimTy;
 use rustc_middle::bug;
 use rustc_middle::ty;
+use rustc_session::lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK;
+use rustc_session::lint::BuiltinLintDiagnostics;
 use rustc_span::def_id::LocalDefId;
 use rustc_span::edition::Edition;
 use rustc_span::hygiene::{ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext};
@@ -99,7 +101,7 @@ pub(crate) fn visit_scopes<T>(
         };
         let mut scope = match ns {
             _ if is_absolute_path => Scope::CrateRoot,
-            TypeNS | ValueNS => Scope::Module(module),
+            TypeNS | ValueNS => Scope::Module(module, None),
             MacroNS => Scope::DeriveHelpers(parent_scope.expansion),
         };
         let mut ctxt = ctxt.normalize_to_macros_2_0();
@@ -163,7 +165,7 @@ pub(crate) fn visit_scopes<T>(
                     MacroRulesScope::Invocation(invoc_id) => {
                         Scope::MacroRules(self.invocation_parent_scopes[&invoc_id].macro_rules)
                     }
-                    MacroRulesScope::Empty => Scope::Module(module),
+                    MacroRulesScope::Empty => Scope::Module(module, None),
                 },
                 Scope::CrateRoot => match ns {
                     TypeNS => {
@@ -172,10 +174,16 @@ pub(crate) fn visit_scopes<T>(
                     }
                     ValueNS | MacroNS => break,
                 },
-                Scope::Module(module) => {
+                Scope::Module(module, prev_lint_id) => {
                     use_prelude = !module.no_implicit_prelude;
-                    match self.hygienic_lexical_parent(module, &mut ctxt) {
-                        Some(parent_module) => Scope::Module(parent_module),
+                    let derive_fallback_lint_id = match scope_set {
+                        ScopeSet::Late(.., lint_id) => lint_id,
+                        _ => None,
+                    };
+                    match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) {
+                        Some((parent_module, lint_id)) => {
+                            Scope::Module(parent_module, lint_id.or(prev_lint_id))
+                        }
                         None => {
                             ctxt.adjust(ExpnId::root());
                             match ns {
@@ -207,13 +215,45 @@ fn hygienic_lexical_parent(
         &mut self,
         module: Module<'a>,
         ctxt: &mut SyntaxContext,
-    ) -> Option<Module<'a>> {
+        derive_fallback_lint_id: Option<NodeId>,
+    ) -> Option<(Module<'a>, Option<NodeId>)> {
         if !module.expansion.outer_expn_is_descendant_of(*ctxt) {
-            return Some(self.expn_def_scope(ctxt.remove_mark()));
+            return Some((self.expn_def_scope(ctxt.remove_mark()), None));
         }
 
         if let ModuleKind::Block = module.kind {
-            return Some(module.parent.unwrap().nearest_item_scope());
+            return Some((module.parent.unwrap().nearest_item_scope(), None));
+        }
+
+        // We need to support the next case under a deprecation warning
+        // ```
+        // struct MyStruct;
+        // ---- begin: this comes from a proc macro derive
+        // mod implementation_details {
+        //     // Note that `MyStruct` is not in scope here.
+        //     impl SomeTrait for MyStruct { ... }
+        // }
+        // ---- end
+        // ```
+        // So we have to fall back to the module's parent during lexical resolution in this case.
+        if derive_fallback_lint_id.is_some() {
+            if let Some(parent) = module.parent {
+                // Inner module is inside the macro, parent module is outside of the macro.
+                if module.expansion != parent.expansion
+                    && module.expansion.is_descendant_of(parent.expansion)
+                {
+                    // The macro is a proc macro derive
+                    if let Some(def_id) = module.expansion.expn_data().macro_def_id {
+                        let ext = self.get_macro_by_def_id(def_id).ext;
+                        if ext.builtin_name.is_none()
+                            && ext.macro_kind() == MacroKind::Derive
+                            && parent.expansion.outer_expn_is_descendant_of(*ctxt)
+                        {
+                            return Some((parent, derive_fallback_lint_id));
+                        }
+                    }
+                }
+            }
         }
 
         None
@@ -470,7 +510,7 @@ struct Flags: u8 {
                             Err((Determinacy::Determined, _)) => Err(Determinacy::Determined),
                         }
                     }
-                    Scope::Module(module) => {
+                    Scope::Module(module, derive_fallback_lint_id) => {
                         let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
                         let binding = this.resolve_ident_in_module_unadjusted_ext(
                             ModuleOrUniformRoot::Module(module),
@@ -483,6 +523,21 @@ struct Flags: u8 {
                         );
                         match binding {
                             Ok(binding) => {
+                                if let Some(lint_id) = derive_fallback_lint_id {
+                                    this.lint_buffer.buffer_lint_with_diagnostic(
+                                        PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
+                                        lint_id,
+                                        orig_ident.span,
+                                        &format!(
+                                            "cannot find {} `{}` in this scope",
+                                            ns.descr(),
+                                            ident
+                                        ),
+                                        BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(
+                                            orig_ident.span,
+                                        ),
+                                    );
+                                }
                                 let misc_flags = if ptr::eq(module, this.graph_root) {
                                     Flags::MISC_SUGGEST_CRATE
                                 } else if module.is_normal() {
index ca43762aa214ec63c846c738a8a8aabe90d86e64..83932c089b311afba738f9e3fb9ff652befe1079 100644 (file)
@@ -668,7 +668,7 @@ fn visit_ty(&mut self, ty: &'ast Ty) {
                     && let Some(partial_res) = self.r.partial_res_map.get(&ty.id)
                     && let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) = partial_res.full_res()
                 {
-                    // This path is actually a bare trait object.  In case of a bare `Fn`-trait
+                    // This path is actually a bare trait object. In case of a bare `Fn`-trait
                     // object with anonymous lifetimes, we need this rib to correctly place the
                     // synthetic lifetimes.
                     let span = ty.span.shrink_to_lo().to(path.span.shrink_to_lo());
@@ -1046,7 +1046,7 @@ fn visit_path_segment(&mut self, path_segment: &'ast PathSegment) {
                     // Probe the lifetime ribs to know how to behave.
                     for rib in self.lifetime_ribs.iter().rev() {
                         match rib.kind {
-                            // We are inside a `PolyTraitRef`.  The lifetimes are
+                            // We are inside a `PolyTraitRef`. The lifetimes are
                             // to be intoduced in that (maybe implicit) `for<>` binder.
                             LifetimeRibKind::Generics {
                                 binder,
@@ -1069,7 +1069,7 @@ fn visit_path_segment(&mut self, path_segment: &'ast PathSegment) {
                                 );
                                 break;
                             }
-                            // We have nowhere to introduce generics.  Code is malformed,
+                            // We have nowhere to introduce generics. Code is malformed,
                             // so use regular lifetime resolution to avoid spurious errors.
                             LifetimeRibKind::Item | LifetimeRibKind::Generics { .. } => {
                                 visit::walk_generic_args(self, args);
@@ -1775,7 +1775,7 @@ fn resolve_elided_lifetimes_in_path(
                         break;
                     }
                     // `LifetimeRes::Error`, which would usually be used in the case of
-                    // `ReportError`, is unsuitable here, as we don't emit an error yet.  Instead,
+                    // `ReportError`, is unsuitable here, as we don't emit an error yet. Instead,
                     // we simply resolve to an implicit lifetime, which will be checked later, at
                     // which point a suitable error will be emitted.
                     LifetimeRibKind::AnonymousReportError | LifetimeRibKind::Item => {
@@ -3373,7 +3373,7 @@ fn smart_resolve_path_fragment(
                         sugg.to_string(),
                         Applicability::MaybeIncorrect,
                     ))
-                } else if res.is_none() {
+                } else if res.is_none() && matches!(source, PathSource::Type) {
                     this.report_missing_type_error(path)
                 } else {
                     None
@@ -3647,7 +3647,7 @@ fn resolve_qpath(
         if let Some(qself) = qself {
             if qself.position == 0 {
                 // This is a case like `<T>::B`, where there is no
-                // trait to resolve.  In that case, we leave the `B`
+                // trait to resolve. In that case, we leave the `B`
                 // segment to be resolved by type-check.
                 return Ok(Some(PartialRes::with_unresolved_segments(
                     Res::Def(DefKind::Mod, CRATE_DEF_ID.to_def_id()),
@@ -3658,7 +3658,7 @@ fn resolve_qpath(
             // Make sure `A::B` in `<T as A::B>::C` is a trait item.
             //
             // Currently, `path` names the full item (`A::B::C`, in
-            // our example).  so we extract the prefix of that that is
+            // our example). so we extract the prefix of that that is
             // the trait (the slice upto and including
             // `qself.position`). And then we recursively resolve that,
             // but with `qself` set to `None`.
index d92b046d0b9f226fe3bb0f3111a09658acef8303..6d448433ee6dbc940bdcde726b3a9f25efbe8997 100644 (file)
@@ -2188,15 +2188,31 @@ pub(crate) fn maybe_report_lifetime_uses(
             let deletion_span = || {
                 if params.len() == 1 {
                     // if sole lifetime, remove the entire `<>` brackets
-                    generics_span
+                    Some(generics_span)
                 } else if param_index == 0 {
                     // if removing within `<>` brackets, we also want to
                     // delete a leading or trailing comma as appropriate
-                    param.span().to(params[param_index + 1].span().shrink_to_lo())
+                    match (
+                        param.span().find_ancestor_inside(generics_span),
+                        params[param_index + 1].span().find_ancestor_inside(generics_span),
+                    ) {
+                        (Some(param_span), Some(next_param_span)) => {
+                            Some(param_span.to(next_param_span.shrink_to_lo()))
+                        }
+                        _ => None,
+                    }
                 } else {
                     // if removing within `<>` brackets, we also want to
                     // delete a leading or trailing comma as appropriate
-                    params[param_index - 1].span().shrink_to_hi().to(param.span())
+                    match (
+                        param.span().find_ancestor_inside(generics_span),
+                        params[param_index - 1].span().find_ancestor_inside(generics_span),
+                    ) {
+                        (Some(param_span), Some(prev_param_span)) => {
+                            Some(prev_param_span.shrink_to_hi().to(param_span))
+                        }
+                        _ => None,
+                    }
                 }
             };
             match use_set {
index f950e4a9bee65282e8ae37a9358f1d408191e00d..1b181b714005bd7177d2a1cd1ce29358301d6020 100644 (file)
@@ -105,7 +105,9 @@ enum Scope<'a> {
     DeriveHelpersCompat,
     MacroRules(MacroRulesScopeRef<'a>),
     CrateRoot,
-    Module(Module<'a>),
+    // The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK`
+    // lint if it should be reported.
+    Module(Module<'a>, Option<NodeId>),
     MacroUsePrelude,
     BuiltinAttrs,
     ExternPrelude,
@@ -1591,7 +1593,7 @@ pub fn traits_in_scope(
 
         self.visit_scopes(ScopeSet::All(TypeNS, false), parent_scope, ctxt, |this, scope, _, _| {
             match scope {
-                Scope::Module(module) => {
+                Scope::Module(module, _) => {
                     this.traits_in_module(module, assoc_item, &mut found_traits);
                 }
                 Scope::StdLibPrelude => {
index 9ae07cb005bd41b0db70ee91e528cf9db13bd741..a5f09de1c401ba0c86fac6d1df3953b109de29fa 100644 (file)
@@ -112,9 +112,7 @@ fn lookup_def_id(&self, ref_id: hir::HirId) -> Option<DefId> {
     }
 
     pub fn dump_crate_info(&mut self, name: Symbol) {
-        let source_file = self.tcx.sess.local_crate_source_file.as_ref();
-        let crate_root = source_file.map(|source_file| {
-            let source_file = Path::new(source_file);
+        let crate_root = self.tcx.sess.local_crate_source_file().map(|source_file| {
             match source_file.file_name() {
                 Some(_) => source_file.parent().unwrap().display(),
                 None => source_file.display(),
@@ -157,10 +155,14 @@ pub fn dump_compilation_options(&mut self, input: &Input, crate_name: Symbol) {
                 .enumerate()
                 .filter(|(i, _)| !remap_arg_indices.contains(i))
                 .map(|(_, arg)| match input {
-                    Input::File(ref path) if path == Path::new(&arg) => {
-                        let mapped = &self.tcx.sess.local_crate_source_file;
-                        mapped.as_ref().unwrap().to_string_lossy().into()
-                    }
+                    Input::File(ref path) if path == Path::new(&arg) => self
+                        .tcx
+                        .sess
+                        .local_crate_source_file()
+                        .as_ref()
+                        .unwrap()
+                        .to_string_lossy()
+                        .into(),
                     _ => arg,
                 });
 
index 8d6758f40f96522d3cbcc9ec79fad3790df563aa..e65d57bb3db3e7737ffbc18b8670df244346be6f 100644 (file)
@@ -18,13 +18,7 @@ pub fn make_filename_string(&self, file: &SourceFile) -> String {
         match &file.name {
             FileName::Real(RealFileName::LocalPath(path)) => {
                 if path.is_absolute() {
-                    self.sess
-                        .source_map()
-                        .path_mapping()
-                        .map_prefix(path.into())
-                        .0
-                        .display()
-                        .to_string()
+                    self.sess.source_map().path_mapping().map_prefix(path).0.display().to_string()
                 } else {
                     self.sess
                         .opts
index 1ccfc59f7a9d61e86b5b9f245cf0f8303f465e6b..586454f76574c5b2ad62a7075674da3e9da39cd2 100644 (file)
@@ -591,6 +591,24 @@ pub fn source_name(&self) -> FileName {
             Input::Str { ref name, .. } => name.clone(),
         }
     }
+
+    pub fn opt_path(&self) -> Option<&Path> {
+        match self {
+            Input::File(file) => Some(file),
+            Input::Str { name, .. } => match name {
+                FileName::Real(real) => real.local_path(),
+                FileName::QuoteExpansion(_) => None,
+                FileName::Anon(_) => None,
+                FileName::MacroExpansion(_) => None,
+                FileName::ProcMacroSourceCode(_) => None,
+                FileName::CfgSpec(_) => None,
+                FileName::CliCrateAttr(_) => None,
+                FileName::Custom(_) => None,
+                FileName::DocTest(path, _) => Some(path),
+                FileName::InlineAsm(_) => None,
+            },
+        }
+    }
 }
 
 #[derive(Clone, Hash, Debug, HashStable_Generic)]
@@ -715,7 +733,7 @@ pub fn split_dwarf_path(
 pub fn host_triple() -> &'static str {
     // Get the host triple out of the build environment. This ensures that our
     // idea of the host triple is the same as for the set of libraries we've
-    // actually built.  We can't just take LLVM's host triple because they
+    // actually built. We can't just take LLVM's host triple because they
     // normalize all ix86 architectures to i386.
     //
     // Instead of grabbing the host triple (for the current host), we grab (at
@@ -1271,7 +1289,7 @@ pub fn unstable<F>(name: &'static str, f: F) -> RustcOptGroup
 
 // The `opt` local module holds wrappers around the `getopts` API that
 // adds extra rustc-specific metadata to each option; such metadata
-// is exposed by .  The public
+// is exposed by . The public
 // functions below ending with `_u` are the functions that return
 // *unstable* options, i.e., options that are only enabled when the
 // user also passes the `-Z unstable-options` debugging flag.
@@ -2091,7 +2109,7 @@ fn parse_libs(matches: &getopts::Matches, error_format: ErrorOutputType) -> Vec<
         .map(|s| {
             // Parse string of the form "[KIND[:MODIFIERS]=]lib[:new_name]",
             // where KIND is one of "dylib", "framework", "static", "link-arg" and
-            // where MODIFIERS are  a comma separated list of supported modifiers
+            // where MODIFIERS are a comma separated list of supported modifiers
             // (bundle, verbatim, whole-archive, as-needed). Each modifier is prefixed
             // with either + or - to indicate whether it is enabled or disabled.
             // The last value specified for a given modifier wins.
@@ -2459,6 +2477,11 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
 
     let pretty = parse_pretty(&unstable_opts, error_format);
 
+    // query-dep-graph is required if dump-dep-graph is given #106736
+    if unstable_opts.dump_dep_graph && !unstable_opts.query_dep_graph {
+        early_error(error_format, "can't dump dependency graph without `-Z query-dep-graph`");
+    }
+
     // Try to find a directory containing the Rust `src`, for more details see
     // the doc comment on the `real_rust_source_base_dir` field.
     let tmp_buf;
@@ -2491,12 +2514,12 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
         early_error(error_format, &format!("Current directory is invalid: {e}"));
     });
 
-    let (path, remapped) =
-        FilePathMapping::new(remap_path_prefix.clone()).map_prefix(working_dir.clone());
+    let remap = FilePathMapping::new(remap_path_prefix.clone());
+    let (path, remapped) = remap.map_prefix(&working_dir);
     let working_dir = if remapped {
-        RealFileName::Remapped { local_path: Some(working_dir), virtual_name: path }
+        RealFileName::Remapped { virtual_name: path.into_owned(), local_path: Some(working_dir) }
     } else {
-        RealFileName::LocalPath(path)
+        RealFileName::LocalPath(path.into_owned())
     };
 
     Options {
index 7f926f7d8bc4a2cb2cd25871384eff142e0324a8..4ae9a3fae474b3c9a993b97f77cf765de06ed3a7 100644 (file)
@@ -228,7 +228,7 @@ pub trait CrateStore: std::fmt::Debug {
     fn def_path_hash(&self, def: DefId) -> DefPathHash;
 
     // This information is safe to access, since it's hashed as part of the StableCrateId, which
-    // incr.  comp. uses to identify a CrateNum.
+    // incr. comp. uses to identify a CrateNum.
     fn crate_name(&self, cnum: CrateNum) -> Symbol;
     fn stable_crate_id(&self, cnum: CrateNum) -> StableCrateId;
     fn stable_crate_id_to_crate_num(&self, stable_crate_id: StableCrateId) -> CrateNum;
index 6f1b31ff9c3aec9cbce8cb0bf3b2fe053a566ba7..b6a328908ce085834d74cb534ba1503486cca6cf 100644 (file)
@@ -155,7 +155,7 @@ fn current_dll_path() -> Result<PathBuf, String> {
 /// This function checks if sysroot is found using env::args().next(), and if it
 /// is not found, finds sysroot from current rustc_driver dll.
 pub fn get_or_default_sysroot() -> Result<PathBuf, String> {
-    // Follow symlinks.  If the resolved path is relative, make it absolute.
+    // Follow symlinks. If the resolved path is relative, make it absolute.
     fn canonicalize(path: PathBuf) -> PathBuf {
         let path = fs::canonicalize(&path).unwrap_or(path);
         // See comments on this target function, but the gist is that
index 8ee3057de625ef3b08c9faad46c824ae9c685130..c3f0c4b58f57ad61eb04cbad7be79918950af8e3 100644 (file)
@@ -29,9 +29,9 @@ pub fn out_filename(
     out_filename
 }
 
-/// Make sure files are writeable.  Mac, FreeBSD, and Windows system linkers
+/// Make sure files are writeable. Mac, FreeBSD, and Windows system linkers
 /// check this already -- however, the Linux linker will happily overwrite a
-/// read-only file.  We should be consistent.
+/// read-only file. We should be consistent.
 pub fn check_file_is_writeable(file: &Path, sess: &Session) {
     if !is_writeable(file) {
         sess.emit_fatal(FileIsNotWriteable { file });
@@ -45,7 +45,7 @@ fn is_writeable(p: &Path) -> bool {
     }
 }
 
-pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input) -> Symbol {
+pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute]) -> Symbol {
     let validate = |s: Symbol, span: Option<Span>| {
         validate_crate_name(sess, s, span);
         s
@@ -71,7 +71,7 @@ pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input)
     if let Some((attr, s)) = attr_crate_name {
         return validate(s, Some(attr.span));
     }
-    if let Input::File(ref path) = *input {
+    if let Input::File(ref path) = sess.io.input {
         if let Some(s) = path.file_stem().and_then(|s| s.to_str()) {
             if s.starts_with('-') {
                 sess.emit_err(CrateNameInvalid { s });
index 1b2e8d9dc707bdd60673f4b2d859057899227588..95f199de6ff6ff0c14b781586d20f72c303819b7 100644 (file)
@@ -1,6 +1,7 @@
 use crate::cgu_reuse_tracker::CguReuseTracker;
 use crate::code_stats::CodeStats;
 pub use crate::code_stats::{DataTypeKind, FieldInfo, SizeKind, VariantInfo};
+use crate::config::Input;
 use crate::config::{self, CrateType, InstrumentCoverage, OptLevel, OutputType, SwitchWithOptPath};
 use crate::errors::{
     BranchProtectionRequiresAArch64, CannotEnableCrtStaticLinux, CannotMixAndMatchSanitizers,
@@ -137,6 +138,13 @@ pub struct Limits {
     pub const_eval_limit: Limit,
 }
 
+pub struct CompilerIO {
+    pub input: Input,
+    pub output_dir: Option<PathBuf>,
+    pub output_file: Option<PathBuf>,
+    pub temps_dir: Option<PathBuf>,
+}
+
 /// Represents the data associated with a compilation
 /// session for a single crate.
 pub struct Session {
@@ -147,9 +155,8 @@ pub struct Session {
     pub target_tlib_path: Lrc<SearchPath>,
     pub parse_sess: ParseSess,
     pub sysroot: PathBuf,
-    /// The name of the root source file of the crate, in the local file system.
-    /// `None` means that there is no source file.
-    pub local_crate_source_file: Option<PathBuf>,
+    /// Input, input file path and output file path to this compilation process.
+    pub io: CompilerIO,
 
     crate_types: OnceCell<Vec<CrateType>>,
     /// The `stable_crate_id` is constructed out of the crate name and all the
@@ -197,7 +204,7 @@ pub struct Session {
     pub ctfe_backtrace: Lock<CtfeBacktrace>,
 
     /// This tracks where `-Zunleash-the-miri-inside-of-you` was used to get around a
-    /// const check, optionally with the relevant feature gate.  We use this to
+    /// const check, optionally with the relevant feature gate. We use this to
     /// warn about unleashing, but with a single diagnostic instead of dozens that
     /// drown everything else in noise.
     miri_unleashed_features: Lock<Vec<(Span, Option<Symbol>)>>,
@@ -228,6 +235,11 @@ pub fn miri_unleashed_feature(&self, span: Span, feature_gate: Option<Symbol>) {
         self.miri_unleashed_features.lock().push((span, feature_gate));
     }
 
+    pub fn local_crate_source_file(&self) -> Option<PathBuf> {
+        let path = self.io.input.opt_path()?;
+        Some(self.opts.file_path_mapping().map_prefix(path).0.into_owned())
+    }
+
     fn check_miri_unleashed_features(&self) {
         let unleashed_features = self.miri_unleashed_features.lock();
         if !unleashed_features.is_empty() {
@@ -1298,7 +1310,7 @@ fn default_emitter(
 #[allow(rustc::bad_opt_access)]
 pub fn build_session(
     sopts: config::Options,
-    local_crate_source_file: Option<PathBuf>,
+    io: CompilerIO,
     bundle: Option<Lrc<rustc_errors::FluentBundle>>,
     registry: rustc_errors::registry::Registry,
     driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
@@ -1391,11 +1403,6 @@ pub fn build_session(
         Lrc::new(SearchPath::from_sysroot_and_triple(&sysroot, target_triple))
     };
 
-    let file_path_mapping = sopts.file_path_mapping();
-
-    let local_crate_source_file =
-        local_crate_source_file.map(|path| file_path_mapping.map_prefix(path).0);
-
     let optimization_fuel = Lock::new(OptimizationFuel {
         remaining: sopts.unstable_opts.fuel.as_ref().map_or(0, |&(_, i)| i),
         out_of_fuel: false,
@@ -1427,7 +1434,7 @@ pub fn build_session(
         target_tlib_path,
         parse_sess,
         sysroot,
-        local_crate_source_file,
+        io,
         crate_types: OnceCell::new(),
         stable_crate_id: OnceCell::new(),
         features: OnceCell::new(),
index a9a9a3fbf9d8061c54dbd6fc05d8894d0a339da7..dee823eefde689b013e18644f0572b3bc9363678 100644 (file)
@@ -338,7 +338,7 @@ pub struct HygieneData {
     /// first and then resolved later), so we use an `Option` here.
     local_expn_data: IndexVec<LocalExpnId, Option<ExpnData>>,
     local_expn_hashes: IndexVec<LocalExpnId, ExpnHash>,
-    /// Data and hash information from external crates.  We may eventually want to remove these
+    /// Data and hash information from external crates. We may eventually want to remove these
     /// maps, and fetch the information directly from the other crate's metadata like DefIds do.
     foreign_expn_data: FxHashMap<ExpnId, ExpnData>,
     foreign_expn_hashes: FxHashMap<ExpnId, ExpnHash>,
index fa09b4faa441f3b83d50809c92b426e708784b89..2e339a9d2d2b00eca439bdbd4930e9072c1a0e7a 100644 (file)
@@ -17,7 +17,7 @@
 use rustc_data_structures::sync::{AtomicU32, Lrc, MappedReadGuard, ReadGuard, RwLock};
 use std::cmp;
 use std::hash::Hash;
-use std::path::{Path, PathBuf};
+use std::path::{self, Path, PathBuf};
 use std::sync::atomic::Ordering;
 
 use std::fs;
@@ -1071,12 +1071,24 @@ pub fn count_lines(&self) -> usize {
 
     pub fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
         source_file.add_external_src(|| {
-            match source_file.name {
-                FileName::Real(ref name) if let Some(local_path) = name.local_path() => {
-                    self.file_loader.read_file(local_path).ok()
+            let FileName::Real(ref name) = source_file.name else {
+                return None;
+            };
+
+            let local_path: Cow<'_, Path> = match name {
+                RealFileName::LocalPath(local_path) => local_path.into(),
+                RealFileName::Remapped { local_path: Some(local_path), .. } => local_path.into(),
+                RealFileName::Remapped { local_path: None, virtual_name } => {
+                    // The compiler produces better error messages if the sources of dependencies
+                    // are available. Attempt to undo any path mapping so we can find remapped
+                    // dependencies.
+                    // We can only use the heuristic because `add_external_src` checks the file
+                    // content hash.
+                    self.path_mapping.reverse_map_prefix_heuristically(virtual_name)?.into()
                 }
-                _ => None,
-            }
+            };
+
+            self.file_loader.read_file(&local_path).ok()
         })
     }
 
@@ -1138,7 +1150,8 @@ pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping {
     /// Applies any path prefix substitution as defined by the mapping.
     /// The return value is the remapped path and a boolean indicating whether
     /// the path was affected by the mapping.
-    pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
+    pub fn map_prefix<'a>(&'a self, path: impl Into<Cow<'a, Path>>) -> (Cow<'a, Path>, bool) {
+        let path = path.into();
         if path.as_os_str().is_empty() {
             // Exit early if the path is empty and therefore there's nothing to remap.
             // This is mostly to reduce spam for `RUSTC_LOG=[remap_path_prefix]`.
@@ -1148,7 +1161,10 @@ pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
         return remap_path_prefix(&self.mapping, path);
 
         #[instrument(level = "debug", skip(mapping), ret)]
-        fn remap_path_prefix(mapping: &[(PathBuf, PathBuf)], path: PathBuf) -> (PathBuf, bool) {
+        fn remap_path_prefix<'a>(
+            mapping: &'a [(PathBuf, PathBuf)],
+            path: Cow<'a, Path>,
+        ) -> (Cow<'a, Path>, bool) {
             // NOTE: We are iterating over the mapping entries from last to first
             //       because entries specified later on the command line should
             //       take precedence.
@@ -1163,9 +1179,9 @@ fn remap_path_prefix(mapping: &[(PathBuf, PathBuf)], path: PathBuf) -> (PathBuf,
                         // in remapped paths down the line.
                         // So, if we have an exact match, we just return that without a call
                         // to `Path::join()`.
-                        to.clone()
+                        to.into()
                     } else {
-                        to.join(rest)
+                        to.join(rest).into()
                     };
                     debug!("Match - remapped");
 
@@ -1183,11 +1199,11 @@ fn remap_path_prefix(mapping: &[(PathBuf, PathBuf)], path: PathBuf) -> (PathBuf,
     fn map_filename_prefix(&self, file: &FileName) -> (FileName, bool) {
         match file {
             FileName::Real(realfile) if let RealFileName::LocalPath(local_path) = realfile => {
-                let (mapped_path, mapped) = self.map_prefix(local_path.to_path_buf());
+                let (mapped_path, mapped) = self.map_prefix(local_path);
                 let realfile = if mapped {
                     RealFileName::Remapped {
                         local_path: Some(local_path.clone()),
-                        virtual_name: mapped_path,
+                        virtual_name: mapped_path.into_owned(),
                     }
                 } else {
                     realfile.clone()
@@ -1228,14 +1244,17 @@ pub fn to_embeddable_absolute_path(
                 let (new_path, was_remapped) = self.map_prefix(unmapped_file_path);
                 if was_remapped {
                     // It was remapped, so don't modify further
-                    return RealFileName::Remapped { local_path: None, virtual_name: new_path };
+                    return RealFileName::Remapped {
+                        local_path: None,
+                        virtual_name: new_path.into_owned(),
+                    };
                 }
 
                 if new_path.is_absolute() {
                     // No remapping has applied to this path and it is absolute,
                     // so the working directory cannot influence it either, so
                     // we are done.
-                    return RealFileName::LocalPath(new_path);
+                    return RealFileName::LocalPath(new_path.into_owned());
                 }
 
                 debug_assert!(new_path.is_relative());
@@ -1253,12 +1272,12 @@ pub fn to_embeddable_absolute_path(
                             RealFileName::Remapped {
                                 // Erase the actual path
                                 local_path: None,
-                                virtual_name: file_path_abs,
+                                virtual_name: file_path_abs.into_owned(),
                             }
                         } else {
                             // No kind of remapping applied to this path, so
                             // we leave it as it is.
-                            RealFileName::LocalPath(file_path_abs)
+                            RealFileName::LocalPath(file_path_abs.into_owned())
                         }
                     }
                     RealFileName::Remapped {
@@ -1277,4 +1296,43 @@ pub fn to_embeddable_absolute_path(
             }
         }
     }
+
+    /// Attempts to (heuristically) reverse a prefix mapping.
+    ///
+    /// Returns [`Some`] if there is exactly one mapping where the "to" part is
+    /// a prefix of `path` and has at least one non-empty
+    /// [`Normal`](path::Component::Normal) component. The component
+    /// restriction exists to avoid reverse mapping overly generic paths like
+    /// `/` or `.`).
+    ///
+    /// This is a heuristic and not guaranteed to return the actual original
+    /// path! Do not rely on the result unless you have other means to verify
+    /// that the mapping is correct (e.g. by checking the file content hash).
+    #[instrument(level = "debug", skip(self), ret)]
+    fn reverse_map_prefix_heuristically(&self, path: &Path) -> Option<PathBuf> {
+        let mut found = None;
+
+        for (from, to) in self.mapping.iter() {
+            let has_normal_component = to.components().any(|c| match c {
+                path::Component::Normal(s) => !s.is_empty(),
+                _ => false,
+            });
+
+            if !has_normal_component {
+                continue;
+            }
+
+            let Ok(rest) = path.strip_prefix(to) else {
+                continue;
+            };
+
+            if found.is_some() {
+                return None;
+            }
+
+            found = Some(from.join(rest));
+        }
+
+        found
+    }
 }
index 3cab59e8dbe6cf6166b90d65ad62889e852782a7..686b3b00d7047a7120717418ce5106c118599408 100644 (file)
@@ -344,6 +344,10 @@ fn map_path_prefix(mapping: &FilePathMapping, p: &str) -> String {
     mapping.map_prefix(path(p)).0.to_string_lossy().to_string()
 }
 
+fn reverse_map_prefix(mapping: &FilePathMapping, p: &str) -> Option<String> {
+    mapping.reverse_map_prefix_heuristically(&path(p)).map(|q| q.to_string_lossy().to_string())
+}
+
 #[test]
 fn path_prefix_remapping() {
     // Relative to relative
@@ -387,7 +391,7 @@ fn path_prefix_remapping_expand_to_absolute() {
     let working_directory = path("/foo");
     let working_directory = RealFileName::Remapped {
         local_path: Some(working_directory.clone()),
-        virtual_name: mapping.map_prefix(working_directory).0,
+        virtual_name: mapping.map_prefix(working_directory).0.into_owned(),
     };
 
     assert_eq!(working_directory.remapped_path_if_available(), path("FOO"));
@@ -480,6 +484,45 @@ fn path_prefix_remapping_expand_to_absolute() {
     );
 }
 
+#[test]
+fn path_prefix_remapping_reverse() {
+    // Ignores options without alphanumeric chars.
+    {
+        let mapping =
+            &FilePathMapping::new(vec![(path("abc"), path("/")), (path("def"), path("."))]);
+
+        assert_eq!(reverse_map_prefix(mapping, "/hello.rs"), None);
+        assert_eq!(reverse_map_prefix(mapping, "./hello.rs"), None);
+    }
+
+    // Returns `None` if multiple options match.
+    {
+        let mapping = &FilePathMapping::new(vec![
+            (path("abc"), path("/redacted")),
+            (path("def"), path("/redacted")),
+        ]);
+
+        assert_eq!(reverse_map_prefix(mapping, "/redacted/hello.rs"), None);
+    }
+
+    // Distinct reverse mappings.
+    {
+        let mapping = &FilePathMapping::new(vec![
+            (path("abc"), path("/redacted")),
+            (path("def/ghi"), path("/fake/dir")),
+        ]);
+
+        assert_eq!(
+            reverse_map_prefix(mapping, "/redacted/path/hello.rs"),
+            Some(path_str("abc/path/hello.rs"))
+        );
+        assert_eq!(
+            reverse_map_prefix(mapping, "/fake/dir/hello.rs"),
+            Some(path_str("def/ghi/hello.rs"))
+        );
+    }
+}
+
 #[test]
 fn test_next_point() {
     let sm = SourceMap::new(FilePathMapping::empty());
index 706002f79b1fb98163ff6e2abe678f0fdabe443c..7597b8d126a9ceb55375ecdb2f9f523cd756e093 100644 (file)
         Capture,
         Center,
         Clone,
+        Context,
         Continue,
         Copy,
         Count,
index 7f01f33d39c6cace7bfcb13e9fccd35963ae45f1..70cd883be09b3d7d57989931b1a58d8a101ab585 100644 (file)
@@ -462,7 +462,7 @@ pub fn suggest_class(self, arch: InlineAsmArch, ty: InlineAsmType) -> Option<Sel
     }
 
     /// Returns a suggested template modifier to use for this type and an
-    /// example of a  register named formatted with it.
+    /// example of a register named formatted with it.
     ///
     /// Such suggestions are useful if a type smaller than the full register
     /// size is used and a modifier can be used to point to the subregister of
index e72cab629ff19058cd332c33689fc851ee263cc4..b69ade7e4aa08eee89426153ea3d6b427e62af92 100644 (file)
@@ -12,7 +12,7 @@ pub fn target() -> Target {
 
     Target {
         // Clang automatically chooses a more specific target based on
-        // MACOSX_DEPLOYMENT_TARGET.  To enable cross-language LTO to work
+        // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
         // correctly, we do too.
         llvm_target: macos_llvm_target(arch).into(),
         pointer_width: 64,
index 2b00cda44b511372a33ba51c89c95eb721fcd249..4d03747d0165f698ae7ce3d7867f27110b3316d1 100644 (file)
@@ -6,7 +6,7 @@ pub fn opts(endian: Endian) -> TargetOptions {
         allow_asm: true,
         endian,
         linker_flavor: LinkerFlavor::Bpf,
-        atomic_cas: true,
+        atomic_cas: false,
         dynamic_linking: true,
         no_builtins: true,
         panic_strategy: PanicStrategy::Abort,
index ad22467ba9c8970ad4cdf13b804d648b307f6cfb..b5103d15db695d95de98d7ebc8b4936f832f3446 100644 (file)
@@ -12,7 +12,7 @@ pub fn target() -> Target {
 
     Target {
         // Clang automatically chooses a more specific target based on
-        // MACOSX_DEPLOYMENT_TARGET.  To enable cross-language LTO to work
+        // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
         // correctly, we do too.
         //
         // While ld64 doesn't understand i686, LLVM does.
index 8ac351584434848f258c452b37cafa6964ba8084..e63e789752bc74eed91ae7601bc287bfe8e0d024 100644 (file)
@@ -5,8 +5,8 @@ pub fn opts() -> TargetOptions {
         LinkerFlavor::Unix(Cc::Yes),
         &[
             // The illumos libc contains a stack unwinding implementation, as
-            // does libgcc_s.  The latter implementation includes several
-            // additional symbols that are not always in base libc.  To force
+            // does libgcc_s. The latter implementation includes several
+            // additional symbols that are not always in base libc. To force
             // the consistent use of just one unwinder, we ensure libc appears
             // after libgcc_s in the NEEDED list for the resultant binary by
             // ignoring any attempts to add it as a dynamic dependency until the
@@ -17,7 +17,7 @@ pub fn opts() -> TargetOptions {
             "-lc",
             // LLVM will insert calls to the stack protector functions
             // "__stack_chk_fail" and "__stack_chk_guard" into code in native
-            // object files.  Some platforms include these symbols directly in
+            // object files. Some platforms include these symbols directly in
             // libc, but at least historically these have been provided in
             // libssp.so on illumos and Solaris systems.
             "-lssp",
@@ -40,16 +40,16 @@ pub fn opts() -> TargetOptions {
         // cleanup handlers (in C, this would be something along the lines of:
         // void register_callback(void (*fn)(void *), void *arg);
         // (see src/libstd/sys/unix/fast_thread_local.rs) that is currently
-        // missing in illumos.  For now at least, we must fallback to using
+        // missing in illumos. For now at least, we must fallback to using
         // pthread_{get,set}specific.
         //has_thread_local: true,
 
         // FIXME: Currently, rust is invoking cc to link, which ends up
-        // causing these to get included twice.  We should eventually transition
+        // causing these to get included twice. We should eventually transition
         // to having rustc invoke ld directly, in which case these will need to
         // be uncommented.
         //
-        // We want XPG6 behavior from libc and libm.  See standards(5)
+        // We want XPG6 behavior from libc and libm. See standards(5)
         //pre_link_objects_exe: vec![
         //    "/usr/lib/amd64/values-Xc.o".into(),
         //    "/usr/lib/amd64/values-xpg6.o".into(),
index 1e80b8b759db4e008a3cf831dd1117eab2f6e2b6..a094c2c545269bede5d2eb6764e3eb2df611c51b 100644 (file)
@@ -2622,7 +2622,7 @@ pub fn expect_builtin(target_triple: &TargetTriple) -> Target {
     /// Search for a JSON file specifying the given target triple.
     ///
     /// If none is found in `$RUST_TARGET_PATH`, look for a file called `target.json` inside the
-    /// sysroot under the target-triple's `rustlib` directory.  Note that it could also just be a
+    /// sysroot under the target-triple's `rustlib` directory. Note that it could also just be a
     /// bare filename already, so also check for that. If one of the hardcoded targets we know
     /// about, just return it directly.
     ///
index cda88de0ea40693558af4084af4a49adcaa33ea4..f2c722b9a89da076e450e6bc8ceda7ce9fe3d67b 100644 (file)
@@ -1,5 +1,5 @@
 use crate::abi::Endian;
-use crate::spec::{StackProbeType, Target};
+use crate::spec::{SanitizerSet, StackProbeType, Target};
 
 pub fn target() -> Target {
     let mut base = super::linux_gnu_base::opts();
@@ -13,6 +13,8 @@ pub fn target() -> Target {
     base.max_atomic_width = Some(64);
     base.min_global_align = Some(16);
     base.stack_probes = StackProbeType::Inline;
+    base.supported_sanitizers =
+        SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD;
 
     Target {
         llvm_target: "s390x-unknown-linux-gnu".into(),
index 91e63aee5e49020211443eec54f3c776b0be92e7..8fe9d023c527e824a7d8803f4abecfe8603328ac 100644 (file)
@@ -1,5 +1,5 @@
 use crate::abi::Endian;
-use crate::spec::{StackProbeType, Target};
+use crate::spec::{SanitizerSet, StackProbeType, Target};
 
 pub fn target() -> Target {
     let mut base = super::linux_musl_base::opts();
@@ -14,6 +14,8 @@ pub fn target() -> Target {
     base.min_global_align = Some(16);
     base.static_position_independent_executables = true;
     base.stack_probes = StackProbeType::Inline;
+    base.supported_sanitizers =
+        SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD;
 
     Target {
         llvm_target: "s390x-unknown-linux-musl".into(),
index 440194ef216bf9f20232b02dbe0519c8dac22b40..4d2bc98ab7831fff14840eaf35281f25d1e6143c 100644 (file)
@@ -15,7 +15,7 @@ pub fn target() -> Target {
         pointer_width: 64,
         data_layout: "E-m:e-i64:64-n32:64-S128".into(),
         // Use "sparc64" instead of "sparcv9" here, since the former is already
-        // used widely in the source base.  If we ever needed ABI
+        // used widely in the source base. If we ever needed ABI
         // differentiation from the sparc64, we could, but that would probably
         // just be confusing.
         arch: "sparc64".into(),
index 9a3e7a8050025c01cc63e69238ad78756b736213..e90bda9c9a8718683ddc2cec4c0ed46fed7626bf 100644 (file)
@@ -14,7 +14,7 @@ pub fn target() -> Target {
 
     Target {
         // Clang automatically chooses a more specific target based on
-        // MACOSX_DEPLOYMENT_TARGET.  To enable cross-language LTO to work
+        // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
         // correctly, we do too.
         llvm_target: macos_llvm_target(arch).into(),
         pointer_width: 64,
index 67613e1a4ebc01e31aacb60aead8538012423dd0..90d879976c260cb33d84020b09a0ff33c9423d71 100644 (file)
@@ -19,6 +19,7 @@ rustc_infer = { path = "../rustc_infer" }
 rustc_lint_defs = { path = "../rustc_lint_defs" }
 rustc_macros = { path = "../rustc_macros" }
 rustc_query_system = { path = "../rustc_query_system" }
+rustc_serialize = { path = "../rustc_serialize" }
 rustc_session = { path = "../rustc_session" }
 rustc_span = { path = "../rustc_span" }
 rustc_target = { path = "../rustc_target" }
index 081ac966c696193252a769e5944aae4ac38c9090..6fa0941036390a675beafe5305269c0e9633c740 100644 (file)
@@ -21,6 +21,7 @@
 #![feature(never_type)]
 #![feature(result_option_inspect)]
 #![feature(type_alias_impl_trait)]
+#![feature(min_specialization)]
 #![recursion_limit = "512"] // For rustdoc
 
 #[macro_use]
index e9ddad11ff23e5707ad6297186d93846588e1e03..cdb72d49834f0101f401660171e102675bfa9e3b 100644 (file)
@@ -1,38 +1,85 @@
 //! Code shared by trait and projection goals for candidate assembly.
 
 use super::infcx_ext::InferCtxtExt;
-use super::{
-    fixme_instantiate_canonical_query_response, CanonicalGoal, CanonicalResponse, Certainty,
-    EvalCtxt, Goal,
-};
+use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, QueryResult};
 use rustc_hir::def_id::DefId;
-use rustc_infer::infer::TyCtxtInferExt;
-use rustc_infer::infer::{
-    canonical::{CanonicalVarValues, OriginalQueryValues},
-    InferCtxt,
-};
 use rustc_infer::traits::query::NoSolution;
+use rustc_infer::traits::util::elaborate_predicates;
 use rustc_middle::ty::TypeFoldable;
 use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_span::DUMMY_SP;
 use std::fmt::Debug;
 
 /// A candidate is a possible way to prove a goal.
 ///
 /// It consists of both the `source`, which describes how that goal would be proven,
 /// and the `result` when using the given `source`.
-///
-/// For the list of possible candidates, please look at the documentation of
-/// [super::trait_goals::CandidateSource] and [super::project_goals::CandidateSource].
 #[derive(Debug, Clone)]
-pub(super) struct Candidate<'tcx, G: GoalKind<'tcx>> {
-    pub(super) source: G::CandidateSource,
+pub(super) struct Candidate<'tcx> {
+    pub(super) source: CandidateSource,
     pub(super) result: CanonicalResponse<'tcx>,
 }
 
-pub(super) trait GoalKind<'tcx>: TypeFoldable<'tcx> + Copy {
-    type CandidateSource: Debug + Copy;
+/// Possible ways the given goal can be proven.
+#[derive(Debug, Clone, Copy)]
+pub(super) enum CandidateSource {
+    /// A user written impl.
+    ///
+    /// ## Examples
+    ///
+    /// ```rust
+    /// fn main() {
+    ///     let x: Vec<u32> = Vec::new();
+    ///     // This uses the impl from the standard library to prove `Vec<T>: Clone`.
+    ///     let y = x.clone();
+    /// }
+    /// ```
+    Impl(DefId),
+    /// A builtin impl generated by the compiler. When adding a new special
+    /// trait, try to use actual impls whenever possible. Builtin impls should
+    /// only be used in cases where the impl cannot be manually be written.
+    ///
+    /// Notable examples are auto traits, `Sized`, and `DiscriminantKind`.
+    /// For a list of all traits with builtin impls, check out the
+    /// [`EvalCtxt::assemble_builtin_impl_candidates`] method. Not
+    BuiltinImpl,
+    /// An assumption from the environment.
+    ///
+    /// More precicely we've used the `n-th` assumption in the `param_env`.
+    ///
+    /// ## Examples
+    ///
+    /// ```rust
+    /// fn is_clone<T: Clone>(x: T) -> (T, T) {
+    ///     // This uses the assumption `T: Clone` from the `where`-bounds
+    ///     // to prove `T: Clone`.
+    ///     (x.clone(), x)
+    /// }
+    /// ```
+    ParamEnv(usize),
+    /// If the self type is an alias type, e.g. an opaque type or a projection,
+    /// we know the bounds on that alias to hold even without knowing its concrete
+    /// underlying type.
+    ///
+    /// More precisely this candidate is using the `n-th` bound in the `item_bounds` of
+    /// the self type.
+    ///
+    /// ## Examples
+    ///
+    /// ```rust
+    /// trait Trait {
+    ///     type Assoc: Clone;
+    /// }
+    ///
+    /// fn foo<T: Trait>(x: <T as Trait>::Assoc) {
+    ///     // We prove `<T as Trait>::Assoc` by looking at the bounds on `Assoc` in
+    ///     // in the trait definition.
+    ///     let _y = x.clone();
+    /// }
+    /// ```
+    AliasBound(usize),
+}
 
+pub(super) trait GoalKind<'tcx>: TypeFoldable<'tcx> + Copy + Eq {
     fn self_ty(self) -> Ty<'tcx>;
 
     fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self;
@@ -40,47 +87,86 @@ pub(super) trait GoalKind<'tcx>: TypeFoldable<'tcx> + Copy {
     fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId;
 
     fn consider_impl_candidate(
-        acx: &mut AssemblyCtxt<'_, 'tcx, Self>,
+        ecx: &mut EvalCtxt<'_, 'tcx>,
         goal: Goal<'tcx, Self>,
         impl_def_id: DefId,
-    );
-}
+    ) -> QueryResult<'tcx>;
 
-/// An abstraction which correctly deals with the canonical results for candidates.
-///
-/// It also deduplicates the behavior between trait and projection predicates.
-pub(super) struct AssemblyCtxt<'a, 'tcx, G: GoalKind<'tcx>> {
-    pub(super) cx: &'a mut EvalCtxt<'tcx>,
-    pub(super) infcx: &'a InferCtxt<'tcx>,
-    var_values: CanonicalVarValues<'tcx>,
-    candidates: Vec<Candidate<'tcx, G>>,
-}
+    fn consider_assumption(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+        assumption: ty::Predicate<'tcx>,
+    ) -> QueryResult<'tcx>;
+
+    fn consider_auto_trait_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx>;
 
-impl<'a, 'tcx, G: GoalKind<'tcx>> AssemblyCtxt<'a, 'tcx, G> {
-    pub(super) fn assemble_and_evaluate_candidates(
-        cx: &'a mut EvalCtxt<'tcx>,
-        goal: CanonicalGoal<'tcx, G>,
-    ) -> Vec<Candidate<'tcx, G>> {
-        let (ref infcx, goal, var_values) =
-            cx.tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &goal);
-        let mut acx = AssemblyCtxt { cx, infcx, var_values, candidates: Vec::new() };
+    fn consider_trait_alias_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx>;
 
-        acx.assemble_candidates_after_normalizing_self_ty(goal);
+    fn consider_builtin_sized_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx>;
 
-        acx.assemble_impl_candidates(goal);
+    fn consider_builtin_copy_clone_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx>;
 
-        acx.candidates
-    }
+    fn consider_builtin_pointer_sized_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx>;
+
+    fn consider_builtin_fn_trait_candidates(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+        kind: ty::ClosureKind,
+    ) -> QueryResult<'tcx>;
+
+    fn consider_builtin_tuple_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx>;
+}
 
-    pub(super) fn try_insert_candidate(
+impl<'tcx> EvalCtxt<'_, 'tcx> {
+    pub(super) fn assemble_and_evaluate_candidates<G: GoalKind<'tcx>>(
         &mut self,
-        source: G::CandidateSource,
-        certainty: Certainty,
-    ) {
-        match self.infcx.make_canonical_response(self.var_values.clone(), certainty) {
-            Ok(result) => self.candidates.push(Candidate { source, result }),
-            Err(NoSolution) => debug!(?source, ?certainty, "failed leakcheck"),
+        goal: Goal<'tcx, G>,
+    ) -> Vec<Candidate<'tcx>> {
+        debug_assert_eq!(goal, self.infcx.resolve_vars_if_possible(goal));
+
+        // HACK: `_: Trait` is ambiguous, because it may be satisfied via a builtin rule,
+        // object bound, alias bound, etc. We are unable to determine this until we can at
+        // least structually resolve the type one layer.
+        if goal.predicate.self_ty().is_ty_var() {
+            return vec![Candidate {
+                source: CandidateSource::BuiltinImpl,
+                result: self.make_canonical_response(Certainty::AMBIGUOUS).unwrap(),
+            }];
         }
+
+        let mut candidates = Vec::new();
+
+        self.assemble_candidates_after_normalizing_self_ty(goal, &mut candidates);
+
+        self.assemble_impl_candidates(goal, &mut candidates);
+
+        self.assemble_builtin_impl_candidates(goal, &mut candidates);
+
+        self.assemble_param_env_candidates(goal, &mut candidates);
+
+        self.assemble_alias_bound_candidates(goal, &mut candidates);
+
+        self.assemble_object_bound_candidates(goal, &mut candidates);
+
+        candidates
     }
 
     /// If the self type of a goal is a projection, computing the relevant candidates is difficult.
@@ -88,8 +174,12 @@ pub(super) fn try_insert_candidate(
     /// To deal with this, we first try to normalize the self type and add the candidates for the normalized
     /// self type to the list of candidates in case that succeeds. Note that we can't just eagerly return in
     /// this case as projections as self types add `
-    fn assemble_candidates_after_normalizing_self_ty(&mut self, goal: Goal<'tcx, G>) {
-        let tcx = self.cx.tcx;
+    fn assemble_candidates_after_normalizing_self_ty<G: GoalKind<'tcx>>(
+        &mut self,
+        goal: Goal<'tcx, G>,
+        candidates: &mut Vec<Candidate<'tcx>>,
+    ) {
+        let tcx = self.tcx();
         // FIXME: We also have to normalize opaque types, not sure where to best fit that in.
         let &ty::Alias(ty::Projection, projection_ty) = goal.predicate.self_ty().kind() else {
             return
@@ -103,48 +193,193 @@ fn assemble_candidates_after_normalizing_self_ty(&mut self, goal: Goal<'tcx, G>)
                     term: normalized_ty.into(),
                 }),
             );
-            let normalization_certainty =
-                match self.cx.evaluate_goal(&self.infcx, normalizes_to_goal) {
-                    Ok((_, certainty)) => certainty,
-                    Err(NoSolution) => return,
-                };
+            let normalization_certainty = match self.evaluate_goal(normalizes_to_goal) {
+                Ok((_, certainty)) => certainty,
+                Err(NoSolution) => return,
+            };
+            let normalized_ty = self.infcx.resolve_vars_if_possible(normalized_ty);
 
             // NOTE: Alternatively we could call `evaluate_goal` here and only have a `Normalized` candidate.
-            // This doesn't work as long as we use `CandidateSource` in both winnowing and to resolve associated items.
+            // This doesn't work as long as we use `CandidateSource` in winnowing.
             let goal = goal.with(tcx, goal.predicate.with_self_ty(tcx, normalized_ty));
-            let mut orig_values = OriginalQueryValues::default();
-            let goal = self.infcx.canonicalize_query(goal, &mut orig_values);
-            let normalized_candidates =
-                AssemblyCtxt::assemble_and_evaluate_candidates(self.cx, goal);
-
-            // Map each candidate from being canonical wrt the current inference context to being
-            // canonical wrt the caller.
-            for Candidate { source, result } in normalized_candidates {
-                self.infcx.probe(|_| {
-                    let candidate_certainty = fixme_instantiate_canonical_query_response(
-                        &self.infcx,
-                        &orig_values,
-                        result,
-                    );
-
-                    // FIXME: This is a bit scary if the `normalizes_to_goal` overflows.
-                    //
-                    // If we have an ambiguous candidate it hides that normalization
-                    // caused an overflow which may cause issues.
-                    self.try_insert_candidate(
-                        source,
-                        normalization_certainty.unify_and(candidate_certainty),
-                    )
-                })
+            // FIXME: This is broken if we care about the `usize` of `AliasBound` because the self type
+            // could be normalized to yet another projection with different item bounds.
+            let normalized_candidates = self.assemble_and_evaluate_candidates(goal);
+            for mut normalized_candidate in normalized_candidates {
+                normalized_candidate.result =
+                    normalized_candidate.result.unchecked_map(|mut response| {
+                        // FIXME: This currently hides overflow in the normalization step of the self type
+                        // which is probably wrong. Maybe `unify_and` should actually keep overflow as
+                        // we treat it as non-fatal anyways.
+                        response.certainty = response.certainty.unify_and(normalization_certainty);
+                        response
+                    });
+                candidates.push(normalized_candidate);
             }
         })
     }
 
-    fn assemble_impl_candidates(&mut self, goal: Goal<'tcx, G>) {
-        self.cx.tcx.for_each_relevant_impl(
-            goal.predicate.trait_def_id(self.cx.tcx),
+    fn assemble_impl_candidates<G: GoalKind<'tcx>>(
+        &mut self,
+        goal: Goal<'tcx, G>,
+        candidates: &mut Vec<Candidate<'tcx>>,
+    ) {
+        let tcx = self.tcx();
+        tcx.for_each_relevant_impl(
+            goal.predicate.trait_def_id(tcx),
             goal.predicate.self_ty(),
-            |impl_def_id| G::consider_impl_candidate(self, goal, impl_def_id),
+            |impl_def_id| match G::consider_impl_candidate(self, goal, impl_def_id) {
+                Ok(result) => candidates
+                    .push(Candidate { source: CandidateSource::Impl(impl_def_id), result }),
+                Err(NoSolution) => (),
+            },
         );
     }
+
+    fn assemble_builtin_impl_candidates<G: GoalKind<'tcx>>(
+        &mut self,
+        goal: Goal<'tcx, G>,
+        candidates: &mut Vec<Candidate<'tcx>>,
+    ) {
+        let lang_items = self.tcx().lang_items();
+        let trait_def_id = goal.predicate.trait_def_id(self.tcx());
+        let result = if self.tcx().trait_is_auto(trait_def_id) {
+            G::consider_auto_trait_candidate(self, goal)
+        } else if self.tcx().trait_is_alias(trait_def_id) {
+            G::consider_trait_alias_candidate(self, goal)
+        } else if lang_items.sized_trait() == Some(trait_def_id) {
+            G::consider_builtin_sized_candidate(self, goal)
+        } else if lang_items.copy_trait() == Some(trait_def_id)
+            || lang_items.clone_trait() == Some(trait_def_id)
+        {
+            G::consider_builtin_copy_clone_candidate(self, goal)
+        } else if lang_items.pointer_sized() == Some(trait_def_id) {
+            G::consider_builtin_pointer_sized_candidate(self, goal)
+        } else if let Some(kind) = self.tcx().fn_trait_kind_from_def_id(trait_def_id) {
+            G::consider_builtin_fn_trait_candidates(self, goal, kind)
+        } else if lang_items.tuple_trait() == Some(trait_def_id) {
+            G::consider_builtin_tuple_candidate(self, goal)
+        } else {
+            Err(NoSolution)
+        };
+
+        match result {
+            Ok(result) => {
+                candidates.push(Candidate { source: CandidateSource::BuiltinImpl, result })
+            }
+            Err(NoSolution) => (),
+        }
+    }
+
+    fn assemble_param_env_candidates<G: GoalKind<'tcx>>(
+        &mut self,
+        goal: Goal<'tcx, G>,
+        candidates: &mut Vec<Candidate<'tcx>>,
+    ) {
+        for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() {
+            match G::consider_assumption(self, goal, assumption) {
+                Ok(result) => {
+                    candidates.push(Candidate { source: CandidateSource::ParamEnv(i), result })
+                }
+                Err(NoSolution) => (),
+            }
+        }
+    }
+
+    fn assemble_alias_bound_candidates<G: GoalKind<'tcx>>(
+        &mut self,
+        goal: Goal<'tcx, G>,
+        candidates: &mut Vec<Candidate<'tcx>>,
+    ) {
+        let alias_ty = match goal.predicate.self_ty().kind() {
+            ty::Bool
+            | ty::Char
+            | ty::Int(_)
+            | ty::Uint(_)
+            | ty::Float(_)
+            | ty::Adt(_, _)
+            | ty::Foreign(_)
+            | ty::Str
+            | ty::Array(_, _)
+            | ty::Slice(_)
+            | ty::RawPtr(_)
+            | ty::Ref(_, _, _)
+            | ty::FnDef(_, _)
+            | ty::FnPtr(_)
+            | ty::Dynamic(..)
+            | ty::Closure(..)
+            | ty::Generator(..)
+            | ty::GeneratorWitness(_)
+            | ty::Never
+            | ty::Tuple(_)
+            | ty::Param(_)
+            | ty::Placeholder(..)
+            | ty::Infer(_)
+            | ty::Error(_) => return,
+            ty::Bound(..) => bug!("unexpected bound type: {goal:?}"),
+            ty::Alias(_, alias_ty) => alias_ty,
+        };
+
+        for (i, (assumption, _)) in self
+            .tcx()
+            .bound_explicit_item_bounds(alias_ty.def_id)
+            .subst_iter_copied(self.tcx(), alias_ty.substs)
+            .enumerate()
+        {
+            match G::consider_assumption(self, goal, assumption) {
+                Ok(result) => {
+                    candidates.push(Candidate { source: CandidateSource::AliasBound(i), result })
+                }
+                Err(NoSolution) => (),
+            }
+        }
+    }
+
+    fn assemble_object_bound_candidates<G: GoalKind<'tcx>>(
+        &mut self,
+        goal: Goal<'tcx, G>,
+        candidates: &mut Vec<Candidate<'tcx>>,
+    ) {
+        let self_ty = goal.predicate.self_ty();
+        let bounds = match *self_ty.kind() {
+            ty::Bool
+            | ty::Char
+            | ty::Int(_)
+            | ty::Uint(_)
+            | ty::Float(_)
+            | ty::Adt(_, _)
+            | ty::Foreign(_)
+            | ty::Str
+            | ty::Array(_, _)
+            | ty::Slice(_)
+            | ty::RawPtr(_)
+            | ty::Ref(_, _, _)
+            | ty::FnDef(_, _)
+            | ty::FnPtr(_)
+            | ty::Alias(..)
+            | ty::Closure(..)
+            | ty::Generator(..)
+            | ty::GeneratorWitness(_)
+            | ty::Never
+            | ty::Tuple(_)
+            | ty::Param(_)
+            | ty::Placeholder(..)
+            | ty::Infer(_)
+            | ty::Error(_) => return,
+            ty::Bound(..) => bug!("unexpected bound type: {goal:?}"),
+            ty::Dynamic(bounds, ..) => bounds,
+        };
+
+        let tcx = self.tcx();
+        for assumption in
+            elaborate_predicates(tcx, bounds.iter().map(|bound| bound.with_self_ty(tcx, self_ty)))
+        {
+            match G::consider_assumption(self, goal, assumption.predicate) {
+                Ok(result) => {
+                    candidates.push(Candidate { source: CandidateSource::BuiltinImpl, result })
+                }
+                Err(NoSolution) => (),
+            }
+        }
+    }
 }
diff --git a/compiler/rustc_trait_selection/src/solve/cache.rs b/compiler/rustc_trait_selection/src/solve/cache.rs
deleted file mode 100644 (file)
index 993b798..0000000
+++ /dev/null
@@ -1,257 +0,0 @@
-//! This module both handles the global cache which stores "finished" goals,
-//! and the provisional cache which contains partially computed goals.
-//!
-//! The provisional cache is necessary when dealing with coinductive cycles.
-//!
-//! For more information about the provisional cache and coinduction in general,
-//! check out the relevant section of the rustc-dev-guide.
-//!
-//! FIXME(@lcnr): Write that section, feel free to ping me if you need help here
-//! before then or if I still haven't done that before January 2023.
-use super::overflow::OverflowData;
-use super::CanonicalGoal;
-use super::{EvalCtxt, QueryResult};
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_middle::ty::TyCtxt;
-use std::{cmp::Ordering, collections::hash_map::Entry};
-
-#[derive(Debug, Clone)]
-struct ProvisionalEntry<'tcx> {
-    // In case we have a coinductive cycle, this is the
-    // the currently least restrictive result of this goal.
-    response: QueryResult<'tcx>,
-    // The lowest element on the stack on which this result
-    // relies on. Starts out as just being the depth at which
-    // we've proven this obligation, but gets lowered to the
-    // depth of another goal if we rely on it in a cycle.
-    depth: usize,
-}
-
-struct StackElem<'tcx> {
-    goal: CanonicalGoal<'tcx>,
-    has_been_used: bool,
-}
-
-/// The cache used for goals which are currently in progress or which depend
-/// on in progress results.
-///
-/// Once we're done with a goal we can store it in the global trait solver
-/// cache of the `TyCtxt`. For goals which we're currently proving, or which
-/// have only been proven via a coinductive cycle using a goal still on our stack
-/// we have to use this separate data structure.
-///
-/// The current data structure is not perfect, so there may still be room for
-/// improvement here. We have the following requirements:
-///
-/// ## Is there is a provisional entry for the given goal:
-///
-/// ```ignore (for syntax highlighting)
-/// self.entries.get(goal)
-/// ```
-///
-/// ## Get all goals on the stack involved in a cycle:
-///
-/// ```ignore (for syntax highlighting)
-/// let entry = self.entries.get(goal).unwrap();
-/// let involved_goals = self.stack.iter().skip(entry.depth);
-/// ```
-///
-/// ## Capping the depth of all entries
-///
-/// Needed whenever we encounter a cycle. The current implementation always
-/// iterates over all entries instead of only the ones with a larger depth.
-/// Changing this may result in notable performance improvements.
-///
-/// ```ignore (for syntax highlighting)
-/// let cycle_depth = self.entries.get(goal).unwrap().depth;
-/// for e in &mut self.entries {
-///     e.depth = e.depth.min(cycle_depth);
-/// }
-/// ```
-///
-/// ## Checking whether we have to rerun the current goal
-///
-/// A goal has to be rerun if its provisional result was used in a cycle
-/// and that result is different from its final result. We update
-/// [StackElem::has_been_used] for the deepest stack element involved in a cycle.
-///
-/// ## Moving all finished goals into the global cache
-///
-/// If `stack_elem.has_been_used` is true, iterate over all entries, moving the ones
-/// with equal depth. If not, simply move this single entry.
-pub(super) struct ProvisionalCache<'tcx> {
-    stack: Vec<StackElem<'tcx>>,
-    entries: FxHashMap<CanonicalGoal<'tcx>, ProvisionalEntry<'tcx>>,
-}
-
-impl<'tcx> ProvisionalCache<'tcx> {
-    pub(super) fn empty() -> ProvisionalCache<'tcx> {
-        ProvisionalCache { stack: Vec::new(), entries: Default::default() }
-    }
-
-    pub(super) fn current_depth(&self) -> usize {
-        self.stack.len()
-    }
-}
-
-impl<'tcx> EvalCtxt<'tcx> {
-    /// Tries putting the new goal on the stack, returning an error if it is already cached.
-    ///
-    /// This correctly updates the provisional cache if there is a cycle.
-    pub(super) fn try_push_stack(
-        &mut self,
-        goal: CanonicalGoal<'tcx>,
-    ) -> Result<(), QueryResult<'tcx>> {
-        // FIXME: start by checking the global cache
-
-        // Look at the provisional cache to check for cycles.
-        let cache = &mut self.provisional_cache;
-        match cache.entries.entry(goal) {
-            // No entry, simply push this goal on the stack after dealing with overflow.
-            Entry::Vacant(v) => {
-                if self.overflow_data.has_overflow(cache.stack.len()) {
-                    return Err(self.deal_with_overflow());
-                }
-
-                v.insert(ProvisionalEntry {
-                    response: fixme_response_yes_no_constraints(),
-                    depth: cache.stack.len(),
-                });
-                cache.stack.push(StackElem { goal, has_been_used: false });
-                Ok(())
-            }
-            // We have a nested goal which relies on a goal `root` deeper in the stack.
-            //
-            // We first store that we may have to rerun `evaluate_goal` for `root` in case the
-            // provisional response is not equal to the final response. We also update the depth
-            // of all goals which recursively depend on our current goal to depend on `root`
-            // instead.
-            //
-            // Finally we can return either the provisional response for that goal if we have a
-            // coinductive cycle or an ambiguous result if the cycle is inductive.
-            Entry::Occupied(entry) => {
-                // FIXME: `ProvisionalEntry` should be `Copy`.
-                let entry = entry.get().clone();
-                cache.stack[entry.depth].has_been_used = true;
-                for provisional_entry in cache.entries.values_mut() {
-                    provisional_entry.depth = provisional_entry.depth.min(entry.depth);
-                }
-
-                // NOTE: The goals on the stack aren't the only goals involved in this cycle.
-                // We can also depend on goals which aren't part of the stack but coinductively
-                // depend on the stack themselves. We already checked whether all the goals
-                // between these goals and their root on the stack. This means that as long as
-                // each goal in a cycle is checked for coinductivity by itself simply checking
-                // the stack is enough.
-                if cache.stack[entry.depth..]
-                    .iter()
-                    .all(|g| g.goal.value.predicate.is_coinductive(self.tcx))
-                {
-                    Err(entry.response)
-                } else {
-                    Err(fixme_response_maybe_no_constraints())
-                }
-            }
-        }
-    }
-
-    /// We cannot simply store the result of [EvalCtxt::compute_goal] as we have to deal with
-    /// coinductive cycles.
-    ///
-    /// When we encounter a coinductive cycle, we have to prove the final result of that cycle
-    /// while we are still computing that result. Because of this we continously recompute the
-    /// cycle until the result of the previous iteration is equal to the final result, at which
-    /// point we are done.
-    ///
-    /// This function returns `true` if we were able to finalize the goal and `false` if it has
-    /// updated the provisional cache and we have to recompute the current goal.
-    ///
-    /// FIXME: Refer to the rustc-dev-guide entry once it exists.
-    pub(super) fn try_finalize_goal(
-        &mut self,
-        actual_goal: CanonicalGoal<'tcx>,
-        response: QueryResult<'tcx>,
-    ) -> bool {
-        let cache = &mut self.provisional_cache;
-        let StackElem { goal, has_been_used } = cache.stack.pop().unwrap();
-        assert_eq!(goal, actual_goal);
-
-        let provisional_entry = cache.entries.get_mut(&goal).unwrap();
-        // Check whether the current stack entry is the root of a cycle.
-        //
-        // If so, we either move all participants of that cycle to the global cache
-        // or, in case the provisional response used in the cycle is not equal to the
-        // final response, have to recompute the goal after updating the provisional
-        // response to the final response of this iteration.
-        if has_been_used {
-            if provisional_entry.response == response {
-                // We simply drop all entries according to an immutable condition, so
-                // query instability is not a concern here.
-                #[allow(rustc::potential_query_instability)]
-                cache.entries.retain(|goal, entry| match entry.depth.cmp(&cache.stack.len()) {
-                    Ordering::Less => true,
-                    Ordering::Equal => {
-                        Self::try_move_finished_goal_to_global_cache(
-                            self.tcx,
-                            &mut self.overflow_data,
-                            &cache.stack,
-                            // FIXME: these should be `Copy` :(
-                            goal.clone(),
-                            entry.response.clone(),
-                        );
-                        false
-                    }
-                    Ordering::Greater => bug!("entry with greater depth than the current leaf"),
-                });
-
-                true
-            } else {
-                provisional_entry.response = response;
-                cache.stack.push(StackElem { goal, has_been_used: false });
-                false
-            }
-        } else {
-            Self::try_move_finished_goal_to_global_cache(
-                self.tcx,
-                &mut self.overflow_data,
-                &cache.stack,
-                goal,
-                response,
-            );
-            cache.entries.remove(&goal);
-            true
-        }
-    }
-
-    fn try_move_finished_goal_to_global_cache(
-        tcx: TyCtxt<'tcx>,
-        overflow_data: &mut OverflowData,
-        stack: &[StackElem<'tcx>],
-        goal: CanonicalGoal<'tcx>,
-        response: QueryResult<'tcx>,
-    ) {
-        // We move goals to the global cache if we either did not hit an overflow or if it's
-        // the root goal as that will now always hit the same overflow limit.
-        //
-        // NOTE: We cannot move any non-root goals to the global cache even if their final result
-        // isn't impacted by the overflow as that goal still has unstable query dependencies
-        // because it didn't go its full depth.
-        //
-        // FIXME(@lcnr): We could still cache subtrees which are not impacted by overflow though.
-        // Tracking that info correctly isn't trivial, so I haven't implemented it for now.
-        let should_cache_globally = !overflow_data.did_overflow() || stack.is_empty();
-        if should_cache_globally {
-            // FIXME: move the provisional entry to the global cache.
-            let _ = (tcx, goal, response);
-        }
-    }
-}
-
-fn fixme_response_yes_no_constraints<'tcx>() -> QueryResult<'tcx> {
-    unimplemented!()
-}
-
-fn fixme_response_maybe_no_constraints<'tcx>() -> QueryResult<'tcx> {
-    unimplemented!()
-}
index c014d682a9aaabcdb269168dd6e050f4dae2784c..40b9bedc84fd3afd8292c2c721bee97d1d8b62de 100644 (file)
@@ -1,6 +1,5 @@
 use std::mem;
 
-use rustc_data_structures::fx::FxHashMap;
 use rustc_infer::{
     infer::InferCtxt,
     traits::{
@@ -8,9 +7,8 @@
         SelectionError, TraitEngine,
     },
 };
-use rustc_middle::ty;
 
-use super::{Certainty, EvalCtxt};
+use super::{search_graph, Certainty, EvalCtxt};
 
 /// A trait engine using the new trait solver.
 ///
@@ -52,7 +50,7 @@ fn select_all_or_error(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentErr
             .drain(..)
             .map(|obligation| FulfillmentError {
                 obligation: obligation.clone(),
-                code: FulfillmentErrorCode::CodeSelectionError(SelectionError::Unimplemented),
+                code: FulfillmentErrorCode::CodeAmbiguity,
                 root_obligation: obligation,
             })
             .collect()
@@ -62,19 +60,22 @@ fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentE
         let mut errors = Vec::new();
         for i in 0.. {
             if !infcx.tcx.recursion_limit().value_within_limit(i) {
-                unimplemented!("overflow")
+                unimplemented!("overflowed on pending obligations: {:?}", self.obligations);
             }
 
             let mut has_changed = false;
             for obligation in mem::take(&mut self.obligations) {
-                let mut cx = EvalCtxt::new(infcx.tcx);
-                let (changed, certainty) = match cx.evaluate_goal(infcx, obligation.clone().into())
-                {
+                let goal = obligation.clone().into();
+                let search_graph = &mut search_graph::SearchGraph::new(infcx.tcx);
+                let mut ecx = EvalCtxt::new_outside_solver(infcx, search_graph);
+                let (changed, certainty) = match ecx.evaluate_goal(goal) {
                     Ok(result) => result,
                     Err(NoSolution) => {
                         errors.push(FulfillmentError {
                             obligation: obligation.clone(),
-                            code: FulfillmentErrorCode::CodeAmbiguity,
+                            code: FulfillmentErrorCode::CodeSelectionError(
+                                SelectionError::Unimplemented,
+                            ),
                             root_obligation: obligation,
                         });
                         continue;
@@ -99,8 +100,4 @@ fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentE
     fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>> {
         self.obligations.clone()
     }
-
-    fn relationships(&mut self) -> &mut FxHashMap<ty::TyVid, ty::FoundRelationships> {
-        unimplemented!("Should be moved out of `TraitEngine`")
-    }
 }
index 436f4eea6625bcdf45b7b3e66bec6cba9d4bee80..42f597c781d257e0f97506d931d5121b5d5a83fd 100644 (file)
@@ -1,23 +1,35 @@
-use rustc_infer::infer::canonical::CanonicalVarValues;
+use rustc_infer::infer::at::ToTrace;
 use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
-use rustc_infer::infer::InferCtxt;
+use rustc_infer::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
 use rustc_infer::traits::query::NoSolution;
-use rustc_middle::ty::Ty;
+use rustc_infer::traits::ObligationCause;
+use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
+use rustc_middle::ty::{self, Ty, TypeFoldable};
 use rustc_span::DUMMY_SP;
 
-use crate::solve::ExternalConstraints;
-
-use super::{Certainty, QueryResult, Response};
+use super::Goal;
 
 /// Methods used inside of the canonical queries of the solver.
+///
+/// Most notably these do not care about diagnostics information.
+/// If you find this while looking for methods to use outside of the
+/// solver, you may look at the implementation of these method for
+/// help.
 pub(super) trait InferCtxtExt<'tcx> {
     fn next_ty_infer(&self) -> Ty<'tcx>;
+    fn next_const_infer(&self, ty: Ty<'tcx>) -> ty::Const<'tcx>;
+
+    fn eq<T: ToTrace<'tcx>>(
+        &self,
+        param_env: ty::ParamEnv<'tcx>,
+        lhs: T,
+        rhs: T,
+    ) -> Result<Vec<Goal<'tcx, ty::Predicate<'tcx>>>, NoSolution>;
 
-    fn make_canonical_response(
+    fn instantiate_bound_vars_with_infer<T: TypeFoldable<'tcx> + Copy>(
         &self,
-        var_values: CanonicalVarValues<'tcx>,
-        certainty: Certainty,
-    ) -> QueryResult<'tcx>;
+        value: ty::Binder<'tcx, T>,
+    ) -> T;
 }
 
 impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
@@ -27,29 +39,40 @@ fn next_ty_infer(&self) -> Ty<'tcx> {
             span: DUMMY_SP,
         })
     }
+    fn next_const_infer(&self, ty: Ty<'tcx>) -> ty::Const<'tcx> {
+        self.next_const_var(
+            ty,
+            ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span: DUMMY_SP },
+        )
+    }
 
-    fn make_canonical_response(
+    #[instrument(level = "debug", skip(self, param_env), ret)]
+    fn eq<T: ToTrace<'tcx>>(
         &self,
-        var_values: CanonicalVarValues<'tcx>,
-        certainty: Certainty,
-    ) -> QueryResult<'tcx> {
-        let external_constraints = take_external_constraints(self)?;
-
-        Ok(self.canonicalize_response(Response { var_values, external_constraints, certainty }))
+        param_env: ty::ParamEnv<'tcx>,
+        lhs: T,
+        rhs: T,
+    ) -> Result<Vec<Goal<'tcx, ty::Predicate<'tcx>>>, NoSolution> {
+        self.at(&ObligationCause::dummy(), param_env)
+            .define_opaque_types(false)
+            .eq(lhs, rhs)
+            .map(|InferOk { value: (), obligations }| {
+                obligations.into_iter().map(|o| o.into()).collect()
+            })
+            .map_err(|e| {
+                debug!(?e, "failed to equate");
+                NoSolution
+            })
     }
-}
 
-#[instrument(level = "debug", skip(infcx), ret)]
-fn take_external_constraints<'tcx>(
-    infcx: &InferCtxt<'tcx>,
-) -> Result<ExternalConstraints<'tcx>, NoSolution> {
-    let region_obligations = infcx.take_registered_region_obligations();
-    let opaque_types = infcx.take_opaque_types_for_query_response();
-    Ok(ExternalConstraints {
-        // FIXME: Now that's definitely wrong :)
-        //
-        // Should also do the leak check here I think
-        regions: drop(region_obligations),
-        opaque_types,
-    })
+    fn instantiate_bound_vars_with_infer<T: TypeFoldable<'tcx> + Copy>(
+        &self,
+        value: ty::Binder<'tcx, T>,
+    ) -> T {
+        self.replace_bound_vars_with_fresh_vars(
+            DUMMY_SP,
+            LateBoundRegionConversionTime::HigherRankedType,
+            value,
+        )
+    }
 }
index 7f5e3208f4e7c0fd03dba54521b106c640ac214b..da2a1a19957e128f491b8a1bbc3b3384da637581 100644 (file)
 
 use std::mem;
 
-use rustc_infer::infer::canonical::OriginalQueryValues;
-use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
+use rustc_hir::def_id::DefId;
+use rustc_infer::infer::canonical::{Canonical, CanonicalVarKind, CanonicalVarValues};
+use rustc_infer::infer::canonical::{OriginalQueryValues, QueryRegionConstraints, QueryResponse};
+use rustc_infer::infer::{InferCtxt, InferOk, TyCtxtInferExt};
 use rustc_infer::traits::query::NoSolution;
 use rustc_infer::traits::Obligation;
-use rustc_middle::infer::canonical::{Canonical, CanonicalVarValues};
+use rustc_middle::infer::canonical::Certainty as OldCertainty;
 use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_middle::ty::{RegionOutlivesPredicate, ToPredicate, TypeOutlivesPredicate};
+use rustc_middle::ty::{
+    CoercePredicate, RegionOutlivesPredicate, SubtypePredicate, ToPredicate, TypeOutlivesPredicate,
+};
 use rustc_span::DUMMY_SP;
 
-use self::infcx_ext::InferCtxtExt;
+use crate::traits::ObligationCause;
 
 mod assembly;
-mod cache;
 mod fulfill;
 mod infcx_ext;
-mod overflow;
 mod project_goals;
+mod search_graph;
 mod trait_goals;
 
 pub use fulfill::FulfillmentCtxt;
@@ -87,6 +90,8 @@ pub enum Certainty {
 }
 
 impl Certainty {
+    pub const AMBIGUOUS: Certainty = Certainty::Maybe(MaybeCause::Ambiguity);
+
     /// When proving multiple goals using **AND**, e.g. nested obligations for an impl,
     /// use this function to unify the certainty of these goals
     pub fn unify_and(self, other: Certainty) -> Certainty {
@@ -119,7 +124,7 @@ pub enum MaybeCause {
 }
 
 /// Additional constraints returned on success.
-#[derive(Debug, PartialEq, Eq, Clone, Hash, TypeFoldable, TypeVisitable)]
+#[derive(Debug, PartialEq, Eq, Clone, Hash, TypeFoldable, TypeVisitable, Default)]
 pub struct ExternalConstraints<'tcx> {
     // FIXME: implement this.
     regions: (),
@@ -142,45 +147,42 @@ pub trait TyCtxtExt<'tcx> {
 
 impl<'tcx> TyCtxtExt<'tcx> for TyCtxt<'tcx> {
     fn evaluate_goal(self, goal: CanonicalGoal<'tcx>) -> QueryResult<'tcx> {
-        let mut cx = EvalCtxt::new(self);
-        cx.evaluate_canonical_goal(goal)
+        let mut search_graph = search_graph::SearchGraph::new(self);
+        EvalCtxt::evaluate_canonical_goal(self, &mut search_graph, goal)
     }
 }
 
-struct EvalCtxt<'tcx> {
-    tcx: TyCtxt<'tcx>,
+struct EvalCtxt<'a, 'tcx> {
+    infcx: &'a InferCtxt<'tcx>,
+    var_values: CanonicalVarValues<'tcx>,
 
-    provisional_cache: cache::ProvisionalCache<'tcx>,
-    overflow_data: overflow::OverflowData,
+    search_graph: &'a mut search_graph::SearchGraph<'tcx>,
 }
 
-impl<'tcx> EvalCtxt<'tcx> {
-    fn new(tcx: TyCtxt<'tcx>) -> EvalCtxt<'tcx> {
-        EvalCtxt {
-            tcx,
-            provisional_cache: cache::ProvisionalCache::empty(),
-            overflow_data: overflow::OverflowData::new(tcx),
-        }
+impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
+    fn tcx(&self) -> TyCtxt<'tcx> {
+        self.infcx.tcx
     }
 
-    /// Recursively evaluates `goal`, returning whether any inference vars have
-    /// been constrained and the certainty of the result.
-    fn evaluate_goal(
-        &mut self,
-        infcx: &InferCtxt<'tcx>,
-        goal: Goal<'tcx, ty::Predicate<'tcx>>,
-    ) -> Result<(bool, Certainty), NoSolution> {
-        let mut orig_values = OriginalQueryValues::default();
-        let canonical_goal = infcx.canonicalize_query(goal, &mut orig_values);
-        let canonical_response = self.evaluate_canonical_goal(canonical_goal)?;
-        Ok((
-            true, // FIXME: check whether `var_values` are an identity substitution.
-            fixme_instantiate_canonical_query_response(infcx, &orig_values, canonical_response),
-        ))
+    /// Creates a new evaluation context outside of the trait solver.
+    ///
+    /// With this solver making a canonical response doesn't make much sense.
+    /// The `search_graph` for this solver has to be completely empty.
+    fn new_outside_solver(
+        infcx: &'a InferCtxt<'tcx>,
+        search_graph: &'a mut search_graph::SearchGraph<'tcx>,
+    ) -> EvalCtxt<'a, 'tcx> {
+        assert!(search_graph.is_empty());
+        EvalCtxt { infcx, var_values: CanonicalVarValues::dummy(), search_graph }
     }
 
-    fn evaluate_canonical_goal(&mut self, goal: CanonicalGoal<'tcx>) -> QueryResult<'tcx> {
-        match self.try_push_stack(goal) {
+    #[instrument(level = "debug", skip(tcx, search_graph), ret)]
+    fn evaluate_canonical_goal(
+        tcx: TyCtxt<'tcx>,
+        search_graph: &'a mut search_graph::SearchGraph<'tcx>,
+        canonical_goal: CanonicalGoal<'tcx>,
+    ) -> QueryResult<'tcx> {
+        match search_graph.try_push_stack(tcx, canonical_goal) {
             Ok(()) => {}
             // Our goal is already on the stack, eager return.
             Err(response) => return response,
@@ -191,87 +193,169 @@ fn evaluate_canonical_goal(&mut self, goal: CanonicalGoal<'tcx>) -> QueryResult<
         //
         // FIXME: Similar to `evaluate_all`, this has to check for overflow.
         loop {
-            let result = self.compute_goal(goal);
+            let (ref infcx, goal, var_values) =
+                tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &canonical_goal);
+            let mut ecx = EvalCtxt { infcx, var_values, search_graph };
+            let result = ecx.compute_goal(goal);
 
             // FIXME: `Response` should be `Copy`
-            if self.try_finalize_goal(goal, result.clone()) {
+            if search_graph.try_finalize_goal(tcx, canonical_goal, result.clone()) {
                 return result;
             }
         }
     }
 
-    fn compute_goal(&mut self, canonical_goal: CanonicalGoal<'tcx>) -> QueryResult<'tcx> {
-        // WARNING: We're looking at a canonical value without instantiating it here.
-        //
-        // We have to be incredibly careful to not change the order of bound variables or
-        // remove any. As we go from `Goal<'tcx, Predicate>` to `Goal` with the variants
-        // of `PredicateKind` this is the case and it is and faster than instantiating and
-        // recanonicalizing.
-        let Goal { param_env, predicate } = canonical_goal.value;
-        if let Some(kind) = predicate.kind().no_bound_vars() {
+    fn make_canonical_response(&self, certainty: Certainty) -> QueryResult<'tcx> {
+        let external_constraints = take_external_constraints(self.infcx)?;
+
+        Ok(self.infcx.canonicalize_response(Response {
+            var_values: self.var_values.clone(),
+            external_constraints,
+            certainty,
+        }))
+    }
+
+    /// Recursively evaluates `goal`, returning whether any inference vars have
+    /// been constrained and the certainty of the result.
+    fn evaluate_goal(
+        &mut self,
+        goal: Goal<'tcx, ty::Predicate<'tcx>>,
+    ) -> Result<(bool, Certainty), NoSolution> {
+        let mut orig_values = OriginalQueryValues::default();
+        let canonical_goal = self.infcx.canonicalize_query(goal, &mut orig_values);
+        let canonical_response =
+            EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?;
+        Ok((
+            !canonical_response.value.var_values.is_identity(),
+            instantiate_canonical_query_response(self.infcx, &orig_values, canonical_response),
+        ))
+    }
+
+    fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> {
+        let Goal { param_env, predicate } = goal;
+        let kind = predicate.kind();
+        if let Some(kind) = kind.no_bound_vars() {
             match kind {
-                ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) => self.compute_trait_goal(
-                    canonical_goal.unchecked_rebind(Goal { param_env, predicate }),
-                ),
-                ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => self
-                    .compute_projection_goal(
-                        canonical_goal.unchecked_rebind(Goal { param_env, predicate }),
-                    ),
-                ty::PredicateKind::Clause(ty::Clause::TypeOutlives(predicate)) => self
-                    .compute_type_outlives_goal(
-                        canonical_goal.unchecked_rebind(Goal { param_env, predicate }),
-                    ),
-                ty::PredicateKind::Clause(ty::Clause::RegionOutlives(predicate)) => self
-                    .compute_region_outlives_goal(
-                        canonical_goal.unchecked_rebind(Goal { param_env, predicate }),
-                    ),
+                ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) => {
+                    self.compute_trait_goal(Goal { param_env, predicate })
+                }
+                ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => {
+                    self.compute_projection_goal(Goal { param_env, predicate })
+                }
+                ty::PredicateKind::Clause(ty::Clause::TypeOutlives(predicate)) => {
+                    self.compute_type_outlives_goal(Goal { param_env, predicate })
+                }
+                ty::PredicateKind::Clause(ty::Clause::RegionOutlives(predicate)) => {
+                    self.compute_region_outlives_goal(Goal { param_env, predicate })
+                }
+                ty::PredicateKind::Subtype(predicate) => {
+                    self.compute_subtype_goal(Goal { param_env, predicate })
+                }
+                ty::PredicateKind::Coerce(predicate) => {
+                    self.compute_coerce_goal(Goal { param_env, predicate })
+                }
+                ty::PredicateKind::ClosureKind(def_id, substs, kind) => self
+                    .compute_closure_kind_goal(Goal {
+                        param_env,
+                        predicate: (def_id, substs, kind),
+                    }),
+                ty::PredicateKind::Ambiguous => self.make_canonical_response(Certainty::AMBIGUOUS),
                 // FIXME: implement these predicates :)
                 ty::PredicateKind::WellFormed(_)
                 | ty::PredicateKind::ObjectSafe(_)
-                | ty::PredicateKind::ClosureKind(_, _, _)
-                | ty::PredicateKind::Subtype(_)
-                | ty::PredicateKind::Coerce(_)
                 | ty::PredicateKind::ConstEvaluatable(_)
-                | ty::PredicateKind::ConstEquate(_, _)
-                | ty::PredicateKind::TypeWellFormedFromEnv(_)
-                | ty::PredicateKind::Ambiguous => unimplemented!(),
+                | ty::PredicateKind::ConstEquate(_, _) => {
+                    self.make_canonical_response(Certainty::Yes)
+                }
+                ty::PredicateKind::TypeWellFormedFromEnv(..) => {
+                    bug!("TypeWellFormedFromEnv is only used for Chalk")
+                }
             }
         } else {
-            let (infcx, goal, var_values) =
-                self.tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &canonical_goal);
-            let kind = infcx.replace_bound_vars_with_placeholders(goal.predicate.kind());
-            let goal = goal.with(self.tcx, ty::Binder::dummy(kind));
-            let (_, certainty) = self.evaluate_goal(&infcx, goal)?;
-            infcx.make_canonical_response(var_values, certainty)
+            let kind = self.infcx.replace_bound_vars_with_placeholders(kind);
+            let goal = goal.with(self.tcx(), ty::Binder::dummy(kind));
+            let (_, certainty) = self.evaluate_goal(goal)?;
+            self.make_canonical_response(certainty)
         }
     }
 
     fn compute_type_outlives_goal(
         &mut self,
-        _goal: CanonicalGoal<'tcx, TypeOutlivesPredicate<'tcx>>,
+        _goal: Goal<'tcx, TypeOutlivesPredicate<'tcx>>,
     ) -> QueryResult<'tcx> {
-        todo!()
+        self.make_canonical_response(Certainty::Yes)
     }
 
     fn compute_region_outlives_goal(
         &mut self,
-        _goal: CanonicalGoal<'tcx, RegionOutlivesPredicate<'tcx>>,
+        _goal: Goal<'tcx, RegionOutlivesPredicate<'tcx>>,
+    ) -> QueryResult<'tcx> {
+        self.make_canonical_response(Certainty::Yes)
+    }
+
+    fn compute_coerce_goal(
+        &mut self,
+        goal: Goal<'tcx, CoercePredicate<'tcx>>,
+    ) -> QueryResult<'tcx> {
+        self.compute_subtype_goal(Goal {
+            param_env: goal.param_env,
+            predicate: SubtypePredicate {
+                a_is_expected: false,
+                a: goal.predicate.a,
+                b: goal.predicate.b,
+            },
+        })
+    }
+
+    fn compute_subtype_goal(
+        &mut self,
+        goal: Goal<'tcx, SubtypePredicate<'tcx>>,
+    ) -> QueryResult<'tcx> {
+        if goal.predicate.a.is_ty_var() && goal.predicate.b.is_ty_var() {
+            // FIXME: Do we want to register a subtype relation between these vars?
+            // That won't actually reflect in the query response, so it seems moot.
+            self.make_canonical_response(Certainty::AMBIGUOUS)
+        } else {
+            self.infcx.probe(|_| {
+                let InferOk { value: (), obligations } = self
+                    .infcx
+                    .at(&ObligationCause::dummy(), goal.param_env)
+                    .sub(goal.predicate.a, goal.predicate.b)?;
+                self.evaluate_all_and_make_canonical_response(
+                    obligations.into_iter().map(|pred| pred.into()).collect(),
+                )
+            })
+        }
+    }
+
+    fn compute_closure_kind_goal(
+        &mut self,
+        goal: Goal<'tcx, (DefId, ty::SubstsRef<'tcx>, ty::ClosureKind)>,
     ) -> QueryResult<'tcx> {
-        todo!()
+        let (_, substs, expected_kind) = goal.predicate;
+        let found_kind = substs.as_closure().kind_ty().to_opt_closure_kind();
+
+        let Some(found_kind) = found_kind else {
+            return self.make_canonical_response(Certainty::AMBIGUOUS);
+        };
+        if found_kind.extends(expected_kind) {
+            self.make_canonical_response(Certainty::Yes)
+        } else {
+            Err(NoSolution)
+        }
     }
 }
 
-impl<'tcx> EvalCtxt<'tcx> {
+impl<'tcx> EvalCtxt<'_, 'tcx> {
     fn evaluate_all(
         &mut self,
-        infcx: &InferCtxt<'tcx>,
         mut goals: Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
     ) -> Result<Certainty, NoSolution> {
         let mut new_goals = Vec::new();
         self.repeat_while_none(|this| {
             let mut has_changed = Err(Certainty::Yes);
             for goal in goals.drain(..) {
-                let (changed, certainty) = match this.evaluate_goal(infcx, goal) {
+                let (changed, certainty) = match this.evaluate_goal(goal) {
                     Ok(result) => result,
                     Err(NoSolution) => return Some(Err(NoSolution)),
                 };
@@ -298,12 +382,88 @@ fn evaluate_all(
             }
         })
     }
+
+    fn evaluate_all_and_make_canonical_response(
+        &mut self,
+        goals: Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
+    ) -> QueryResult<'tcx> {
+        self.evaluate_all(goals).and_then(|certainty| self.make_canonical_response(certainty))
+    }
 }
 
-fn fixme_instantiate_canonical_query_response<'tcx>(
-    _: &InferCtxt<'tcx>,
-    _: &OriginalQueryValues<'tcx>,
-    _: CanonicalResponse<'tcx>,
+#[instrument(level = "debug", skip(infcx), ret)]
+fn take_external_constraints<'tcx>(
+    infcx: &InferCtxt<'tcx>,
+) -> Result<ExternalConstraints<'tcx>, NoSolution> {
+    let region_obligations = infcx.take_registered_region_obligations();
+    let opaque_types = infcx.take_opaque_types_for_query_response();
+    Ok(ExternalConstraints {
+        // FIXME: Now that's definitely wrong :)
+        //
+        // Should also do the leak check here I think
+        regions: drop(region_obligations),
+        opaque_types,
+    })
+}
+
+fn instantiate_canonical_query_response<'tcx>(
+    infcx: &InferCtxt<'tcx>,
+    original_values: &OriginalQueryValues<'tcx>,
+    response: CanonicalResponse<'tcx>,
 ) -> Certainty {
-    unimplemented!()
+    let Ok(InferOk { value, obligations }) = infcx
+        .instantiate_query_response_and_region_obligations(
+            &ObligationCause::dummy(),
+            ty::ParamEnv::empty(),
+            original_values,
+            &response.unchecked_map(|resp| QueryResponse {
+                var_values: resp.var_values,
+                region_constraints: QueryRegionConstraints::default(),
+                certainty: match resp.certainty {
+                    Certainty::Yes => OldCertainty::Proven,
+                    Certainty::Maybe(_) => OldCertainty::Ambiguous,
+                },
+                opaque_types: resp.external_constraints.opaque_types,
+                value: resp.certainty,
+            }),
+        ) else { bug!(); };
+    assert!(obligations.is_empty());
+    value
+}
+
+pub(super) fn response_no_constraints<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    goal: Canonical<'tcx, impl Sized>,
+    certainty: Certainty,
+) -> QueryResult<'tcx> {
+    let var_values = goal
+        .variables
+        .iter()
+        .enumerate()
+        .map(|(i, info)| match info.kind {
+            CanonicalVarKind::Ty(_) | CanonicalVarKind::PlaceholderTy(_) => {
+                tcx.mk_ty(ty::Bound(ty::INNERMOST, ty::BoundVar::from_usize(i).into())).into()
+            }
+            CanonicalVarKind::Region(_) | CanonicalVarKind::PlaceholderRegion(_) => {
+                let br = ty::BoundRegion {
+                    var: ty::BoundVar::from_usize(i),
+                    kind: ty::BrAnon(i as u32, None),
+                };
+                tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br)).into()
+            }
+            CanonicalVarKind::Const(_, ty) | CanonicalVarKind::PlaceholderConst(_, ty) => tcx
+                .mk_const(ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_usize(i)), ty)
+                .into(),
+        })
+        .collect();
+
+    Ok(Canonical {
+        max_universe: goal.max_universe,
+        variables: goal.variables,
+        value: Response {
+            var_values: CanonicalVarValues { var_values },
+            external_constraints: Default::default(),
+            certainty,
+        },
+    })
 }
diff --git a/compiler/rustc_trait_selection/src/solve/overflow.rs b/compiler/rustc_trait_selection/src/solve/overflow.rs
deleted file mode 100644 (file)
index fdd6adb..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-use rustc_infer::traits::query::NoSolution;
-use rustc_middle::ty::TyCtxt;
-use rustc_session::Limit;
-
-use super::{Certainty, EvalCtxt, MaybeCause, QueryResult};
-
-/// When detecting a solver overflow, we return ambiguity. Overflow can be
-/// *hidden* by either a fatal error in an **AND** or a trivial success in an **OR**.
-///
-/// This is in issue in case of exponential blowup, e.g. if each goal on the stack
-/// has multiple nested (overflowing) candidates. To deal with this, we reduce the limit
-/// used by the solver when hitting the default limit for the first time.
-///
-/// FIXME: Get tests where always using the `default_limit` results in a hang and refer
-/// to them here. We can also improve the overflow strategy if necessary.
-pub(super) struct OverflowData {
-    default_limit: Limit,
-    current_limit: Limit,
-    /// When proving an **AND** we have to repeatedly iterate over the yet unproven goals.
-    ///
-    /// Because of this each iteration also increases the depth in addition to the stack
-    /// depth.
-    additional_depth: usize,
-}
-
-impl OverflowData {
-    pub(super) fn new(tcx: TyCtxt<'_>) -> OverflowData {
-        let default_limit = tcx.recursion_limit();
-        OverflowData { default_limit, current_limit: default_limit, additional_depth: 0 }
-    }
-
-    #[inline]
-    pub(super) fn did_overflow(&self) -> bool {
-        self.default_limit.0 != self.current_limit.0
-    }
-
-    #[inline]
-    pub(super) fn has_overflow(&self, depth: usize) -> bool {
-        !self.current_limit.value_within_limit(depth + self.additional_depth)
-    }
-
-    /// Updating the current limit when hitting overflow.
-    fn deal_with_overflow(&mut self) {
-        // When first hitting overflow we reduce the overflow limit
-        // for all future goals to prevent hangs if there's an exponental
-        // blowup.
-        self.current_limit.0 = self.default_limit.0 / 8;
-    }
-}
-
-impl<'tcx> EvalCtxt<'tcx> {
-    pub(super) fn deal_with_overflow(&mut self) -> QueryResult<'tcx> {
-        self.overflow_data.deal_with_overflow();
-        fixme_response_overflow_no_constraints()
-    }
-
-    /// A `while`-loop which tracks overflow.
-    pub(super) fn repeat_while_none(
-        &mut self,
-        mut loop_body: impl FnMut(&mut Self) -> Option<Result<Certainty, NoSolution>>,
-    ) -> Result<Certainty, NoSolution> {
-        let start_depth = self.overflow_data.additional_depth;
-        let depth = self.provisional_cache.current_depth();
-        while !self.overflow_data.has_overflow(depth) {
-            if let Some(result) = loop_body(self) {
-                self.overflow_data.additional_depth = start_depth;
-                return result;
-            }
-
-            self.overflow_data.additional_depth += 1;
-        }
-        self.overflow_data.additional_depth = start_depth;
-        self.overflow_data.deal_with_overflow();
-        Ok(Certainty::Maybe(MaybeCause::Overflow))
-    }
-}
-
-fn fixme_response_overflow_no_constraints<'tcx>() -> QueryResult<'tcx> {
-    unimplemented!()
-}
index e9140507192e1d296ec0a1671f383a62004ba7e4..32e15f03998b3598431b58e417d87938e5cab02d 100644 (file)
 use crate::traits::{specialization_graph, translate_substs};
 
-use super::assembly::{self, AssemblyCtxt};
-use super::{CanonicalGoal, EvalCtxt, Goal, QueryResult};
+use super::assembly::{self, Candidate, CandidateSource};
+use super::infcx_ext::InferCtxtExt;
+use super::trait_goals::structural_traits;
+use super::{Certainty, EvalCtxt, Goal, QueryResult};
 use rustc_errors::ErrorGuaranteed;
 use rustc_hir::def::DefKind;
 use rustc_hir::def_id::DefId;
-use rustc_infer::infer::{InferCtxt, InferOk};
+use rustc_infer::infer::InferCtxt;
 use rustc_infer::traits::query::NoSolution;
 use rustc_infer::traits::specialization_graph::LeafDef;
-use rustc_infer::traits::{ObligationCause, Reveal};
+use rustc_infer::traits::Reveal;
 use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams};
-use rustc_middle::ty::ProjectionPredicate;
-use rustc_middle::ty::TypeVisitable;
 use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_middle::ty::{ProjectionPredicate, TypeSuperVisitable, TypeVisitor};
+use rustc_middle::ty::{ToPredicate, TypeVisitable};
 use rustc_span::DUMMY_SP;
 use std::iter;
+use std::ops::ControlFlow;
 
-#[allow(dead_code)] // FIXME: implement and use all variants.
-#[derive(Debug, Clone, Copy)]
-pub(super) enum CandidateSource {
-    Impl(DefId),
-    ParamEnv(usize),
-    Builtin,
-}
-
-type Candidate<'tcx> = assembly::Candidate<'tcx, ProjectionPredicate<'tcx>>;
-
-impl<'tcx> EvalCtxt<'tcx> {
+impl<'tcx> EvalCtxt<'_, 'tcx> {
     pub(super) fn compute_projection_goal(
         &mut self,
-        goal: CanonicalGoal<'tcx, ProjectionPredicate<'tcx>>,
+        goal: Goal<'tcx, ProjectionPredicate<'tcx>>,
     ) -> QueryResult<'tcx> {
-        let candidates = AssemblyCtxt::assemble_and_evaluate_candidates(self, goal);
-        self.merge_project_candidates(candidates)
+        // To only compute normalization once for each projection we only
+        // normalize if the expected term is an unconstrained inference variable.
+        //
+        // E.g. for `<T as Trait>::Assoc = u32` we recursively compute the goal
+        // `exists<U> <T as Trait>::Assoc = U` and then take the resulting type for
+        // `U` and equate it with `u32`. This means that we don't need a separate
+        // projection cache in the solver.
+        if self.term_is_fully_unconstrained(goal) {
+            let candidates = self.assemble_and_evaluate_candidates(goal);
+            self.merge_project_candidates(candidates)
+        } else {
+            let predicate = goal.predicate;
+            let unconstrained_rhs = match predicate.term.unpack() {
+                ty::TermKind::Ty(_) => self.infcx.next_ty_infer().into(),
+                ty::TermKind::Const(ct) => self.infcx.next_const_infer(ct.ty()).into(),
+            };
+            let unconstrained_predicate = ty::Clause::Projection(ProjectionPredicate {
+                projection_ty: goal.predicate.projection_ty,
+                term: unconstrained_rhs,
+            });
+            let (_has_changed, normalize_certainty) =
+                self.evaluate_goal(goal.with(self.tcx(), unconstrained_predicate))?;
+
+            let nested_eq_goals =
+                self.infcx.eq(goal.param_env, unconstrained_rhs, predicate.term)?;
+            let eval_certainty = self.evaluate_all(nested_eq_goals)?;
+            self.make_canonical_response(normalize_certainty.unify_and(eval_certainty))
+        }
+    }
+
+    /// Is the projection predicate is of the form `exists<T> <Ty as Trait>::Assoc = T`.
+    ///
+    /// This is the case if the `term` is an inference variable in the innermost universe
+    /// and does not occur in any other part of the predicate.
+    fn term_is_fully_unconstrained(&self, goal: Goal<'tcx, ProjectionPredicate<'tcx>>) -> bool {
+        let infcx = self.infcx;
+        let term_is_infer = match goal.predicate.term.unpack() {
+            ty::TermKind::Ty(ty) => {
+                if let &ty::Infer(ty::TyVar(vid)) = ty.kind() {
+                    match infcx.probe_ty_var(vid) {
+                        Ok(value) => bug!("resolved var in query: {goal:?} {value:?}"),
+                        Err(universe) => universe == infcx.universe(),
+                    }
+                } else {
+                    false
+                }
+            }
+            ty::TermKind::Const(ct) => {
+                if let ty::ConstKind::Infer(ty::InferConst::Var(vid)) = ct.kind() {
+                    match self.infcx.probe_const_var(vid) {
+                        Ok(value) => bug!("resolved var in query: {goal:?} {value:?}"),
+                        Err(universe) => universe == infcx.universe(),
+                    }
+                } else {
+                    false
+                }
+            }
+        };
+
+        // Guard against `<T as Trait<?0>>::Assoc = ?0>`.
+        struct ContainsTerm<'tcx> {
+            term: ty::Term<'tcx>,
+        }
+        impl<'tcx> TypeVisitor<'tcx> for ContainsTerm<'tcx> {
+            type BreakTy = ();
+            fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
+                if t.needs_infer() {
+                    if ty::Term::from(t) == self.term {
+                        ControlFlow::BREAK
+                    } else {
+                        t.super_visit_with(self)
+                    }
+                } else {
+                    ControlFlow::CONTINUE
+                }
+            }
+
+            fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
+                if c.needs_infer() {
+                    if ty::Term::from(c) == self.term {
+                        ControlFlow::BREAK
+                    } else {
+                        c.super_visit_with(self)
+                    }
+                } else {
+                    ControlFlow::CONTINUE
+                }
+            }
+        }
+
+        let mut visitor = ContainsTerm { term: goal.predicate.term };
+
+        term_is_infer
+            && goal.predicate.projection_ty.visit_with(&mut visitor).is_continue()
+            && goal.param_env.visit_with(&mut visitor).is_continue()
     }
 
     fn merge_project_candidates(
@@ -83,14 +169,13 @@ fn project_candidate_should_be_dropped_in_favor_of(
         match (candidate.source, other.source) {
             (CandidateSource::Impl(_), _)
             | (CandidateSource::ParamEnv(_), _)
-            | (CandidateSource::Builtin, _) => unimplemented!(),
+            | (CandidateSource::BuiltinImpl, _)
+            | (CandidateSource::AliasBound(_), _) => unimplemented!(),
         }
     }
 }
 
 impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
-    type CandidateSource = CandidateSource;
-
     fn self_ty(self) -> Ty<'tcx> {
         self.self_ty()
     }
@@ -104,33 +189,26 @@ fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId {
     }
 
     fn consider_impl_candidate(
-        acx: &mut AssemblyCtxt<'_, 'tcx, ProjectionPredicate<'tcx>>,
+        ecx: &mut EvalCtxt<'_, 'tcx>,
         goal: Goal<'tcx, ProjectionPredicate<'tcx>>,
         impl_def_id: DefId,
-    ) {
-        let tcx = acx.cx.tcx;
+    ) -> QueryResult<'tcx> {
+        let tcx = ecx.tcx();
+
         let goal_trait_ref = goal.predicate.projection_ty.trait_ref(tcx);
         let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
         let drcx = DeepRejectCtxt { treat_obligation_params: TreatParams::AsPlaceholder };
         if iter::zip(goal_trait_ref.substs, impl_trait_ref.skip_binder().substs)
             .any(|(goal, imp)| !drcx.generic_args_may_unify(goal, imp))
         {
-            return;
+            return Err(NoSolution);
         }
 
-        acx.infcx.probe(|_| {
-            let impl_substs = acx.infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
+        ecx.infcx.probe(|_| {
+            let impl_substs = ecx.infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
             let impl_trait_ref = impl_trait_ref.subst(tcx, impl_substs);
 
-            let Ok(InferOk { obligations, .. }) = acx
-                .infcx
-                .at(&ObligationCause::dummy(), goal.param_env)
-                .define_opaque_types(false)
-                .eq(goal_trait_ref, impl_trait_ref)
-                .map_err(|e| debug!("failed to equate trait refs: {e:?}"))
-            else {
-                return
-            };
+            let mut nested_goals = ecx.infcx.eq(goal.param_env, goal_trait_ref, impl_trait_ref)?;
             let where_clause_bounds = tcx
                 .predicates_of(impl_def_id)
                 .instantiate(tcx, impl_substs)
@@ -138,17 +216,20 @@ fn consider_impl_candidate(
                 .into_iter()
                 .map(|pred| goal.with(tcx, pred));
 
-            let nested_goals = obligations.into_iter().map(|o| o.into()).chain(where_clause_bounds).collect();
-            let Ok(trait_ref_certainty) = acx.cx.evaluate_all(acx.infcx, nested_goals) else { return };
+            nested_goals.extend(where_clause_bounds);
+            let trait_ref_certainty = ecx.evaluate_all(nested_goals)?;
 
+            // In case the associated item is hidden due to specialization, we have to
+            // return ambiguity this would otherwise be incomplete, resulting in
+            // unsoundness during coherence (#105782).
             let Some(assoc_def) = fetch_eligible_assoc_item_def(
-                acx.infcx,
+                ecx.infcx,
                 goal.param_env,
                 goal_trait_ref,
                 goal.predicate.def_id(),
                 impl_def_id
-            ) else {
-                return
+            )? else {
+                return ecx.make_canonical_response(trait_ref_certainty.unify_and(Certainty::AMBIGUOUS));
             };
 
             if !assoc_def.item.defaultness(tcx).has_value() {
@@ -171,10 +252,10 @@ fn consider_impl_candidate(
             let impl_substs_with_gat = goal.predicate.projection_ty.substs.rebase_onto(
                 tcx,
                 goal_trait_ref.def_id,
-                impl_trait_ref.substs,
+                impl_substs,
             );
             let substs = translate_substs(
-                acx.infcx,
+                ecx.infcx,
                 goal.param_env,
                 impl_def_id,
                 impl_substs_with_gat,
@@ -185,7 +266,8 @@ fn consider_impl_candidate(
             let is_const = matches!(tcx.def_kind(assoc_def.item.def_id), DefKind::AssocConst);
             let ty = tcx.bound_type_of(assoc_def.item.def_id);
             let term: ty::EarlyBinder<ty::Term<'tcx>> = if is_const {
-                let identity_substs = ty::InternalSubsts::identity_for_item(tcx, assoc_def.item.def_id);
+                let identity_substs =
+                    ty::InternalSubsts::identity_for_item(tcx, assoc_def.item.def_id);
                 let did = ty::WithOptConstParam::unknown(assoc_def.item.def_id);
                 let kind =
                     ty::ConstKind::Unevaluated(ty::UnevaluatedConst::new(did, identity_substs));
@@ -194,23 +276,121 @@ fn consider_impl_candidate(
                 ty.map_bound(|ty| ty.into())
             };
 
-            let Ok(InferOk { obligations, .. }) = acx
+            // The term of our goal should be fully unconstrained, so this should never fail.
+            //
+            // It can however be ambiguous when the resolved type is a projection.
+            let nested_goals = ecx
                 .infcx
-                .at(&ObligationCause::dummy(), goal.param_env)
-                .define_opaque_types(false)
-                .eq(goal.predicate.term,  term.subst(tcx, substs))
-                .map_err(|e| debug!("failed to equate trait refs: {e:?}"))
-            else {
-                return
-            };
-
-            let nested_goals = obligations.into_iter().map(|o| o.into()).collect();
-            let Ok(rhs_certainty) = acx.cx.evaluate_all(acx.infcx, nested_goals) else { return };
+                .eq(goal.param_env, goal.predicate.term, term.subst(tcx, substs))
+                .expect("failed to unify with unconstrained term");
+            let rhs_certainty =
+                ecx.evaluate_all(nested_goals).expect("failed to unify with unconstrained term");
 
-            let certainty = trait_ref_certainty.unify_and(rhs_certainty);
-            acx.try_insert_candidate(CandidateSource::Impl(impl_def_id), certainty);
+            ecx.make_canonical_response(trait_ref_certainty.unify_and(rhs_certainty))
         })
     }
+
+    fn consider_assumption(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+        assumption: ty::Predicate<'tcx>,
+    ) -> QueryResult<'tcx> {
+        if let Some(poly_projection_pred) = assumption.to_opt_poly_projection_pred() {
+            ecx.infcx.probe(|_| {
+                let assumption_projection_pred =
+                    ecx.infcx.instantiate_bound_vars_with_infer(poly_projection_pred);
+                let nested_goals = ecx.infcx.eq(
+                    goal.param_env,
+                    goal.predicate.projection_ty,
+                    assumption_projection_pred.projection_ty,
+                )?;
+                let subst_certainty = ecx.evaluate_all(nested_goals)?;
+
+                // The term of our goal should be fully unconstrained, so this should never fail.
+                //
+                // It can however be ambiguous when the resolved type is a projection.
+                let nested_goals = ecx
+                    .infcx
+                    .eq(goal.param_env, goal.predicate.term, assumption_projection_pred.term)
+                    .expect("failed to unify with unconstrained term");
+                let rhs_certainty = ecx
+                    .evaluate_all(nested_goals)
+                    .expect("failed to unify with unconstrained term");
+
+                ecx.make_canonical_response(subst_certainty.unify_and(rhs_certainty))
+            })
+        } else {
+            Err(NoSolution)
+        }
+    }
+
+    fn consider_auto_trait_candidate(
+        _ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        bug!("auto traits do not have associated types: {:?}", goal);
+    }
+
+    fn consider_trait_alias_candidate(
+        _ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        bug!("trait aliases do not have associated types: {:?}", goal);
+    }
+
+    fn consider_builtin_sized_candidate(
+        _ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        bug!("`Sized` does not have an associated type: {:?}", goal);
+    }
+
+    fn consider_builtin_copy_clone_candidate(
+        _ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        bug!("`Copy`/`Clone` does not have an associated type: {:?}", goal);
+    }
+
+    fn consider_builtin_pointer_sized_candidate(
+        _ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        bug!("`PointerSized` does not have an associated type: {:?}", goal);
+    }
+
+    fn consider_builtin_fn_trait_candidates(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+        goal_kind: ty::ClosureKind,
+    ) -> QueryResult<'tcx> {
+        if let Some(tupled_inputs_and_output) =
+            structural_traits::extract_tupled_inputs_and_output_from_callable(
+                ecx.tcx(),
+                goal.predicate.self_ty(),
+                goal_kind,
+            )?
+        {
+            let pred = tupled_inputs_and_output
+                .map_bound(|(inputs, output)| ty::ProjectionPredicate {
+                    projection_ty: ecx
+                        .tcx()
+                        .mk_alias_ty(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs]),
+                    term: output.into(),
+                })
+                .to_predicate(ecx.tcx());
+            Self::consider_assumption(ecx, goal, pred)
+        } else {
+            ecx.make_canonical_response(Certainty::AMBIGUOUS)
+        }
+    }
+
+    fn consider_builtin_tuple_candidate(
+        _ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        bug!("`Tuple` does not have an associated type: {:?}", goal);
+    }
 }
 
 /// This behavior is also implemented in `rustc_ty_utils` and in the old `project` code.
@@ -224,10 +404,9 @@ fn fetch_eligible_assoc_item_def<'tcx>(
     goal_trait_ref: ty::TraitRef<'tcx>,
     trait_assoc_def_id: DefId,
     impl_def_id: DefId,
-) -> Option<LeafDef> {
+) -> Result<Option<LeafDef>, NoSolution> {
     let node_item = specialization_graph::assoc_def(infcx.tcx, impl_def_id, trait_assoc_def_id)
-        .map_err(|ErrorGuaranteed { .. }| ())
-        .ok()?;
+        .map_err(|ErrorGuaranteed { .. }| NoSolution)?;
 
     let eligible = if node_item.is_final() {
         // Non-specializable items are always projectable.
@@ -246,5 +425,5 @@ fn fetch_eligible_assoc_item_def<'tcx>(
         }
     };
 
-    if eligible { Some(node_item) } else { None }
+    if eligible { Ok(Some(node_item)) } else { Ok(None) }
 }
diff --git a/compiler/rustc_trait_selection/src/solve/search_graph/cache.rs b/compiler/rustc_trait_selection/src/solve/search_graph/cache.rs
new file mode 100644 (file)
index 0000000..730a8e6
--- /dev/null
@@ -0,0 +1,123 @@
+//! This module both handles the global cache which stores "finished" goals,
+//! and the provisional cache which contains partially computed goals.
+//!
+//! The provisional cache is necessary when dealing with coinductive cycles.
+//!
+//! For more information about the provisional cache and coinduction in general,
+//! check out the relevant section of the rustc-dev-guide.
+//!
+//! FIXME(@lcnr): Write that section, feel free to ping me if you need help here
+//! before then or if I still haven't done that before January 2023.
+use super::overflow::OverflowData;
+use super::StackDepth;
+use crate::solve::{CanonicalGoal, QueryResult};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_index::vec::IndexVec;
+use rustc_middle::ty::TyCtxt;
+
+rustc_index::newtype_index! {
+    pub struct EntryIndex {}
+}
+
+#[derive(Debug, Clone)]
+pub(super) struct ProvisionalEntry<'tcx> {
+    // In case we have a coinductive cycle, this is the
+    // the currently least restrictive result of this goal.
+    pub(super) response: QueryResult<'tcx>,
+    // In case of a cycle, the position of deepest stack entry involved
+    // in that cycle. This is monotonically decreasing in the stack as all
+    // elements between the current stack element in the deepest stack entry
+    // involved have to also be involved in that cycle.
+    //
+    // We can only move entries to the global cache once we're complete done
+    // with the cycle. If this entry has not been involved in a cycle,
+    // this is just its own depth.
+    pub(super) depth: StackDepth,
+
+    // The goal for this entry. Should always be equal to the corresponding goal
+    // in the lookup table.
+    pub(super) goal: CanonicalGoal<'tcx>,
+}
+
+pub(super) struct ProvisionalCache<'tcx> {
+    pub(super) entries: IndexVec<EntryIndex, ProvisionalEntry<'tcx>>,
+    // FIXME: This is only used to quickly check whether a given goal
+    // is in the cache. We should experiment with using something like
+    // `SsoHashSet` here because in most cases there are only a few entries.
+    pub(super) lookup_table: FxHashMap<CanonicalGoal<'tcx>, EntryIndex>,
+}
+
+impl<'tcx> ProvisionalCache<'tcx> {
+    pub(super) fn empty() -> ProvisionalCache<'tcx> {
+        ProvisionalCache { entries: Default::default(), lookup_table: Default::default() }
+    }
+
+    pub(super) fn is_empty(&self) -> bool {
+        self.entries.is_empty() && self.lookup_table.is_empty()
+    }
+
+    /// Adds a dependency from the current leaf to `target` in the cache
+    /// to prevent us from moving any goals which depend on the current leaf
+    /// to the global cache while we're still computing `target`.
+    ///
+    /// Its important to note that `target` may already be part of a different cycle.
+    /// In this case we have to ensure that we also depend on all other goals
+    /// in the existing cycle in addition to the potentially direct cycle with `target`.
+    pub(super) fn add_dependency_of_leaf_on(&mut self, target: EntryIndex) {
+        let depth = self.entries[target].depth;
+        for provisional_entry in &mut self.entries.raw[target.index()..] {
+            // The depth of `target` is the position of the deepest goal in the stack
+            // on which `target` depends. That goal is the `root` of this cycle.
+            //
+            // Any entry which was added after `target` is either on the stack itself
+            // at which point its depth is definitely at least as high as the depth of
+            // `root`. If it's not on the stack itself it has to depend on a goal
+            // between `root` and `leaf`. If it were to depend on a goal deeper in the
+            // stack than `root`, then `root` would also depend on that goal, at which
+            // point `root` wouldn't be the root anymore.
+            debug_assert!(provisional_entry.depth >= depth);
+            provisional_entry.depth = depth;
+        }
+
+        // We only update entries which were added after `target` as no other
+        // entry should have a higher depth.
+        //
+        // Any entry which previously had a higher depth than target has to
+        // be between `target` and `root`. Because of this we would have updated
+        // its depth when calling `add_dependency_of_leaf_on(root)` for `target`.
+        if cfg!(debug_assertions) {
+            self.entries.iter().all(|e| e.depth <= depth);
+        }
+    }
+
+    pub(super) fn depth(&self, entry_index: EntryIndex) -> StackDepth {
+        self.entries[entry_index].depth
+    }
+
+    pub(super) fn provisional_result(&self, entry_index: EntryIndex) -> QueryResult<'tcx> {
+        self.entries[entry_index].response.clone()
+    }
+}
+
+pub(super) fn try_move_finished_goal_to_global_cache<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    overflow_data: &mut OverflowData,
+    stack: &IndexVec<super::StackDepth, super::StackElem<'tcx>>,
+    goal: CanonicalGoal<'tcx>,
+    response: QueryResult<'tcx>,
+) {
+    // We move goals to the global cache if we either did not hit an overflow or if it's
+    // the root goal as that will now always hit the same overflow limit.
+    //
+    // NOTE: We cannot move any non-root goals to the global cache even if their final result
+    // isn't impacted by the overflow as that goal still has unstable query dependencies
+    // because it didn't go its full depth.
+    //
+    // FIXME(@lcnr): We could still cache subtrees which are not impacted by overflow though.
+    // Tracking that info correctly isn't trivial, so I haven't implemented it for now.
+    let should_cache_globally = !overflow_data.did_overflow() || stack.is_empty();
+    if should_cache_globally {
+        // FIXME: move the provisional entry to the global cache.
+        let _ = (tcx, goal, response);
+    }
+}
diff --git a/compiler/rustc_trait_selection/src/solve/search_graph/mod.rs b/compiler/rustc_trait_selection/src/solve/search_graph/mod.rs
new file mode 100644 (file)
index 0000000..0030e9a
--- /dev/null
@@ -0,0 +1,178 @@
+mod cache;
+mod overflow;
+
+use self::cache::ProvisionalEntry;
+use super::{CanonicalGoal, Certainty, MaybeCause, QueryResult};
+use cache::ProvisionalCache;
+use overflow::OverflowData;
+use rustc_index::vec::IndexVec;
+use rustc_middle::ty::TyCtxt;
+use std::collections::hash_map::Entry;
+
+rustc_index::newtype_index! {
+    pub struct StackDepth {}
+}
+
+struct StackElem<'tcx> {
+    goal: CanonicalGoal<'tcx>,
+    has_been_used: bool,
+}
+
+pub(super) struct SearchGraph<'tcx> {
+    /// The stack of goals currently being computed.
+    ///
+    /// An element is *deeper* in the stack if its index is *lower*.
+    stack: IndexVec<StackDepth, StackElem<'tcx>>,
+    overflow_data: OverflowData,
+    provisional_cache: ProvisionalCache<'tcx>,
+}
+
+impl<'tcx> SearchGraph<'tcx> {
+    pub(super) fn new(tcx: TyCtxt<'tcx>) -> SearchGraph<'tcx> {
+        Self {
+            stack: Default::default(),
+            overflow_data: OverflowData::new(tcx),
+            provisional_cache: ProvisionalCache::empty(),
+        }
+    }
+
+    pub(super) fn is_empty(&self) -> bool {
+        self.stack.is_empty()
+            && self.provisional_cache.is_empty()
+            && !self.overflow_data.did_overflow()
+    }
+
+    /// Tries putting the new goal on the stack, returning an error if it is already cached.
+    ///
+    /// This correctly updates the provisional cache if there is a cycle.
+    pub(super) fn try_push_stack(
+        &mut self,
+        tcx: TyCtxt<'tcx>,
+        goal: CanonicalGoal<'tcx>,
+    ) -> Result<(), QueryResult<'tcx>> {
+        // FIXME: start by checking the global cache
+
+        // Look at the provisional cache to check for cycles.
+        let cache = &mut self.provisional_cache;
+        match cache.lookup_table.entry(goal) {
+            // No entry, simply push this goal on the stack after dealing with overflow.
+            Entry::Vacant(v) => {
+                if self.overflow_data.has_overflow(self.stack.len()) {
+                    return Err(self.deal_with_overflow(tcx, goal));
+                }
+
+                let depth = self.stack.push(StackElem { goal, has_been_used: false });
+                let response = super::response_no_constraints(tcx, goal, Certainty::Yes);
+                let entry_index = cache.entries.push(ProvisionalEntry { response, depth, goal });
+                v.insert(entry_index);
+                Ok(())
+            }
+            // We have a nested goal which relies on a goal `root` deeper in the stack.
+            //
+            // We first store that we may have to rerun `evaluate_goal` for `root` in case the
+            // provisional response is not equal to the final response. We also update the depth
+            // of all goals which recursively depend on our current goal to depend on `root`
+            // instead.
+            //
+            // Finally we can return either the provisional response for that goal if we have a
+            // coinductive cycle or an ambiguous result if the cycle is inductive.
+            Entry::Occupied(entry_index) => {
+                let entry_index = *entry_index.get();
+
+                cache.add_dependency_of_leaf_on(entry_index);
+                let stack_depth = cache.depth(entry_index);
+
+                self.stack[stack_depth].has_been_used = true;
+                // NOTE: The goals on the stack aren't the only goals involved in this cycle.
+                // We can also depend on goals which aren't part of the stack but coinductively
+                // depend on the stack themselves. We already checked whether all the goals
+                // between these goals and their root on the stack. This means that as long as
+                // each goal in a cycle is checked for coinductivity by itself, simply checking
+                // the stack is enough.
+                if self.stack.raw[stack_depth.index()..]
+                    .iter()
+                    .all(|g| g.goal.value.predicate.is_coinductive(tcx))
+                {
+                    Err(cache.provisional_result(entry_index))
+                } else {
+                    Err(super::response_no_constraints(
+                        tcx,
+                        goal,
+                        Certainty::Maybe(MaybeCause::Overflow),
+                    ))
+                }
+            }
+        }
+    }
+
+    /// We cannot simply store the result of [super::EvalCtxt::compute_goal] as we have to deal with
+    /// coinductive cycles.
+    ///
+    /// When we encounter a coinductive cycle, we have to prove the final result of that cycle
+    /// while we are still computing that result. Because of this we continously recompute the
+    /// cycle until the result of the previous iteration is equal to the final result, at which
+    /// point we are done.
+    ///
+    /// This function returns `true` if we were able to finalize the goal and `false` if it has
+    /// updated the provisional cache and we have to recompute the current goal.
+    ///
+    /// FIXME: Refer to the rustc-dev-guide entry once it exists.
+    pub(super) fn try_finalize_goal(
+        &mut self,
+        tcx: TyCtxt<'tcx>,
+        actual_goal: CanonicalGoal<'tcx>,
+        response: QueryResult<'tcx>,
+    ) -> bool {
+        let StackElem { goal, has_been_used } = self.stack.pop().unwrap();
+        assert_eq!(goal, actual_goal);
+
+        let cache = &mut self.provisional_cache;
+        let provisional_entry_index = *cache.lookup_table.get(&goal).unwrap();
+        let provisional_entry = &mut cache.entries[provisional_entry_index];
+        let depth = provisional_entry.depth;
+        // Was the current goal the root of a cycle and was the provisional response
+        // different from the final one.
+        if has_been_used && provisional_entry.response != response {
+            // If so, update the provisional reponse for this goal...
+            provisional_entry.response = response;
+            // ...remove all entries whose result depends on this goal
+            // from the provisional cache...
+            //
+            // That's not completely correct, as a nested goal can also
+            // depend on a goal which is lower in the stack so it doesn't
+            // actually depend on the current goal. This should be fairly
+            // rare and is hopefully not relevant for performance.
+            #[allow(rustc::potential_query_instability)]
+            cache.lookup_table.retain(|_key, index| *index <= provisional_entry_index);
+            cache.entries.truncate(provisional_entry_index.index() + 1);
+
+            // ...and finally push our goal back on the stack and reevaluate it.
+            self.stack.push(StackElem { goal, has_been_used: false });
+            false
+        } else {
+            // If not, we're done with this goal.
+            //
+            // Check whether that this goal doesn't depend on a goal deeper on the stack
+            // and if so, move it and all nested goals to the global cache.
+            //
+            // Note that if any nested goal were to depend on something deeper on the stack,
+            // this would have also updated the depth of the current goal.
+            if depth == self.stack.next_index() {
+                for (i, entry) in cache.entries.drain_enumerated(provisional_entry_index.index()..)
+                {
+                    let actual_index = cache.lookup_table.remove(&entry.goal);
+                    debug_assert_eq!(Some(i), actual_index);
+                    debug_assert!(entry.depth == depth);
+                    cache::try_move_finished_goal_to_global_cache(
+                        tcx,
+                        &mut self.overflow_data,
+                        &self.stack,
+                        entry.goal,
+                        entry.response,
+                    );
+                }
+            }
+            true
+        }
+    }
+}
diff --git a/compiler/rustc_trait_selection/src/solve/search_graph/overflow.rs b/compiler/rustc_trait_selection/src/solve/search_graph/overflow.rs
new file mode 100644 (file)
index 0000000..1dd3894
--- /dev/null
@@ -0,0 +1,84 @@
+use rustc_infer::infer::canonical::Canonical;
+use rustc_infer::traits::query::NoSolution;
+use rustc_middle::ty::TyCtxt;
+use rustc_session::Limit;
+
+use super::SearchGraph;
+use crate::solve::{response_no_constraints, Certainty, EvalCtxt, MaybeCause, QueryResult};
+
+/// When detecting a solver overflow, we return ambiguity. Overflow can be
+/// *hidden* by either a fatal error in an **AND** or a trivial success in an **OR**.
+///
+/// This is in issue in case of exponential blowup, e.g. if each goal on the stack
+/// has multiple nested (overflowing) candidates. To deal with this, we reduce the limit
+/// used by the solver when hitting the default limit for the first time.
+///
+/// FIXME: Get tests where always using the `default_limit` results in a hang and refer
+/// to them here. We can also improve the overflow strategy if necessary.
+pub(super) struct OverflowData {
+    default_limit: Limit,
+    current_limit: Limit,
+    /// When proving an **AND** we have to repeatedly iterate over the yet unproven goals.
+    ///
+    /// Because of this each iteration also increases the depth in addition to the stack
+    /// depth.
+    additional_depth: usize,
+}
+
+impl OverflowData {
+    pub(super) fn new(tcx: TyCtxt<'_>) -> OverflowData {
+        let default_limit = tcx.recursion_limit();
+        OverflowData { default_limit, current_limit: default_limit, additional_depth: 0 }
+    }
+
+    #[inline]
+    pub(super) fn did_overflow(&self) -> bool {
+        self.default_limit.0 != self.current_limit.0
+    }
+
+    #[inline]
+    pub(super) fn has_overflow(&self, depth: usize) -> bool {
+        !self.current_limit.value_within_limit(depth + self.additional_depth)
+    }
+
+    /// Updating the current limit when hitting overflow.
+    fn deal_with_overflow(&mut self) {
+        // When first hitting overflow we reduce the overflow limit
+        // for all future goals to prevent hangs if there's an exponental
+        // blowup.
+        self.current_limit.0 = self.default_limit.0 / 8;
+    }
+}
+
+impl<'tcx> SearchGraph<'tcx> {
+    pub fn deal_with_overflow(
+        &mut self,
+        tcx: TyCtxt<'tcx>,
+        goal: Canonical<'tcx, impl Sized>,
+    ) -> QueryResult<'tcx> {
+        self.overflow_data.deal_with_overflow();
+        response_no_constraints(tcx, goal, Certainty::Maybe(MaybeCause::Overflow))
+    }
+}
+
+impl<'tcx> EvalCtxt<'_, 'tcx> {
+    /// A `while`-loop which tracks overflow.
+    pub fn repeat_while_none(
+        &mut self,
+        mut loop_body: impl FnMut(&mut Self) -> Option<Result<Certainty, NoSolution>>,
+    ) -> Result<Certainty, NoSolution> {
+        let start_depth = self.search_graph.overflow_data.additional_depth;
+        let depth = self.search_graph.stack.len();
+        while !self.search_graph.overflow_data.has_overflow(depth) {
+            if let Some(result) = loop_body(self) {
+                self.search_graph.overflow_data.additional_depth = start_depth;
+                return result;
+            }
+
+            self.search_graph.overflow_data.additional_depth += 1;
+        }
+        self.search_graph.overflow_data.additional_depth = start_depth;
+        self.search_graph.overflow_data.deal_with_overflow();
+        Ok(Certainty::Maybe(MaybeCause::Overflow))
+    }
+}
index a43fef5cdb0c875f26e0976766cf9faedc0febf1..4b6d673c999c96fd7dab198e99a3aec327ceba70 100644 (file)
@@ -2,58 +2,20 @@
 
 use std::iter;
 
-use super::assembly::{self, AssemblyCtxt};
-use super::{CanonicalGoal, EvalCtxt, Goal, QueryResult};
+use super::assembly::{self, Candidate, CandidateSource};
+use super::infcx_ext::InferCtxtExt;
+use super::{Certainty, EvalCtxt, Goal, QueryResult};
 use rustc_hir::def_id::DefId;
-use rustc_infer::infer::InferOk;
+use rustc_infer::infer::InferCtxt;
 use rustc_infer::traits::query::NoSolution;
-use rustc_infer::traits::ObligationCause;
 use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams};
-use rustc_middle::ty::TraitPredicate;
-use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_middle::ty::{self, ToPredicate, Ty, TyCtxt};
+use rustc_middle::ty::{TraitPredicate, TypeVisitable};
 use rustc_span::DUMMY_SP;
 
-#[allow(dead_code)] // FIXME: implement and use all variants.
-#[derive(Debug, Clone, Copy)]
-pub(super) enum CandidateSource {
-    /// Some user-defined impl with the given `DefId`.
-    Impl(DefId),
-    /// The n-th caller bound in the `param_env` of our goal.
-    ///
-    /// This is pretty much always a bound from the `where`-clauses of the
-    /// currently checked item.
-    ParamEnv(usize),
-    /// A bound on the `self_ty` in case it is a projection or an opaque type.
-    ///
-    /// # Examples
-    ///
-    /// ```ignore (for syntax highlighting)
-    /// trait Trait {
-    ///     type Assoc: OtherTrait;
-    /// }
-    /// ```
-    ///
-    /// We know that `<Whatever as Trait>::Assoc: OtherTrait` holds by looking at
-    /// the bounds on `Trait::Assoc`.
-    AliasBound(usize),
-    /// A builtin implementation for some specific traits, used in cases
-    /// where we cannot rely an ordinary library implementations.
-    ///
-    /// The most notable examples are `Sized`, `Copy` and `Clone`. This is also
-    /// used for the `DiscriminantKind` and `Pointee` trait, both of which have
-    /// an associated type.
-    Builtin,
-    /// An automatic impl for an auto trait, e.g. `Send`. These impls recursively look
-    /// at the constituent types of the `self_ty` to check whether the auto trait
-    /// is implemented for those.
-    AutoImpl,
-}
-
-type Candidate<'tcx> = assembly::Candidate<'tcx, TraitPredicate<'tcx>>;
+pub mod structural_traits;
 
 impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
-    type CandidateSource = CandidateSource;
-
     fn self_ty(self) -> Ty<'tcx> {
         self.self_ty()
     }
@@ -67,55 +29,194 @@ fn trait_def_id(self, _: TyCtxt<'tcx>) -> DefId {
     }
 
     fn consider_impl_candidate(
-        acx: &mut AssemblyCtxt<'_, 'tcx, Self>,
+        ecx: &mut EvalCtxt<'_, 'tcx>,
         goal: Goal<'tcx, TraitPredicate<'tcx>>,
         impl_def_id: DefId,
-    ) {
-        let tcx = acx.cx.tcx;
+    ) -> QueryResult<'tcx> {
+        let tcx = ecx.tcx();
 
         let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
         let drcx = DeepRejectCtxt { treat_obligation_params: TreatParams::AsPlaceholder };
         if iter::zip(goal.predicate.trait_ref.substs, impl_trait_ref.skip_binder().substs)
             .any(|(goal, imp)| !drcx.generic_args_may_unify(goal, imp))
         {
-            return;
+            return Err(NoSolution);
         }
 
-        acx.infcx.probe(|_| {
-            let impl_substs = acx.infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
+        ecx.infcx.probe(|_| {
+            let impl_substs = ecx.infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
             let impl_trait_ref = impl_trait_ref.subst(tcx, impl_substs);
 
-            let Ok(InferOk { obligations, .. }) = acx
-                .infcx
-                .at(&ObligationCause::dummy(), goal.param_env)
-                .define_opaque_types(false)
-                .eq(goal.predicate.trait_ref, impl_trait_ref)
-                .map_err(|e| debug!("failed to equate trait refs: {e:?}"))
-            else {
-                return
-            };
+            let mut nested_goals =
+                ecx.infcx.eq(goal.param_env, goal.predicate.trait_ref, impl_trait_ref)?;
             let where_clause_bounds = tcx
                 .predicates_of(impl_def_id)
                 .instantiate(tcx, impl_substs)
                 .predicates
                 .into_iter()
                 .map(|pred| goal.with(tcx, pred));
+            nested_goals.extend(where_clause_bounds);
+            ecx.evaluate_all_and_make_canonical_response(nested_goals)
+        })
+    }
+
+    fn consider_assumption(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+        assumption: ty::Predicate<'tcx>,
+    ) -> QueryResult<'tcx> {
+        if let Some(poly_trait_pred) = assumption.to_opt_poly_trait_pred() {
+            // FIXME: Constness and polarity
+            ecx.infcx.probe(|_| {
+                let assumption_trait_pred =
+                    ecx.infcx.instantiate_bound_vars_with_infer(poly_trait_pred);
+                let nested_goals = ecx.infcx.eq(
+                    goal.param_env,
+                    goal.predicate.trait_ref,
+                    assumption_trait_pred.trait_ref,
+                )?;
+                ecx.evaluate_all_and_make_canonical_response(nested_goals)
+            })
+        } else {
+            Err(NoSolution)
+        }
+    }
+
+    fn consider_auto_trait_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        ecx.probe_and_evaluate_goal_for_constituent_tys(
+            goal,
+            structural_traits::instantiate_constituent_tys_for_auto_trait,
+        )
+    }
 
-            let nested_goals =
-                obligations.into_iter().map(|o| o.into()).chain(where_clause_bounds).collect();
+    fn consider_trait_alias_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        let tcx = ecx.tcx();
 
-            let Ok(certainty) = acx.cx.evaluate_all(acx.infcx, nested_goals) else { return };
-            acx.try_insert_candidate(CandidateSource::Impl(impl_def_id), certainty);
+        ecx.infcx.probe(|_| {
+            let nested_obligations = tcx
+                .predicates_of(goal.predicate.def_id())
+                .instantiate(tcx, goal.predicate.trait_ref.substs);
+            ecx.evaluate_all_and_make_canonical_response(
+                nested_obligations.predicates.into_iter().map(|p| goal.with(tcx, p)).collect(),
+            )
         })
     }
+
+    fn consider_builtin_sized_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        ecx.probe_and_evaluate_goal_for_constituent_tys(
+            goal,
+            structural_traits::instantiate_constituent_tys_for_sized_trait,
+        )
+    }
+
+    fn consider_builtin_copy_clone_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        ecx.probe_and_evaluate_goal_for_constituent_tys(
+            goal,
+            structural_traits::instantiate_constituent_tys_for_copy_clone_trait,
+        )
+    }
+
+    fn consider_builtin_pointer_sized_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        if goal.predicate.self_ty().has_non_region_infer() {
+            return ecx.make_canonical_response(Certainty::AMBIGUOUS);
+        }
+
+        let tcx = ecx.tcx();
+        let self_ty = tcx.erase_regions(goal.predicate.self_ty());
+
+        if let Ok(layout) = tcx.layout_of(goal.param_env.and(self_ty))
+            &&  let usize_layout = tcx.layout_of(ty::ParamEnv::empty().and(tcx.types.usize)).unwrap().layout
+            && layout.layout.size() == usize_layout.size()
+            && layout.layout.align().abi == usize_layout.align().abi
+        {
+            // FIXME: We could make this faster by making a no-constraints response
+            ecx.make_canonical_response(Certainty::Yes)
+        } else {
+            Err(NoSolution)
+        }
+    }
+
+    fn consider_builtin_fn_trait_candidates(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+        goal_kind: ty::ClosureKind,
+    ) -> QueryResult<'tcx> {
+        if let Some(tupled_inputs_and_output) =
+            structural_traits::extract_tupled_inputs_and_output_from_callable(
+                ecx.tcx(),
+                goal.predicate.self_ty(),
+                goal_kind,
+            )?
+        {
+            let pred = tupled_inputs_and_output
+                .map_bound(|(inputs, _)| {
+                    ecx.tcx()
+                        .mk_trait_ref(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs])
+                })
+                .to_predicate(ecx.tcx());
+            Self::consider_assumption(ecx, goal, pred)
+        } else {
+            ecx.make_canonical_response(Certainty::AMBIGUOUS)
+        }
+    }
+
+    fn consider_builtin_tuple_candidate(
+        ecx: &mut EvalCtxt<'_, 'tcx>,
+        goal: Goal<'tcx, Self>,
+    ) -> QueryResult<'tcx> {
+        if let ty::Tuple(..) = goal.predicate.self_ty().kind() {
+            ecx.make_canonical_response(Certainty::Yes)
+        } else {
+            Err(NoSolution)
+        }
+    }
 }
 
-impl<'tcx> EvalCtxt<'tcx> {
+impl<'tcx> EvalCtxt<'_, 'tcx> {
+    /// Convenience function for traits that are structural, i.e. that only
+    /// have nested subgoals that only change the self type. Unlike other
+    /// evaluate-like helpers, this does a probe, so it doesn't need to be
+    /// wrapped in one.
+    fn probe_and_evaluate_goal_for_constituent_tys(
+        &mut self,
+        goal: Goal<'tcx, TraitPredicate<'tcx>>,
+        constituent_tys: impl Fn(&InferCtxt<'tcx>, Ty<'tcx>) -> Result<Vec<Ty<'tcx>>, NoSolution>,
+    ) -> QueryResult<'tcx> {
+        self.infcx.probe(|_| {
+            self.evaluate_all_and_make_canonical_response(
+                constituent_tys(self.infcx, goal.predicate.self_ty())?
+                    .into_iter()
+                    .map(|ty| {
+                        goal.with(
+                            self.tcx(),
+                            ty::Binder::dummy(goal.predicate.with_self_ty(self.tcx(), ty)),
+                        )
+                    })
+                    .collect(),
+            )
+        })
+    }
+
     pub(super) fn compute_trait_goal(
         &mut self,
-        goal: CanonicalGoal<'tcx, TraitPredicate<'tcx>>,
+        goal: Goal<'tcx, TraitPredicate<'tcx>>,
     ) -> QueryResult<'tcx> {
-        let candidates = AssemblyCtxt::assemble_and_evaluate_candidates(self, goal);
+        let candidates = self.assemble_and_evaluate_candidates(goal);
         self.merge_trait_candidates_discard_reservation_impls(candidates)
     }
 
@@ -169,14 +270,13 @@ fn trait_candidate_should_be_dropped_in_favor_of(
             (CandidateSource::Impl(_), _)
             | (CandidateSource::ParamEnv(_), _)
             | (CandidateSource::AliasBound(_), _)
-            | (CandidateSource::Builtin, _)
-            | (CandidateSource::AutoImpl, _) => unimplemented!(),
+            | (CandidateSource::BuiltinImpl, _) => unimplemented!(),
         }
     }
 
     fn discard_reservation_impl(&self, candidate: Candidate<'tcx>) -> Candidate<'tcx> {
         if let CandidateSource::Impl(def_id) = candidate.source {
-            if let ty::ImplPolarity::Reservation = self.tcx.impl_polarity(def_id) {
+            if let ty::ImplPolarity::Reservation = self.tcx().impl_polarity(def_id) {
                 debug!("Selected reservation impl");
                 // FIXME: reduce candidate to ambiguous
                 // FIXME: replace `var_values` with identity, yeet external constraints.
diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals/structural_traits.rs b/compiler/rustc_trait_selection/src/solve/trait_goals/structural_traits.rs
new file mode 100644 (file)
index 0000000..a11cd13
--- /dev/null
@@ -0,0 +1,223 @@
+use rustc_hir::{Movability, Mutability};
+use rustc_infer::{infer::InferCtxt, traits::query::NoSolution};
+use rustc_middle::ty::{self, Ty, TyCtxt};
+
+// Calculates the constituent types of a type for `auto trait` purposes.
+//
+// For types with an "existential" binder, i.e. generator witnesses, we also
+// instantiate the binder with placeholders eagerly.
+pub(super) fn instantiate_constituent_tys_for_auto_trait<'tcx>(
+    infcx: &InferCtxt<'tcx>,
+    ty: Ty<'tcx>,
+) -> Result<Vec<Ty<'tcx>>, NoSolution> {
+    let tcx = infcx.tcx;
+    match *ty.kind() {
+        ty::Uint(_)
+        | ty::Int(_)
+        | ty::Bool
+        | ty::Float(_)
+        | ty::FnDef(..)
+        | ty::FnPtr(_)
+        | ty::Str
+        | ty::Error(_)
+        | ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
+        | ty::Never
+        | ty::Char => Ok(vec![]),
+
+        ty::Placeholder(..)
+        | ty::Dynamic(..)
+        | ty::Param(..)
+        | ty::Foreign(..)
+        | ty::Alias(ty::Projection, ..)
+        | ty::Bound(..)
+        | ty::Infer(ty::TyVar(_)) => Err(NoSolution),
+
+        ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => bug!(),
+
+        ty::RawPtr(ty::TypeAndMut { ty: element_ty, .. }) | ty::Ref(_, element_ty, _) => {
+            Ok(vec![element_ty])
+        }
+
+        ty::Array(element_ty, _) | ty::Slice(element_ty) => Ok(vec![element_ty]),
+
+        ty::Tuple(ref tys) => {
+            // (T1, ..., Tn) -- meets any bound that all of T1...Tn meet
+            Ok(tys.iter().collect())
+        }
+
+        ty::Closure(_, ref substs) => Ok(vec![substs.as_closure().tupled_upvars_ty()]),
+
+        ty::Generator(_, ref substs, _) => {
+            let generator_substs = substs.as_generator();
+            Ok(vec![generator_substs.tupled_upvars_ty(), generator_substs.witness()])
+        }
+
+        ty::GeneratorWitness(types) => {
+            Ok(infcx.replace_bound_vars_with_placeholders(types).to_vec())
+        }
+
+        // For `PhantomData<T>`, we pass `T`.
+        ty::Adt(def, substs) if def.is_phantom_data() => Ok(vec![substs.type_at(0)]),
+
+        ty::Adt(def, substs) => Ok(def.all_fields().map(|f| f.ty(tcx, substs)).collect()),
+
+        ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
+            // We can resolve the `impl Trait` to its concrete type,
+            // which enforces a DAG between the functions requiring
+            // the auto trait bounds in question.
+            Ok(vec![tcx.bound_type_of(def_id).subst(tcx, substs)])
+        }
+    }
+}
+
+pub(super) fn instantiate_constituent_tys_for_sized_trait<'tcx>(
+    infcx: &InferCtxt<'tcx>,
+    ty: Ty<'tcx>,
+) -> Result<Vec<Ty<'tcx>>, NoSolution> {
+    match *ty.kind() {
+        ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
+        | ty::Uint(_)
+        | ty::Int(_)
+        | ty::Bool
+        | ty::Float(_)
+        | ty::FnDef(..)
+        | ty::FnPtr(_)
+        | ty::RawPtr(..)
+        | ty::Char
+        | ty::Ref(..)
+        | ty::Generator(..)
+        | ty::GeneratorWitness(..)
+        | ty::Array(..)
+        | ty::Closure(..)
+        | ty::Never
+        | ty::Dynamic(_, _, ty::DynStar)
+        | ty::Error(_) => Ok(vec![]),
+
+        ty::Str
+        | ty::Slice(_)
+        | ty::Dynamic(..)
+        | ty::Foreign(..)
+        | ty::Alias(..)
+        | ty::Param(_)
+        | ty::Infer(ty::TyVar(_)) => Err(NoSolution),
+
+        ty::Placeholder(..)
+        | ty::Bound(..)
+        | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => bug!(),
+
+        ty::Tuple(tys) => Ok(tys.to_vec()),
+
+        ty::Adt(def, substs) => {
+            let sized_crit = def.sized_constraint(infcx.tcx);
+            Ok(sized_crit
+                .0
+                .iter()
+                .map(|ty| sized_crit.rebind(*ty).subst(infcx.tcx, substs))
+                .collect())
+        }
+    }
+}
+
+pub(super) fn instantiate_constituent_tys_for_copy_clone_trait<'tcx>(
+    infcx: &InferCtxt<'tcx>,
+    ty: Ty<'tcx>,
+) -> Result<Vec<Ty<'tcx>>, NoSolution> {
+    match *ty.kind() {
+        ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
+        | ty::FnDef(..)
+        | ty::FnPtr(_)
+        | ty::Error(_) => Ok(vec![]),
+
+        // Implementations are provided in core
+        ty::Uint(_)
+        | ty::Int(_)
+        | ty::Bool
+        | ty::Float(_)
+        | ty::Char
+        | ty::RawPtr(..)
+        | ty::Never
+        | ty::Ref(_, _, Mutability::Not)
+        | ty::Array(..) => Err(NoSolution),
+
+        ty::Dynamic(..)
+        | ty::Str
+        | ty::Slice(_)
+        | ty::Generator(_, _, Movability::Static)
+        | ty::Foreign(..)
+        | ty::Ref(_, _, Mutability::Mut)
+        | ty::Adt(_, _)
+        | ty::Alias(_, _)
+        | ty::Param(_)
+        | ty::Infer(ty::TyVar(_)) => Err(NoSolution),
+
+        ty::Placeholder(..)
+        | ty::Bound(..)
+        | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => bug!(),
+
+        ty::Tuple(tys) => Ok(tys.to_vec()),
+
+        ty::Closure(_, substs) => Ok(vec![substs.as_closure().tupled_upvars_ty()]),
+
+        ty::Generator(_, substs, Movability::Movable) => {
+            if infcx.tcx.features().generator_clone {
+                let generator = substs.as_generator();
+                Ok(vec![generator.tupled_upvars_ty(), generator.witness()])
+            } else {
+                Err(NoSolution)
+            }
+        }
+
+        ty::GeneratorWitness(types) => {
+            Ok(infcx.replace_bound_vars_with_placeholders(types).to_vec())
+        }
+    }
+}
+
+pub(crate) fn extract_tupled_inputs_and_output_from_callable<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    self_ty: Ty<'tcx>,
+    goal_kind: ty::ClosureKind,
+) -> Result<Option<ty::Binder<'tcx, (Ty<'tcx>, Ty<'tcx>)>>, NoSolution> {
+    match *self_ty.kind() {
+        ty::FnDef(def_id, substs) => Ok(Some(
+            tcx.bound_fn_sig(def_id)
+                .subst(tcx, substs)
+                .map_bound(|sig| (tcx.mk_tup(sig.inputs().iter()), sig.output())),
+        )),
+        ty::FnPtr(sig) => {
+            Ok(Some(sig.map_bound(|sig| (tcx.mk_tup(sig.inputs().iter()), sig.output()))))
+        }
+        ty::Closure(_, substs) => {
+            let closure_substs = substs.as_closure();
+            match closure_substs.kind_ty().to_opt_closure_kind() {
+                Some(closure_kind) if closure_kind.extends(goal_kind) => {}
+                None => return Ok(None),
+                _ => return Err(NoSolution),
+            }
+            Ok(Some(closure_substs.sig().map_bound(|sig| (sig.inputs()[0], sig.output()))))
+        }
+        ty::Bool
+        | ty::Char
+        | ty::Int(_)
+        | ty::Uint(_)
+        | ty::Float(_)
+        | ty::Adt(_, _)
+        | ty::Foreign(_)
+        | ty::Str
+        | ty::Array(_, _)
+        | ty::Slice(_)
+        | ty::RawPtr(_)
+        | ty::Ref(_, _, _)
+        | ty::Dynamic(_, _, _)
+        | ty::Generator(_, _, _)
+        | ty::GeneratorWitness(_)
+        | ty::Never
+        | ty::Tuple(_)
+        | ty::Alias(_, _)
+        | ty::Param(_)
+        | ty::Placeholder(_)
+        | ty::Bound(_, _)
+        | ty::Infer(_)
+        | ty::Error(_) => Err(NoSolution),
+    }
+}
index e88950523537f23ba2494ba122e1a9e10447c120..61d09189798ea2d82eda6eff65546dde89c3adb2 100644 (file)
@@ -7,24 +7,18 @@
     ChalkEnvironmentAndGoal, FulfillmentError, FulfillmentErrorCode, PredicateObligation,
     SelectionError, TraitEngine,
 };
-use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
-use rustc_middle::ty::{self, TypeVisitable};
+use rustc_data_structures::fx::FxIndexSet;
+use rustc_middle::ty::TypeVisitable;
 
 pub struct FulfillmentContext<'tcx> {
     obligations: FxIndexSet<PredicateObligation<'tcx>>,
 
-    relationships: FxHashMap<ty::TyVid, ty::FoundRelationships>,
-
     usable_in_snapshot: bool,
 }
 
 impl FulfillmentContext<'_> {
     pub(super) fn new() -> Self {
-        FulfillmentContext {
-            obligations: FxIndexSet::default(),
-            relationships: FxHashMap::default(),
-            usable_in_snapshot: false,
-        }
+        FulfillmentContext { obligations: FxIndexSet::default(), usable_in_snapshot: false }
     }
 
     pub(crate) fn new_in_snapshot() -> Self {
@@ -43,8 +37,6 @@ fn register_predicate_obligation(
         }
         let obligation = infcx.resolve_vars_if_possible(obligation);
 
-        super::relationships::update(self, infcx, &obligation);
-
         self.obligations.insert(obligation);
     }
 
@@ -154,8 +146,4 @@ fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentE
     fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>> {
         self.obligations.iter().cloned().collect()
     }
-
-    fn relationships(&mut self) -> &mut FxHashMap<ty::TyVid, ty::FoundRelationships> {
-        &mut self.relationships
-    }
 }
index 0edae34190c300f2a42a616db12251dab87a175e..225c1050c7c952fc41230b7bdde07365d65ad419 100644 (file)
@@ -401,12 +401,12 @@ fn resolve_negative_obligation<'tcx>(
     infcx.resolve_regions(&outlives_env).is_empty()
 }
 
+#[instrument(level = "debug", skip(tcx), ret)]
 pub fn trait_ref_is_knowable<'tcx>(
     tcx: TyCtxt<'tcx>,
     trait_ref: ty::TraitRef<'tcx>,
 ) -> Result<(), Conflict> {
-    debug!("trait_ref_is_knowable(trait_ref={:?})", trait_ref);
-    if orphan_check_trait_ref(tcx, trait_ref, InCrate::Remote).is_ok() {
+    if orphan_check_trait_ref(trait_ref, InCrate::Remote).is_ok() {
         // A downstream or cousin crate is allowed to implement some
         // substitution of this trait-ref.
         return Err(Conflict::Downstream);
@@ -429,11 +429,9 @@ pub fn trait_ref_is_knowable<'tcx>(
     // and if we are an intermediate owner, then we don't care
     // about future-compatibility, which means that we're OK if
     // we are an owner.
-    if orphan_check_trait_ref(tcx, trait_ref, InCrate::Local).is_ok() {
-        debug!("trait_ref_is_knowable: orphan check passed");
+    if orphan_check_trait_ref(trait_ref, InCrate::Local).is_ok() {
         Ok(())
     } else {
-        debug!("trait_ref_is_knowable: nonlocal, nonfundamental, unowned");
         Err(Conflict::Upstream)
     }
 }
@@ -445,6 +443,7 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>(
     trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental)
 }
 
+#[derive(Debug)]
 pub enum OrphanCheckErr<'tcx> {
     NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>),
     UncoveredTy(Ty<'tcx>, Option<Ty<'tcx>>),
@@ -456,13 +455,12 @@ pub enum OrphanCheckErr<'tcx> {
 ///
 /// 1. All type parameters in `Self` must be "covered" by some local type constructor.
 /// 2. Some local type must appear in `Self`.
+#[instrument(level = "debug", skip(tcx), ret)]
 pub fn orphan_check(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Result<(), OrphanCheckErr<'_>> {
-    debug!("orphan_check({:?})", impl_def_id);
-
     // We only except this routine to be invoked on implementations
     // of a trait, not inherent implementations.
     let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap().subst_identity();
-    debug!("orphan_check: trait_ref={:?}", trait_ref);
+    debug!(?trait_ref);
 
     // If the *trait* is local to the crate, ok.
     if trait_ref.def_id.is_local() {
@@ -470,7 +468,7 @@ pub fn orphan_check(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Result<(), OrphanChe
         return Ok(());
     }
 
-    orphan_check_trait_ref(tcx, trait_ref, InCrate::Local)
+    orphan_check_trait_ref(trait_ref, InCrate::Local)
 }
 
 /// Checks whether a trait-ref is potentially implementable by a crate.
@@ -559,13 +557,11 @@ pub fn orphan_check(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Result<(), OrphanChe
 ///
 /// Note that this function is never called for types that have both type
 /// parameters and inference variables.
+#[instrument(level = "trace", ret)]
 fn orphan_check_trait_ref<'tcx>(
-    tcx: TyCtxt<'tcx>,
     trait_ref: ty::TraitRef<'tcx>,
     in_crate: InCrate,
 ) -> Result<(), OrphanCheckErr<'tcx>> {
-    debug!("orphan_check_trait_ref(trait_ref={:?}, in_crate={:?})", trait_ref, in_crate);
-
     if trait_ref.needs_infer() && trait_ref.needs_subst() {
         bug!(
             "can't orphan check a trait ref with both params and inference variables {:?}",
@@ -573,7 +569,7 @@ fn orphan_check_trait_ref<'tcx>(
         );
     }
 
-    let mut checker = OrphanChecker::new(tcx, in_crate);
+    let mut checker = OrphanChecker::new(in_crate);
     match trait_ref.visit_with(&mut checker) {
         ControlFlow::Continue(()) => Err(OrphanCheckErr::NonLocalInputType(checker.non_local_tys)),
         ControlFlow::Break(OrphanCheckEarlyExit::ParamTy(ty)) => {
@@ -592,7 +588,6 @@ fn orphan_check_trait_ref<'tcx>(
 }
 
 struct OrphanChecker<'tcx> {
-    tcx: TyCtxt<'tcx>,
     in_crate: InCrate,
     in_self_ty: bool,
     /// Ignore orphan check failures and exclusively search for the first
@@ -602,9 +597,8 @@ struct OrphanChecker<'tcx> {
 }
 
 impl<'tcx> OrphanChecker<'tcx> {
-    fn new(tcx: TyCtxt<'tcx>, in_crate: InCrate) -> Self {
+    fn new(in_crate: InCrate) -> Self {
         OrphanChecker {
-            tcx,
             in_crate,
             in_self_ty: true,
             search_first_local_ty: false,
@@ -614,12 +608,12 @@ fn new(tcx: TyCtxt<'tcx>, in_crate: InCrate) -> Self {
 
     fn found_non_local_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<OrphanCheckEarlyExit<'tcx>> {
         self.non_local_tys.push((t, self.in_self_ty));
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn found_param_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<OrphanCheckEarlyExit<'tcx>> {
         if self.search_first_local_ty {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         } else {
             ControlFlow::Break(OrphanCheckEarlyExit::ParamTy(t))
         }
@@ -641,7 +635,7 @@ enum OrphanCheckEarlyExit<'tcx> {
 impl<'tcx> TypeVisitor<'tcx> for OrphanChecker<'tcx> {
     type BreakTy = OrphanCheckEarlyExit<'tcx>;
     fn visit_region(&mut self, _r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
@@ -697,13 +691,17 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 }
             }
             ty::Error(_) => ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty)),
-            ty::Closure(..) | ty::Generator(..) | ty::GeneratorWitness(..) => {
-                self.tcx.sess.delay_span_bug(
-                    DUMMY_SP,
-                    format!("ty_is_local invoked on closure or generator: {:?}", ty),
-                );
-                ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty))
+            ty::Closure(did, ..) | ty::Generator(did, ..) => {
+                if self.def_id_is_local(did) {
+                    ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty))
+                } else {
+                    self.found_non_local_ty(ty)
+                }
             }
+            // This should only be created when checking whether we have to check whether some
+            // auto trait impl applies. There will never be multiple impls, so we can just
+            // act as if it were a local type here.
+            ty::GeneratorWitness(_) => ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty)),
             ty::Alias(ty::Opaque, ..) => {
                 // This merits some explanation.
                 // Normally, opaque types are not involved when performing
@@ -756,6 +754,6 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
     /// parameters, allowing uncovered const parameters in impls seems more useful
     /// than allowing `impl<T> Trait<local_fn_ptr, T> for i32` to compile.
     fn visit_const(&mut self, _c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 }
index 71fb6058cd2c54e722a0be82f736dd295f7b9ceb..f779d9dd8d93560302988bd9d03d981eda133ae8 100644 (file)
@@ -198,7 +198,7 @@ fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
                 // If we start allowing directly writing `ConstKind::Expr` without an intermediate anon const
                 // this will be incorrect. It might be worth investigating making `predicates_of` elaborate
                 // all of the `ConstEvaluatable` bounds rather than having a visitor here.
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
         }
     }
index df57c0f60fa6d760675b5ece4064a98d0d41ebe8..0419bb3f724f9ed86454dc73c60dc5db20502ada 100644 (file)
@@ -82,9 +82,7 @@ pub fn recompute_applicable_impls<'tcx>(
 
     let predicates =
         tcx.predicates_of(obligation.cause.body_id.owner.to_def_id()).instantiate_identity(tcx);
-    for obligation in
-        elaborate_predicates_with_span(tcx, std::iter::zip(predicates.predicates, predicates.spans))
-    {
+    for obligation in elaborate_predicates_with_span(tcx, predicates.into_iter()) {
         let kind = obligation.predicate.kind();
         if let ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) = kind.skip_binder()
             && param_env_candidate_may_apply(kind.rebind(trait_pred))
index b23b3d34d515aa854cf451766f161dd064fdb61c..52971486c553e361fe1d91cf55947ca15de59b45 100644 (file)
@@ -454,9 +454,11 @@ struct ErrorDescriptor<'tcx> {
             }
         }
 
-        for (error, suppressed) in iter::zip(errors, is_suppressed) {
-            if !suppressed {
-                self.report_fulfillment_error(error, body_id);
+        for from_expansion in [false, true] {
+            for (error, suppressed) in iter::zip(errors, &is_suppressed) {
+                if !suppressed && error.obligation.cause.span.from_expansion() == from_expansion {
+                    self.report_fulfillment_error(error, body_id);
+                }
             }
         }
 
@@ -1348,6 +1350,7 @@ fn report_selection_error(
                         expected_trait_ref,
                         obligation.cause.code(),
                         found_node,
+                        obligation.param_env,
                     )
                 } else {
                     let (closure_span, closure_arg_span, found) = found_did
@@ -2216,7 +2219,7 @@ fn maybe_report_ambiguity(
                 // This is kind of a hack: it frequently happens that some earlier
                 // error prevents types from being fully inferred, and then we get
                 // a bunch of uninteresting errors saying something like "<generic
-                // #0> doesn't implement Sized".  It may even be true that we
+                // #0> doesn't implement Sized". It may even be true that we
                 // could just skip over all checks where the self-ty is an
                 // inference variable, but I was afraid that there might be an
                 // inference variable created, registered as an obligation, and
@@ -2930,11 +2933,12 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
         if matches!(ty.kind(), ty::Infer(ty::FloatVar(_) | ty::IntVar(_))) {
             ControlFlow::Break(())
         } else {
-            ControlFlow::CONTINUE
+            ControlFlow::Continue(())
         }
     }
 }
 
+#[derive(Copy, Clone)]
 pub enum DefIdOrName {
     DefId(DefId),
     Name(&'static str),
index 1b98ead29f851d96e076703361ea7053be52d9d0..39e50b2accf17af05b1f4acb4f0c8d2451267e81 100644 (file)
@@ -212,6 +212,13 @@ fn suggest_add_clone_to_arg(
         trait_pred: ty::PolyTraitPredicate<'tcx>,
     ) -> bool;
 
+    fn extract_callable_info(
+        &self,
+        hir_id: HirId,
+        param_env: ty::ParamEnv<'tcx>,
+        found: Ty<'tcx>,
+    ) -> Option<(DefIdOrName, Ty<'tcx>, Vec<Ty<'tcx>>)>;
+
     fn suggest_add_reference_to_arg(
         &self,
         obligation: &PredicateObligation<'tcx>,
@@ -276,6 +283,7 @@ fn report_closure_arg_mismatch(
         expected: ty::PolyTraitRef<'tcx>,
         cause: &ObligationCauseCode<'tcx>,
         found_node: Option<Node<'_>>,
+        param_env: ty::ParamEnv<'tcx>,
     ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>;
 
     fn note_conflicting_closure_bounds(
@@ -878,6 +886,12 @@ fn suggest_fn_call(
         err: &mut Diagnostic,
         trait_pred: ty::PolyTraitPredicate<'tcx>,
     ) -> bool {
+        // It doesn't make sense to make this suggestion outside of typeck...
+        // (also autoderef will ICE...)
+        if self.typeck_results.is_none() {
+            return false;
+        }
+
         if let ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) = obligation.predicate.kind().skip_binder()
             && Some(trait_pred.def_id()) == self.tcx.lang_items().sized_trait()
         {
@@ -885,92 +899,17 @@ fn suggest_fn_call(
             return false;
         }
 
-        // This is duplicated from `extract_callable_info` in typeck, which
-        // relies on autoderef, so we can't use it here.
-        let found = trait_pred.self_ty().skip_binder().peel_refs();
-        let Some((def_id_or_name, output, inputs)) = (match *found.kind()
-        {
-            ty::FnPtr(fn_sig) => {
-                Some((DefIdOrName::Name("function pointer"), fn_sig.output(), fn_sig.inputs()))
-            }
-            ty::FnDef(def_id, _) => {
-                let fn_sig = found.fn_sig(self.tcx);
-                Some((DefIdOrName::DefId(def_id), fn_sig.output(), fn_sig.inputs()))
-            }
-            ty::Closure(def_id, substs) => {
-                let fn_sig = substs.as_closure().sig();
-                Some((
-                    DefIdOrName::DefId(def_id),
-                    fn_sig.output(),
-                    fn_sig.inputs().map_bound(|inputs| &inputs[1..]),
-                ))
-            }
-            ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
-                self.tcx.bound_item_bounds(def_id).subst(self.tcx, substs).iter().find_map(|pred| {
-                    if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
-                    && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output()
-                    // args tuple will always be substs[1]
-                    && let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
-                    {
-                        Some((
-                            DefIdOrName::DefId(def_id),
-                            pred.kind().rebind(proj.term.ty().unwrap()),
-                            pred.kind().rebind(args.as_slice()),
-                        ))
-                    } else {
-                        None
-                    }
-                })
-            }
-            ty::Dynamic(data, _, ty::Dyn) => {
-                data.iter().find_map(|pred| {
-                    if let ty::ExistentialPredicate::Projection(proj) = pred.skip_binder()
-                    && Some(proj.def_id) == self.tcx.lang_items().fn_once_output()
-                    // for existential projection, substs are shifted over by 1
-                    && let ty::Tuple(args) = proj.substs.type_at(0).kind()
-                    {
-                        Some((
-                            DefIdOrName::Name("trait object"),
-                            pred.rebind(proj.term.ty().unwrap()),
-                            pred.rebind(args.as_slice()),
-                        ))
-                    } else {
-                        None
-                    }
-                })
-            }
-            ty::Param(_) => {
-                obligation.param_env.caller_bounds().iter().find_map(|pred| {
-                    if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
-                    && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output()
-                    && proj.projection_ty.self_ty() == found
-                    // args tuple will always be substs[1]
-                    && let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
-                    {
-                        Some((
-                            DefIdOrName::Name("type parameter"),
-                            pred.kind().rebind(proj.term.ty().unwrap()),
-                            pred.kind().rebind(args.as_slice()),
-                        ))
-                    } else {
-                        None
-                    }
-                })
-            }
-            _ => None,
-        }) else { return false; };
-        let output = self.replace_bound_vars_with_fresh_vars(
-            obligation.cause.span,
+        let self_ty = self.replace_bound_vars_with_fresh_vars(
+            DUMMY_SP,
             LateBoundRegionConversionTime::FnCall,
-            output,
+            trait_pred.self_ty(),
         );
-        let inputs = inputs.skip_binder().iter().map(|ty| {
-            self.replace_bound_vars_with_fresh_vars(
-                obligation.cause.span,
-                LateBoundRegionConversionTime::FnCall,
-                inputs.rebind(*ty),
-            )
-        });
+
+        let Some((def_id_or_name, output, inputs)) = self.extract_callable_info(
+            obligation.cause.body_id,
+            obligation.param_env,
+            self_ty,
+        ) else { return false; };
 
         // Remapping bound vars here
         let trait_pred_and_self = trait_pred.map_bound(|trait_pred| (trait_pred, output));
@@ -998,6 +937,7 @@ fn suggest_fn_call(
         };
 
         let args = inputs
+            .into_iter()
             .map(|ty| {
                 if ty.is_suggestable(self.tcx, false) {
                     format!("/* {ty} */")
@@ -1161,6 +1101,120 @@ fn suggest_add_clone_to_arg(
         false
     }
 
+    /// Extracts information about a callable type for diagnostics. This is a
+    /// heuristic -- it doesn't necessarily mean that a type is always callable,
+    /// because the callable type must also be well-formed to be called.
+    fn extract_callable_info(
+        &self,
+        hir_id: HirId,
+        param_env: ty::ParamEnv<'tcx>,
+        found: Ty<'tcx>,
+    ) -> Option<(DefIdOrName, Ty<'tcx>, Vec<Ty<'tcx>>)> {
+        // Autoderef is useful here because sometimes we box callables, etc.
+        let Some((def_id_or_name, output, inputs)) = (self.autoderef_steps)(found).into_iter().find_map(|(found, _)| {
+            match *found.kind() {
+                ty::FnPtr(fn_sig) =>
+                    Some((DefIdOrName::Name("function pointer"), fn_sig.output(), fn_sig.inputs())),
+                ty::FnDef(def_id, _) => {
+                    let fn_sig = found.fn_sig(self.tcx);
+                    Some((DefIdOrName::DefId(def_id), fn_sig.output(), fn_sig.inputs()))
+                }
+                ty::Closure(def_id, substs) => {
+                    let fn_sig = substs.as_closure().sig();
+                    Some((DefIdOrName::DefId(def_id), fn_sig.output(), fn_sig.inputs().map_bound(|inputs| &inputs[1..])))
+                }
+                ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
+                    self.tcx.item_bounds(def_id).subst(self.tcx, substs).iter().find_map(|pred| {
+                        if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
+                        && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output()
+                        // args tuple will always be substs[1]
+                        && let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
+                        {
+                            Some((
+                                DefIdOrName::DefId(def_id),
+                                pred.kind().rebind(proj.term.ty().unwrap()),
+                                pred.kind().rebind(args.as_slice()),
+                            ))
+                        } else {
+                            None
+                        }
+                    })
+                }
+                ty::Dynamic(data, _, ty::Dyn) => {
+                    data.iter().find_map(|pred| {
+                        if let ty::ExistentialPredicate::Projection(proj) = pred.skip_binder()
+                        && Some(proj.def_id) == self.tcx.lang_items().fn_once_output()
+                        // for existential projection, substs are shifted over by 1
+                        && let ty::Tuple(args) = proj.substs.type_at(0).kind()
+                        {
+                            Some((
+                                DefIdOrName::Name("trait object"),
+                                pred.rebind(proj.term.ty().unwrap()),
+                                pred.rebind(args.as_slice()),
+                            ))
+                        } else {
+                            None
+                        }
+                    })
+                }
+                ty::Param(param) => {
+                    let generics = self.tcx.generics_of(hir_id.owner.to_def_id());
+                    let name = if generics.count() > param.index as usize
+                        && let def = generics.param_at(param.index as usize, self.tcx)
+                        && matches!(def.kind, ty::GenericParamDefKind::Type { .. })
+                        && def.name == param.name
+                    {
+                        DefIdOrName::DefId(def.def_id)
+                    } else {
+                        DefIdOrName::Name("type parameter")
+                    };
+                    param_env.caller_bounds().iter().find_map(|pred| {
+                        if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
+                        && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output()
+                        && proj.projection_ty.self_ty() == found
+                        // args tuple will always be substs[1]
+                        && let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
+                        {
+                            Some((
+                                name,
+                                pred.kind().rebind(proj.term.ty().unwrap()),
+                                pred.kind().rebind(args.as_slice()),
+                            ))
+                        } else {
+                            None
+                        }
+                    })
+                }
+                _ => None,
+            }
+        }) else { return None; };
+
+        let output = self.replace_bound_vars_with_fresh_vars(
+            DUMMY_SP,
+            LateBoundRegionConversionTime::FnCall,
+            output,
+        );
+        let inputs = inputs
+            .skip_binder()
+            .iter()
+            .map(|ty| {
+                self.replace_bound_vars_with_fresh_vars(
+                    DUMMY_SP,
+                    LateBoundRegionConversionTime::FnCall,
+                    inputs.rebind(*ty),
+                )
+            })
+            .collect();
+
+        // We don't want to register any extra obligations, which should be
+        // implied by wf, but also because that would possibly result in
+        // erroneous errors later on.
+        let InferOk { value: output, obligations: _ } =
+            self.at(&ObligationCause::dummy(), param_env).normalize(output);
+
+        if output.is_ty_var() { None } else { Some((def_id_or_name, output, inputs)) }
+    }
+
     fn suggest_add_reference_to_arg(
         &self,
         obligation: &PredicateObligation<'tcx>,
@@ -1925,6 +1979,7 @@ fn report_closure_arg_mismatch(
         expected: ty::PolyTraitRef<'tcx>,
         cause: &ObligationCauseCode<'tcx>,
         found_node: Option<Node<'_>>,
+        param_env: ty::ParamEnv<'tcx>,
     ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
         pub(crate) fn build_fn_sig_ty<'tcx>(
             infcx: &InferCtxt<'tcx>,
@@ -1987,7 +2042,7 @@ pub(crate) fn build_fn_sig_ty<'tcx>(
         self.note_conflicting_closure_bounds(cause, &mut err);
 
         if let Some(found_node) = found_node {
-            hint_missing_borrow(span, found, expected, found_node, &mut err);
+            hint_missing_borrow(self, param_env, span, found, expected, found_node, &mut err);
         }
 
         err
@@ -2017,7 +2072,7 @@ fn note_conflicting_closure_bounds(
 
             // Find another predicate whose self-type is equal to the expected self type,
             // but whose substs don't match.
-            let other_pred = std::iter::zip(&predicates.predicates, &predicates.spans)
+            let other_pred = predicates.into_iter()
                 .enumerate()
                 .find(|(other_idx, (pred, _))| match pred.kind().skip_binder() {
                     ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred))
@@ -2042,7 +2097,7 @@ fn note_conflicting_closure_bounds(
             // If we found one, then it's very likely the cause of the error.
             if let Some((_, (_, other_pred_span))) = other_pred {
                 err.span_note(
-                    *other_pred_span,
+                    other_pred_span,
                     "closure inferred to have a different signature due to this bound",
                 );
             }
@@ -2259,7 +2314,7 @@ fn maybe_note_obligation_cause_for_async_await(
             // generator interior are not generally known, so we
             // want to erase them when comparing (and anyway,
             // `Send` and other bounds are generally unaffected by
-            // the choice of region).  When erasing regions, we
+            // the choice of region). When erasing regions, we
             // also have to erase late-bound regions. This is
             // because the types that appear in the generator
             // interior generally contain "bound regions" to
@@ -2275,7 +2330,7 @@ fn maybe_note_obligation_cause_for_async_await(
         };
 
         // Get the typeck results from the infcx if the generator is the function we are currently
-        // type-checking; otherwise, get them by performing a query.  This is needed to avoid
+        // type-checking; otherwise, get them by performing a query. This is needed to avoid
         // cycles. If we can't use resolved types because the generator comes from another crate,
         // we still provide a targeted error but without all the relevant spans.
         let generator_data = match &self.typeck_results {
@@ -3694,6 +3749,8 @@ fn probe_assoc_types_at_expr(
 
 /// Add a hint to add a missing borrow or remove an unnecessary one.
 fn hint_missing_borrow<'tcx>(
+    infcx: &InferCtxt<'tcx>,
+    param_env: ty::ParamEnv<'tcx>,
     span: Span,
     found: Ty<'tcx>,
     expected: Ty<'tcx>,
@@ -3716,7 +3773,7 @@ fn hint_missing_borrow<'tcx>(
     // This could be a variant constructor, for example.
     let Some(fn_decl) = found_node.fn_decl() else { return; };
 
-    let arg_spans = fn_decl.inputs.iter().map(|ty| ty.span);
+    let args = fn_decl.inputs.iter().map(|ty| ty);
 
     fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, usize) {
         let mut refs = 0;
@@ -3732,21 +3789,34 @@ fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, usize) {
     let mut to_borrow = Vec::new();
     let mut remove_borrow = Vec::new();
 
-    for ((found_arg, expected_arg), arg_span) in found_args.zip(expected_args).zip(arg_spans) {
+    for ((found_arg, expected_arg), arg) in found_args.zip(expected_args).zip(args) {
         let (found_ty, found_refs) = get_deref_type_and_refs(*found_arg);
         let (expected_ty, expected_refs) = get_deref_type_and_refs(*expected_arg);
 
-        if found_ty == expected_ty {
+        if infcx.can_eq(param_env, found_ty, expected_ty).is_ok() {
             if found_refs < expected_refs {
-                to_borrow.push((arg_span, expected_arg.to_string()));
+                to_borrow.push((arg.span.shrink_to_lo(), "&".repeat(expected_refs - found_refs)));
             } else if found_refs > expected_refs {
-                remove_borrow.push((arg_span, expected_arg.to_string()));
+                let mut span = arg.span.shrink_to_lo();
+                let mut left = found_refs - expected_refs;
+                let mut ty = arg;
+                while let hir::TyKind::Ref(_, mut_ty) = &ty.kind && left > 0 {
+                    span = span.with_hi(mut_ty.ty.span.lo());
+                    ty = mut_ty.ty;
+                    left -= 1;
+                }
+                let sugg = if left == 0 {
+                    (span, String::new())
+                } else {
+                    (arg.span, expected_arg.to_string())
+                };
+                remove_borrow.push(sugg);
             }
         }
     }
 
     if !to_borrow.is_empty() {
-        err.multipart_suggestion(
+        err.multipart_suggestion_verbose(
             "consider borrowing the argument",
             to_borrow,
             Applicability::MaybeIncorrect,
@@ -3754,7 +3824,7 @@ fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, usize) {
     }
 
     if !remove_borrow.is_empty() {
-        err.multipart_suggestion(
+        err.multipart_suggestion_verbose(
             "do not borrow the argument",
             remove_borrow,
             Applicability::MaybeIncorrect,
index 76a755ed9e09d4a1da7070064fba845301c5d12a..5a58d37e18362694dc1b5ec00ec16c22c20a15c7 100644 (file)
@@ -1,5 +1,4 @@
 use crate::infer::{InferCtxt, TyOrConstInferVar};
-use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::obligation_forest::ProcessResult;
 use rustc_data_structures::obligation_forest::{Error, ForestObligation, Outcome};
 use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor};
@@ -54,8 +53,6 @@ pub struct FulfillmentContext<'tcx> {
     // fulfillment context.
     predicates: ObligationForest<PendingPredicateObligation<'tcx>>,
 
-    relationships: FxHashMap<ty::TyVid, ty::FoundRelationships>,
-
     // Is it OK to register obligations into this infcx inside
     // an infcx snapshot?
     //
@@ -85,19 +82,11 @@ pub struct PendingPredicateObligation<'tcx> {
 impl<'a, 'tcx> FulfillmentContext<'tcx> {
     /// Creates a new fulfillment context.
     pub(super) fn new() -> FulfillmentContext<'tcx> {
-        FulfillmentContext {
-            predicates: ObligationForest::new(),
-            relationships: FxHashMap::default(),
-            usable_in_snapshot: false,
-        }
+        FulfillmentContext { predicates: ObligationForest::new(), usable_in_snapshot: false }
     }
 
     pub(super) fn new_in_snapshot() -> FulfillmentContext<'tcx> {
-        FulfillmentContext {
-            predicates: ObligationForest::new(),
-            relationships: FxHashMap::default(),
-            usable_in_snapshot: true,
-        }
+        FulfillmentContext { predicates: ObligationForest::new(), usable_in_snapshot: true }
     }
 
     /// Attempts to select obligations using `selcx`.
@@ -139,8 +128,6 @@ fn register_predicate_obligation(
 
         assert!(!infcx.is_in_snapshot() || self.usable_in_snapshot);
 
-        super::relationships::update(self, infcx, &obligation);
-
         self.predicates
             .register_obligation(PendingPredicateObligation { obligation, stalled_on: vec![] });
     }
@@ -164,10 +151,6 @@ fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentE
     fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>> {
         self.predicates.map_pending_obligations(|o| o.obligation.clone())
     }
-
-    fn relationships(&mut self) -> &mut FxHashMap<ty::TyVid, ty::FoundRelationships> {
-        &mut self.relationships
-    }
 }
 
 struct FulfillProcessor<'a, 'tcx> {
index b6ded4ce5a3962e0d252dd81b0b226c72017174e..a41a601f2db076a700bdb32b787a202268f2a914 100644 (file)
@@ -1,29 +1,36 @@
 //! Miscellaneous type-system utilities that are too small to deserve their own modules.
 
-use crate::infer::InferCtxtExt as _;
 use crate::traits::{self, ObligationCause};
 
+use rustc_data_structures::fx::FxIndexSet;
 use rustc_hir as hir;
-use rustc_infer::infer::TyCtxtInferExt;
+use rustc_infer::infer::{RegionResolutionError, TyCtxtInferExt};
+use rustc_infer::{infer::outlives::env::OutlivesEnvironment, traits::FulfillmentError};
 use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable};
 
-use crate::traits::error_reporting::TypeErrCtxtExt;
+use super::outlives_bounds::InferCtxtExt;
 
-#[derive(Clone)]
 pub enum CopyImplementationError<'tcx> {
-    InfrigingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>)>),
+    InfrigingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>),
     NotAnAdt,
     HasDestructor,
 }
 
-pub fn can_type_implement_copy<'tcx>(
+pub enum InfringingFieldsReason<'tcx> {
+    Fulfill(Vec<FulfillmentError<'tcx>>),
+    Regions(Vec<RegionResolutionError<'tcx>>),
+}
+
+/// Checks that the fields of the type (an ADT) all implement copy.
+///
+/// If fields don't implement copy, return an error containing a list of
+/// those violating fields. If it's not an ADT, returns `Err(NotAnAdt)`.
+pub fn type_allowed_to_implement_copy<'tcx>(
     tcx: TyCtxt<'tcx>,
     param_env: ty::ParamEnv<'tcx>,
     self_type: Ty<'tcx>,
     parent_cause: ObligationCause<'tcx>,
 ) -> Result<(), CopyImplementationError<'tcx>> {
-    // FIXME: (@jroesch) float this code up
-    let infcx = tcx.infer_ctxt().build();
     let (adt, substs) = match self_type.kind() {
         // These types used to have a builtin impl.
         // Now libcore provides that impl.
@@ -42,42 +49,82 @@ pub fn can_type_implement_copy<'tcx>(
         _ => return Err(CopyImplementationError::NotAnAdt),
     };
 
+    let copy_def_id = tcx.require_lang_item(hir::LangItem::Copy, Some(parent_cause.span));
+
     let mut infringing = Vec::new();
     for variant in adt.variants() {
         for field in &variant.fields {
-            let ty = field.ty(tcx, substs);
-            if ty.references_error() {
+            // Do this per-field to get better error messages.
+            let infcx = tcx.infer_ctxt().build();
+            let ocx = traits::ObligationCtxt::new(&infcx);
+
+            let unnormalized_ty = field.ty(tcx, substs);
+            if unnormalized_ty.references_error() {
                 continue;
             }
-            let span = tcx.def_span(field.did);
+
+            let field_span = tcx.def_span(field.did);
+            let field_ty_span = match tcx.hir().get_if_local(field.did) {
+                Some(hir::Node::Field(field_def)) => field_def.ty.span,
+                _ => field_span,
+            };
+
             // FIXME(compiler-errors): This gives us better spans for bad
             // projection types like in issue-50480.
             // If the ADT has substs, point to the cause we are given.
             // If it does not, then this field probably doesn't normalize
             // to begin with, and point to the bad field's span instead.
-            let cause = if field
+            let normalization_cause = if field
                 .ty(tcx, traits::InternalSubsts::identity_for_item(tcx, adt.did()))
                 .has_non_region_param()
             {
                 parent_cause.clone()
             } else {
-                ObligationCause::dummy_with_span(span)
-            };
-            match traits::fully_normalize(&infcx, cause, param_env, ty) {
-                Ok(ty) => {
-                    if !infcx.type_is_copy_modulo_regions(param_env, ty, span) {
-                        infringing.push((field, ty));
-                    }
-                }
-                Err(errors) => {
-                    infcx.err_ctxt().report_fulfillment_errors(&errors, None);
-                }
+                ObligationCause::dummy_with_span(field_ty_span)
             };
+            let ty = ocx.normalize(&normalization_cause, param_env, unnormalized_ty);
+            let normalization_errors = ocx.select_where_possible();
+            if !normalization_errors.is_empty() {
+                tcx.sess.delay_span_bug(field_span, format!("couldn't normalize struct field `{unnormalized_ty}` when checking Copy implementation"));
+                continue;
+            }
+
+            ocx.register_bound(
+                ObligationCause::dummy_with_span(field_ty_span),
+                param_env,
+                ty,
+                copy_def_id,
+            );
+            let errors = ocx.select_all_or_error();
+            if !errors.is_empty() {
+                infringing.push((field, ty, InfringingFieldsReason::Fulfill(errors)));
+            }
+
+            // Check regions assuming the self type of the impl is WF
+            let outlives_env = OutlivesEnvironment::with_bounds(
+                param_env,
+                Some(&infcx),
+                infcx.implied_bounds_tys(
+                    param_env,
+                    parent_cause.body_id,
+                    FxIndexSet::from_iter([self_type]),
+                ),
+            );
+            infcx.process_registered_region_obligations(
+                outlives_env.region_bound_pairs(),
+                param_env,
+            );
+            let errors = infcx.resolve_regions(&outlives_env);
+            if !errors.is_empty() {
+                infringing.push((field, ty, InfringingFieldsReason::Regions(errors)));
+            }
         }
     }
+
     if !infringing.is_empty() {
         return Err(CopyImplementationError::InfrigingFields(infringing));
     }
+
     if adt.has_dtor(tcx) {
         return Err(CopyImplementationError::HasDestructor);
     }
index 241ae0275cec849d9bd31f27dbb1f8c225a5da9c..3c640cdc503ceba9010ffe62f5119be36868ff2f 100644 (file)
@@ -14,7 +14,6 @@
 pub mod outlives_bounds;
 mod project;
 pub mod query;
-pub(crate) mod relationships;
 mod select;
 mod specialize;
 mod structural_match;
@@ -115,14 +114,12 @@ pub fn predicates_for_generics<'tcx>(
     param_env: ty::ParamEnv<'tcx>,
     generic_bounds: ty::InstantiatedPredicates<'tcx>,
 ) -> impl Iterator<Item = PredicateObligation<'tcx>> {
-    std::iter::zip(generic_bounds.predicates, generic_bounds.spans).enumerate().map(
-        move |(idx, (predicate, span))| Obligation {
-            cause: cause(idx, span),
-            recursion_depth: 0,
-            param_env,
-            predicate,
-        },
-    )
+    generic_bounds.into_iter().enumerate().map(move |(idx, (predicate, span))| Obligation {
+        cause: cause(idx, span),
+        recursion_depth: 0,
+        param_env,
+        predicate,
+    })
 }
 
 /// Determines whether the type `ty` is known to meet `bound` and
@@ -308,7 +305,7 @@ pub fn normalize_param_env_or_error<'tcx>(
     // the `TypeOutlives` predicates first inside the unnormalized parameter environment, and
     // then we normalize the `TypeOutlives` bounds inside the normalized parameter environment.
     //
-    // This works fairly well because trait matching  does not actually care about param-env
+    // This works fairly well because trait matching does not actually care about param-env
     // TypeOutlives predicates - these are normally used by regionck.
     let outlives_predicates: Vec<_> = predicates
         .drain_filter(|predicate| {
@@ -495,7 +492,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 && let param_def_id = self.generics.type_param(param, self.tcx).def_id
                 && self.tcx.parent(param_def_id) == self.trait_item_def_id
             {
-                return ControlFlow::BREAK;
+                return ControlFlow::Break(());
             }
             t.super_visit_with(self)
         }
@@ -504,7 +501,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
                 && let param_def_id = self.generics.region_param(&param, self.tcx).def_id
                 && self.tcx.parent(param_def_id) == self.trait_item_def_id
             {
-                return ControlFlow::BREAK;
+                return ControlFlow::Break(());
             }
             r.super_visit_with(self)
         }
@@ -513,7 +510,7 @@ fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
                 && let param_def_id = self.generics.const_param(&param, self.tcx).def_id
                 && self.tcx.parent(param_def_id) == self.trait_item_def_id
             {
-                return ControlFlow::BREAK;
+                return ControlFlow::Break(());
             }
             ct.super_visit_with(self)
         }
index 8b1ced78f4e8a06fe4a69ed1d0a4f49c8e043935..c9121212cd8f16b9b115ab1ac703b7895d793790 100644 (file)
@@ -783,16 +783,16 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             match t.kind() {
                 ty::Param(_) => {
                     if t == self.tcx.types.self_param {
-                        ControlFlow::BREAK
+                        ControlFlow::Break(())
                     } else {
-                        ControlFlow::CONTINUE
+                        ControlFlow::Continue(())
                     }
                 }
                 ty::Alias(ty::Projection, ref data)
                     if self.tcx.def_kind(data.def_id) == DefKind::ImplTraitPlaceholder =>
                 {
                     // We'll deny these later in their own pass
-                    ControlFlow::CONTINUE
+                    ControlFlow::Continue(())
                 }
                 ty::Alias(ty::Projection, ref data) => {
                     // This is a projected type `<Foo as SomeTrait>::X`.
@@ -809,7 +809,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                     // SomeTrait` is in fact a supertrait of the
                     // current trait. In that case, this type is
                     // legal, because the type `X` will be specified
-                    // in the object type.  Note that we can just use
+                    // in the object type. Note that we can just use
                     // direct equality here because all of these types
                     // are part of the formal parameter listing, and
                     // hence there should be no inference variables.
@@ -820,7 +820,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                         .contains(&data.trait_ref(self.tcx).def_id);
 
                     if is_supertrait_of_current_trait {
-                        ControlFlow::CONTINUE // do not walk contained types, do not report error, do collect $200
+                        ControlFlow::Continue(()) // do not walk contained types, do not report error, do collect $200
                     } else {
                         t.super_visit_with(self) // DO walk contained types, POSSIBLY reporting an error
                     }
index 81966f3fcb231791b5cc098dab611e9b81fde698..fbc7eccedc88392d11f036ff43459322537836f6 100644 (file)
@@ -148,7 +148,7 @@ fn push_candidate(&mut self, candidate: ProjectionCandidate<'tcx>) -> bool {
                 }
 
                 // Prefer where-clauses. As in select, if there are multiple
-                // candidates, we prefer where-clause candidates over impls.  This
+                // candidates, we prefer where-clause candidates over impls. This
                 // may seem a bit surprising, since impls are the source of
                 // "truth" in some sense, but in fact some of the impls that SEEM
                 // applicable are not, because of nested obligations. Where
@@ -1034,7 +1034,7 @@ fn opt_normalize_projection_type<'a, 'b, 'tcx>(
         }
         Err(ProjectionCacheEntry::InProgress) => {
             // Under lazy normalization, this can arise when
-            // bootstrapping.  That is, imagine an environment with a
+            // bootstrapping. That is, imagine an environment with a
             // where-clause like `A::B == u32`. Now, if we are asked
             // to normalize `A::B`, we will want to check the
             // where-clauses in scope. So we will try to unify `A::B`
@@ -1375,7 +1375,7 @@ fn assemble_candidates_from_trait_def<'cx, 'tcx>(
     // Check whether the self-type is itself a projection.
     // If so, extract what we know from the trait and try to come up with a good answer.
     let bounds = match *obligation.predicate.self_ty().kind() {
-        ty::Alias(_, ref data) => tcx.bound_item_bounds(data.def_id).subst(tcx, data.substs),
+        ty::Alias(_, ref data) => tcx.item_bounds(data.def_id).subst(tcx, data.substs),
         ty::Infer(ty::TyVar(_)) => {
             // If the self-type is an inference variable, then it MAY wind up
             // being a projected type, so induce an ambiguity.
@@ -2259,25 +2259,23 @@ fn confirm_impl_trait_in_trait_candidate<'tcx>(
         tcx.predicates_of(impl_fn_def_id).instantiate(tcx, impl_fn_substs),
         &mut obligations,
     );
-    obligations.extend(std::iter::zip(predicates.predicates, predicates.spans).map(
-        |(pred, span)| {
-            Obligation::with_depth(
-                tcx,
-                ObligationCause::new(
-                    obligation.cause.span,
-                    obligation.cause.body_id,
-                    if span.is_dummy() {
-                        super::ItemObligation(impl_fn_def_id)
-                    } else {
-                        super::BindingObligation(impl_fn_def_id, span)
-                    },
-                ),
-                obligation.recursion_depth + 1,
-                obligation.param_env,
-                pred,
-            )
-        },
-    ));
+    obligations.extend(predicates.into_iter().map(|(pred, span)| {
+        Obligation::with_depth(
+            tcx,
+            ObligationCause::new(
+                obligation.cause.span,
+                obligation.cause.body_id,
+                if span.is_dummy() {
+                    super::ItemObligation(impl_fn_def_id)
+                } else {
+                    super::BindingObligation(impl_fn_def_id, span)
+                },
+            ),
+            obligation.recursion_depth + 1,
+            obligation.param_env,
+            pred,
+        )
+    }));
 
     let ty = normalize_with_depth_to(
         selcx,
@@ -2303,10 +2301,10 @@ fn assoc_ty_own_obligations<'cx, 'tcx>(
     nested: &mut Vec<PredicateObligation<'tcx>>,
 ) {
     let tcx = selcx.tcx();
-    let own = tcx
+    let predicates = tcx
         .predicates_of(obligation.predicate.def_id)
         .instantiate_own(tcx, obligation.predicate.substs);
-    for (predicate, span) in std::iter::zip(own.predicates, own.spans) {
+    for (predicate, span) in predicates {
         let normalized = normalize_with_depth_to(
             selcx,
             obligation.param_env,
index c6ef13e185b2d7526e6870397a3932e451e6a49c..27247271d1f4d3f6aa0f989352ec3e38ed75e51e 100644 (file)
@@ -133,7 +133,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 .escaping
                 .max(t.outer_exclusive_binder().as_usize() - self.outer_index.as_usize());
         }
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     #[inline]
@@ -145,7 +145,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
             }
             _ => {}
         }
-        ControlFlow::CONTINUE
+        ControlFlow::Continue(())
     }
 
     fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
@@ -153,7 +153,7 @@ fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
             ty::ConstKind::Bound(debruijn, _) if debruijn >= self.outer_index => {
                 self.escaping =
                     self.escaping.max(debruijn.as_usize() - self.outer_index.as_usize());
-                ControlFlow::CONTINUE
+                ControlFlow::Continue(())
             }
             _ => ct.super_visit_with(self),
         }
@@ -201,7 +201,7 @@ fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
         // wait to fold the substs.
 
         // Wrap this in a closure so we don't accidentally return from the outer function
-        let res = (|| match *ty.kind() {
+        let res = match *ty.kind() {
             // This is really important. While we *can* handle this, this has
             // severe performance implications for large opaque types with
             // late-bound regions. See `issue-88862` benchmark.
@@ -210,7 +210,7 @@ fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
             {
                 // Only normalize `impl Trait` outside of type inference, usually in codegen.
                 match self.param_env.reveal() {
-                    Reveal::UserFacing => ty.try_super_fold_with(self),
+                    Reveal::UserFacing => ty.try_super_fold_with(self)?,
 
                     Reveal::All => {
                         let substs = substs.try_fold_with(self)?;
@@ -239,7 +239,7 @@ fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
                         }
                         let folded_ty = ensure_sufficient_stack(|| self.try_fold_ty(concrete_ty));
                         self.anon_depth -= 1;
-                        folded_ty
+                        folded_ty?
                     }
                 }
             }
@@ -287,9 +287,9 @@ fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
                 // `tcx.normalize_projection_ty` may normalize to a type that still has
                 // unevaluated consts, so keep normalizing here if that's the case.
                 if res != ty && res.has_type_flags(ty::TypeFlags::HAS_CT_PROJECTION) {
-                    Ok(res.try_super_fold_with(self)?)
+                    res.try_super_fold_with(self)?
                 } else {
-                    Ok(res)
+                    res
                 }
             }
 
@@ -344,14 +344,14 @@ fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
                 // `tcx.normalize_projection_ty` may normalize to a type that still has
                 // unevaluated consts, so keep normalizing here if that's the case.
                 if res != ty && res.has_type_flags(ty::TypeFlags::HAS_CT_PROJECTION) {
-                    Ok(res.try_super_fold_with(self)?)
+                    res.try_super_fold_with(self)?
                 } else {
-                    Ok(res)
+                    res
                 }
             }
 
-            _ => ty.try_super_fold_with(self),
-        })()?;
+            _ => ty.try_super_fold_with(self)?,
+        };
 
         self.cache.insert(ty, res);
         Ok(res)
diff --git a/compiler/rustc_trait_selection/src/traits/relationships.rs b/compiler/rustc_trait_selection/src/traits/relationships.rs
deleted file mode 100644 (file)
index 34b5fc4..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-use crate::infer::InferCtxt;
-use crate::traits::query::evaluate_obligation::InferCtxtExt;
-use crate::traits::PredicateObligation;
-use rustc_infer::traits::TraitEngine;
-use rustc_middle::ty;
-
-pub(crate) fn update<'tcx, T>(
-    engine: &mut T,
-    infcx: &InferCtxt<'tcx>,
-    obligation: &PredicateObligation<'tcx>,
-) where
-    T: TraitEngine<'tcx>,
-{
-    // (*) binder skipped
-    if let ty::PredicateKind::Clause(ty::Clause::Trait(tpred)) = obligation.predicate.kind().skip_binder()
-        && let Some(ty) = infcx.shallow_resolve(tpred.self_ty()).ty_vid().map(|t| infcx.root_var(t))
-        && infcx.tcx.lang_items().sized_trait().map_or(false, |st| st != tpred.trait_ref.def_id)
-    {
-        let new_self_ty = infcx.tcx.types.unit;
-
-        // Then construct a new obligation with Self = () added
-        // to the ParamEnv, and see if it holds.
-        let o = obligation.with(infcx.tcx,
-            obligation
-                .predicate
-                .kind()
-                .rebind(
-                    // (*) binder moved here
-                    ty::PredicateKind::Clause(ty::Clause::Trait(tpred.with_self_ty(infcx.tcx, new_self_ty)))
-                ),
-        );
-        // Don't report overflow errors. Otherwise equivalent to may_hold.
-        if let Ok(result) = infcx.probe(|_| infcx.evaluate_obligation(&o)) && result.may_apply() {
-            engine.relationships().entry(ty).or_default().self_in_trait = true;
-        }
-    }
-
-    if let ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) =
-        obligation.predicate.kind().skip_binder()
-    {
-        // If the projection predicate (Foo::Bar == X) has X as a non-TyVid,
-        // we need to make it into one.
-        if let Some(vid) = predicate.term.ty().and_then(|ty| ty.ty_vid()) {
-            debug!("relationship: {:?}.output = true", vid);
-            engine.relationships().entry(vid).or_default().output = true;
-        }
-    }
-}
index e29ad30d5f2ec7cb5a9e7aa4c0382912e45c6e85..2733d9643fd77ad8c53d120322b6989138d1ecaf 100644 (file)
@@ -398,7 +398,7 @@ fn assemble_candidates_from_auto_impls(
                 }
                 ty::Param(..) | ty::Alias(ty::Projection, ..) => {
                     // In these cases, we don't know what the actual
-                    // type is.  Therefore, we cannot break it down
+                    // type is. Therefore, we cannot break it down
                     // into its constituent types. So we don't
                     // consider the `..` impl but instead just add no
                     // candidates: this means that typeck will only
index a41d10f104358ea57f8f6e4cb641e38455b63ad7..82a59831be30aec5ff9791487cec5b72f6df7ca6 100644 (file)
@@ -2,7 +2,7 @@
 //!
 //! Confirmation unifies the output type parameters of the trait
 //! with the values found in the obligation, possibly yielding a
-//! type error.  See the [rustc dev guide] for more details.
+//! type error. See the [rustc dev guide] for more details.
 //!
 //! [rustc dev guide]:
 //! https://rustc-dev-guide.rust-lang.org/traits/resolution.html#confirmation
@@ -160,8 +160,7 @@ fn confirm_projection_candidate(
             _ => bug!("projection candidate for unexpected type: {:?}", placeholder_self_ty),
         };
 
-        let candidate_predicate =
-            tcx.bound_item_bounds(def_id).map_bound(|i| i[idx]).subst(tcx, substs);
+        let candidate_predicate = tcx.item_bounds(def_id).map_bound(|i| i[idx]).subst(tcx, substs);
         let candidate = candidate_predicate
             .to_opt_poly_trait_pred()
             .expect("projection candidate is not a trait predicate")
@@ -185,9 +184,8 @@ fn confirm_projection_candidate(
         })?);
 
         if let ty::Alias(ty::Projection, ..) = placeholder_self_ty.kind() {
-            let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs).predicates;
-            debug!(?predicates, "projection predicates");
-            for predicate in predicates {
+            let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
+            for (predicate, _) in predicates {
                 let normalized = normalize_with_depth_to(
                     self,
                     obligation.param_env,
@@ -357,8 +355,8 @@ fn vtable_auto_impl(
                 nested,
             );
 
-            // Adds the predicates from the trait.  Note that this contains a `Self: Trait`
-            // predicate as usual.  It won't have any effect since auto traits are coinductive.
+            // Adds the predicates from the trait. Note that this contains a `Self: Trait`
+            // predicate as usual. It won't have any effect since auto traits are coinductive.
             obligations.extend(trait_obligations);
 
             debug!(?obligations, "vtable_auto_impl");
@@ -511,7 +509,7 @@ fn confirm_object_candidate(
             // This maybe belongs in wf, but that can't (doesn't) handle
             // higher-ranked things.
             // Prevent, e.g., `dyn Iterator<Item = str>`.
-            for bound in self.tcx().bound_item_bounds(assoc_type).transpose_iter() {
+            for bound in self.tcx().item_bounds(assoc_type).transpose_iter() {
                 let subst_bound =
                     if defs.count() == 0 {
                         bound.subst(tcx, trait_predicate.trait_ref.substs)
index ba4e668f52ddd93b30abdecc98402aa5171efef0..f90da95d51668b56cfbb582cbae2231ef6db371f 100644 (file)
@@ -430,7 +430,7 @@ fn candidate_from_obligation_no_cache<'o>(
         //     impl<T:Clone> Vec<T> { fn push_clone(...) { ... } }
         //
         // and we were to see some code `foo.push_clone()` where `boo`
-        // is a `Vec<Bar>` and `Bar` does not implement `Clone`.  If
+        // is a `Vec<Bar>` and `Bar` does not implement `Clone`. If
         // we were to winnow, we'd wind up with zero candidates.
         // Instead, we select the right impl now but report "`Bar` does
         // not implement `Clone`".
@@ -1604,7 +1604,7 @@ fn match_projection_obligation_against_definition_bounds(
                 );
             }
         };
-        let bounds = tcx.bound_item_bounds(def_id).subst(tcx, substs);
+        let bounds = tcx.item_bounds(def_id).subst(tcx, substs);
 
         // The bounds returned by `item_bounds` may contain duplicates after
         // normalization, so try to deduplicate when possible to avoid
@@ -2324,7 +2324,7 @@ fn collect_predicates_for_types(
     // Matching
     //
     // Matching is a common path used for both evaluation and
-    // confirmation.  It basically unifies types that appear in impls
+    // confirmation. It basically unifies types that appear in impls
     // and traits. This does affect the surrounding environment;
     // therefore, when used during evaluation, match routines must be
     // run inside of a `probe()` so that their side-effects are
@@ -2558,12 +2558,11 @@ fn impl_or_trait_obligations(
         // obligation will normalize to `<$0 as Iterator>::Item = $1` and
         // `$1: Copy`, so we must ensure the obligations are emitted in
         // that order.
-        let predicates = tcx.bound_predicates_of(def_id);
-        debug!(?predicates);
-        assert_eq!(predicates.0.parent, None);
-        let mut obligations = Vec::with_capacity(predicates.0.predicates.len());
-        for (predicate, span) in predicates.0.predicates {
-            let span = *span;
+        let predicates = tcx.predicates_of(def_id);
+        assert_eq!(predicates.parent, None);
+        let predicates = predicates.instantiate_own(tcx, substs);
+        let mut obligations = Vec::with_capacity(predicates.len());
+        for (predicate, span) in predicates {
             let cause = cause.clone().derived_cause(parent_trait_pred, |derived| {
                 ImplDerivedObligation(Box::new(ImplDerivedObligationCause {
                     derived,
@@ -2576,7 +2575,7 @@ fn impl_or_trait_obligations(
                 param_env,
                 cause.clone(),
                 recursion_depth,
-                predicates.rebind(*predicate).subst(tcx, substs),
+                predicate,
                 &mut obligations,
             );
             obligations.push(Obligation { cause, recursion_depth, param_env, predicate });
@@ -2644,7 +2643,7 @@ fn update_reached_depth(&self, reached_depth: usize) {
 /// In Issue #60010, we found a bug in rustc where it would cache
 /// these intermediate results. This was fixed in #60444 by disabling
 /// *all* caching for things involved in a cycle -- in our example,
-/// that would mean we don't cache that `Bar<T>: Send`.  But this led
+/// that would mean we don't cache that `Bar<T>: Send`. But this led
 /// to large slowdowns.
 ///
 /// Specifically, imagine this scenario, where proving `Baz<T>: Send`
@@ -2670,7 +2669,7 @@ fn update_reached_depth(&self, reached_depth: usize) {
 /// a result at `reached_depth`, so it marks the *current* solution as
 /// provisional as well. If an error is encountered, we toss out any
 /// provisional results added from the subtree that encountered the
-/// error.  When we pop the node at `reached_depth` from the stack, we
+/// error. When we pop the node at `reached_depth` from the stack, we
 /// can commit all the things that remain in the provisional cache.
 struct ProvisionalEvaluationCache<'tcx> {
     /// next "depth first number" to issue -- just a counter
@@ -2781,7 +2780,7 @@ fn insert_provisional(
     }
 
     /// Invoked when the node with dfn `dfn` does not get a successful
-    /// result.  This will clear out any provisional cache entries
+    /// result. This will clear out any provisional cache entries
     /// that were added since `dfn` was created. This is because the
     /// provisional entries are things which must assume that the
     /// things on the stack at the time of their creation succeeded --
index 6411206a5a40c276b188a337270ade6dbc2cae6f..0f9196de4fb197f5974d6b38b437352878ccabf9 100644 (file)
@@ -418,7 +418,7 @@ pub(crate) fn assoc_def(
     } else {
         // This is saying that neither the trait nor
         // the impl contain a definition for this
-        // associated type.  Normally this situation
+        // associated type. Normally this situation
         // could only arise through a compiler bug --
         // if the user wrote a bad item name, it
         // should have failed in astconv.
index 892a7afd799c73c64ff0a5c6ba5bbf156b68fa9e..f398fb06c187a0bb52ce0367e20244bc89cebe48 100644 (file)
@@ -107,25 +107,25 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
             ty::FnDef(..) => {
                 // Types of formals and return in `fn(_) -> _` are also irrelevant;
                 // so we do not recur into them via `super_visit_with`
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
             ty::Array(_, n)
                 if { n.try_eval_usize(self.tcx, ty::ParamEnv::reveal_all()) == Some(0) } =>
             {
                 // rust-lang/rust#62336: ignore type of contents
                 // for empty array.
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
             ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Str | ty::Never => {
                 // These primitive types are always structural match.
                 //
                 // `Never` is kind of special here, but as it is not inhabitable, this should be fine.
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
 
             ty::FnPtr(..) => {
                 if !self.adt_const_param {
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 } else {
                     return ControlFlow::Break(ty);
                 }
@@ -147,7 +147,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                     // Even though `NonStructural` does not implement `PartialEq`,
                     // structural equality on `T` does not recur into the raw
                     // pointer. Therefore, one can still use `C` in a pattern.
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 } else {
                     return ControlFlow::Break(ty);
                 }
@@ -155,7 +155,7 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
 
             ty::Float(_) => {
                 if !self.adt_const_param {
-                    return ControlFlow::CONTINUE;
+                    return ControlFlow::Continue(());
                 } else {
                     return ControlFlow::Break(ty);
                 }
@@ -172,13 +172,13 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
                 self.tcx.sess.delay_span_bug(self.span, "ty::Error in structural-match check");
                 // We still want to check other types after encountering an error,
                 // as this may still emit relevant errors.
-                return ControlFlow::CONTINUE;
+                return ControlFlow::Continue(());
             }
         };
 
         if !self.seen.insert(adt_def.did()) {
             debug!("Search already seen adt_def: {:?}", adt_def);
-            return ControlFlow::CONTINUE;
+            return ControlFlow::Continue(());
         }
 
         if !self.type_marked_structural(ty) {
index 5ec9c2a24cd448db3534fa5f3a8798cba24a1c64..64daca714c32d1e8b20d6ed3ed8a300727a0bc69 100644 (file)
@@ -261,7 +261,10 @@ fn vtable_entries<'tcx>(
                     // Note that this method could then never be called, so we
                     // do not want to try and codegen it, in that case (see #23435).
                     let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
-                    if impossible_predicates(tcx, predicates.predicates) {
+                    if impossible_predicates(
+                        tcx,
+                        predicates.map(|(predicate, _)| predicate).collect(),
+                    ) {
                         debug!("vtable_entries: predicates do not hold");
                         return VtblEntry::Vacant;
                     }
index fec4047ff49ba4d4a675b7cd97c4bd8d31c1085a..12d4cb4fc6920a0d07438637415bff6980d058cb 100644 (file)
@@ -76,7 +76,7 @@ pub fn obligations<'tcx>(
 }
 
 /// Returns the obligations that make this trait reference
-/// well-formed.  For example, if there is a trait `Set` defined like
+/// well-formed. For example, if there is a trait `Set` defined like
 /// `trait Set<K:Eq>`, then the trait reference `Foo: Set<Bar>` is WF
 /// if `Bar: Eq`.
 pub fn trait_obligations<'tcx>(
@@ -654,7 +654,7 @@ fn compute(&mut self, arg: GenericArg<'tcx>) {
                     // All of the requirements on type parameters
                     // have already been checked for `impl Trait` in
                     // return position. We do need to check type-alias-impl-trait though.
-                    if ty::is_impl_trait_defn(self.tcx, def_id).is_none() {
+                    if self.tcx.is_type_alias_impl_trait(def_id) {
                         let obligations = self.nominal_obligations(def_id, substs);
                         self.out.extend(obligations);
                     }
@@ -736,7 +736,7 @@ fn nominal_obligations_inner(
         trace!("{:#?}", predicates);
         debug_assert_eq!(predicates.predicates.len(), origins.len());
 
-        iter::zip(iter::zip(predicates.predicates, predicates.spans), origins.into_iter().rev())
+        iter::zip(predicates, origins.into_iter().rev())
             .map(|((mut pred, span), origin_def_id)| {
                 let code = if span.is_dummy() {
                     traits::ItemObligation(origin_def_id)
index 7c0cae1e7bdc2e6658a8267d575562368a7fca2e..f146de3966ba107865068631c9328f88fe2bd885 100644 (file)
@@ -7,7 +7,7 @@
 //! `crate::chalk::lowering` (to lower rustc types into Chalk types).
 
 use rustc_middle::traits::ChalkRustInterner as RustInterner;
-use rustc_middle::ty::{self, AssocKind, EarlyBinder, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable};
+use rustc_middle::ty::{self, AssocKind, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable};
 use rustc_middle::ty::{InternalSubsts, SubstsRef};
 use rustc_target::abi::{Integer, IntegerType};
 
@@ -38,13 +38,12 @@ fn where_clauses_for(
         def_id: DefId,
         bound_vars: SubstsRef<'tcx>,
     ) -> Vec<chalk_ir::QuantifiedWhereClause<RustInterner<'tcx>>> {
-        let predicates = self.interner.tcx.predicates_defined_on(def_id).predicates;
-        predicates
-            .iter()
-            .map(|(wc, _)| EarlyBinder(*wc).subst(self.interner.tcx, bound_vars))
-            .filter_map(|wc| LowerInto::<
-                    Option<chalk_ir::QuantifiedWhereClause<RustInterner<'tcx>>>
-                    >::lower_into(wc, self.interner)).collect()
+        self.interner
+            .tcx
+            .predicates_defined_on(def_id)
+            .instantiate_own(self.interner.tcx, bound_vars)
+            .filter_map(|(wc, _)| LowerInto::lower_into(wc, self.interner))
+            .collect()
     }
 
     fn bounds_for<T>(&self, def_id: DefId, bound_vars: SubstsRef<'tcx>) -> Vec<T>
index f127ef8343f91e5a262beed9bd805e6a582a237b..c0da8a8169e5bbde54cf52783300c2f5cd50a9f4 100644 (file)
@@ -1,5 +1,5 @@
 // This file contains various trait resolution methods used by codegen.
-// They all assume regions can be erased and monomorphic types.  It
+// They all assume regions can be erased and monomorphic types. It
 // seems likely that they should eventually be merged into more
 // general routines.
 
index 010233d7718c222372d87b976f7d29e0321c6d9d..7d2d8433c932d8d9d40b5656c1a78c99af9694de 100644 (file)
@@ -154,11 +154,8 @@ fn implied_bounds_from_components<'tcx>(
             match component {
                 Component::Region(r) => Some(OutlivesBound::RegionSubRegion(sub_region, r)),
                 Component::Param(p) => Some(OutlivesBound::RegionSubParam(sub_region, p)),
-                Component::Projection(p) => Some(OutlivesBound::RegionSubProjection(sub_region, p)),
-                Component::Opaque(def_id, substs) => {
-                    Some(OutlivesBound::RegionSubOpaque(sub_region, def_id, substs))
-                }
-                Component::EscapingProjection(_) =>
+                Component::Alias(p) => Some(OutlivesBound::RegionSubAlias(sub_region, p)),
+                Component::EscapingAlias(_) =>
                 // If the projection has escaping regions, don't
                 // try to infer any implied bounds even for its
                 // free components. This is conservative, because
index aa5c83ac2e6557cd98e40f7d29cbc18db5a99853..f35c5e44882df38db20a578bc83cdfcd9c2f50ae 100644 (file)
@@ -17,7 +17,6 @@
 use rustc_trait_selection::traits::query::{Fallible, NoSolution};
 use rustc_trait_selection::traits::{Normalized, Obligation, ObligationCause, ObligationCtxt};
 use std::fmt;
-use std::iter::zip;
 
 pub(crate) fn provide(p: &mut Providers) {
     *p = Providers {
@@ -108,9 +107,7 @@ fn relate_mir_and_user_substs<'tcx>(
     let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs);
 
     debug!(?instantiated_predicates);
-    for (instantiated_predicate, predicate_span) in
-        zip(instantiated_predicates.predicates, instantiated_predicates.spans)
-    {
+    for (instantiated_predicate, predicate_span) in instantiated_predicates {
         let span = if span == DUMMY_SP { predicate_span } else { span };
         let cause = ObligationCause::new(
             span,
index 384d03106b1e8c1cc67760bbfdf3345a741bb859..b3b9a67b26e3d23fcfa2ab9153847dc787145560 100644 (file)
@@ -1,4 +1,4 @@
-#![feature(alloc_layout_extra, control_flow_enum, decl_macro, iterator_try_reduce, never_type)]
+#![feature(alloc_layout_extra, decl_macro, iterator_try_reduce, never_type)]
 #![allow(dead_code, unused_variables)]
 #![deny(rustc::untranslatable_diagnostic)]
 #![deny(rustc::diagnostic_outside_of_impl)]
index dc1dd1bfaf8e7f30ddd14374c59375957472ed3c..91a505a72fae7a6be74b31e2b2ff0909af29b877 100644 (file)
@@ -108,21 +108,41 @@ fn fn_sig_for_fn_abi<'tcx>(
             // `Generator::resume(...) -> GeneratorState` function in case we
             // have an ordinary generator, or the `Future::poll(...) -> Poll`
             // function in case this is a special generator backing an async construct.
-            let ret_ty = if tcx.generator_is_async(did) {
-                let state_did = tcx.require_lang_item(LangItem::Poll, None);
-                let state_adt_ref = tcx.adt_def(state_did);
-                let state_substs = tcx.intern_substs(&[sig.return_ty.into()]);
-                tcx.mk_adt(state_adt_ref, state_substs)
+            let (resume_ty, ret_ty) = if tcx.generator_is_async(did) {
+                // The signature should be `Future::poll(_, &mut Context<'_>) -> Poll<Output>`
+                let poll_did = tcx.require_lang_item(LangItem::Poll, None);
+                let poll_adt_ref = tcx.adt_def(poll_did);
+                let poll_substs = tcx.intern_substs(&[sig.return_ty.into()]);
+                let ret_ty = tcx.mk_adt(poll_adt_ref, poll_substs);
+
+                // We have to replace the `ResumeTy` that is used for type and borrow checking
+                // with `&mut Context<'_>` which is used in codegen.
+                #[cfg(debug_assertions)]
+                {
+                    if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
+                        let expected_adt =
+                            tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
+                        assert_eq!(*resume_ty_adt, expected_adt);
+                    } else {
+                        panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
+                    };
+                }
+                let context_mut_ref = tcx.mk_task_context();
+
+                (context_mut_ref, ret_ty)
             } else {
+                // The signature should be `Generator::resume(_, Resume) -> GeneratorState<Yield, Return>`
                 let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
                 let state_adt_ref = tcx.adt_def(state_did);
                 let state_substs = tcx.intern_substs(&[sig.yield_ty.into(), sig.return_ty.into()]);
-                tcx.mk_adt(state_adt_ref, state_substs)
+                let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
+
+                (sig.resume_ty, ret_ty)
             };
 
             ty::Binder::bind_with_vars(
                 tcx.mk_fn_sig(
-                    [env_ty, sig.resume_ty].iter(),
+                    [env_ty, resume_ty].iter(),
                     &ret_ty,
                     false,
                     hir::Unsafety::Normal,
@@ -219,8 +239,7 @@ fn adjust_for_rust_scalar<'tcx>(
         return;
     }
 
-    // Scalars which have invalid values cannot be undef.
-    if !scalar.is_always_valid(&cx) {
+    if !scalar.is_uninit_valid() {
         attrs.set(ArgAttribute::NoUndef);
     }
 
@@ -246,11 +265,6 @@ fn adjust_for_rust_scalar<'tcx>(
                 PointerKind::SharedMutable | PointerKind::UniqueOwned => Size::ZERO,
             };
 
-            // `Box`, `&T`, and `&mut T` cannot be undef.
-            // Note that this only applies to the value of the pointer itself;
-            // this attribute doesn't make it UB for the pointed-to data to be undef.
-            attrs.set(ArgAttribute::NoUndef);
-
             // The aliasing rules for `Box<T>` are still not decided, but currently we emit
             // `noalias` for it. This can be turned off using an unstable flag.
             // See https://github.com/rust-lang/unsafe-code-guidelines/issues/326
index a9b4e1420ea0db1b5b9cbe5670e8df8c3f1c84d3..a9fbad55dac5540b6230dd669fdee5926cd06302 100644 (file)
@@ -302,13 +302,53 @@ fn expr_is_poly(&mut self, expr: &thir::Expr<'tcx>) -> bool {
         }
 
         match expr.kind {
-            thir::ExprKind::NamedConst { substs, .. } => substs.has_non_region_param(),
+            thir::ExprKind::NamedConst { substs, .. }
+            | thir::ExprKind::ConstBlock { substs, .. } => substs.has_non_region_param(),
             thir::ExprKind::ConstParam { .. } => true,
             thir::ExprKind::Repeat { value, count } => {
                 self.visit_expr(&self.thir()[value]);
                 count.has_non_region_param()
             }
-            _ => false,
+            thir::ExprKind::Scope { .. }
+            | thir::ExprKind::Box { .. }
+            | thir::ExprKind::If { .. }
+            | thir::ExprKind::Call { .. }
+            | thir::ExprKind::Deref { .. }
+            | thir::ExprKind::Binary { .. }
+            | thir::ExprKind::LogicalOp { .. }
+            | thir::ExprKind::Unary { .. }
+            | thir::ExprKind::Cast { .. }
+            | thir::ExprKind::Use { .. }
+            | thir::ExprKind::NeverToAny { .. }
+            | thir::ExprKind::Pointer { .. }
+            | thir::ExprKind::Loop { .. }
+            | thir::ExprKind::Let { .. }
+            | thir::ExprKind::Match { .. }
+            | thir::ExprKind::Block { .. }
+            | thir::ExprKind::Assign { .. }
+            | thir::ExprKind::AssignOp { .. }
+            | thir::ExprKind::Field { .. }
+            | thir::ExprKind::Index { .. }
+            | thir::ExprKind::VarRef { .. }
+            | thir::ExprKind::UpvarRef { .. }
+            | thir::ExprKind::Borrow { .. }
+            | thir::ExprKind::AddressOf { .. }
+            | thir::ExprKind::Break { .. }
+            | thir::ExprKind::Continue { .. }
+            | thir::ExprKind::Return { .. }
+            | thir::ExprKind::Array { .. }
+            | thir::ExprKind::Tuple { .. }
+            | thir::ExprKind::Adt(_)
+            | thir::ExprKind::PlaceTypeAscription { .. }
+            | thir::ExprKind::ValueTypeAscription { .. }
+            | thir::ExprKind::Closure(_)
+            | thir::ExprKind::Literal { .. }
+            | thir::ExprKind::NonHirLiteral { .. }
+            | thir::ExprKind::ZstLiteral { .. }
+            | thir::ExprKind::StaticRef { .. }
+            | thir::ExprKind::InlineAsm(_)
+            | thir::ExprKind::ThreadLocalRef(_)
+            | thir::ExprKind::Yield { .. } => false,
         }
     }
     fn pat_is_poly(&mut self, pat: &thir::Pat<'tcx>) -> bool {
index 7ad5cbc01ccf25293c116f05025bca179b33d0e3..0853de601b04072100213a4f2b9d114242abd44f 100644 (file)
@@ -6,7 +6,6 @@
 
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![feature(let_chains)]
-#![feature(control_flow_enum)]
 #![feature(never_type)]
 #![feature(box_patterns)]
 #![recursion_limit = "256"]
index b944cbd698d13598de404d3f6291718cb7259dfe..5f29588ae4d2602b4e36c868c540ada95d0b5548 100644 (file)
@@ -807,7 +807,7 @@ fn hash_stable(
 ///
 /// Note that inference variables and bound regions are not included
 /// in this diagram. In the case of inference variables, they should
-/// be inferred to some other region from the diagram.  In the case of
+/// be inferred to some other region from the diagram. In the case of
 /// bound regions, they are excluded because they don't make sense to
 /// include -- the diagram indicates the relationship between free
 /// regions.
index fe6de1cf879b2e2395173f1d4e5d3c80ea2a1fe9..3a797bd5ecaa8ea84e8d90f8a47dab4f5b6c94b9 100644 (file)
@@ -20,7 +20,7 @@
 mod tests;
 
 extern "Rust" {
-    // These are the magic symbols to call the global allocator.  rustc generates
+    // These are the magic symbols to call the global allocator. rustc generates
     // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
     // (the code expanding that attribute macro generates those functions), or to call
     // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
@@ -353,7 +353,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
 
 #[cfg(not(no_global_oom_handling))]
 extern "Rust" {
-    // This is the magic symbol to call the global alloc error handler.  rustc generates
+    // This is the magic symbol to call the global alloc error handler. rustc generates
     // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
     // default implementations below (`__rdl_oom`) otherwise.
     fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
index 4583bc9a158efc02c059e239c40962faf529f9ce..0b73b1af4eb35abad64b5d6c812293d1a6ad18b1 100644 (file)
 use core::fmt;
 use core::iter::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen};
 use core::mem::{self, swap, ManuallyDrop};
+use core::num::NonZeroUsize;
 use core::ops::{Deref, DerefMut};
 use core::ptr;
 
 /// It is a logic error for an item to be modified in such a way that the
 /// item's ordering relative to any other item, as determined by the [`Ord`]
 /// trait, changes while it is in the heap. This is normally only possible
-/// through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. The
+/// through interior mutability, global state, I/O, or unsafe code. The
 /// behavior resulting from such a logic error is not specified, but will
 /// be encapsulated to the `BinaryHeap` that observed the logic error and not
 /// result in undefined behavior. This could include panics, incorrect results,
 /// aborts, memory leaks, and non-termination.
 ///
+/// As long as no elements change their relative order while being in the heap
+/// as described above, the API of `BinaryHeap` guarantees that the heap
+/// invariant remains intact i.e. its methods all behave as documented. For
+/// example if a method is documented as iterating in sorted order, that's
+/// guaranteed to work as long as elements in the heap have not changed order,
+/// even in the presence of closures getting unwinded out of, iterators getting
+/// leaked, and similar foolishness.
+///
 /// # Examples
 ///
 /// ```
@@ -279,7 +288,9 @@ pub struct BinaryHeap<T> {
 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
 pub struct PeekMut<'a, T: 'a + Ord> {
     heap: &'a mut BinaryHeap<T>,
-    sift: bool,
+    // If a set_len + sift_down are required, this is Some. If a &mut T has not
+    // yet been exposed to peek_mut()'s caller, it's None.
+    original_len: Option<NonZeroUsize>,
 }
 
 #[stable(feature = "collection_debug", since = "1.17.0")]
@@ -292,7 +303,14 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
 impl<T: Ord> Drop for PeekMut<'_, T> {
     fn drop(&mut self) {
-        if self.sift {
+        if let Some(original_len) = self.original_len {
+            // SAFETY: That's how many elements were in the Vec at the time of
+            // the PeekMut::deref_mut call, and therefore also at the time of
+            // the BinaryHeap::peek_mut call. Since the PeekMut did not end up
+            // getting leaked, we are now undoing the leak amplification that
+            // the DerefMut prepared for.
+            unsafe { self.heap.data.set_len(original_len.get()) };
+
             // SAFETY: PeekMut is only instantiated for non-empty heaps.
             unsafe { self.heap.sift_down(0) };
         }
@@ -313,7 +331,26 @@ fn deref(&self) -> &T {
 impl<T: Ord> DerefMut for PeekMut<'_, T> {
     fn deref_mut(&mut self) -> &mut T {
         debug_assert!(!self.heap.is_empty());
-        self.sift = true;
+
+        let len = self.heap.len();
+        if len > 1 {
+            // Here we preemptively leak all the rest of the underlying vector
+            // after the currently max element. If the caller mutates the &mut T
+            // we're about to give them, and then leaks the PeekMut, all these
+            // elements will remain leaked. If they don't leak the PeekMut, then
+            // either Drop or PeekMut::pop will un-leak the vector elements.
+            //
+            // This is technique is described throughout several other places in
+            // the standard library as "leak amplification".
+            unsafe {
+                // SAFETY: len > 1 so len != 0.
+                self.original_len = Some(NonZeroUsize::new_unchecked(len));
+                // SAFETY: len > 1 so all this does for now is leak elements,
+                // which is safe.
+                self.heap.data.set_len(1);
+            }
+        }
+
         // SAFE: PeekMut is only instantiated for non-empty heaps
         unsafe { self.heap.data.get_unchecked_mut(0) }
     }
@@ -323,9 +360,16 @@ impl<'a, T: Ord> PeekMut<'a, T> {
     /// Removes the peeked value from the heap and returns it.
     #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
     pub fn pop(mut this: PeekMut<'a, T>) -> T {
-        let value = this.heap.pop().unwrap();
-        this.sift = false;
-        value
+        if let Some(original_len) = this.original_len.take() {
+            // SAFETY: This is how many elements were in the Vec at the time of
+            // the BinaryHeap::peek_mut call.
+            unsafe { this.heap.data.set_len(original_len.get()) };
+
+            // Unlike in Drop, here we don't also need to do a sift_down even if
+            // the caller could've mutated the element. It is removed from the
+            // heap on the next line and pop() is not sensitive to its value.
+        }
+        this.heap.pop().unwrap()
     }
 }
 
@@ -398,8 +442,9 @@ pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
     /// Returns a mutable reference to the greatest item in the binary heap, or
     /// `None` if it is empty.
     ///
-    /// Note: If the `PeekMut` value is leaked, the heap may be in an
-    /// inconsistent state.
+    /// Note: If the `PeekMut` value is leaked, some heap elements might get
+    /// leaked along with it, but the remaining elements will remain a valid
+    /// heap.
     ///
     /// # Examples
     ///
@@ -426,7 +471,7 @@ pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
     /// otherwise it's *O*(1).
     #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
     pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
-        if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: false }) }
+        if self.is_empty() { None } else { Some(PeekMut { heap: self, original_len: None }) }
     }
 
     /// Removes the greatest item from the binary heap and returns it, or `None` if it
index 59c516374c0e2790d6c2357b6ed1566afa7c2344..ffbb6c80ac01847999294b6dee8af0b81fa0dc4f 100644 (file)
@@ -1,6 +1,7 @@
 use super::*;
 use crate::boxed::Box;
 use crate::testing::crash_test::{CrashTestDummy, Panic};
+use core::mem;
 use std::iter::TrustedLen;
 use std::panic::{catch_unwind, AssertUnwindSafe};
 
@@ -146,6 +147,24 @@ fn test_peek_mut() {
     assert_eq!(heap.peek(), Some(&9));
 }
 
+#[test]
+fn test_peek_mut_leek() {
+    let data = vec![4, 2, 7];
+    let mut heap = BinaryHeap::from(data);
+    let mut max = heap.peek_mut().unwrap();
+    *max = -1;
+
+    // The PeekMut object's Drop impl would have been responsible for moving the
+    // -1 out of the max position of the BinaryHeap, but we don't run it.
+    mem::forget(max);
+
+    // Absent some mitigation like leak amplification, the -1 would incorrectly
+    // end up in the last position of the returned Vec, with the rest of the
+    // heap's original contents in front of it in sorted order.
+    let sorted_vec = heap.into_sorted_vec();
+    assert!(sorted_vec.is_sorted(), "{:?}", sorted_vec);
+}
+
 #[test]
 fn test_peek_mut_pop() {
     let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
index 4e812529c2cc846d7934672a05202a0eb9db7218..ca75c3895f41f61547be83491e0c042102a4a927 100644 (file)
 #![feature(const_size_of_val)]
 #![feature(const_align_of_val)]
 #![feature(const_ptr_read)]
+#![feature(const_maybe_uninit_zeroed)]
 #![feature(const_maybe_uninit_write)]
 #![feature(const_maybe_uninit_as_mut_ptr)]
 #![feature(const_refs_to_cell)]
 #![feature(hasher_prefixfree_extras)]
 #![feature(inline_const)]
 #![feature(inplace_iteration)]
+#![cfg_attr(test, feature(is_sorted))]
 #![feature(iter_advance_by)]
 #![feature(iter_next_chunk)]
 #![feature(iter_repeat_n)]
index c1d853ed652160149e6911c5577e8cc93891f1a9..c9aa23fc4af1f275e98ff6352e4e34480d6044cb 100644 (file)
@@ -2179,7 +2179,7 @@ pub struct Weak<T: ?Sized> {
     // This is a `NonNull` to allow optimizing the size of this type in enums,
     // but it is not necessarily a valid pointer.
     // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
-    // to allocate space on the heap.  That's not a value a real pointer
+    // to allocate space on the heap. That's not a value a real pointer
     // will ever have because RcBox has alignment at least 2.
     // This is only possible when `T: Sized`; unsized `T` never dangle.
     ptr: NonNull<RcBox<T>>,
index e9886fc5717990ec4a93feafa5901aa4235ca9fd..fecacc2bb639508836b4090fa22280a368051a67 100644 (file)
 use core::mem::{self, SizedTypeProperties};
 #[cfg(not(no_global_oom_handling))]
 use core::ptr;
+#[cfg(not(no_global_oom_handling))]
+use core::slice::sort;
 
 use crate::alloc::Allocator;
 #[cfg(not(no_global_oom_handling))]
-use crate::alloc::Global;
+use crate::alloc::{self, Global};
 #[cfg(not(no_global_oom_handling))]
 use crate::borrow::ToOwned;
 use crate::boxed::Box;
@@ -206,7 +208,7 @@ pub fn sort(&mut self)
     where
         T: Ord,
     {
-        merge_sort(self, T::lt);
+        stable_sort(self, T::lt);
     }
 
     /// Sorts the slice with a comparator function.
@@ -262,7 +264,7 @@ pub fn sort_by<F>(&mut self, mut compare: F)
     where
         F: FnMut(&T, &T) -> Ordering,
     {
-        merge_sort(self, |a, b| compare(a, b) == Less);
+        stable_sort(self, |a, b| compare(a, b) == Less);
     }
 
     /// Sorts the slice with a key extraction function.
@@ -305,7 +307,7 @@ pub fn sort_by_key<K, F>(&mut self, mut f: F)
         F: FnMut(&T) -> K,
         K: Ord,
     {
-        merge_sort(self, |a, b| f(a).lt(&f(b)));
+        stable_sort(self, |a, b| f(a).lt(&f(b)));
     }
 
     /// Sorts the slice with a key extraction function.
@@ -812,324 +814,52 @@ fn clone_into(&self, target: &mut Vec<T>) {
 // Sorting
 ////////////////////////////////////////////////////////////////////////////////
 
-/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
-///
-/// This is the integral subroutine of insertion sort.
-#[cfg(not(no_global_oom_handling))]
-fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
-where
-    F: FnMut(&T, &T) -> bool,
-{
-    if v.len() >= 2 && is_less(&v[1], &v[0]) {
-        unsafe {
-            // There are three ways to implement insertion here:
-            //
-            // 1. Swap adjacent elements until the first one gets to its final destination.
-            //    However, this way we copy data around more than is necessary. If elements are big
-            //    structures (costly to copy), this method will be slow.
-            //
-            // 2. Iterate until the right place for the first element is found. Then shift the
-            //    elements succeeding it to make room for it and finally place it into the
-            //    remaining hole. This is a good method.
-            //
-            // 3. Copy the first element into a temporary variable. Iterate until the right place
-            //    for it is found. As we go along, copy every traversed element into the slot
-            //    preceding it. Finally, copy data from the temporary variable into the remaining
-            //    hole. This method is very good. Benchmarks demonstrated slightly better
-            //    performance than with the 2nd method.
-            //
-            // All methods were benchmarked, and the 3rd showed best results. So we chose that one.
-            let tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
-
-            // Intermediate state of the insertion process is always tracked by `hole`, which
-            // serves two purposes:
-            // 1. Protects integrity of `v` from panics in `is_less`.
-            // 2. Fills the remaining hole in `v` in the end.
-            //
-            // Panic safety:
-            //
-            // If `is_less` panics at any point during the process, `hole` will get dropped and
-            // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
-            // initially held exactly once.
-            let mut hole = InsertionHole { src: &*tmp, dest: &mut v[1] };
-            ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
-
-            for i in 2..v.len() {
-                if !is_less(&v[i], &*tmp) {
-                    break;
-                }
-                ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
-                hole.dest = &mut v[i];
-            }
-            // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
-        }
-    }
-
-    // When dropped, copies from `src` into `dest`.
-    struct InsertionHole<T> {
-        src: *const T,
-        dest: *mut T,
-    }
-
-    impl<T> Drop for InsertionHole<T> {
-        fn drop(&mut self) {
-            unsafe {
-                ptr::copy_nonoverlapping(self.src, self.dest, 1);
-            }
-        }
-    }
-}
-
-/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
-/// stores the result into `v[..]`.
-///
-/// # Safety
-///
-/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
-/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
-#[cfg(not(no_global_oom_handling))]
-unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
-where
-    F: FnMut(&T, &T) -> bool,
-{
-    let len = v.len();
-    let v = v.as_mut_ptr();
-    let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
-
-    // The merge process first copies the shorter run into `buf`. Then it traces the newly copied
-    // run and the longer run forwards (or backwards), comparing their next unconsumed elements and
-    // copying the lesser (or greater) one into `v`.
-    //
-    // As soon as the shorter run is fully consumed, the process is done. If the longer run gets
-    // consumed first, then we must copy whatever is left of the shorter run into the remaining
-    // hole in `v`.
-    //
-    // Intermediate state of the process is always tracked by `hole`, which serves two purposes:
-    // 1. Protects integrity of `v` from panics in `is_less`.
-    // 2. Fills the remaining hole in `v` if the longer run gets consumed first.
-    //
-    // Panic safety:
-    //
-    // If `is_less` panics at any point during the process, `hole` will get dropped and fill the
-    // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
-    // object it initially held exactly once.
-    let mut hole;
-
-    if mid <= len - mid {
-        // The left run is shorter.
-        unsafe {
-            ptr::copy_nonoverlapping(v, buf, mid);
-            hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
-        }
-
-        // Initially, these pointers point to the beginnings of their arrays.
-        let left = &mut hole.start;
-        let mut right = v_mid;
-        let out = &mut hole.dest;
-
-        while *left < hole.end && right < v_end {
-            // Consume the lesser side.
-            // If equal, prefer the left run to maintain stability.
-            unsafe {
-                let to_copy = if is_less(&*right, &**left) {
-                    get_and_increment(&mut right)
-                } else {
-                    get_and_increment(left)
-                };
-                ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
-            }
-        }
-    } else {
-        // The right run is shorter.
-        unsafe {
-            ptr::copy_nonoverlapping(v_mid, buf, len - mid);
-            hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
-        }
-
-        // Initially, these pointers point past the ends of their arrays.
-        let left = &mut hole.dest;
-        let right = &mut hole.end;
-        let mut out = v_end;
-
-        while v < *left && buf < *right {
-            // Consume the greater side.
-            // If equal, prefer the right run to maintain stability.
-            unsafe {
-                let to_copy = if is_less(&*right.sub(1), &*left.sub(1)) {
-                    decrement_and_get(left)
-                } else {
-                    decrement_and_get(right)
-                };
-                ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
-            }
-        }
-    }
-    // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
-    // it will now be copied into the hole in `v`.
-
-    unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
-        let old = *ptr;
-        *ptr = unsafe { ptr.add(1) };
-        old
-    }
-
-    unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
-        *ptr = unsafe { ptr.sub(1) };
-        *ptr
-    }
-
-    // When dropped, copies the range `start..end` into `dest..`.
-    struct MergeHole<T> {
-        start: *mut T,
-        end: *mut T,
-        dest: *mut T,
-    }
-
-    impl<T> Drop for MergeHole<T> {
-        fn drop(&mut self) {
-            // `T` is not a zero-sized type, and these are pointers into a slice's elements.
-            unsafe {
-                let len = self.end.sub_ptr(self.start);
-                ptr::copy_nonoverlapping(self.start, self.dest, len);
-            }
-        }
-    }
-}
-
-/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail
-/// [here](https://github.com/python/cpython/blob/main/Objects/listsort.txt).
-///
-/// The algorithm identifies strictly descending and non-descending subsequences, which are called
-/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
-/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
-/// satisfied:
-///
-/// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
-/// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
-///
-/// The invariants ensure that the total running time is *O*(*n* \* log(*n*)) worst-case.
+#[inline]
 #[cfg(not(no_global_oom_handling))]
-fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
+fn stable_sort<T, F>(v: &mut [T], mut is_less: F)
 where
     F: FnMut(&T, &T) -> bool,
 {
-    // Slices of up to this length get sorted using insertion sort.
-    const MAX_INSERTION: usize = 20;
-    // Very short runs are extended using insertion sort to span at least this many elements.
-    const MIN_RUN: usize = 10;
-
-    // Sorting has no meaningful behavior on zero-sized types.
     if T::IS_ZST {
+        // Sorting has no meaningful behavior on zero-sized types. Do nothing.
         return;
     }
 
-    let len = v.len();
-
-    // Short arrays get sorted in-place via insertion sort to avoid allocations.
-    if len <= MAX_INSERTION {
-        if len >= 2 {
-            for i in (0..len - 1).rev() {
-                insert_head(&mut v[i..], &mut is_less);
-            }
-        }
-        return;
-    }
-
-    // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
-    // shallow copies of the contents of `v` without risking the dtors running on copies if
-    // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
-    // which will always have length at most `len / 2`.
-    let mut buf = Vec::with_capacity(len / 2);
+    let elem_alloc_fn = |len: usize| -> *mut T {
+        // SAFETY: Creating the layout is safe as long as merge_sort never calls this with len >
+        // v.len(). Alloc in general will only be used as 'shadow-region' to store temporary swap
+        // elements.
+        unsafe { alloc::alloc(alloc::Layout::array::<T>(len).unwrap_unchecked()) as *mut T }
+    };
 
-    // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
-    // strange decision, but consider the fact that merges more often go in the opposite direction
-    // (forwards). According to benchmarks, merging forwards is slightly faster than merging
-    // backwards. To conclude, identifying runs by traversing backwards improves performance.
-    let mut runs = vec![];
-    let mut end = len;
-    while end > 0 {
-        // Find the next natural run, and reverse it if it's strictly descending.
-        let mut start = end - 1;
-        if start > 0 {
-            start -= 1;
-            unsafe {
-                if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) {
-                    while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) {
-                        start -= 1;
-                    }
-                    v[start..end].reverse();
-                } else {
-                    while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1))
-                    {
-                        start -= 1;
-                    }
-                }
-            }
-        }
-
-        // Insert some more elements into the run if it's too short. Insertion sort is faster than
-        // merge sort on short sequences, so this significantly improves performance.
-        while start > 0 && end - start < MIN_RUN {
-            start -= 1;
-            insert_head(&mut v[start..end], &mut is_less);
+    let elem_dealloc_fn = |buf_ptr: *mut T, len: usize| {
+        // SAFETY: Creating the layout is safe as long as merge_sort never calls this with len >
+        // v.len(). The caller must ensure that buf_ptr was created by elem_alloc_fn with the same
+        // len.
+        unsafe {
+            alloc::dealloc(buf_ptr as *mut u8, alloc::Layout::array::<T>(len).unwrap_unchecked());
         }
+    };
 
-        // Push this run onto the stack.
-        runs.push(Run { start, len: end - start });
-        end = start;
-
-        // Merge some pairs of adjacent runs to satisfy the invariants.
-        while let Some(r) = collapse(&runs) {
-            let left = runs[r + 1];
-            let right = runs[r];
-            unsafe {
-                merge(
-                    &mut v[left.start..right.start + right.len],
-                    left.len,
-                    buf.as_mut_ptr(),
-                    &mut is_less,
-                );
-            }
-            runs[r] = Run { start: left.start, len: left.len + right.len };
-            runs.remove(r + 1);
+    let run_alloc_fn = |len: usize| -> *mut sort::TimSortRun {
+        // SAFETY: Creating the layout is safe as long as merge_sort never calls this with an
+        // obscene length or 0.
+        unsafe {
+            alloc::alloc(alloc::Layout::array::<sort::TimSortRun>(len).unwrap_unchecked())
+                as *mut sort::TimSortRun
         }
-    }
-
-    // Finally, exactly one run must remain in the stack.
-    debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
+    };
 
-    // Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
-    // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
-    // algorithm should continue building a new run instead, `None` is returned.
-    //
-    // TimSort is infamous for its buggy implementations, as described here:
-    // http://envisage-project.eu/timsort-specification-and-verification/
-    //
-    // The gist of the story is: we must enforce the invariants on the top four runs on the stack.
-    // Enforcing them on just top three is not sufficient to ensure that the invariants will still
-    // hold for *all* runs in the stack.
-    //
-    // This function correctly checks invariants for the top four runs. Additionally, if the top
-    // run starts at index 0, it will always demand a merge operation until the stack is fully
-    // collapsed, in order to complete the sort.
-    #[inline]
-    fn collapse(runs: &[Run]) -> Option<usize> {
-        let n = runs.len();
-        if n >= 2
-            && (runs[n - 1].start == 0
-                || runs[n - 2].len <= runs[n - 1].len
-                || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len)
-                || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len))
-        {
-            if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { Some(n - 2) }
-        } else {
-            None
+    let run_dealloc_fn = |buf_ptr: *mut sort::TimSortRun, len: usize| {
+        // SAFETY: The caller must ensure that buf_ptr was created by elem_alloc_fn with the same
+        // len.
+        unsafe {
+            alloc::dealloc(
+                buf_ptr as *mut u8,
+                alloc::Layout::array::<sort::TimSortRun>(len).unwrap_unchecked(),
+            );
         }
-    }
+    };
 
-    #[derive(Clone, Copy)]
-    struct Run {
-        start: usize,
-        len: usize,
-    }
+    sort::merge_sort(v, &mut is_less, elem_alloc_fn, elem_dealloc_fn, run_alloc_fn, run_dealloc_fn);
 }
index b28d20cda179ec32054515d56dad13d303e387ee..afbe5cfaf8ef9dbe0cbe87436cc50e249c1d7ffa 100644 (file)
@@ -559,10 +559,9 @@ pub fn repeat(&self, n: usize) -> String {
     #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
     #[inline]
     pub fn to_ascii_uppercase(&self) -> String {
-        let mut bytes = self.as_bytes().to_vec();
-        bytes.make_ascii_uppercase();
-        // make_ascii_uppercase() preserves the UTF-8 invariant.
-        unsafe { String::from_utf8_unchecked(bytes) }
+        let mut s = self.to_owned();
+        s.make_ascii_uppercase();
+        s
     }
 
     /// Returns a copy of this string where each character is mapped to its
@@ -592,10 +591,9 @@ pub fn to_ascii_uppercase(&self) -> String {
     #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
     #[inline]
     pub fn to_ascii_lowercase(&self) -> String {
-        let mut bytes = self.as_bytes().to_vec();
-        bytes.make_ascii_lowercase();
-        // make_ascii_lowercase() preserves the UTF-8 invariant.
-        unsafe { String::from_utf8_unchecked(bytes) }
+        let mut s = self.to_owned();
+        s.make_ascii_lowercase();
+        s
     }
 }
 
index d833d4d1dfbd327a5f53b606498e5e717cbcae69..9bc9182f7b53c3b170d0634ed959164f5b4204ea 100644 (file)
@@ -295,7 +295,7 @@ pub struct Weak<T: ?Sized> {
     // This is a `NonNull` to allow optimizing the size of this type in enums,
     // but it is not necessarily a valid pointer.
     // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
-    // to allocate space on the heap.  That's not a value a real pointer
+    // to allocate space on the heap. That's not a value a real pointer
     // will ever have because RcBox has alignment at least 2.
     // This is only possible when `T: Sized`; unsized `T` never dangle.
     ptr: NonNull<ArcInner<T>>,
@@ -654,6 +654,20 @@ pub fn try_new_zeroed() -> Result<Arc<mem::MaybeUninit<T>>, AllocError> {
     ///
     /// This will succeed even if there are outstanding weak references.
     ///
+    // FIXME: when `Arc::into_inner` is stabilized, add this paragraph:
+    /*
+    /// It is strongly recommended to use [`Arc::into_inner`] instead if you don't
+    /// want to keep the `Arc` in the [`Err`] case.
+    /// Immediately dropping the [`Err`] payload, like in the expression
+    /// `Arc::try_unwrap(this).ok()`, can still cause the strong count to
+    /// drop to zero and the inner value of the `Arc` to be dropped:
+    /// For instance if two threads execute this expression in parallel, then
+    /// there is a race condition. The threads could first both check whether they
+    /// have the last clone of their `Arc` via `Arc::try_unwrap`, and then
+    /// both drop their `Arc` in the call to [`ok`][`Result::ok`],
+    /// taking the strong count from two down to zero.
+    ///
+     */
     /// # Examples
     ///
     /// ```
@@ -685,6 +699,137 @@ pub fn try_unwrap(this: Self) -> Result<T, Self> {
             Ok(elem)
         }
     }
+
+    /// Returns the inner value, if the `Arc` has exactly one strong reference.
+    ///
+    /// Otherwise, [`None`] is returned and the `Arc` is dropped.
+    ///
+    /// This will succeed even if there are outstanding weak references.
+    ///
+    /// If `Arc::into_inner` is called on every clone of this `Arc`,
+    /// it is guaranteed that exactly one of the calls returns the inner value.
+    /// This means in particular that the inner value is not dropped.
+    ///
+    /// The similar expression `Arc::try_unwrap(this).ok()` does not
+    /// offer such a guarantee. See the last example below.
+    //
+    // FIXME: when `Arc::into_inner` is stabilized, add this to end
+    // of the previous sentence:
+    /*
+    /// and the documentation of [`Arc::try_unwrap`].
+     */
+    ///
+    /// # Examples
+    ///
+    /// Minimal example demonstrating the guarantee that `Arc::into_inner` gives.
+    /// ```
+    /// #![feature(arc_into_inner)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let x = Arc::new(3);
+    /// let y = Arc::clone(&x);
+    ///
+    /// // Two threads calling `Arc::into_inner` on both clones of an `Arc`:
+    /// let x_thread = std::thread::spawn(|| Arc::into_inner(x));
+    /// let y_thread = std::thread::spawn(|| Arc::into_inner(y));
+    ///
+    /// let x_inner_value = x_thread.join().unwrap();
+    /// let y_inner_value = y_thread.join().unwrap();
+    ///
+    /// // One of the threads is guaranteed to receive the inner value:
+    /// assert!(matches!(
+    ///     (x_inner_value, y_inner_value),
+    ///     (None, Some(3)) | (Some(3), None)
+    /// ));
+    /// // The result could also be `(None, None)` if the threads called
+    /// // `Arc::try_unwrap(x).ok()` and `Arc::try_unwrap(y).ok()` instead.
+    /// ```
+    ///
+    /// A more practical example demonstrating the need for `Arc::into_inner`:
+    /// ```
+    /// #![feature(arc_into_inner)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// // Definition of a simple singly linked list using `Arc`:
+    /// #[derive(Clone)]
+    /// struct LinkedList<T>(Option<Arc<Node<T>>>);
+    /// struct Node<T>(T, Option<Arc<Node<T>>>);
+    ///
+    /// // Dropping a long `LinkedList<T>` relying on the destructor of `Arc`
+    /// // can cause a stack overflow. To prevent this, we can provide a
+    /// // manual `Drop` implementation that does the destruction in a loop:
+    /// impl<T> Drop for LinkedList<T> {
+    ///     fn drop(&mut self) {
+    ///         let mut link = self.0.take();
+    ///         while let Some(arc_node) = link.take() {
+    ///             if let Some(Node(_value, next)) = Arc::into_inner(arc_node) {
+    ///                 link = next;
+    ///             }
+    ///         }
+    ///     }
+    /// }
+    ///
+    /// // Implementation of `new` and `push` omitted
+    /// impl<T> LinkedList<T> {
+    ///     /* ... */
+    /// #   fn new() -> Self {
+    /// #       LinkedList(None)
+    /// #   }
+    /// #   fn push(&mut self, x: T) {
+    /// #       self.0 = Some(Arc::new(Node(x, self.0.take())));
+    /// #   }
+    /// }
+    ///
+    /// // The following code could have still caused a stack overflow
+    /// // despite the manual `Drop` impl if that `Drop` impl had used
+    /// // `Arc::try_unwrap(arc).ok()` instead of `Arc::into_inner(arc)`.
+    ///
+    /// // Create a long list and clone it
+    /// let mut x = LinkedList::new();
+    /// for i in 0..100000 {
+    ///     x.push(i); // Adds i to the front of x
+    /// }
+    /// let y = x.clone();
+    ///
+    /// // Drop the clones in parallel
+    /// let x_thread = std::thread::spawn(|| drop(x));
+    /// let y_thread = std::thread::spawn(|| drop(y));
+    /// x_thread.join().unwrap();
+    /// y_thread.join().unwrap();
+    /// ```
+
+    // FIXME: when `Arc::into_inner` is stabilized, adjust above documentation
+    // and the documentation of `Arc::try_unwrap` according to the `FIXME`s. Also
+    // open an issue on rust-lang/rust-clippy, asking for a lint against
+    // `Arc::try_unwrap(...).ok()`.
+    #[inline]
+    #[unstable(feature = "arc_into_inner", issue = "106894")]
+    pub fn into_inner(this: Self) -> Option<T> {
+        // Make sure that the ordinary `Drop` implementation isn’t called as well
+        let mut this = mem::ManuallyDrop::new(this);
+
+        // Following the implementation of `drop` and `drop_slow`
+        if this.inner().strong.fetch_sub(1, Release) != 1 {
+            return None;
+        }
+
+        acquire!(this.inner().strong);
+
+        // SAFETY: This mirrors the line
+        //
+        //     unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
+        //
+        // in `drop_slow`. Instead of dropping the value behind the pointer,
+        // it is read and eventually returned; `ptr::read` has the same
+        // safety conditions as `ptr::drop_in_place`.
+        let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) };
+
+        drop(Weak { ptr: this.ptr });
+
+        Some(inner)
+    }
 }
 
 impl<T> Arc<[T]> {
@@ -1656,7 +1801,7 @@ fn is_unique(&mut self) -> bool {
         //
         // The acquire label here ensures a happens-before relationship with any
         // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
-        // of the `weak` count (via `Weak::drop`, which uses release).  If the upgraded
+        // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
         // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
         if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
             // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
@@ -1712,7 +1857,7 @@ fn drop(&mut self) {
         }
 
         // This fence is needed to prevent reordering of use of the data and
-        // deletion of the data.  Because it is marked `Release`, the decreasing
+        // deletion of the data. Because it is marked `Release`, the decreasing
         // of the reference count synchronizes with this `Acquire` fence. This
         // means that use of the data happens before decreasing the reference
         // count, which happens before this fence, which happens before the
@@ -2172,7 +2317,7 @@ fn clone(&self) -> Weak<T> {
         } else {
             return Weak { ptr: self.ptr };
         };
-        // See comments in Arc::clone() for why this is relaxed.  This can use a
+        // See comments in Arc::clone() for why this is relaxed. This can use a
         // fetch_add (ignoring the lock) because the weak count is only locked
         // where are *no other* weak pointers in existence. (So we can't be
         // running this code in that case).
index 0fae8953aa2c771532d10f90c79b795eb2ef1450..863d58bdf4d9cdad35f341f1aeb519a67f9bb63f 100644 (file)
@@ -101,6 +101,38 @@ fn try_unwrap() {
     assert_eq!(Arc::try_unwrap(x), Ok(5));
 }
 
+#[test]
+fn into_inner() {
+    for _ in 0..100
+    // ^ Increase chances of hitting potential race conditions
+    {
+        let x = Arc::new(3);
+        let y = Arc::clone(&x);
+        let r_thread = std::thread::spawn(|| Arc::into_inner(x));
+        let s_thread = std::thread::spawn(|| Arc::into_inner(y));
+        let r = r_thread.join().expect("r_thread panicked");
+        let s = s_thread.join().expect("s_thread panicked");
+        assert!(
+            matches!((r, s), (None, Some(3)) | (Some(3), None)),
+            "assertion failed: unexpected result `{:?}`\
+            \n  expected `(None, Some(3))` or `(Some(3), None)`",
+            (r, s),
+        );
+    }
+
+    let x = Arc::new(3);
+    assert_eq!(Arc::into_inner(x), Some(3));
+
+    let x = Arc::new(4);
+    let y = Arc::clone(&x);
+    assert_eq!(Arc::into_inner(x), None);
+    assert_eq!(Arc::into_inner(y), Some(4));
+
+    let x = Arc::new(5);
+    let _w = Arc::downgrade(&x);
+    assert_eq!(Arc::into_inner(x), Some(5));
+}
+
 #[test]
 fn into_from_raw() {
     let x = Arc::new(Box::new("hello"));
index 541f99bcfaba4582ad801dc81de5075c5830aed3..2b1a787cc549929231be0dfe33f1b8b57d1e9441 100644 (file)
@@ -223,9 +223,9 @@ fn drop(&mut self) {
         }
 
         // as_slice() must only be called when iter.len() is > 0 because
-        // vec::Splice modifies vec::Drain fields and may grow the vec which would invalidate
-        // the iterator's internal pointers. Creating a reference to deallocated memory
-        // is invalid even when it is zero-length
+        // it also gets touched by vec::Splice which may turn it into a dangling pointer
+        // which would make it and the vec pointer point to different allocations which would
+        // lead to invalid pointer arithmetic below.
         let drop_ptr = iter.as_slice().as_ptr();
 
         unsafe {
index b207b3210f1aa22ae6edcf989ab68b7f5b14bff1..37966007eb7e43eb1fb661d8925567b303f905ba 100644 (file)
@@ -40,7 +40,7 @@ pub struct IntoIter<
     // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
     pub(super) alloc: ManuallyDrop<A>,
     pub(super) ptr: *const T,
-    pub(super) end: *const T, // If T is a ZST, this is actually ptr+len.  This encoding is picked so that
+    pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
                               // ptr == end is a quick test for the Iterator being empty, that works
                               // for both ZST and non-ZST.
 }
@@ -146,9 +146,9 @@ pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
         let mut this = ManuallyDrop::new(self);
 
         // SAFETY: This allocation originally came from a `Vec`, so it passes
-        // all those checks.  We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
+        // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
         // so the `sub_ptr`s below cannot wrap, and will produce a well-formed
-        // range.  `end` ≤ `buf + cap`, so the range will be in-bounds.
+        // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
         // Taking `alloc` is ok because nothing else is going to look at it,
         // since our `Drop` impl isn't going to run so there's no more code.
         unsafe {
index 8e652d676dc010a740a44f60a92934b99bd0c11e..cb9adf05c25b0efe8351608e7293452975bd08b9 100644 (file)
@@ -4,7 +4,8 @@
 
 #[rustc_specialization_trait]
 pub(super) unsafe trait IsZero {
-    /// Whether this value's representation is all zeros
+    /// Whether this value's representation is all zeros,
+    /// or can be represented with all zeroes.
     fn is_zero(&self) -> bool;
 }
 
@@ -57,7 +58,7 @@ fn is_zero(&self) -> bool {
     #[inline]
     fn is_zero(&self) -> bool {
         // Because this is generated as a runtime check, it's not obvious that
-        // it's worth doing if the array is really long.  The threshold here
+        // it's worth doing if the array is really long. The threshold here
         // is largely arbitrary, but was picked because as of 2022-07-01 LLVM
         // fails to const-fold the check in `vec![[1; 32]; n]`
         // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022
@@ -147,6 +148,23 @@ fn is_zero(&self) -> bool {
     NonZeroIsize,
 );
 
+macro_rules! impl_is_zero_option_of_num {
+    ($($t:ty,)+) => {$(
+        unsafe impl IsZero for Option<$t> {
+            #[inline]
+            fn is_zero(&self) -> bool {
+                const {
+                    let none: Self = unsafe { core::mem::MaybeUninit::zeroed().assume_init() };
+                    assert!(none.is_none());
+                }
+                self.is_none()
+            }
+        }
+    )+};
+}
+
+impl_is_zero_option_of_num!(u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, usize, isize,);
+
 unsafe impl<T: IsZero> IsZero for Wrapping<T> {
     #[inline]
     fn is_zero(&self) -> bool {
index 36cfac8ee9e17da0dde403e785c0a09efd324898..36b0b3c9e7cc072aca6f39d2f165d277d52cbdc3 100644 (file)
@@ -2429,7 +2429,7 @@ pub fn extend_from_within<R>(&mut self, src: R)
         self.reserve(range.len());
 
         // SAFETY:
-        // - `slice::range` guarantees  that the given range is valid for indexing self
+        // - `slice::range` guarantees that the given range is valid for indexing self
         unsafe {
             self.spec_extend_from_within(range);
         }
@@ -2686,7 +2686,7 @@ fn clone(&self) -> Self {
 
     // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
     // required for this method definition, is not available. Instead use the
-    // `slice::to_vec`  function which is only available with cfg(test)
+    // `slice::to_vec` function which is only available with cfg(test)
     // NB see the slice::hack module in slice.rs for more information
     #[cfg(test)]
     fn clone(&self) -> Self {
index bad765c7f51fab944b3de89a3a956de2dd42f158..1861147fe72fb676920d5a8ac8847288253e6797 100644 (file)
@@ -54,6 +54,12 @@ impl<I: Iterator, A: Allocator> ExactSizeIterator for Splice<'_, I, A> {}
 impl<I: Iterator, A: Allocator> Drop for Splice<'_, I, A> {
     fn drop(&mut self) {
         self.drain.by_ref().for_each(drop);
+        // At this point draining is done and the only remaining tasks are splicing
+        // and moving things into the final place.
+        // Which means we can replace the slice::Iter with pointers that won't point to deallocated
+        // memory, so that Drain::drop is still allowed to call iter.len(), otherwise it would break
+        // the ptr.sub_ptr contract.
+        self.drain.iter = (&[]).iter();
 
         unsafe {
             if self.drain.tail_len == 0 {
index d6d2b055b2395e8563543ed0628189b5b975823c..2a93a242d51744e02e5fa196579898a67f9d682d 100644 (file)
@@ -1,7 +1,6 @@
 #![feature(allocator_api)]
 #![feature(alloc_layout_extra)]
 #![feature(assert_matches)]
-#![feature(box_syntax)]
 #![feature(btree_drain_filter)]
 #![feature(cow_is_borrowed)]
 #![feature(const_box)]
index 87adcead8f62d69ba7dbcdcb4c5f331aa276b414..2f07c2911a50264aee57ab1a67b966a869045ef3 100644 (file)
@@ -1849,7 +1849,7 @@ fn next_then_drop<I: Iterator>(mut i: I) {
     }
 
     // Test that, if we reserved enough space, adding and removing elements does not
-    // invalidate references into the vector (such as `v0`).  This test also
+    // invalidate references into the vector (such as `v0`). This test also
     // runs in Miri, which would detect such problems.
     // Note that this test does *not* constitute a stable guarantee that all these functions do not
     // reallocate! Only what is explicitly documented at
index 9ca4947ed8f8bb7978870a8a6c01880f7b1d8bdc..c0fb0d993c3ed939ad5087a1dbb04f555463115e 100644 (file)
@@ -662,7 +662,8 @@ pub unsafe fn downcast_mut_unchecked<T: Any>(&mut self) -> &mut T {
 /// While `TypeId` implements `Hash`, `PartialOrd`, and `Ord`, it is worth
 /// noting that the hashes and ordering will vary between Rust releases. Beware
 /// of relying on them inside of your code!
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+#[derive(Clone, Copy, Debug, Hash, Eq)]
+#[derive_const(PartialEq, PartialOrd, Ord)]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct TypeId {
     t: u64,
index b91c630183d4fe707231b70e483f4b202d5dafde..8259c087d22e4ff13cce9566ed8a828a169f0977 100644 (file)
@@ -109,8 +109,8 @@ impl<T, const N: usize> IntoIter<T, N> {
     /// use std::array::IntoIter;
     /// use std::mem::MaybeUninit;
     ///
-    /// # // Hi!  Thanks for reading the code.  This is restricted to `Copy` because
-    /// # // otherwise it could leak.  A fully-general version this would need a drop
+    /// # // Hi!  Thanks for reading the code. This is restricted to `Copy` because
+    /// # // otherwise it could leak. A fully-general version this would need a drop
     /// # // guard to handle panics from the iterator, but this works for an example.
     /// fn next_chunk<T: Copy, const N: usize>(
     ///     it: &mut impl Iterator<Item = T>,
@@ -211,7 +211,7 @@ pub const fn empty() -> Self {
         let initialized = 0..0;
 
         // SAFETY: We're telling it that none of the elements are initialized,
-        // which is trivially true.  And ∀N: usize, 0 <= N.
+        // which is trivially true. And ∀N: usize, 0 <= N.
         unsafe { Self::new_unchecked(buffer, initialized) }
     }
 
index fa5073e3304d72e93dff7a042804e043b234caa8..2a7ec544f9e2e138804cf0c7720b70091d2d2927 100644 (file)
@@ -1355,11 +1355,11 @@ fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c>
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{}", Foo::new(2)), "2");
-    /// assert_eq!(&format!("{}", Foo::new(-1)), "-1");
-    /// assert_eq!(&format!("{}", Foo::new(0)), "0");
-    /// assert_eq!(&format!("{:#}", Foo::new(-1)), "-Foo 1");
-    /// assert_eq!(&format!("{:0>#8}", Foo::new(-1)), "00-Foo 1");
+    /// assert_eq!(format!("{}", Foo::new(2)), "2");
+    /// assert_eq!(format!("{}", Foo::new(-1)), "-1");
+    /// assert_eq!(format!("{}", Foo::new(0)), "0");
+    /// assert_eq!(format!("{:#}", Foo::new(-1)), "-Foo 1");
+    /// assert_eq!(format!("{:0>#8}", Foo::new(-1)), "00-Foo 1");
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn pad_integral(&mut self, is_nonnegative: bool, prefix: &str, buf: &str) -> Result {
@@ -1452,8 +1452,8 @@ fn write_prefix(f: &mut Formatter<'_>, sign: Option<char>, prefix: Option<&str>)
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{Foo:<4}"), "Foo ");
-    /// assert_eq!(&format!("{Foo:0>4}"), "0Foo");
+    /// assert_eq!(format!("{Foo:<4}"), "Foo ");
+    /// assert_eq!(format!("{Foo:0>4}"), "0Foo");
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn pad(&mut self, s: &str) -> Result {
@@ -1636,8 +1636,8 @@ fn write_bytes(buf: &mut dyn Write, s: &[u8]) -> Result {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{Foo}"), "Foo");
-    /// assert_eq!(&format!("{Foo:0>8}"), "Foo");
+    /// assert_eq!(format!("{Foo}"), "Foo");
+    /// assert_eq!(format!("{Foo:0>8}"), "Foo");
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn write_str(&mut self, data: &str) -> Result {
@@ -1659,8 +1659,8 @@ pub fn write_str(&mut self, data: &str) -> Result {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{}", Foo(-1)), "Foo -1");
-    /// assert_eq!(&format!("{:0>8}", Foo(2)), "Foo 2");
+    /// assert_eq!(format!("{}", Foo(-1)), "Foo -1");
+    /// assert_eq!(format!("{:0>8}", Foo(2)), "Foo 2");
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn write_fmt(&mut self, fmt: Arguments<'_>) -> Result {
@@ -1703,8 +1703,8 @@ pub fn flags(&self) -> u32 {
     /// }
     ///
     /// // We set alignment to the right with ">".
-    /// assert_eq!(&format!("{Foo:G>3}"), "GGG");
-    /// assert_eq!(&format!("{Foo:t>6}"), "tttttt");
+    /// assert_eq!(format!("{Foo:G>3}"), "GGG");
+    /// assert_eq!(format!("{Foo:t>6}"), "tttttt");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
@@ -1738,10 +1738,10 @@ pub fn fill(&self) -> char {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{Foo:<}"), "left");
-    /// assert_eq!(&format!("{Foo:>}"), "right");
-    /// assert_eq!(&format!("{Foo:^}"), "center");
-    /// assert_eq!(&format!("{Foo}"), "into the void");
+    /// assert_eq!(format!("{Foo:<}"), "left");
+    /// assert_eq!(format!("{Foo:>}"), "right");
+    /// assert_eq!(format!("{Foo:^}"), "center");
+    /// assert_eq!(format!("{Foo}"), "into the void");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags_align", since = "1.28.0")]
@@ -1767,7 +1767,7 @@ pub fn align(&self) -> Option<Alignment> {
     ///     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
     ///         if let Some(width) = formatter.width() {
     ///             // If we received a width, we use it
-    ///             write!(formatter, "{:width$}", &format!("Foo({})", self.0), width = width)
+    ///             write!(formatter, "{:width$}", format!("Foo({})", self.0), width = width)
     ///         } else {
     ///             // Otherwise we do nothing special
     ///             write!(formatter, "Foo({})", self.0)
@@ -1775,8 +1775,8 @@ pub fn align(&self) -> Option<Alignment> {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{:10}", Foo(23)), "Foo(23)   ");
-    /// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
+    /// assert_eq!(format!("{:10}", Foo(23)), "Foo(23)   ");
+    /// assert_eq!(format!("{}", Foo(23)), "Foo(23)");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
@@ -1806,8 +1806,8 @@ pub fn width(&self) -> Option<usize> {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{:.4}", Foo(23.2)), "Foo(23.2000)");
-    /// assert_eq!(&format!("{}", Foo(23.2)), "Foo(23.20)");
+    /// assert_eq!(format!("{:.4}", Foo(23.2)), "Foo(23.2000)");
+    /// assert_eq!(format!("{}", Foo(23.2)), "Foo(23.20)");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
@@ -1837,9 +1837,9 @@ pub fn precision(&self) -> Option<usize> {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{:+}", Foo(23)), "Foo(+23)");
-    /// assert_eq!(&format!("{:+}", Foo(-23)), "Foo(-23)");
-    /// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
+    /// assert_eq!(format!("{:+}", Foo(23)), "Foo(+23)");
+    /// assert_eq!(format!("{:+}", Foo(-23)), "Foo(-23)");
+    /// assert_eq!(format!("{}", Foo(23)), "Foo(23)");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
@@ -1867,8 +1867,8 @@ pub fn sign_plus(&self) -> bool {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{:-}", Foo(23)), "-Foo(23)");
-    /// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
+    /// assert_eq!(format!("{:-}", Foo(23)), "-Foo(23)");
+    /// assert_eq!(format!("{}", Foo(23)), "Foo(23)");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
@@ -1895,8 +1895,8 @@ pub fn sign_minus(&self) -> bool {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{:#}", Foo(23)), "Foo(23)");
-    /// assert_eq!(&format!("{}", Foo(23)), "23");
+    /// assert_eq!(format!("{:#}", Foo(23)), "Foo(23)");
+    /// assert_eq!(format!("{}", Foo(23)), "23");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
@@ -1922,7 +1922,7 @@ pub fn alternate(&self) -> bool {
     ///     }
     /// }
     ///
-    /// assert_eq!(&format!("{:04}", Foo(23)), "23");
+    /// assert_eq!(format!("{:04}", Foo(23)), "23");
     /// ```
     #[must_use]
     #[stable(feature = "fmt_flags", since = "1.5.0")]
index 5bfe001de46e3775f456a9c42c026b0b4230e253..c4fb362094664196a354070a1a8a246062734959 100644 (file)
@@ -112,6 +112,10 @@ pub unsafe fn get_context<'a, 'b>(cx: ResumeTy) -> &'a mut Context<'b> {
     unsafe { &mut *cx.0.as_ptr().cast() }
 }
 
+// FIXME(swatinem): This fn is currently needed to work around shortcomings
+// in type and lifetime inference.
+// See the comment at the bottom of `LoweringContext::make_async_expr` and
+// <https://github.com/rust-lang/rust/issues/104826>.
 #[doc(hidden)]
 #[unstable(feature = "gen_future", issue = "50547")]
 #[inline]
index e8d724ab1ef4eed662ac5ab2810878e6ee8e2904..5a76e866923366e8716afeb1ddeb3ecc1e470ac7 100644 (file)
@@ -219,6 +219,75 @@ pub fn spin_loop() {
 /// backend used. Programs cannot rely on `black_box` for *correctness* in any way.
 ///
 /// [`std::convert::identity`]: crate::convert::identity
+///
+/// # When is this useful?
+///
+/// First and foremost: `black_box` does _not_ guarantee any exact behavior and, in some cases, may
+/// do nothing at all. As such, it **must not be relied upon to control critical program behavior.**
+/// This _immediately_ precludes any direct use of this function for cryptographic or security
+/// purposes.
+///
+/// While not suitable in those mission-critical cases, `back_box`'s functionality can generally be
+/// relied upon for benchmarking, and should be used there. It will try to ensure that the
+/// compiler doesn't optimize away part of the intended test code based on context. For
+/// example:
+///
+/// ```
+/// fn contains(haystack: &[&str], needle: &str) -> bool {
+///     haystack.iter().any(|x| x == &needle)
+/// }
+///
+/// pub fn benchmark() {
+///     let haystack = vec!["abc", "def", "ghi", "jkl", "mno"];
+///     let needle = "ghi";
+///     for _ in 0..10 {
+///         contains(&haystack, needle);
+///     }
+/// }
+/// ```
+///
+/// The compiler could theoretically make optimizations like the following:
+///
+/// - `needle` and `haystack` are always the same, move the call to `contains` outside the loop and
+///   delete the loop
+/// - Inline `contains`
+/// - `needle` and `haystack` have values known at compile time, `contains` is always true. Remove
+///   the call and replace with `true`
+/// - Nothing is done with the result of `contains`: delete this function call entirely
+/// - `benchmark` now has no purpose: delete this function
+///
+/// It is not likely that all of the above happens, but the compiler is definitely able to make some
+/// optimizations that could result in a very inaccurate benchmark. This is where `black_box` comes
+/// in:
+///
+/// ```
+/// use std::hint::black_box;
+///
+/// // Same `contains` function
+/// fn contains(haystack: &[&str], needle: &str) -> bool {
+///     haystack.iter().any(|x| x == &needle)
+/// }
+///
+/// pub fn benchmark() {
+///     let haystack = vec!["abc", "def", "ghi", "jkl", "mno"];
+///     let needle = "ghi";
+///     for _ in 0..10 {
+///         // Adjust our benchmark loop contents
+///         black_box(contains(black_box(&haystack), black_box(needle)));
+///     }
+/// }
+/// ```
+///
+/// This essentially tells the compiler to block optimizations across any calls to `black_box`. So,
+/// it now:
+///
+/// - Treats both arguments to `contains` as unpredictable: the body of `contains` can no longer be
+///   optimized based on argument values
+/// - Treats the call to `contains` and its result as volatile: the body of `benchmark` cannot
+///   optimize this away
+///
+/// This makes our benchmark much more realistic to how the function would be used in situ, where
+/// arguments are usually not known at compile time and the result is used in some way.
 #[inline]
 #[stable(feature = "bench_black_box", since = "1.66.0")]
 #[rustc_const_unstable(feature = "const_black_box", issue = "none")]
index 399d54f18c5b3803b7b4d3fc69da33dccbf8e692..e3157b66902eb9dbfbfafbc505bdb786c88cbb2b 100644 (file)
@@ -259,6 +259,8 @@ pub fn $($sig)* { panic!() }
 define!("mir_drop", fn Drop<T>(place: T, goto: BasicBlock));
 define!("mir_drop_and_replace", fn DropAndReplace<T>(place: T, value: T, goto: BasicBlock));
 define!("mir_call", fn Call<T>(place: T, goto: BasicBlock, call: T));
+define!("mir_storage_live", fn StorageLive<T>(local: T));
+define!("mir_storage_dead", fn StorageDead<T>(local: T));
 define!("mir_retag", fn Retag<T>(place: T));
 define!("mir_move", fn Move<T>(place: T) -> T);
 define!("mir_static", fn Static<T>(s: T) -> &'static T);
index ac7b389b15b4d5e9e00e99ef2a4740a7c51b2971..b5739f2f3c0b057d286ecff2095d80f861c5ca18 100644 (file)
@@ -756,7 +756,7 @@ unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item
     where
         Self: TrustedRandomAccessNoCoerce,
     {
-        // SAFETY: The TrustedRandomAccess contract requires that callers only  pass an index
+        // SAFETY: The TrustedRandomAccess contract requires that callers only pass an index
         // that is in bounds.
         // Additionally Self: TrustedRandomAccess is only implemented for Copy types
         // which means even repeated reads of the same index would be safe.
index 8e7cbd34a4f664b6c1496d6fa65591ca11ce8537..4cbe731b222f920131e999b0bd409747aa387a88 100644 (file)
@@ -1,3 +1,4 @@
+use crate::fmt;
 use crate::ops::{Generator, GeneratorState};
 use crate::pin::Pin;
 
 /// ```
 #[inline]
 #[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")]
-pub fn from_generator<G: Generator<Return = ()> + Unpin>(
-    generator: G,
-) -> impl Iterator<Item = G::Yield> {
+pub fn from_generator<G: Generator<Return = ()> + Unpin>(generator: G) -> FromGenerator<G> {
     FromGenerator(generator)
 }
 
-struct FromGenerator<G>(G);
+/// An iterator over the values yielded by an underlying generator.
+///
+/// This `struct` is created by the [`iter::from_generator()`] function. See its documentation for
+/// more.
+///
+/// [`iter::from_generator()`]: from_generator
+#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")]
+#[derive(Clone)]
+pub struct FromGenerator<G>(G);
 
+#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")]
 impl<G: Generator<Return = ()> + Unpin> Iterator for FromGenerator<G> {
     type Item = G::Yield;
 
@@ -41,3 +49,10 @@ fn next(&mut self) -> Option<Self::Item> {
         }
     }
 }
+
+#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")]
+impl<G> fmt::Debug for FromGenerator<G> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("FromGenerator").finish()
+    }
+}
index ec5fa45fdaddd72489d612d87617c14ed7ce8012..3806977f70ee4598d6a78aa88483b10086460b36 100644 (file)
@@ -26,7 +26,7 @@ mod fpu_precision {
     /// Developer's Manual (Volume 1).
     ///
     /// The only field which is relevant for the following code is PC, Precision Control. This
-    /// field determines the precision of the operations performed by the  FPU. It can be set to:
+    /// field determines the precision of the operations performed by the FPU. It can be set to:
     ///  - 0b00, single precision i.e., 32-bits
     ///  - 0b10, double precision i.e., 64-bits
     ///  - 0b11, double extended precision i.e., 80-bits (default state)
index 21518a3f551807356dbbf2dc484f4af1387490fc..2cae98b8e494334e640b3c936181cb1c6db32555 100644 (file)
@@ -1538,7 +1538,7 @@ pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) {
         ///
         /// ```
         /// #![feature(bigint_helper_methods)]
-        /// // Only the  most significant word is signed.
+        /// // Only the most significant word is signed.
         /// //
         #[doc = concat!("//   10  MAX    (a = 10 × 2^", stringify!($BITS), " + 2^", stringify!($BITS), " - 1)")]
         #[doc = concat!("// + -5    9    (b = -5 × 2^", stringify!($BITS), " + 9)")]
@@ -1625,7 +1625,7 @@ pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) {
         /// overflow.
         ///
         /// Performs "ternary subtraction" by subtracting both an integer
-        /// operandand a borrow-in bit from `self`, and returns a tuple of the
+        /// operand and a borrow-in bit from `self`, and returns a tuple of the
         /// difference along with a boolean indicating whether an arithmetic
         /// overflow would occur. On overflow, the wrapped value is returned.
         ///
@@ -1646,7 +1646,7 @@ pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) {
         ///
         /// ```
         /// #![feature(bigint_helper_methods)]
-        /// // Only the  most significant word is signed.
+        /// // Only the most significant word is signed.
         /// //
         #[doc = concat!("//    6    8    (a = 6 × 2^", stringify!($BITS), " + 8)")]
         #[doc = concat!("// - -5    9    (b = -5 × 2^", stringify!($BITS), " + 9)")]
index 2eb29d4f9c57410081309ed539b6c6f2fb34d1ac..ec0c9984841e68089536f9836cfe70e5bb7dcd81 100644 (file)
@@ -753,7 +753,7 @@ pub fn set(&mut self, value: P::Target)
 impl<'a, T: ?Sized> Pin<&'a T> {
     /// Constructs a new pin by mapping the interior value.
     ///
-    /// For example, if you  wanted to get a `Pin` of a field of something,
+    /// For example, if you wanted to get a `Pin` of a field of something,
     /// you could use this to get access to that field in one line of code.
     /// However, there are several gotchas with these "pinning projections";
     /// see the [`pin` module] documentation for further details on that topic.
@@ -856,7 +856,7 @@ pub const fn get_mut(self) -> &'a mut T
 
     /// Construct a new pin by mapping the interior value.
     ///
-    /// For example, if you  wanted to get a `Pin` of a field of something,
+    /// For example, if you wanted to get a `Pin` of a field of something,
     /// you could use this to get access to that field in one line of code.
     /// However, there are several gotchas with these "pinning projections";
     /// see the [`pin` module] documentation for further details on that topic.
index 0a74c03d70f3a6cf17c95e9a059d49f2960f0945..7b1cb5488bcac650871b9ee38de84fb536bef4e7 100644 (file)
@@ -202,14 +202,11 @@ pub fn from_bits(bits: usize) -> Self
     #[must_use]
     #[inline(always)]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn addr(self) -> usize
-    where
-        T: Sized,
-    {
+    pub fn addr(self) -> usize {
         // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
         // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
         // provenance).
-        unsafe { mem::transmute(self) }
+        unsafe { mem::transmute(self.cast::<()>()) }
     }
 
     /// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
@@ -239,12 +236,9 @@ pub fn addr(self) -> usize
     #[must_use]
     #[inline(always)]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn expose_addr(self) -> usize
-    where
-        T: Sized,
-    {
+    pub fn expose_addr(self) -> usize {
         // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
-        self as usize
+        self.cast::<()>() as usize
     }
 
     /// Creates a new pointer with the given address.
@@ -262,10 +256,7 @@ pub fn expose_addr(self) -> usize
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn with_addr(self, addr: usize) -> Self
-    where
-        T: Sized,
-    {
+    pub fn with_addr(self, addr: usize) -> Self {
         // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
         //
         // In the mean-time, this operation is defined to be "as if" it was
@@ -288,10 +279,7 @@ pub fn with_addr(self, addr: usize) -> Self
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
-    where
-        T: Sized,
-    {
+    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
         self.with_addr(f(self.addr()))
     }
 
index 5f30029eaa07d5c8d9be41bd6a6f221e13b0b43e..1ad9af1549a47fcdf12e46b391e12891c2fba0bd 100644 (file)
@@ -1701,7 +1701,7 @@ pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
         // offset is not a multiple of `stride`, the input pointer was misaligned and no pointer
         // offset will be able to produce a `p` aligned to the specified `a`.
         //
-        // The naive `-p (mod a)` equation  inhibits LLVM's ability to select instructions
+        // The naive `-p (mod a)` equation inhibits LLVM's ability to select instructions
         // like `lea`. We compute `(round_up_to_next_alignment(p, a) - p)` instead. This
         // redistributes operations around the load-bearing, but pessimizing `and` instruction
         // sufficiently for LLVM to be able to utilize the various optimizations it knows about.
index d70fb70c79fa4dc6dacc84297e67a062235bd8d8..ed1e3bd48122761607f22a10f0bfb11608c0d892 100644 (file)
@@ -208,14 +208,11 @@ pub fn from_bits(bits: usize) -> Self
     #[must_use]
     #[inline(always)]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn addr(self) -> usize
-    where
-        T: Sized,
-    {
+    pub fn addr(self) -> usize {
         // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
         // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
         // provenance).
-        unsafe { mem::transmute(self) }
+        unsafe { mem::transmute(self.cast::<()>()) }
     }
 
     /// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
@@ -245,12 +242,9 @@ pub fn addr(self) -> usize
     #[must_use]
     #[inline(always)]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn expose_addr(self) -> usize
-    where
-        T: Sized,
-    {
+    pub fn expose_addr(self) -> usize {
         // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
-        self as usize
+        self.cast::<()>() as usize
     }
 
     /// Creates a new pointer with the given address.
@@ -268,10 +262,7 @@ pub fn expose_addr(self) -> usize
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn with_addr(self, addr: usize) -> Self
-    where
-        T: Sized,
-    {
+    pub fn with_addr(self, addr: usize) -> Self {
         // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
         //
         // In the mean-time, this operation is defined to be "as if" it was
@@ -294,10 +285,7 @@ pub fn with_addr(self, addr: usize) -> Self
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
-    where
-        T: Sized,
-    {
+    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
         self.with_addr(f(self.addr()))
     }
 
index af79d4bbd836c027e8037ed1808afd6e762b07d6..8c1a648860555bacacf1d8b9d7cce1e621f08cc0 100644 (file)
@@ -268,10 +268,7 @@ pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn addr(self) -> NonZeroUsize
-    where
-        T: Sized,
-    {
+    pub fn addr(self) -> NonZeroUsize {
         // SAFETY: The pointer is guaranteed by the type to be non-null,
         // meaning that the address will be non-zero.
         unsafe { NonZeroUsize::new_unchecked(self.pointer.addr()) }
@@ -286,10 +283,7 @@ pub fn addr(self) -> NonZeroUsize
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn with_addr(self, addr: NonZeroUsize) -> Self
-    where
-        T: Sized,
-    {
+    pub fn with_addr(self, addr: NonZeroUsize) -> Self {
         // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
         unsafe { NonNull::new_unchecked(self.pointer.with_addr(addr.get()) as *mut _) }
     }
@@ -303,10 +297,7 @@ pub fn with_addr(self, addr: NonZeroUsize) -> Self
     #[must_use]
     #[inline]
     #[unstable(feature = "strict_provenance", issue = "95228")]
-    pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self
-    where
-        T: Sized,
-    {
+    pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self {
         self.with_addr(f(self.addr()))
     }
 
index 06228976719f52f8e25c59010c4fab554b7ae4f5..90ab43d1289f0d22b3b2dcfecc81ccb5e71bdc88 100644 (file)
@@ -6,7 +6,7 @@
 use crate::cmp;
 use crate::cmp::Ordering;
 use crate::fmt;
-use crate::intrinsics::{assume, exact_div, unchecked_sub};
+use crate::intrinsics::assume;
 use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce};
 use crate::marker::{PhantomData, Send, Sized, Sync};
 use crate::mem::{self, SizedTypeProperties};
@@ -35,12 +35,6 @@ fn into_iter(self) -> IterMut<'a, T> {
     }
 }
 
-// Macro helper functions
-#[inline(always)]
-fn size_from_ptr<T>(_: *const T) -> usize {
-    mem::size_of::<T>()
-}
-
 /// Immutable slice iterator
 ///
 /// This struct is created by the [`iter`] method on [slices].
@@ -65,7 +59,7 @@ fn size_from_ptr<T>(_: *const T) -> usize {
 #[must_use = "iterators are lazy and do nothing unless consumed"]
 pub struct Iter<'a, T: 'a> {
     ptr: NonNull<T>,
-    end: *const T, // If T is a ZST, this is actually ptr+len.  This encoding is picked so that
+    end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
     // ptr == end is a quick test for the Iterator being empty, that works
     // for both ZST and non-ZST.
     _marker: PhantomData<&'a T>,
@@ -186,7 +180,7 @@ fn as_ref(&self) -> &[T] {
 #[must_use = "iterators are lazy and do nothing unless consumed"]
 pub struct IterMut<'a, T: 'a> {
     ptr: NonNull<T>,
-    end: *mut T, // If T is a ZST, this is actually ptr+len.  This encoding is picked so that
+    end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
     // ptr == end is a quick test for the Iterator being empty, that works
     // for both ZST and non-ZST.
     _marker: PhantomData<&'a mut T>,
index ce51d48e3e551901457c3af8d9783f9c544e2af5..0fd57b197aa97daa8838687de60165d6d3518372 100644 (file)
@@ -9,30 +9,20 @@ macro_rules! is_empty {
     };
 }
 
-// To get rid of some bounds checks (see `position`), we compute the length in a somewhat
-// unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
 macro_rules! len {
     ($self: ident) => {{
         #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
 
         let start = $self.ptr;
-        let size = size_from_ptr(start.as_ptr());
-        if size == 0 {
-            // This _cannot_ use `unchecked_sub` because we depend on wrapping
+        if T::IS_ZST {
+            // This _cannot_ use `ptr_sub` because we depend on wrapping
             // to represent the length of long ZST slice iterators.
             $self.end.addr().wrapping_sub(start.as_ptr().addr())
         } else {
-            // We know that `start <= end`, so can do better than `offset_from`,
-            // which needs to deal in signed.  By setting appropriate flags here
-            // we can tell LLVM this, which helps it remove bounds checks.
-            // SAFETY: By the type invariant, `start <= end`
-            let diff = unsafe { unchecked_sub($self.end.addr(), start.as_ptr().addr()) };
-            // By also telling LLVM that the pointers are apart by an exact
-            // multiple of the type size, it can optimize `len() == 0` down to
-            // `start == end` instead of `(end - start) < size`.
-            // SAFETY: By the type invariant, the pointers are aligned so the
-            //         distance between them must be a multiple of pointee size
-            unsafe { exact_div(diff, size) }
+            // To get rid of some bounds checks (see `position`), we use ptr_sub instead of
+            // offset_from (Tested by `codegen/slice-position-bounds-check`.)
+            // SAFETY: by the type invariant pointers are aligned and `start <= end`
+            unsafe { $self.end.sub_ptr(start.as_ptr()) }
         }
     }};
 }
index 2995cf0c6443f8a03eb37ae1201a0ecffeb1b272..d93a3a57ecd27fdcdb94a4aa479a5e8ceb02be3c 100644 (file)
 /// Pure rust memchr implementation, taken from rust-memchr
 pub mod memchr;
 
+#[unstable(
+    feature = "slice_internals",
+    issue = "none",
+    reason = "exposed from core to be reused in std;"
+)]
+pub mod sort;
+
 mod ascii;
 mod cmp;
 mod index;
 mod iter;
 mod raw;
 mod rotate;
-mod sort;
 mod specialize;
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -703,7 +709,7 @@ const fn revswap<T>(a: &mut [T], b: &mut [T], n: usize) {
 
             // Because this function is first compiled in isolation,
             // this check tells LLVM that the indexing below is
-            // in-bounds.  Then after inlining -- once the actual
+            // in-bounds. Then after inlining -- once the actual
             // lengths of the slices are known -- it's removed.
             let (a, b) = (&mut a[..n], &mut b[..n]);
 
@@ -781,6 +787,22 @@ pub fn iter_mut(&mut self) -> IterMut<'_, T> {
     /// let mut iter = slice.windows(4);
     /// assert!(iter.next().is_none());
     /// ```
+    ///
+    /// There's no `windows_mut`, as that existing would let safe code violate the
+    /// "only one `&mut` at a time to the same thing" rule.  However, you can sometimes
+    /// use [`Cell::as_slice_of_cells`](crate::cell::Cell::as_slice_of_cells) in
+    /// conjunction with `windows` to accomplish something similar:
+    /// ```
+    /// use std::cell::Cell;
+    ///
+    /// let mut array = ['R', 'u', 's', 't', ' ', '2', '0', '1', '5'];
+    /// let slice = &mut array[..];
+    /// let slice_of_cells: &[Cell<char>] = Cell::from_mut(slice).as_slice_of_cells();
+    /// for w in slice_of_cells.windows(3) {
+    ///     Cell::swap(&w[0], &w[2]);
+    /// }
+    /// assert_eq!(array, ['s', 't', ' ', '2', '0', '1', '5', 'u', 'R']);
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn windows(&self, size: usize) -> Windows<'_, T> {
@@ -1248,7 +1270,7 @@ pub fn array_chunks_mut<const N: usize>(&mut self) -> ArrayChunksMut<'_, T, N> {
         ArrayChunksMut::new(self)
     }
 
-    /// Returns an iterator over overlapping windows of `N` elements of  a slice,
+    /// Returns an iterator over overlapping windows of `N` elements of a slice,
     /// starting at the beginning of the slice.
     ///
     /// This is the const generic equivalent of [`windows`].
@@ -2476,7 +2498,7 @@ pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize>
             let mid = left + size / 2;
 
             // SAFETY: the while condition means `size` is strictly positive, so
-            // `size/2 < size`.  Thus `left + size/2 < left + size`, which
+            // `size/2 < size`. Thus `left + size/2 < left + size`, which
             // coupled with the `left + size <= self.len()` invariant means
             // we have `left + size/2 < self.len()`, and this is in-bounds.
             let cmp = f(unsafe { self.get_unchecked(mid) });
index b8c0c3fd9493285b10117fb8ede4d341ee733607..2181f9a811855f7ac2a39352b481f0f865e193d5 100644 (file)
@@ -5,6 +5,9 @@
 //!
 //! Unstable sorting is compatible with core because it doesn't allocate memory, unlike our
 //! stable sorting implementation.
+//!
+//! In addition it also contains the core logic of the stable sort used by `slice::sort` based on
+//! TimSort.
 
 use crate::cmp;
 use crate::mem::{self, MaybeUninit, SizedTypeProperties};
@@ -18,9 +21,9 @@ struct CopyOnDrop<T> {
 
 impl<T> Drop for CopyOnDrop<T> {
     fn drop(&mut self) {
-        // SAFETY:  This is a helper class.
-        //          Please refer to its usage for correctness.
-        //          Namely, one must be sure that `src` and `dst` does not overlap as required by `ptr::copy_nonoverlapping`.
+        // SAFETY: This is a helper class.
+        //         Please refer to its usage for correctness.
+        //         Namely, one must be sure that `src` and `dst` does not overlap as required by `ptr::copy_nonoverlapping`.
         unsafe {
             ptr::copy_nonoverlapping(self.src, self.dest, 1);
         }
@@ -831,6 +834,15 @@ fn partition_at_index_loop<'a, T, F>(
 ) where
     F: FnMut(&T, &T) -> bool,
 {
+    // Limit the amount of iterations and fall back to heapsort, similarly to `slice::sort_unstable`.
+    // This lowers the worst case running time from O(n^2) to O(n log n).
+    // FIXME: Investigate whether it would be better to use something like Median of Medians
+    // or Fast Deterministic Selection to guarantee O(n) worst case.
+    let mut limit = usize::BITS - v.len().leading_zeros();
+
+    // True if the last partitioning was reasonably balanced.
+    let mut was_balanced = true;
+
     loop {
         // For slices of up to this length it's probably faster to simply sort them.
         const MAX_INSERTION: usize = 10;
@@ -839,6 +851,18 @@ fn partition_at_index_loop<'a, T, F>(
             return;
         }
 
+        if limit == 0 {
+            heapsort(v, is_less);
+            return;
+        }
+
+        // If the last partitioning was imbalanced, try breaking patterns in the slice by shuffling
+        // some elements around. Hopefully we'll choose a better pivot this time.
+        if !was_balanced {
+            break_patterns(v);
+            limit -= 1;
+        }
+
         // Choose a pivot
         let (pivot, _) = choose_pivot(v, is_less);
 
@@ -863,6 +887,7 @@ fn partition_at_index_loop<'a, T, F>(
         }
 
         let (mid, _) = partition(v, pivot, is_less);
+        was_balanced = cmp::min(mid, v.len() - mid) >= v.len() / 8;
 
         // Split the slice into `left`, `pivot`, and `right`.
         let (left, right) = v.split_at_mut(mid);
@@ -883,6 +908,7 @@ fn partition_at_index_loop<'a, T, F>(
     }
 }
 
+/// Reorder the slice such that the element at `index` is at its final sorted position.
 pub fn partition_at_index<T, F>(
     v: &mut [T],
     index: usize,
@@ -927,3 +953,513 @@ pub fn partition_at_index<T, F>(
     let pivot = &mut pivot[0];
     (left, pivot, right)
 }
+
+/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
+///
+/// This is the integral subroutine of insertion sort.
+fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
+where
+    F: FnMut(&T, &T) -> bool,
+{
+    if v.len() >= 2 && is_less(&v[1], &v[0]) {
+        // SAFETY: Copy tmp back even if panic, and ensure unique observation.
+        unsafe {
+            // There are three ways to implement insertion here:
+            //
+            // 1. Swap adjacent elements until the first one gets to its final destination.
+            //    However, this way we copy data around more than is necessary. If elements are big
+            //    structures (costly to copy), this method will be slow.
+            //
+            // 2. Iterate until the right place for the first element is found. Then shift the
+            //    elements succeeding it to make room for it and finally place it into the
+            //    remaining hole. This is a good method.
+            //
+            // 3. Copy the first element into a temporary variable. Iterate until the right place
+            //    for it is found. As we go along, copy every traversed element into the slot
+            //    preceding it. Finally, copy data from the temporary variable into the remaining
+            //    hole. This method is very good. Benchmarks demonstrated slightly better
+            //    performance than with the 2nd method.
+            //
+            // All methods were benchmarked, and the 3rd showed best results. So we chose that one.
+            let tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
+
+            // Intermediate state of the insertion process is always tracked by `hole`, which
+            // serves two purposes:
+            // 1. Protects integrity of `v` from panics in `is_less`.
+            // 2. Fills the remaining hole in `v` in the end.
+            //
+            // Panic safety:
+            //
+            // If `is_less` panics at any point during the process, `hole` will get dropped and
+            // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
+            // initially held exactly once.
+            let mut hole = InsertionHole { src: &*tmp, dest: &mut v[1] };
+            ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
+
+            for i in 2..v.len() {
+                if !is_less(&v[i], &*tmp) {
+                    break;
+                }
+                ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
+                hole.dest = &mut v[i];
+            }
+            // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
+        }
+    }
+
+    // When dropped, copies from `src` into `dest`.
+    struct InsertionHole<T> {
+        src: *const T,
+        dest: *mut T,
+    }
+
+    impl<T> Drop for InsertionHole<T> {
+        fn drop(&mut self) {
+            // SAFETY: The caller must ensure that src and dest are correctly set.
+            unsafe {
+                ptr::copy_nonoverlapping(self.src, self.dest, 1);
+            }
+        }
+    }
+}
+
+/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
+/// stores the result into `v[..]`.
+///
+/// # Safety
+///
+/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
+/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
+unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
+where
+    F: FnMut(&T, &T) -> bool,
+{
+    let len = v.len();
+    let v = v.as_mut_ptr();
+
+    // SAFETY: mid and len must be in-bounds of v.
+    let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
+
+    // The merge process first copies the shorter run into `buf`. Then it traces the newly copied
+    // run and the longer run forwards (or backwards), comparing their next unconsumed elements and
+    // copying the lesser (or greater) one into `v`.
+    //
+    // As soon as the shorter run is fully consumed, the process is done. If the longer run gets
+    // consumed first, then we must copy whatever is left of the shorter run into the remaining
+    // hole in `v`.
+    //
+    // Intermediate state of the process is always tracked by `hole`, which serves two purposes:
+    // 1. Protects integrity of `v` from panics in `is_less`.
+    // 2. Fills the remaining hole in `v` if the longer run gets consumed first.
+    //
+    // Panic safety:
+    //
+    // If `is_less` panics at any point during the process, `hole` will get dropped and fill the
+    // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
+    // object it initially held exactly once.
+    let mut hole;
+
+    if mid <= len - mid {
+        // The left run is shorter.
+
+        // SAFETY: buf must have enough capacity for `v[..mid]`.
+        unsafe {
+            ptr::copy_nonoverlapping(v, buf, mid);
+            hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
+        }
+
+        // Initially, these pointers point to the beginnings of their arrays.
+        let left = &mut hole.start;
+        let mut right = v_mid;
+        let out = &mut hole.dest;
+
+        while *left < hole.end && right < v_end {
+            // Consume the lesser side.
+            // If equal, prefer the left run to maintain stability.
+
+            // SAFETY: left and right must be valid and part of v same for out.
+            unsafe {
+                let to_copy = if is_less(&*right, &**left) {
+                    get_and_increment(&mut right)
+                } else {
+                    get_and_increment(left)
+                };
+                ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
+            }
+        }
+    } else {
+        // The right run is shorter.
+
+        // SAFETY: buf must have enough capacity for `v[mid..]`.
+        unsafe {
+            ptr::copy_nonoverlapping(v_mid, buf, len - mid);
+            hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
+        }
+
+        // Initially, these pointers point past the ends of their arrays.
+        let left = &mut hole.dest;
+        let right = &mut hole.end;
+        let mut out = v_end;
+
+        while v < *left && buf < *right {
+            // Consume the greater side.
+            // If equal, prefer the right run to maintain stability.
+
+            // SAFETY: left and right must be valid and part of v same for out.
+            unsafe {
+                let to_copy = if is_less(&*right.sub(1), &*left.sub(1)) {
+                    decrement_and_get(left)
+                } else {
+                    decrement_and_get(right)
+                };
+                ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
+            }
+        }
+    }
+    // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
+    // it will now be copied into the hole in `v`.
+
+    unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
+        let old = *ptr;
+
+        // SAFETY: ptr.add(1) must still be a valid pointer and part of `v`.
+        *ptr = unsafe { ptr.add(1) };
+        old
+    }
+
+    unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
+        // SAFETY: ptr.sub(1) must still be a valid pointer and part of `v`.
+        *ptr = unsafe { ptr.sub(1) };
+        *ptr
+    }
+
+    // When dropped, copies the range `start..end` into `dest..`.
+    struct MergeHole<T> {
+        start: *mut T,
+        end: *mut T,
+        dest: *mut T,
+    }
+
+    impl<T> Drop for MergeHole<T> {
+        fn drop(&mut self) {
+            // SAFETY: `T` is not a zero-sized type, and these are pointers into a slice's elements.
+            unsafe {
+                let len = self.end.sub_ptr(self.start);
+                ptr::copy_nonoverlapping(self.start, self.dest, len);
+            }
+        }
+    }
+}
+
+/// This merge sort borrows some (but not all) ideas from TimSort, which used to be described in
+/// detail [here](https://github.com/python/cpython/blob/main/Objects/listsort.txt). However Python
+/// has switched to a Powersort based implementation.
+///
+/// The algorithm identifies strictly descending and non-descending subsequences, which are called
+/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
+/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
+/// satisfied:
+///
+/// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
+/// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
+///
+/// The invariants ensure that the total running time is *O*(*n* \* log(*n*)) worst-case.
+pub fn merge_sort<T, CmpF, ElemAllocF, ElemDeallocF, RunAllocF, RunDeallocF>(
+    v: &mut [T],
+    is_less: &mut CmpF,
+    elem_alloc_fn: ElemAllocF,
+    elem_dealloc_fn: ElemDeallocF,
+    run_alloc_fn: RunAllocF,
+    run_dealloc_fn: RunDeallocF,
+) where
+    CmpF: FnMut(&T, &T) -> bool,
+    ElemAllocF: Fn(usize) -> *mut T,
+    ElemDeallocF: Fn(*mut T, usize),
+    RunAllocF: Fn(usize) -> *mut TimSortRun,
+    RunDeallocF: Fn(*mut TimSortRun, usize),
+{
+    // Slices of up to this length get sorted using insertion sort.
+    const MAX_INSERTION: usize = 20;
+    // Very short runs are extended using insertion sort to span at least this many elements.
+    const MIN_RUN: usize = 10;
+
+    // The caller should have already checked that.
+    debug_assert!(!T::IS_ZST);
+
+    let len = v.len();
+
+    // Short arrays get sorted in-place via insertion sort to avoid allocations.
+    if len <= MAX_INSERTION {
+        if len >= 2 {
+            for i in (0..len - 1).rev() {
+                insert_head(&mut v[i..], is_less);
+            }
+        }
+        return;
+    }
+
+    // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
+    // shallow copies of the contents of `v` without risking the dtors running on copies if
+    // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
+    // which will always have length at most `len / 2`.
+    let buf = BufGuard::new(len / 2, elem_alloc_fn, elem_dealloc_fn);
+    let buf_ptr = buf.buf_ptr;
+
+    let mut runs = RunVec::new(run_alloc_fn, run_dealloc_fn);
+
+    // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
+    // strange decision, but consider the fact that merges more often go in the opposite direction
+    // (forwards). According to benchmarks, merging forwards is slightly faster than merging
+    // backwards. To conclude, identifying runs by traversing backwards improves performance.
+    let mut end = len;
+    while end > 0 {
+        // Find the next natural run, and reverse it if it's strictly descending.
+        let mut start = end - 1;
+        if start > 0 {
+            start -= 1;
+
+            // SAFETY: The v.get_unchecked must be fed with correct inbound indicies.
+            unsafe {
+                if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) {
+                    while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) {
+                        start -= 1;
+                    }
+                    v[start..end].reverse();
+                } else {
+                    while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1))
+                    {
+                        start -= 1;
+                    }
+                }
+            }
+        }
+
+        // Insert some more elements into the run if it's too short. Insertion sort is faster than
+        // merge sort on short sequences, so this significantly improves performance.
+        while start > 0 && end - start < MIN_RUN {
+            start -= 1;
+            insert_head(&mut v[start..end], is_less);
+        }
+
+        // Push this run onto the stack.
+        runs.push(TimSortRun { start, len: end - start });
+        end = start;
+
+        // Merge some pairs of adjacent runs to satisfy the invariants.
+        while let Some(r) = collapse(runs.as_slice()) {
+            let left = runs[r + 1];
+            let right = runs[r];
+            // SAFETY: `buf_ptr` must hold enough capacity for the shorter of the two sides, and
+            // neither side may be on length 0.
+            unsafe {
+                merge(&mut v[left.start..right.start + right.len], left.len, buf_ptr, is_less);
+            }
+            runs[r] = TimSortRun { start: left.start, len: left.len + right.len };
+            runs.remove(r + 1);
+        }
+    }
+
+    // Finally, exactly one run must remain in the stack.
+    debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
+
+    // Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
+    // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
+    // algorithm should continue building a new run instead, `None` is returned.
+    //
+    // TimSort is infamous for its buggy implementations, as described here:
+    // http://envisage-project.eu/timsort-specification-and-verification/
+    //
+    // The gist of the story is: we must enforce the invariants on the top four runs on the stack.
+    // Enforcing them on just top three is not sufficient to ensure that the invariants will still
+    // hold for *all* runs in the stack.
+    //
+    // This function correctly checks invariants for the top four runs. Additionally, if the top
+    // run starts at index 0, it will always demand a merge operation until the stack is fully
+    // collapsed, in order to complete the sort.
+    #[inline]
+    fn collapse(runs: &[TimSortRun]) -> Option<usize> {
+        let n = runs.len();
+        if n >= 2
+            && (runs[n - 1].start == 0
+                || runs[n - 2].len <= runs[n - 1].len
+                || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len)
+                || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len))
+        {
+            if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { Some(n - 2) }
+        } else {
+            None
+        }
+    }
+
+    // Extremely basic versions of Vec.
+    // Their use is super limited and by having the code here, it allows reuse between the sort
+    // implementations.
+    struct BufGuard<T, ElemDeallocF>
+    where
+        ElemDeallocF: Fn(*mut T, usize),
+    {
+        buf_ptr: *mut T,
+        capacity: usize,
+        elem_dealloc_fn: ElemDeallocF,
+    }
+
+    impl<T, ElemDeallocF> BufGuard<T, ElemDeallocF>
+    where
+        ElemDeallocF: Fn(*mut T, usize),
+    {
+        fn new<ElemAllocF>(
+            len: usize,
+            elem_alloc_fn: ElemAllocF,
+            elem_dealloc_fn: ElemDeallocF,
+        ) -> Self
+        where
+            ElemAllocF: Fn(usize) -> *mut T,
+        {
+            Self { buf_ptr: elem_alloc_fn(len), capacity: len, elem_dealloc_fn }
+        }
+    }
+
+    impl<T, ElemDeallocF> Drop for BufGuard<T, ElemDeallocF>
+    where
+        ElemDeallocF: Fn(*mut T, usize),
+    {
+        fn drop(&mut self) {
+            (self.elem_dealloc_fn)(self.buf_ptr, self.capacity);
+        }
+    }
+
+    struct RunVec<RunAllocF, RunDeallocF>
+    where
+        RunAllocF: Fn(usize) -> *mut TimSortRun,
+        RunDeallocF: Fn(*mut TimSortRun, usize),
+    {
+        buf_ptr: *mut TimSortRun,
+        capacity: usize,
+        len: usize,
+        run_alloc_fn: RunAllocF,
+        run_dealloc_fn: RunDeallocF,
+    }
+
+    impl<RunAllocF, RunDeallocF> RunVec<RunAllocF, RunDeallocF>
+    where
+        RunAllocF: Fn(usize) -> *mut TimSortRun,
+        RunDeallocF: Fn(*mut TimSortRun, usize),
+    {
+        fn new(run_alloc_fn: RunAllocF, run_dealloc_fn: RunDeallocF) -> Self {
+            // Most slices can be sorted with at most 16 runs in-flight.
+            const START_RUN_CAPACITY: usize = 16;
+
+            Self {
+                buf_ptr: run_alloc_fn(START_RUN_CAPACITY),
+                capacity: START_RUN_CAPACITY,
+                len: 0,
+                run_alloc_fn,
+                run_dealloc_fn,
+            }
+        }
+
+        fn push(&mut self, val: TimSortRun) {
+            if self.len == self.capacity {
+                let old_capacity = self.capacity;
+                let old_buf_ptr = self.buf_ptr;
+
+                self.capacity = self.capacity * 2;
+                self.buf_ptr = (self.run_alloc_fn)(self.capacity);
+
+                // SAFETY: buf_ptr new and old were correctly allocated and old_buf_ptr has
+                // old_capacity valid elements.
+                unsafe {
+                    ptr::copy_nonoverlapping(old_buf_ptr, self.buf_ptr, old_capacity);
+                }
+
+                (self.run_dealloc_fn)(old_buf_ptr, old_capacity);
+            }
+
+            // SAFETY: The invariant was just checked.
+            unsafe {
+                self.buf_ptr.add(self.len).write(val);
+            }
+            self.len += 1;
+        }
+
+        fn remove(&mut self, index: usize) {
+            if index >= self.len {
+                panic!("Index out of bounds");
+            }
+
+            // SAFETY: buf_ptr needs to be valid and len invariant upheld.
+            unsafe {
+                // the place we are taking from.
+                let ptr = self.buf_ptr.add(index);
+
+                // Shift everything down to fill in that spot.
+                ptr::copy(ptr.add(1), ptr, self.len - index - 1);
+            }
+            self.len -= 1;
+        }
+
+        fn as_slice(&self) -> &[TimSortRun] {
+            // SAFETY: Safe as long as buf_ptr is valid and len invariant was upheld.
+            unsafe { &*ptr::slice_from_raw_parts(self.buf_ptr, self.len) }
+        }
+
+        fn len(&self) -> usize {
+            self.len
+        }
+    }
+
+    impl<RunAllocF, RunDeallocF> core::ops::Index<usize> for RunVec<RunAllocF, RunDeallocF>
+    where
+        RunAllocF: Fn(usize) -> *mut TimSortRun,
+        RunDeallocF: Fn(*mut TimSortRun, usize),
+    {
+        type Output = TimSortRun;
+
+        fn index(&self, index: usize) -> &Self::Output {
+            if index < self.len {
+                // SAFETY: buf_ptr and len invariant must be upheld.
+                unsafe {
+                    return &*(self.buf_ptr.add(index));
+                }
+            }
+
+            panic!("Index out of bounds");
+        }
+    }
+
+    impl<RunAllocF, RunDeallocF> core::ops::IndexMut<usize> for RunVec<RunAllocF, RunDeallocF>
+    where
+        RunAllocF: Fn(usize) -> *mut TimSortRun,
+        RunDeallocF: Fn(*mut TimSortRun, usize),
+    {
+        fn index_mut(&mut self, index: usize) -> &mut Self::Output {
+            if index < self.len {
+                // SAFETY: buf_ptr and len invariant must be upheld.
+                unsafe {
+                    return &mut *(self.buf_ptr.add(index));
+                }
+            }
+
+            panic!("Index out of bounds");
+        }
+    }
+
+    impl<RunAllocF, RunDeallocF> Drop for RunVec<RunAllocF, RunDeallocF>
+    where
+        RunAllocF: Fn(usize) -> *mut TimSortRun,
+        RunDeallocF: Fn(*mut TimSortRun, usize),
+    {
+        fn drop(&mut self) {
+            // As long as TimSortRun is Copy we don't need to drop them individually but just the
+            // whole allocation.
+            (self.run_dealloc_fn)(self.buf_ptr, self.capacity);
+        }
+    }
+}
+
+/// Internal type used by merge_sort.
+#[derive(Clone, Copy, Debug)]
+pub struct TimSortRun {
+    len: usize,
+    start: usize,
+}
index a4425fd234a4e22ad79ee3dfd4418464d80723ae..89adfccd90135233ef2ac66b533571bbbc3eeae5 100644 (file)
@@ -174,6 +174,7 @@ pub const fn new(
 /// Currently, `Context` only serves to provide access to a [`&Waker`](Waker)
 /// which can be used to wake the current task.
 #[stable(feature = "futures_api", since = "1.36.0")]
+#[cfg_attr(not(bootstrap), lang = "Context")]
 pub struct Context<'a> {
     waker: &'a Waker,
     // Ensure we future-proof against variance changes by forcing
index c4e105cba600d008c76b3f8042ec91a0ee566c0f..a2b9bb551e677c57672721373a2866f624a307e5 100644 (file)
@@ -15,7 +15,7 @@ macro_rules! test_literal {
         for input in inputs {
             assert_eq!(input.parse(), Ok(x64));
             assert_eq!(input.parse(), Ok(x32));
-            let neg_input = &format!("-{input}");
+            let neg_input = format!("-{input}");
             assert_eq!(neg_input.parse(), Ok(-x64));
             assert_eq!(neg_input.parse(), Ok(-x32));
         }
index fd35d96c3fef814bd7952fb8b13afbcd7f31354f..39559cdbb5ea9c275b9b3db5b1db08291ec90c64 100644 (file)
@@ -1488,7 +1488,7 @@ macro_rules! panic_cases {
                 // optional:
                 //
                 // one or more similar inputs for which data[input] succeeds,
-                // and the corresponding output as an array.  This helps validate
+                // and the corresponding output as an array. This helps validate
                 // "critical points" where an input range straddles the boundary
                 // between valid and invalid.
                 // (such as the input `len..len`, which is just barely valid)
index 012182e090b9f1b2ab3bef92a654a6bfd7ec0837..d576bd0ccee03b5d4ea32070dd410f94a1ac131c 100644 (file)
@@ -69,7 +69,7 @@ fn dot(x: &[f64], y: &[f64]) -> f64 {
 #[cfg(test)]
 #[test]
 fn test() {
-    assert_eq!(&format!("{:.9}", spectral_norm(100)), "1.274219991");
+    assert_eq!(format!("{:.9}", spectral_norm(100)), "1.274219991");
 }
 
 fn main() {
index 5c5ef0b1125a0d712a5ef787ce99ab5f0ec801f0..286ad68fd13e8fdf70efab2fb31cb122ec23286d 100644 (file)
@@ -1512,7 +1512,7 @@ pub fn is_dir(&self) -> bool {
     }
 
     /// Tests whether this file type represents a regular file.
-    /// The result is  mutually exclusive to the results of
+    /// The result is mutually exclusive to the results of
     /// [`is_dir`] and [`is_symlink`]; only zero or one of these
     /// tests may pass.
     ///
index f4e688eb926cc7bff8962f1f79ecca455d7c27bc..4c1b7d57684ddcf01b890b8742bc4732588248e8 100644 (file)
@@ -288,8 +288,8 @@ fn seek(&mut self, _: SeekFrom) -> io::Result<u64> {
     let mut reader = BufReader::with_capacity(5, ErrAfterFirstSeekReader { first_seek: true });
     assert_eq!(reader.fill_buf().ok(), Some(&[0, 0, 0, 0, 0][..]));
 
-    // The following seek will require two underlying seeks.  The first will
-    // succeed but the second will fail.  This should still invalidate the
+    // The following seek will require two underlying seeks. The first will
+    // succeed but the second will fail. This should still invalidate the
     // buffer.
     assert!(reader.seek(SeekFrom::Current(i64::MIN)).is_err());
     assert_eq!(reader.buffer().len(), 0);
index 601c01c2128c816757c103d3a04f3c4c0dd8646c..3581484050dd1a4e506574b46d99ce609c5ed996 100644 (file)
@@ -374,10 +374,10 @@ macro_rules! static_assert {
 static_assert!(align_of::<SimpleMessage>() >= TAG_MASK + 1);
 static_assert!(align_of::<Custom>() >= TAG_MASK + 1);
 
-static_assert!(@usize_eq: (TAG_MASK & TAG_SIMPLE_MESSAGE), TAG_SIMPLE_MESSAGE);
-static_assert!(@usize_eq: (TAG_MASK & TAG_CUSTOM), TAG_CUSTOM);
-static_assert!(@usize_eq: (TAG_MASK & TAG_OS), TAG_OS);
-static_assert!(@usize_eq: (TAG_MASK & TAG_SIMPLE), TAG_SIMPLE);
+static_assert!(@usize_eq: TAG_MASK & TAG_SIMPLE_MESSAGE, TAG_SIMPLE_MESSAGE);
+static_assert!(@usize_eq: TAG_MASK & TAG_CUSTOM, TAG_CUSTOM);
+static_assert!(@usize_eq: TAG_MASK & TAG_OS, TAG_OS);
+static_assert!(@usize_eq: TAG_MASK & TAG_SIMPLE, TAG_SIMPLE);
 
 // This is obviously true (`TAG_CUSTOM` is `0b01`), but in `Repr::new_custom` we
 // offset a pointer by this value, and expect it to both be within the same
index 16c634e9afd50110519875b191aa035c3bc6e13e..9aea62a5b940c553cfb011ecb322f7ff7a5a9af0 100644 (file)
@@ -190,5 +190,5 @@ fn test_std_io_error_downcast() {
     let io_error = io_error.downcast::<E>().unwrap_err();
 
     assert_eq!(SIMPLE_MESSAGE.kind, io_error.kind());
-    assert_eq!(SIMPLE_MESSAGE.message, &*format!("{io_error}"));
+    assert_eq!(SIMPLE_MESSAGE.message, format!("{io_error}"));
 }
index a7e13f5b866b53058f0f995f5818e739c47c09de..99cc01863104850f21f1aba37334021cc8cba5f6 100644 (file)
 #![feature(allocator_internals)]
 #![feature(allow_internal_unsafe)]
 #![feature(allow_internal_unstable)]
-#![feature(box_syntax)]
 #![feature(c_unwind)]
 #![feature(cfg_target_thread_local)]
 #![feature(concat_idents)]
index 7c3430b2b217c5b838e22a63bf3511e846bc0f56..0eb59d45de727b6603bd32b1988b55fcb509e9ac 100644 (file)
@@ -125,8 +125,8 @@ fn ipv4_addr_to_string() {
     assert_eq!(Ipv4Addr::new(127, 127, 127, 127).to_string(), "127.127.127.127");
 
     // Test padding
-    assert_eq!(&format!("{:16}", Ipv4Addr::new(1, 1, 1, 1)), "1.1.1.1         ");
-    assert_eq!(&format!("{:>16}", Ipv4Addr::new(1, 1, 1, 1)), "         1.1.1.1");
+    assert_eq!(format!("{:16}", Ipv4Addr::new(1, 1, 1, 1)), "1.1.1.1         ");
+    assert_eq!(format!("{:>16}", Ipv4Addr::new(1, 1, 1, 1)), "         1.1.1.1");
 }
 
 #[test]
@@ -148,8 +148,8 @@ fn ipv6_addr_to_string() {
         "1111:2222:3333:4444:5555:6666:7777:8888"
     );
     // padding
-    assert_eq!(&format!("{:20}", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8)), "1:2:3:4:5:6:7:8     ");
-    assert_eq!(&format!("{:>20}", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8)), "     1:2:3:4:5:6:7:8");
+    assert_eq!(format!("{:20}", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8)), "1:2:3:4:5:6:7:8     ");
+    assert_eq!(format!("{:>20}", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8)), "     1:2:3:4:5:6:7:8");
 
     // reduce a single run of zeros
     assert_eq!(
index 15211f81981ba1f6b5d1e24a06276c3a43eaa65e..dfc6dabbed1edeace9365cd0ed3bcdb89ea4e932 100644 (file)
@@ -64,11 +64,11 @@ fn ipv4_socket_addr_to_string() {
 
     // Test padding.
     assert_eq!(
-        &format!("{:16}", SocketAddrV4::new(Ipv4Addr::new(1, 1, 1, 1), 53)),
+        format!("{:16}", SocketAddrV4::new(Ipv4Addr::new(1, 1, 1, 1), 53)),
         "1.1.1.1:53      "
     );
     assert_eq!(
-        &format!("{:>16}", SocketAddrV4::new(Ipv4Addr::new(1, 1, 1, 1), 53)),
+        format!("{:>16}", SocketAddrV4::new(Ipv4Addr::new(1, 1, 1, 1), 53)),
         "      1.1.1.1:53"
     );
 }
@@ -111,11 +111,11 @@ fn ipv6_socket_addr_to_string() {
 
     // Test padding.
     assert_eq!(
-        &format!("{:22}", SocketAddrV6::new(Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8), 9, 0, 0)),
+        format!("{:22}", SocketAddrV6::new(Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8), 9, 0, 0)),
         "[1:2:3:4:5:6:7:8]:9   "
     );
     assert_eq!(
-        &format!("{:>22}", SocketAddrV6::new(Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8), 9, 0, 0)),
+        format!("{:>22}", SocketAddrV6::new(Ipv6Addr::new(1, 2, 3, 4, 5, 6, 7, 8), 9, 0, 0)),
         "   [1:2:3:4:5:6:7:8]:9"
     );
 }
index c6aa7c77dbc41d74ff1fb776de1d82c13be3a8bc..35de4860fe24925b215e84578bbba65c6a151fe5 100644 (file)
@@ -3,7 +3,7 @@
 //! This module is supported on Unix platforms and WASI, which both use a
 //! similar file descriptor system for referencing OS resources.
 
-#![stable(feature = "io_safety", since = "1.63.0")]
+#![stable(feature = "os_fd", since = "1.66.0")]
 #![deny(unsafe_op_in_unsafe_fn)]
 
 // `RawFd`, `AsRawFd`, etc.
@@ -19,7 +19,7 @@
 mod tests;
 
 // Export the types and traits for the public API.
-#[unstable(feature = "os_fd", issue = "98699")]
+#[stable(feature = "os_fd", since = "1.66.0")]
 pub use owned::*;
-#[unstable(feature = "os_fd", issue = "98699")]
+#[stable(feature = "os_fd", since = "1.66.0")]
 pub use raw::*;
index c16518577f7c466794127c9db77be3136e3d4c46..c41e093a7e5c6442f7022cd09d7f587f680af185 100644 (file)
@@ -100,7 +100,7 @@ pub fn try_clone_to_owned(&self) -> crate::io::Result<OwnedFd> {
 
         // For ESP-IDF, F_DUPFD is used instead, because the CLOEXEC semantics
         // will never be supported, as this is a bare metal framework with
-        // no capabilities for multi-process execution.  While F_DUPFD is also
+        // no capabilities for multi-process execution. While F_DUPFD is also
         // not supported yet, it might be (currently it returns ENOSYS).
         #[cfg(target_os = "espidf")]
         let cmd = libc::F_DUPFD;
index df3fc8e6a3b660cfa61f85ccc38fff7501a95f9c..85065984fbbb19872e7c2935e683969b4e8feef2 100644 (file)
@@ -38,7 +38,7 @@ pub trait SocketAddrExt: Sealed {
     ///     Ok(())
     /// }
     /// ```
-    fn from_abstract_name<N>(name: &N) -> crate::io::Result<SocketAddr>
+    fn from_abstract_name<N>(name: N) -> crate::io::Result<SocketAddr>
     where
         N: AsRef<[u8]>;
 
index 81ac829d21bc81164006a02f55a81a32b0ad35c7..ece2b33bddf364e16c1421b68f5a37e362ee5793 100644 (file)
@@ -256,7 +256,7 @@ fn as_abstract_name(&self) -> Option<&[u8]> {
         if let AddressKind::Abstract(name) = self.address() { Some(name) } else { None }
     }
 
-    fn from_abstract_name<N>(name: &N) -> crate::io::Result<Self>
+    fn from_abstract_name<N>(name: N) -> crate::io::Result<Self>
     where
         N: AsRef<[u8]>,
     {
index b30dd8eecd84c210d709d998cd15a95e77b5d2dc..b0db3112e22fd34470d06fa910b73ef4d118f4a7 100644 (file)
@@ -306,11 +306,11 @@ pub mod panic_count {
     // and after increase and decrease, but not necessarily during their execution.
     //
     // Additionally, the top bit of GLOBAL_PANIC_COUNT (GLOBAL_ALWAYS_ABORT_FLAG)
-    // records whether panic::always_abort() has been called.  This can only be
+    // records whether panic::always_abort() has been called. This can only be
     // set, never cleared.
     // panic::always_abort() is usually called to prevent memory allocations done by
     // the panic handling in the child created by `libc::fork`.
-    // Memory allocations performed in  a child created with `libc::fork` are undefined
+    // Memory allocations performed in a child created with `libc::fork` are undefined
     // behavior in most operating systems.
     // Accessing LOCAL_PANIC_COUNT in a child created by `libc::fork` would lead to a memory
     // allocation. Only GLOBAL_PANIC_COUNT can be accessed in this situation. This is
index a2dcee0e2bdb26ef252f04cd8feee8d217b27683..2f53cf83936910e1df1dc217f96a8884c6f53302 100644 (file)
@@ -607,7 +607,7 @@ pub struct Components<'a> {
 
     // true if path *physically* has a root separator; for most Windows
     // prefixes, it may have a "logical" root separator for the purposes of
-    // normalization, e.g.,  \\server\share == \\server\share\.
+    // normalization, e.g., \\server\share == \\server\share\.
     has_physical_root: bool,
 
     // The iterator is double-ended, and these two states keep track of what has
@@ -2531,6 +2531,8 @@ pub fn extension(&self) -> Option<&OsStr> {
 
     /// Creates an owned [`PathBuf`] with `path` adjoined to `self`.
     ///
+    /// If `path` is absolute, it replaces the current path.
+    ///
     /// See [`PathBuf::push`] for more details on what it means to adjoin a path.
     ///
     /// # Examples
@@ -2539,6 +2541,7 @@ pub fn extension(&self) -> Option<&OsStr> {
     /// use std::path::{Path, PathBuf};
     ///
     /// assert_eq!(Path::new("/etc").join("passwd"), PathBuf::from("/etc/passwd"));
+    /// assert_eq!(Path::new("/etc").join("/bin/sh"), PathBuf::from("/bin/sh"));
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     #[must_use]
index 8f65544a9e894de0a21f897911fea2d688cead06..2507f70695173396f81d8356b97ad1e24ec8f1f2 100644 (file)
@@ -27,10 +27,10 @@ pub unsafe fn new_with_coreid(
         p: Box<dyn FnOnce()>,
         core_id: isize,
     ) -> io::Result<Thread> {
-        let p = Box::into_raw(box p);
+        let p = Box::into_raw(Box::new(p));
         let tid = abi::spawn2(
             thread_start,
-            p as usize,
+            p.expose_addr(),
             abi::Priority::into(abi::NORMAL_PRIO),
             stack,
             core_id,
index 9b683fce157488df8d26ee08184e278578e5c17b..613266b9530a800d47560ebb3d5f018b17732387 100644 (file)
@@ -5,32 +5,23 @@
 // The this solution works like the implementation of macOS and
 // doesn't additional OS support
 
-use crate::cell::Cell;
-use crate::ptr;
+use crate::mem;
 
 #[thread_local]
-static DTORS: Cell<*mut List> = Cell::new(ptr::null_mut());
-
-type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>;
+static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new();
 
 pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
-    if DTORS.get().is_null() {
-        let v: Box<List> = box Vec::new();
-        DTORS.set(Box::into_raw(v));
-    }
-
-    let list: &mut List = &mut *DTORS.get();
+    let list = &mut DTORS;
     list.push((t, dtor));
 }
 
 // every thread call this function to run through all possible destructors
 pub unsafe fn run_dtors() {
-    let mut ptr = DTORS.replace(ptr::null_mut());
-    while !ptr.is_null() {
-        let list = Box::from_raw(ptr);
-        for (ptr, dtor) in list.into_iter() {
+    let mut list = mem::take(&mut DTORS);
+    while !list.is_empty() {
+        for (ptr, dtor) in list {
             dtor(ptr);
         }
-        ptr = DTORS.replace(ptr::null_mut());
+        list = mem::take(&mut DTORS);
     }
 }
index 535703be33f06474724a37802df9b6d6fb4f7353..19350b83fab884d8cf3ebd0efdabb7b9de30d4a3 100644 (file)
@@ -294,7 +294,7 @@ fn drop(&mut self) {
                 // Terminate and delete the task
                 // Safety: `self.task` still represents a task we own (because
                 //         this method or `join_inner` is called only once for
-                //         each `Thread`). The task  indicated that it's safe to
+                //         each `Thread`). The task indicated that it's safe to
                 //         delete by entering the `FINISHED` state.
                 unsafe { terminate_and_delete_task(self.task) };
 
index 97356457057761014502ea74885732e75b42b248..bad14bb37f720e50707898944ca5e88212e358c9 100644 (file)
@@ -5,43 +5,35 @@
 
 use super::{abi, itron::task};
 use crate::cell::Cell;
-use crate::ptr;
+use crate::mem;
 
 #[thread_local]
-static DTORS: Cell<*mut List> = Cell::new(ptr::null_mut());
+static REGISTERED: Cell<bool> = Cell::new(false);
 
-type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>;
+#[thread_local]
+static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new();
 
 pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
-    if DTORS.get().is_null() {
+    if !REGISTERED.get() {
         let tid = task::current_task_id_aborting();
-        let v: Box<List> = box Vec::new();
-        DTORS.set(Box::into_raw(v));
-
         // Register `tls_dtor` to make sure the TLS destructors are called
         // for tasks created by other means than `std::thread`
         unsafe { abi::SOLID_TLS_AddDestructor(tid as i32, tls_dtor) };
+        REGISTERED.set(true);
     }
 
-    let list: &mut List = unsafe { &mut *DTORS.get() };
+    let list = unsafe { &mut DTORS };
     list.push((t, dtor));
 }
 
 pub unsafe fn run_dtors() {
-    let ptr = DTORS.get();
-    if !ptr.is_null() {
-        // Swap the destructor list, call all registered destructors,
-        // and repeat this until the list becomes permanently empty.
-        while let Some(list) = Some(crate::mem::replace(unsafe { &mut *ptr }, Vec::new()))
-            .filter(|list| !list.is_empty())
-        {
-            for (ptr, dtor) in list.into_iter() {
-                unsafe { dtor(ptr) };
-            }
+    let mut list = mem::take(unsafe { &mut DTORS });
+    while !list.is_empty() {
+        for (ptr, dtor) in list {
+            unsafe { dtor(ptr) };
         }
 
-        // Drop the destructor list
-        unsafe { Box::from_raw(DTORS.replace(ptr::null_mut())) };
+        list = mem::take(unsafe { &mut DTORS });
     }
 }
 
index aea0c26ee8b60e8a2045208b6228a0dc5812d7b5..8e1f35d6cc92011cac5704c196d5384390e3de3c 100644 (file)
@@ -149,12 +149,13 @@ unsafe fn try_statx(
     ) -> Option<io::Result<FileAttr>> {
         use crate::sync::atomic::{AtomicU8, Ordering};
 
-        // Linux kernel prior to 4.11 or glibc prior to glibc 2.28 don't support `statx`
-        // We store the availability in global to avoid unnecessary syscalls.
-        // 0: Unknown
-        // 1: Not available
-        // 2: Available
-        static STATX_STATE: AtomicU8 = AtomicU8::new(0);
+        // Linux kernel prior to 4.11 or glibc prior to glibc 2.28 don't support `statx`.
+        // We check for it on first failure and remember availability to avoid having to
+        // do it again.
+        #[repr(u8)]
+        enum STATX_STATE{ Unknown = 0, Present, Unavailable }
+        static STATX_SAVED_STATE: AtomicU8 = AtomicU8::new(STATX_STATE::Unknown as u8);
+
         syscall! {
             fn statx(
                 fd: c_int,
@@ -165,31 +166,44 @@ fn statx(
             ) -> c_int
         }
 
-        match STATX_STATE.load(Ordering::Relaxed) {
-            0 => {
-                // It is a trick to call `statx` with null pointers to check if the syscall
-                // is available. According to the manual, it is expected to fail with EFAULT.
-                // We do this mainly for performance, since it is nearly hundreds times
-                // faster than a normal successful call.
-                let err = cvt(statx(0, ptr::null(), 0, libc::STATX_ALL, ptr::null_mut()))
-                    .err()
-                    .and_then(|e| e.raw_os_error());
-                // We don't check `err == Some(libc::ENOSYS)` because the syscall may be limited
-                // and returns `EPERM`. Listing all possible errors seems not a good idea.
-                // See: https://github.com/rust-lang/rust/issues/65662
-                if err != Some(libc::EFAULT) {
-                    STATX_STATE.store(1, Ordering::Relaxed);
-                    return None;
-                }
-                STATX_STATE.store(2, Ordering::Relaxed);
-            }
-            1 => return None,
-            _ => {}
+        if STATX_SAVED_STATE.load(Ordering::Relaxed) == STATX_STATE::Unavailable as u8 {
+            return None;
         }
 
         let mut buf: libc::statx = mem::zeroed();
         if let Err(err) = cvt(statx(fd, path, flags, mask, &mut buf)) {
-            return Some(Err(err));
+            if STATX_SAVED_STATE.load(Ordering::Relaxed) == STATX_STATE::Present as u8 {
+                return Some(Err(err));
+            }
+
+            // Availability not checked yet.
+            //
+            // First try the cheap way.
+            if err.raw_os_error() == Some(libc::ENOSYS) {
+                STATX_SAVED_STATE.store(STATX_STATE::Unavailable as u8, Ordering::Relaxed);
+                return None;
+            }
+
+            // Error other than `ENOSYS` is not a good enough indicator -- it is
+            // known that `EPERM` can be returned as a result of using seccomp to
+            // block the syscall.
+            // Availability is checked by performing a call which expects `EFAULT`
+            // if the syscall is usable.
+            // See: https://github.com/rust-lang/rust/issues/65662
+            // FIXME this can probably just do the call if `EPERM` was received, but
+            // previous iteration of the code checked it for all errors and for now
+            // this is retained.
+            // FIXME what about transient conditions like `ENOMEM`?
+            let err2 = cvt(statx(0, ptr::null(), 0, libc::STATX_ALL, ptr::null_mut()))
+                .err()
+                .and_then(|e| e.raw_os_error());
+            if err2 == Some(libc::EFAULT) {
+                STATX_SAVED_STATE.store(STATX_STATE::Present as u8, Ordering::Relaxed);
+                return Some(Err(err));
+            } else {
+                STATX_SAVED_STATE.store(STATX_STATE::Unavailable as u8, Ordering::Relaxed);
+                return None;
+            }
         }
 
         // We cannot fill `stat64` exhaustively because of private padding fields.
@@ -600,13 +614,13 @@ fn next(&mut self) -> Option<io::Result<DirEntry>> {
             loop {
                 // As of POSIX.1-2017, readdir() is not required to be thread safe; only
                 // readdir_r() is. However, readdir_r() cannot correctly handle platforms
-                // with unlimited or variable NAME_MAX.  Many modern platforms guarantee
+                // with unlimited or variable NAME_MAX. Many modern platforms guarantee
                 // thread safety for readdir() as long an individual DIR* is not accessed
                 // concurrently, which is sufficient for Rust.
                 super::os::set_errno(0);
                 let entry_ptr = readdir64(self.inner.dirp.0);
                 if entry_ptr.is_null() {
-                    // We either encountered an error, or reached the end.  Either way,
+                    // We either encountered an error, or reached the end. Either way,
                     // the next call to next() should return None.
                     self.end_of_stream = true;
 
index 0f7107122b7e86ecbba33e56b46efcd52ef2fcbc..73b9bef7e2ac9b68b3b8480d27a2e288a7c4cd24 100644 (file)
@@ -587,7 +587,7 @@ fn copy_file_range(
                         // - copy_file_range file is immutable or syscall is blocked by seccomp¹ (EPERM)
                         // - copy_file_range cannot be used with pipes or device nodes (EINVAL)
                         // - the writer fd was opened with O_APPEND (EBADF²)
-                        // and no bytes were written successfully yet.  (All these errnos should
+                        // and no bytes were written successfully yet. (All these errnos should
                         // not be returned if something was already written, but they happen in
                         // the wild, see #91152.)
                         //
index 4c99d758c93a3156e1ccfafe6252443c4f4468b5..d4c7e58b34d2ef85c2d5864962928745d6eebd14 100644 (file)
@@ -262,7 +262,7 @@ pub fn signal(&self) -> Option<i32> {
     // available on Fuchsia.
     //
     // It does not appear that Fuchsia is Unix-like enough to implement ExitStatus (or indeed many
-    // other things from std::os::unix) properly.  This veneer is always going to be a bodge.  So
+    // other things from std::os::unix) properly. This veneer is always going to be a bodge. So
     // while I don't know if these implementations are actually correct, I think they will do for
     // now at least.
     pub fn core_dumped(&self) -> bool {
@@ -277,9 +277,9 @@ pub fn continued(&self) -> bool {
 
     pub fn into_raw(&self) -> c_int {
         // We don't know what someone who calls into_raw() will do with this value, but it should
-        // have the conventional Unix representation.  Despite the fact that this is not
+        // have the conventional Unix representation. Despite the fact that this is not
         // standardised in SuS or POSIX, all Unix systems encode the signal and exit status the
-        // same way.  (Ie the WIFEXITED, WEXITSTATUS etc. macros have identical behaviour on every
+        // same way. (Ie the WIFEXITED, WEXITSTATUS etc. macros have identical behaviour on every
         // Unix.)
         //
         // The caller of `std::os::unix::into_raw` is probably wanting a Unix exit status, and may
@@ -287,14 +287,14 @@ pub fn into_raw(&self) -> c_int {
         // different Unix variant.
         //
         // The other view would be to say that the caller on Fuchsia ought to know that `into_raw`
-        // will give a raw Fuchsia status (whatever that is - I don't know, personally).  That is
+        // will give a raw Fuchsia status (whatever that is - I don't know, personally). That is
         // not possible here because we must return a c_int because that's what Unix (including
         // SuS and POSIX) say a wait status is, but Fuchsia apparently uses a u64, so it won't
         // necessarily fit.
         //
         // It seems to me that the right answer would be to provide std::os::fuchsia with its
         // own ExitStatusExt, rather that trying to provide a not very convincing imitation of
-        // Unix.  Ie, std::os::unix::process:ExitStatusExt ought not to exist on Fuchsia.  But
+        // Unix. Ie, std::os::unix::process:ExitStatusExt ought not to exist on Fuchsia. But
         // fixing this up that is beyond the scope of my efforts now.
         let exit_status_as_if_unix: u8 = self.0.try_into().expect("Fuchsia process return code bigger than 8 bits, but std::os::unix::ExitStatusExt::into_raw() was called to try to convert the value into a traditional Unix-style wait status, which cannot represent values greater than 255.");
         let wait_status_as_if_unix = (exit_status_as_if_unix as c_int) << 8;
index 39d1c8b1d8ebc75d0da500c882e4683bd167ba40..3bc17b7754d85b7405c7eb33095cb8372e681f41 100644 (file)
@@ -666,11 +666,11 @@ fn exited(&self) -> bool {
     }
 
     pub fn exit_ok(&self) -> Result<(), ExitStatusError> {
-        // This assumes that WIFEXITED(status) && WEXITSTATUS==0 corresponds to status==0.  This is
+        // This assumes that WIFEXITED(status) && WEXITSTATUS==0 corresponds to status==0. This is
         // true on all actual versions of Unix, is widely assumed, and is specified in SuS
-        // https://pubs.opengroup.org/onlinepubs/9699919799/functions/wait.html .  If it is not
+        // https://pubs.opengroup.org/onlinepubs/9699919799/functions/wait.html. If it is not
         // true for a platform pretending to be Unix, the tests (our doctests, and also
-        // procsss_unix/tests.rs) will spot it.  `ExitStatusError::code` assumes this too.
+        // procsss_unix/tests.rs) will spot it. `ExitStatusError::code` assumes this too.
         match NonZero_c_int::try_from(self.0) {
             /* was nonzero */ Ok(failure) => Err(ExitStatusError(failure)),
             /* was zero, couldn't convert */ Err(_) => Ok(()),
@@ -746,6 +746,8 @@ fn signal_string(signal: i32) -> &'static str {
         libc::SIGWINCH => " (SIGWINCH)",
         #[cfg(not(target_os = "haiku"))]
         libc::SIGIO => " (SIGIO)",
+        #[cfg(target_os = "haiku")]
+        libc::SIGPOLL => " (SIGPOLL)",
         libc::SIGSYS => " (SIGSYS)",
         // For information on Linux signals, run `man 7 signal`
         #[cfg(all(
index 4c87f633a260919fb958b204c0cdf6c1a2ff9aaf..e5e1f956bc351e43c16878ea2d37b6b65d876025 100644 (file)
@@ -19,17 +19,17 @@ fn exitstatus_display_tests() {
     t(0x00000, "exit status: 0");
     t(0x0ff00, "exit status: 255");
 
-    // On MacOS, 0x0137f is WIFCONTINUED, not WIFSTOPPED.  Probably *BSD is similar.
+    // On MacOS, 0x0137f is WIFCONTINUED, not WIFSTOPPED. Probably *BSD is similar.
     //   https://github.com/rust-lang/rust/pull/82749#issuecomment-790525956
     // The purpose of this test is to test our string formatting, not our understanding of the wait
-    // status magic numbers.  So restrict these to Linux.
+    // status magic numbers. So restrict these to Linux.
     if cfg!(target_os = "linux") {
         t(0x0137f, "stopped (not terminated) by signal: 19 (SIGSTOP)");
         t(0x0ffff, "continued (WIFCONTINUED)");
     }
 
     // Testing "unrecognised wait status" is hard because the wait.h macros typically
-    // assume that the value came from wait and isn't mad.  With the glibc I have here
+    // assume that the value came from wait and isn't mad. With the glibc I have here
     // this works:
     if cfg!(all(target_os = "linux", target_env = "gnu")) {
         t(0x000ff, "unrecognised wait status: 255 0xff");
index f549d37c301165fad9019ca014f5770398cdcf5c..569a4b149125d618bd8847691b7b4434d8ab0cb7 100644 (file)
@@ -195,11 +195,11 @@ fn exited(&self) -> bool {
     }
 
     pub fn exit_ok(&self) -> Result<(), ExitStatusError> {
-        // This assumes that WIFEXITED(status) && WEXITSTATUS==0 corresponds to status==0.  This is
+        // This assumes that WIFEXITED(status) && WEXITSTATUS==0 corresponds to status==0. This is
         // true on all actual versions of Unix, is widely assumed, and is specified in SuS
-        // https://pubs.opengroup.org/onlinepubs/9699919799/functions/wait.html .  If it is not
+        // https://pubs.opengroup.org/onlinepubs/9699919799/functions/wait.html. If it is not
         // true for a platform pretending to be Unix, the tests (our doctests, and also
-        // procsss_unix/tests.rs) will spot it.  `ExitStatusError::code` assumes this too.
+        // procsss_unix/tests.rs) will spot it. `ExitStatusError::code` assumes this too.
         match NonZero_c_int::try_from(self.0) {
             Ok(failure) => Err(ExitStatusError(failure)),
             Err(_) => Ok(()),
index b251949bda207e1bf0fcfa99750e7311f4412dce..cc0e5929569729ef17de83334958ca55eddad79f 100644 (file)
@@ -49,7 +49,7 @@ unsafe impl Sync for Thread {}
 impl Thread {
     // unsafe: see thread::Builder::spawn_unchecked for safety requirements
     pub unsafe fn new(stack: usize, p: Box<dyn FnOnce()>) -> io::Result<Thread> {
-        let p = Box::into_raw(box p);
+        let p = Box::into_raw(Box::new(p));
         let mut native: libc::pthread_t = mem::zeroed();
         let mut attr: libc::pthread_attr_t = mem::zeroed();
         assert_eq!(libc::pthread_attr_init(&mut attr), 0);
@@ -73,7 +73,7 @@ pub unsafe fn new(stack: usize, p: Box<dyn FnOnce()>) -> io::Result<Thread> {
                 n => {
                     assert_eq!(n, libc::EINVAL);
                     // EINVAL means |stack_size| is either too small or not a
-                    // multiple of the system page size.  Because it's definitely
+                    // multiple of the system page size. Because it's definitely
                     // >= PTHREAD_STACK_MIN, it must be an alignment issue.
                     // Round up to the nearest page and try again.
                     let page_size = os::page_size();
@@ -755,10 +755,10 @@ pub unsafe fn init() -> Option<Guard> {
         if cfg!(all(target_os = "linux", not(target_env = "musl"))) {
             // Linux doesn't allocate the whole stack right away, and
             // the kernel has its own stack-guard mechanism to fault
-            // when growing too close to an existing mapping.  If we map
+            // when growing too close to an existing mapping. If we map
             // our own guard, then the kernel starts enforcing a rather
             // large gap above that, rendering much of the possible
-            // stack space useless.  See #43052.
+            // stack space useless. See #43052.
             //
             // Instead, we'll just note where we expect rlimit to start
             // faulting, so our handler can report "stack overflow", and
@@ -774,14 +774,14 @@ pub unsafe fn init() -> Option<Guard> {
             None
         } else if cfg!(target_os = "freebsd") {
             // FreeBSD's stack autogrows, and optionally includes a guard page
-            // at the bottom.  If we try to remap the bottom of the stack
-            // ourselves, FreeBSD's guard page moves upwards.  So we'll just use
+            // at the bottom. If we try to remap the bottom of the stack
+            // ourselves, FreeBSD's guard page moves upwards. So we'll just use
             // the builtin guard page.
             let stackptr = get_stack_start_aligned()?;
             let guardaddr = stackptr.addr();
             // Technically the number of guard pages is tunable and controlled
             // by the security.bsd.stack_guard_page sysctl, but there are
-            // few reasons to change it from the default.  The default value has
+            // few reasons to change it from the default. The default value has
             // been 1 ever since FreeBSD 11.1 and 10.4.
             const GUARD_PAGES: usize = 1;
             let guard = guardaddr..guardaddr + GUARD_PAGES * page_size;
@@ -877,9 +877,9 @@ pub unsafe fn current() -> Option<Guard> {
             } else if cfg!(all(target_os = "linux", any(target_env = "gnu", target_env = "uclibc")))
             {
                 // glibc used to include the guard area within the stack, as noted in the BUGS
-                // section of `man pthread_attr_getguardsize`.  This has been corrected starting
+                // section of `man pthread_attr_getguardsize`. This has been corrected starting
                 // with glibc 2.27, and in some distro backports, so the guard is now placed at the
-                // end (below) the stack.  There's no easy way for us to know which we have at
+                // end (below) the stack. There's no easy way for us to know which we have at
                 // runtime, so we'll just match any fault in the range right above or below the
                 // stack base to call that fault a stack overflow.
                 Some(stackaddr - guardsize..stackaddr + guardsize)
index d7fd2130f7ccec5f84ff93187651e12d3550afc0..c31fb3a48dabb474bda286a79d59a9fcfe9cc119 100644 (file)
@@ -57,39 +57,34 @@ pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
 #[cfg(target_os = "macos")]
 pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
     use crate::cell::Cell;
+    use crate::mem;
     use crate::ptr;
 
     #[thread_local]
     static REGISTERED: Cell<bool> = Cell::new(false);
+
+    #[thread_local]
+    static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new();
+
     if !REGISTERED.get() {
         _tlv_atexit(run_dtors, ptr::null_mut());
         REGISTERED.set(true);
     }
 
-    type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>;
-
-    #[thread_local]
-    static DTORS: Cell<*mut List> = Cell::new(ptr::null_mut());
-    if DTORS.get().is_null() {
-        let v: Box<List> = box Vec::new();
-        DTORS.set(Box::into_raw(v));
-    }
-
     extern "C" {
         fn _tlv_atexit(dtor: unsafe extern "C" fn(*mut u8), arg: *mut u8);
     }
 
-    let list: &mut List = &mut *DTORS.get();
+    let list = &mut DTORS;
     list.push((t, dtor));
 
     unsafe extern "C" fn run_dtors(_: *mut u8) {
-        let mut ptr = DTORS.replace(ptr::null_mut());
-        while !ptr.is_null() {
-            let list = Box::from_raw(ptr);
-            for (ptr, dtor) in list.into_iter() {
+        let mut list = mem::take(&mut DTORS);
+        while !list.is_empty() {
+            for (ptr, dtor) in list {
                 dtor(ptr);
             }
-            ptr = DTORS.replace(ptr::null_mut());
+            list = mem::take(&mut DTORS);
         }
     }
 }
index 352337ba322371d77b89aedc5d1a7cc756351f0d..d7adeb266ed93f30918a8fab50137fec6852be98 100644 (file)
@@ -157,7 +157,7 @@ fn next(&mut self) -> Option<PathBuf> {
         // Double quotes are used as a way of introducing literal semicolons
         // (since c:\some;dir is a valid Windows path). Double quotes are not
         // themselves permitted in path names, so there is no way to escape a
-        // double quote.  Quoted regions can appear in arbitrary locations, so
+        // double quote. Quoted regions can appear in arbitrary locations, so
         //
         //   c:\foo;c:\som"e;di"r;c:\bar
         //
index c5c9e97e646fb94fa6960c1da38ba09f1265f04b..ed58c47e0907bb5aab534b92f2e6a6d4e6ca4733 100644 (file)
@@ -22,11 +22,11 @@ pub struct Thread {
 impl Thread {
     // unsafe: see thread::Builder::spawn_unchecked for safety requirements
     pub unsafe fn new(stack: usize, p: Box<dyn FnOnce()>) -> io::Result<Thread> {
-        let p = Box::into_raw(box p);
+        let p = Box::into_raw(Box::new(p));
 
         // FIXME On UNIX, we guard against stack sizes that are too small but
         // that's because pthreads enforces that stacks are at least
-        // PTHREAD_STACK_MIN bytes big.  Windows has no such lower limit, it's
+        // PTHREAD_STACK_MIN bytes big. Windows has no such lower limit, it's
         // just that below a certain threshold you can't do anything useful.
         // That threshold is application and architecture-specific, however.
         let ret = c::CreateThread(
index 1d13a7171b0355f5734dbbdc7120e6fbf58d7781..844946eda031f792603f0e363f597a4006047948 100644 (file)
@@ -30,7 +30,7 @@ pub unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern "C" fn(*mut
     static DTORS: StaticKey = StaticKey::new(Some(run_dtors));
     type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>;
     if DTORS.get().is_null() {
-        let v: Box<List> = box Vec::new();
+        let v: Box<List> = Box::new(Vec::new());
         DTORS.set(Box::into_raw(v) as *mut u8);
     }
     let list: &mut List = &mut *(DTORS.get() as *mut List);
index b30bb7b77efb25bdc1e7e43358eaa3c702a8779b..cf7c2e05a2e9df94c60d536b97ba27e5fa48d7f9 100644 (file)
@@ -1110,8 +1110,7 @@ unsafe fn try_initialize(&'static self, init: impl FnOnce() -> T) -> Option<&'st
             let ptr = if ptr.is_null() {
                 // If the lookup returned null, we haven't initialized our own
                 // local copy, so do that now.
-                let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
-                let ptr = Box::into_raw(ptr);
+                let ptr = Box::into_raw(Box::new(Value { inner: LazyKeyInner::new(), key: self }));
                 // SAFETY: At this point we are sure there is no value inside
                 // ptr so setting it will not affect anyone else.
                 unsafe {
index 30dc4ff855315d2c0f0bca55669d580e50d7f92e..69fb529d7f563b56e310bb85cf6b7a1f8282b6f6 100644 (file)
@@ -116,7 +116,7 @@ pub fn test_main(args: &[String], tests: Vec<TestDescAndFn>, options: Option<Opt
     } else {
         if !opts.nocapture {
             // If we encounter a non-unwinding panic, flush any captured output from the current test,
-            // and stop  capturing output to ensure that the non-unwinding panic message is visible.
+            // and stop capturing output to ensure that the non-unwinding panic message is visible.
             // We also acquire the locks for both output streams to prevent output from other threads
             // from interleaving with the panic message or appearing after it.
             let builtin_panic_hook = panic::take_hook();
index 87b91f34498a37085fd81b98fbbe8e60f798e54f..3e8ccc91ab0517fd8ef676174a3e1d56c5ef9d59 100644 (file)
@@ -30,7 +30,7 @@ pub(crate) fn get_dbpath_for_term(term: &str) -> Option<PathBuf> {
         }
     } else {
         // Found nothing in TERMINFO_DIRS, use the default paths:
-        // According to  /etc/terminfo/README, after looking at
+        // According to /etc/terminfo/README, after looking at
         // ~/.terminfo, ncurses will search /etc/terminfo, then
         // /lib/terminfo, and eventually /usr/share/terminfo.
         // On Haiku the database can be found at /boot/system/data/terminfo
index 7128d542acfe9d0609c165c3c7b074bd904c2d0c..65c882fb801e5b736e8958cc7b5fcf381a67a0b5 100644 (file)
@@ -47,6 +47,8 @@ fn cc2ar(cc: &Path, target: TargetSelection) -> Option<PathBuf> {
         Some(PathBuf::from("ar"))
     } else if target.contains("vxworks") {
         Some(PathBuf::from("wr-ar"))
+    } else if target.contains("android") {
+        Some(cc.parent().unwrap().join(PathBuf::from("llvm-ar")))
     } else {
         let parent = cc.parent().unwrap();
         let file = cc.file_name().unwrap().to_str().unwrap();
@@ -219,12 +221,22 @@ fn set_compiler(
 }
 
 pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf {
-    let triple_translated = triple
-        .replace("armv7neon", "arm")
-        .replace("armv7", "arm")
-        .replace("thumbv7neon", "arm")
-        .replace("thumbv7", "arm");
-    let compiler = format!("{}-{}", triple_translated, compiler.clang());
+    let mut triple_iter = triple.split("-");
+    let triple_translated = if let Some(arch) = triple_iter.next() {
+        let arch_new = match arch {
+            "arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a",
+            other => other,
+        };
+        std::iter::once(arch_new).chain(triple_iter).collect::<Vec<&str>>().join("-")
+    } else {
+        triple.to_string()
+    };
+
+    // API 19 is the earliest API level supported by NDK r25b but AArch64 and x86_64 support
+    // begins at API level 21.
+    let api_level =
+        if triple.contains("aarch64") || triple.contains("x86_64") { "21" } else { "19" };
+    let compiler = format!("{}{}-{}", triple_translated, api_level, compiler.clang());
     ndk.join("bin").join(compiler)
 }
 
index 2bff0f21b53701dc2d6bcefaffd8457bc6f76364..2d86ff1d2baeafff219b642ca5e5338bfd84cf01 100644 (file)
@@ -1140,12 +1140,6 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
         let compiler = self.compiler;
         let target = self.target;
 
-        if target.contains("riscv64") {
-            // riscv64 currently has an LLVM bug that makes rust-analyzer unable
-            // to build. See #74813 for details.
-            return None;
-        }
-
         let rust_analyzer = builder
             .ensure(tool::RustAnalyzer { compiler, target })
             .expect("rust-analyzer always builds");
index e0d1504c9c780a85ae83d32f81ae8fc0c7086375..cb5706ca0a6516f838cc56581d580bfdf34207f8 100644 (file)
@@ -1105,6 +1105,12 @@ fn supported_sanitizers(
         "x86_64-unknown-linux-musl" => {
             common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"])
         }
+        "s390x-unknown-linux-gnu" => {
+            common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"])
+        }
+        "s390x-unknown-linux-musl" => {
+            common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"])
+        }
         _ => Vec::new(),
     }
 }
index 7a875c960e13312d77d11983c0f4bab304b4c554..b6b4fdc67a94905a2d6e28f2b56d01c44ceeb6dc 100644 (file)
@@ -1,4 +1,4 @@
-FROM ubuntu:22.04
+FROM ubuntu:22.10
 
 ARG DEBIAN_FRONTEND=noninteractive
 COPY scripts/android-base-apt-get.sh /scripts/
@@ -6,7 +6,7 @@ RUN sh /scripts/android-base-apt-get.sh
 
 COPY scripts/android-ndk.sh /scripts/
 RUN . /scripts/android-ndk.sh && \
-    download_and_make_toolchain android-ndk-r15c-linux-x86_64.zip arm 14
+    download_ndk android-ndk-r25b-linux.zip
 
 RUN dpkg --add-architecture i386 && \
     apt-get update && \
@@ -30,7 +30,7 @@ ENV PATH=$PATH:/android/sdk/platform-tools
 
 ENV TARGETS=arm-linux-androideabi
 
-ENV RUST_CONFIGURE_ARGS --arm-linux-androideabi-ndk=/android/ndk/arm-14
+ENV RUST_CONFIGURE_ARGS --arm-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/
 
 ENV SCRIPT python3 ../x.py --stage 2 test --host='' --target $TARGETS
 
index 2328db4ab8b1d126dd774a67a744afb990bde1e0..9c6f648896b5127559decfa7d37c229650ca7173 100644 (file)
@@ -1,4 +1,4 @@
-FROM ubuntu:22.04
+FROM ubuntu:22.10
 
 COPY scripts/android-base-apt-get.sh /scripts/
 RUN sh /scripts/android-base-apt-get.sh
@@ -6,14 +6,7 @@ RUN sh /scripts/android-base-apt-get.sh
 # ndk
 COPY scripts/android-ndk.sh /scripts/
 RUN . /scripts/android-ndk.sh && \
-    download_ndk android-ndk-r15c-linux-x86_64.zip && \
-    make_standalone_toolchain arm 14 && \
-    make_standalone_toolchain x86 14 && \
-    make_standalone_toolchain arm 21 && \
-    make_standalone_toolchain x86 21 && \
-    make_standalone_toolchain arm64 21 && \
-    make_standalone_toolchain x86_64 21 && \
-    remove_ndk
+    download_ndk android-ndk-r25b-linux.zip
 
 # env
 ENV TARGETS=arm-linux-androideabi
@@ -26,12 +19,12 @@ ENV TARGETS=$TARGETS,x86_64-linux-android
 ENV RUST_CONFIGURE_ARGS \
       --enable-extended \
       --enable-profiler \
-      --arm-linux-androideabi-ndk=/android/ndk/arm-14 \
-      --armv7-linux-androideabi-ndk=/android/ndk/arm-14 \
-      --thumbv7neon-linux-androideabi-ndk=/android/ndk/arm-14 \
-      --i686-linux-android-ndk=/android/ndk/x86-14 \
-      --aarch64-linux-android-ndk=/android/ndk/arm64-21 \
-      --x86_64-linux-android-ndk=/android/ndk/x86_64-21 \
+      --arm-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \
+      --armv7-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \
+      --thumbv7neon-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \
+      --i686-linux-android-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \
+      --aarch64-linux-android-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \
+      --x86_64-linux-android-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \
       --disable-docs
 
 ENV SCRIPT python3 ../x.py dist --host='' --target $TARGETS
index 43a449b3a1926baa307e1908dbdd1a6a1c2260c8..adb98d7ebb54541e83fbd61d1c877a169119a1cd 100644 (file)
@@ -28,5 +28,5 @@ ENV \
 
 ENV HOSTS=s390x-unknown-linux-gnu
 
-ENV RUST_CONFIGURE_ARGS --enable-extended --enable-lld --enable-profiler --disable-docs
+ENV RUST_CONFIGURE_ARGS --enable-extended --enable-lld --enable-sanitizers --enable-profiler --disable-docs
 ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS
index dc0e591cad6f6f9aec51a1ab2eec129f01e9988b..889a586b351d6f30f700d433461b5951fd80f92a 100644 (file)
@@ -1,8 +1,6 @@
-FROM ubuntu:18.04
-# FIXME: when bumping the version, remove the Python 3.6-specific changes in
-# the reuse-requirements.in file, regenerate reuse-requirements.txt and remove
-# this comment.
+FROM ubuntu:22.04
 
+ARG DEBIAN_FRONTEND=noninteractive
 RUN apt-get update && apt-get install -y --no-install-recommends \
   g++ \
   make \
index d5bc76eeb23daee6779672485caf84fcf53ac877..4cc5d9f8a0dafd08bb59e6b92feb618fba17ad02 100644 (file)
@@ -1,8 +1,6 @@
-FROM ubuntu:18.04
-# FIXME: when bumping the version, remove the Python 3.6-specific changes in
-# the reuse-requirements.in file, regenerate reuse-requirements.txt and remove
-# this comment.
+FROM ubuntu:22.04
 
+ARG DEBIAN_FRONTEND=noninteractive
 RUN apt-get update && apt-get install -y --no-install-recommends \
   g++ \
   make \
index 4964f40aa39adaf30ae30d846d981655df2635e5..c7b3376e2f1fb97d06a6153719677f8db7365467 100644 (file)
 #
 
 reuse
-
-# Some packages dropped support for Python 3.6, which is the version used in
-# this builder (due to Ubuntu 18.04). This should be removed once we bump the
-# Ubuntu version of the builder.
-jinja2 < 3.1
-markupsafe < 2.1
-requests < 2.28
-setuptools < 59.7
index 10a5f73879082ce111af651a779a5c0c1bf2c2af..b0f598f77ea6f38bfebc810621affead94a7d82c 100644 (file)
@@ -1,6 +1,6 @@
 #
-# This file is autogenerated by pip-compile with python 3.10
-# To update, run:
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
 #
 #    pip-compile --allow-unsafe --generate-hashes reuse-requirements.in
 #
@@ -8,138 +8,77 @@ binaryornot==0.4.4 \
     --hash=sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061 \
     --hash=sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4
     # via reuse
-boolean-py==3.8 \
-    --hash=sha256:cc24e20f985d60cd4a3a5a1c0956dd12611159d32a75081dabd0c9ab981acaa4 \
-    --hash=sha256:d75da0fd0354425fa64f6bbc6cec6ae1485d0eec3447b73187ff8cbf9b572e26
+boolean-py==4.0 \
+    --hash=sha256:17b9a181630e43dde1851d42bef546d616d5d9b4480357514597e78b203d06e4 \
+    --hash=sha256:2876f2051d7d6394a531d82dc6eb407faa0b01a0a0b3083817ccd7323b8d96bd
     # via
     #   license-expression
     #   reuse
-certifi==2022.6.15 \
-    --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \
-    --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412
-    # via requests
-chardet==5.0.0 \
-    --hash=sha256:0368df2bfd78b5fc20572bb4e9bb7fb53e2c094f60ae9993339e8671d0afb8aa \
-    --hash=sha256:d3e64f022d254183001eccc5db4040520c0f23b1a3f33d6413e099eb7f126557
+chardet==5.1.0 \
+    --hash=sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5 \
+    --hash=sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9
     # via
     #   binaryornot
     #   python-debian
-charset-normalizer==2.0.12 \
-    --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
-    --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
-    # via requests
-idna==3.3 \
-    --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
-    --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
-    # via requests
-jinja2==3.0.3 \
-    --hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
-    --hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
-    # via
-    #   -r reuse-requirements.in
-    #   reuse
-license-expression==21.6.14 \
-    --hash=sha256:324246eed8e138b4139fefdc0e9dc4161d5075e3929e56983966d37298dca30e \
-    --hash=sha256:9de87a427c9a449eee7913472fb9ed03b63036295547369fdbf95f76a8b924b2
-    # via
-    #   -r reuse-requirements.in
-    #   reuse
-markupsafe==2.0.1 \
-    --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \
-    --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \
-    --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \
-    --hash=sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194 \
-    --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \
-    --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \
-    --hash=sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724 \
-    --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \
-    --hash=sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646 \
-    --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \
-    --hash=sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6 \
-    --hash=sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a \
-    --hash=sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6 \
-    --hash=sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad \
-    --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \
-    --hash=sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38 \
-    --hash=sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac \
-    --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \
-    --hash=sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6 \
-    --hash=sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047 \
-    --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \
-    --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \
-    --hash=sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b \
-    --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \
-    --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \
-    --hash=sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a \
-    --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a \
-    --hash=sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1 \
-    --hash=sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9 \
-    --hash=sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864 \
-    --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \
-    --hash=sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee \
-    --hash=sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f \
-    --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \
-    --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \
-    --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \
-    --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \
-    --hash=sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b \
-    --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \
-    --hash=sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86 \
-    --hash=sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6 \
-    --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \
-    --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \
-    --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \
-    --hash=sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28 \
-    --hash=sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e \
-    --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \
-    --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \
-    --hash=sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f \
-    --hash=sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d \
-    --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \
-    --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \
-    --hash=sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145 \
-    --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \
-    --hash=sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c \
-    --hash=sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1 \
-    --hash=sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a \
-    --hash=sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207 \
-    --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \
-    --hash=sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53 \
-    --hash=sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd \
-    --hash=sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134 \
-    --hash=sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85 \
-    --hash=sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9 \
-    --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \
-    --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \
-    --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \
-    --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \
-    --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872
-    # via
-    #   -r reuse-requirements.in
-    #   jinja2
-python-debian==0.1.44 \
-    --hash=sha256:11bd6f01c46da57982bdd66dd595e2d240feb32a85de3fd37c452102fd0337ab \
-    --hash=sha256:65592fe3b64f6c6c93d94e2d2599db5e0c22831d3bcff07cb7b96d3840b1333e
+jinja2==3.1.2 \
+    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
+    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
     # via reuse
-requests==2.26.0 \
-    --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \
-    --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7
-    # via
-    #   -r reuse-requirements.in
-    #   reuse
-reuse==1.0.0 \
-    --hash=sha256:db3022be2d87f69c8f508b928023de3026f454ce17d01e22f770f7147ac1e8d4 \
-    --hash=sha256:e2605e796311c424465d741ea2a1e1ad03bbb90b921d74750119c331ca5af46e
+license-expression==30.0.0 \
+    --hash=sha256:ad638292aa8493f84354909b517922cb823582c2ce2c4d880e42544a86bea8dd \
+    --hash=sha256:e95325110110eb2b7539ee7773b97a0724d5371ec563cc718c8cac0e38cc40cc
+    # via reuse
+markupsafe==2.1.1 \
+    --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \
+    --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \
+    --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \
+    --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \
+    --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \
+    --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \
+    --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \
+    --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \
+    --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \
+    --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \
+    --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \
+    --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \
+    --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \
+    --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \
+    --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \
+    --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \
+    --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \
+    --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \
+    --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \
+    --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \
+    --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \
+    --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \
+    --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \
+    --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \
+    --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \
+    --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \
+    --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \
+    --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \
+    --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \
+    --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \
+    --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \
+    --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \
+    --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \
+    --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \
+    --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \
+    --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \
+    --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \
+    --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \
+    --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \
+    --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7
+    # via jinja2
+python-debian==0.1.49 \
+    --hash=sha256:880f3bc52e31599f2a9b432bd7691844286825087fccdcf2f6ffd5cd79a26f9f \
+    --hash=sha256:8cf677a30dbcb4be7a99536c17e11308a827a4d22028dc59a67f6c6dd3f0f58c
+    # via reuse
+reuse==1.1.0 \
+    --hash=sha256:7a054f6e372ad02d0b1b07368030fc38746b50ed45f5422a81994e7a88b52f1f \
+    --hash=sha256:b0f3fb9091ff513af04b555d14a4c529ab05f6a575ab192dd9b68244f1e0721d
     # via -r reuse-requirements.in
-urllib3==1.26.10 \
-    --hash=sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec \
-    --hash=sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6
-    # via requests
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==59.6.0 \
-    --hash=sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373 \
-    --hash=sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e
-    # via
-    #   -r reuse-requirements.in
-    #   reuse
+setuptools==66.0.0 \
+    --hash=sha256:a78d01d1e2c175c474884671dde039962c9d74c7223db7369771fcf6e29ceeab \
+    --hash=sha256:bd6eb2d6722568de6d14b87c44a96fac54b2a45ff5e940e639979a3d1792adb6
+    # via reuse
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14/Dockerfile
new file mode 100644 (file)
index 0000000..db6032f
--- /dev/null
@@ -0,0 +1,67 @@
+FROM ubuntu:22.04
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# NOTE: intentionally installs both python2 and python3 so we can test support for both.
+RUN apt-get update && apt-get install -y --no-install-recommends \
+  g++ \
+  gcc-multilib \
+  make \
+  ninja-build \
+  file \
+  curl \
+  ca-certificates \
+  python2.7 \
+  python3 \
+  git \
+  cmake \
+  sudo \
+  gdb \
+  llvm-14-tools \
+  llvm-14-dev \
+  libedit-dev \
+  libssl-dev \
+  pkg-config \
+  zlib1g-dev \
+  xz-utils \
+  nodejs \
+  && rm -rf /var/lib/apt/lists/*
+
+# Install powershell (universal package) so we can test x.ps1 on Linux
+RUN curl -sL "https://github.com/PowerShell/PowerShell/releases/download/v7.3.1/powershell_7.3.1-1.deb_amd64.deb" > powershell.deb && \
+    dpkg -i powershell.deb && \
+    rm -f powershell.deb
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# We are disabling CI LLVM since this builder is intentionally using a host
+# LLVM, rather than the typical src/llvm-project LLVM.
+ENV NO_DOWNLOAD_CI_LLVM 1
+
+# Using llvm-link-shared due to libffi issues -- see #34486
+ENV RUST_CONFIGURE_ARGS \
+      --build=x86_64-unknown-linux-gnu \
+      --llvm-root=/usr/lib/llvm-14 \
+      --enable-llvm-link-shared \
+      --set rust.thin-lto-import-instr-limit=10
+
+# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
+ENV SCRIPT ../x.py --stage 2 test --exclude src/tools/tidy && \
+           # Run the `mir-opt` tests again but this time for a 32-bit target.
+           # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
+           # both 32-bit and 64-bit outputs updated by the PR author, before
+           # the PR is approved and tested for merging.
+           # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`,
+           # despite having different output on 32-bit vs 64-bit targets.
+           ../x --stage 2 test tests/mir-opt \
+                             --host='' --target=i686-unknown-linux-gnu && \
+           # Run the UI test suite again, but in `--pass=check` mode
+           #
+           # This is intended to make sure that both `--pass=check` continues to
+           # work.
+           #
+           ../x.ps1 --stage 2 test tests/ui --pass=check \
+                             --host='' --target=i686-unknown-linux-gnu && \
+           # Run tidy at the very end, after all the other tests.
+           python2.7 ../x.py --stage 2 test src/tools/tidy
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile
new file mode 100644 (file)
index 0000000..5219247
--- /dev/null
@@ -0,0 +1,67 @@
+FROM ubuntu:22.10
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# NOTE: intentionally installs both python2 and python3 so we can test support for both.
+RUN apt-get update && apt-get install -y --no-install-recommends \
+  g++ \
+  gcc-multilib \
+  make \
+  ninja-build \
+  file \
+  curl \
+  ca-certificates \
+  python2.7 \
+  python3 \
+  git \
+  cmake \
+  sudo \
+  gdb \
+  llvm-15-tools \
+  llvm-15-dev \
+  libedit-dev \
+  libssl-dev \
+  pkg-config \
+  zlib1g-dev \
+  xz-utils \
+  nodejs \
+  && rm -rf /var/lib/apt/lists/*
+
+# Install powershell (universal package) so we can test x.ps1 on Linux
+RUN curl -sL "https://github.com/PowerShell/PowerShell/releases/download/v7.3.1/powershell_7.3.1-1.deb_amd64.deb" > powershell.deb && \
+    dpkg -i powershell.deb && \
+    rm -f powershell.deb
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# We are disabling CI LLVM since this builder is intentionally using a host
+# LLVM, rather than the typical src/llvm-project LLVM.
+ENV NO_DOWNLOAD_CI_LLVM 1
+
+# Using llvm-link-shared due to libffi issues -- see #34486
+ENV RUST_CONFIGURE_ARGS \
+      --build=x86_64-unknown-linux-gnu \
+      --llvm-root=/usr/lib/llvm-15 \
+      --enable-llvm-link-shared \
+      --set rust.thin-lto-import-instr-limit=10
+
+# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
+ENV SCRIPT ../x.py --stage 2 test --exclude src/tools/tidy && \
+           # Run the `mir-opt` tests again but this time for a 32-bit target.
+           # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
+           # both 32-bit and 64-bit outputs updated by the PR author, before
+           # the PR is approved and tested for merging.
+           # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`,
+           # despite having different output on 32-bit vs 64-bit targets.
+           ../x --stage 2 test tests/mir-opt \
+                             --host='' --target=i686-unknown-linux-gnu && \
+           # Run the UI test suite again, but in `--pass=check` mode
+           #
+           # This is intended to make sure that both `--pass=check` continues to
+           # work.
+           #
+           ../x.ps1 --stage 2 test tests/ui --pass=check \
+                             --host='' --target=i686-unknown-linux-gnu && \
+           # Run tidy at the very end, after all the other tests.
+           python2.7 ../x.py --stage 2 test src/tools/tidy
index ba70c62ea3081a96725e3c942e9b21b3d46100f7..4dd6ac274fd5b96dfa24873a8c4e0aea6c262cbc 100644 (file)
@@ -4,28 +4,10 @@ set -ex
 URL=https://dl.google.com/android/repository
 
 download_ndk() {
-    mkdir -p /android/ndk
-    cd /android/ndk
+    mkdir /android/
+    cd /android
     curl -fO $URL/$1
     unzip -q $1
     rm $1
     mv android-ndk-* ndk
 }
-
-make_standalone_toolchain() {
-    # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
-    python3 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
-        --install-dir /android/ndk/$1-$2 \
-        --arch $1 \
-        --api $2
-}
-
-remove_ndk() {
-    rm -rf /android/ndk/ndk
-}
-
-download_and_make_toolchain() {
-    download_ndk $1 && \
-    make_standalone_toolchain $2 $3 && \
-    remove_ndk
-}
index d2a9264c84a12c77dd0abb631ac01537564a28e3..a466777dd46f85766125d8ec2a5e12655f32953a 100644 (file)
@@ -450,6 +450,16 @@ jobs:
           - name: x86_64-gnu-distcheck
             <<: *job-linux-xl
 
+          - name: x86_64-gnu-llvm-15
+            env:
+              RUST_BACKTRACE: 1
+            <<: *job-linux-xl
+
+          - name: x86_64-gnu-llvm-14
+            env:
+              RUST_BACKTRACE: 1
+            <<: *job-linux-xl
+
           - name: x86_64-gnu-llvm-13
             env:
               RUST_BACKTRACE: 1
index 2bd5d42c9956369132228da6409f0e68da56c51a..2cd1b5593d26dc6a03c20f8619187ad4b2485552 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 2bd5d42c9956369132228da6409f0e68da56c51a
+Subproject commit 2cd1b5593d26dc6a03c20f8619187ad4b2485552
index 8ca261268068d80c0969260fff15199bad87b587..960d610e7f33889a2577f5f17c26f0d5c82b30df 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 8ca261268068d80c0969260fff15199bad87b587
+Subproject commit 960d610e7f33889a2577f5f17c26f0d5c82b30df
index 3ae62681ff236d5528ef7c8c28ba7c6b2ecc6731..2cb0ed9ba56360949f492f9866afe8c293f9f9da 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 3ae62681ff236d5528ef7c8c28ba7c6b2ecc6731
+Subproject commit 2cb0ed9ba56360949f492f9866afe8c293f9f9da
index 8888f9428fe9a48f31de6bd2cef9b9bf80791edc..a9fb7d13eadfcc5f457962731f105b97f9a7474a 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 8888f9428fe9a48f31de6bd2cef9b9bf80791edc
+Subproject commit a9fb7d13eadfcc5f457962731f105b97f9a7474a
index b3e2a6e6c8a3aae5b5d950c63046f23bae07096d..7352353ae91c48b136d2ca7d03822e1448165e1e 160000 (submodule)
@@ -1 +1 @@
-Subproject commit b3e2a6e6c8a3aae5b5d950c63046f23bae07096d
+Subproject commit 7352353ae91c48b136d2ca7d03822e1448165e1e
index 38fd5c96997631d1ce77b3348e88b7754a8ca63b..da91e25595cc760f117b1eb265748e96ce0c4c6b 100644 (file)
@@ -201,6 +201,8 @@ $ RUSTFLAGS="-C instrument-coverage" \
     cargo test --tests
 ```
 
+> **Note**: The default for `LLVM_PROFILE_FILE` is `default_%m_%p.profraw`. Versions prior to 1.65 had a default of `default.profraw`, so if using those earlier versions, it is recommended to explicitly set `LLVM_PROFILE_FILE="default_%m_%p.profraw"` to avoid having multiple tests overwrite the `.profraw` files.
+
 Make note of the test binary file paths, displayed after the word "`Running`" in the test output:
 
 ```text
index 4d6f1524732f77453e2ce2489cd3eee1b6f848e8..a302750aa1aea341359cb95ad12d718bc501eec2 100644 (file)
@@ -402,15 +402,13 @@ fn make_final_bounds(
                     bound_params: Vec::new(),
                 })
             })
-            .chain(
-                lifetime_to_bounds.into_iter().filter(|&(_, ref bounds)| !bounds.is_empty()).map(
-                    |(lifetime, bounds)| {
-                        let mut bounds_vec = bounds.into_iter().collect();
-                        self.sort_where_bounds(&mut bounds_vec);
-                        WherePredicate::RegionPredicate { lifetime, bounds: bounds_vec }
-                    },
-                ),
-            )
+            .chain(lifetime_to_bounds.into_iter().filter(|(_, bounds)| !bounds.is_empty()).map(
+                |(lifetime, bounds)| {
+                    let mut bounds_vec = bounds.into_iter().collect();
+                    self.sort_where_bounds(&mut bounds_vec);
+                    WherePredicate::RegionPredicate { lifetime, bounds: bounds_vec }
+                },
+            ))
             .collect()
     }
 
index c6939326144ea87aae3bd7c1c08df53081a755b8..da300b89a4e9b1273a168fdd4b57908834f14ae0 100644 (file)
@@ -9,7 +9,7 @@
 use rustc_data_structures::fx::FxHashSet;
 use rustc_hir as hir;
 use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, DefIdSet, LocalDefId};
 use rustc_hir::Mutability;
 use rustc_metadata::creader::{CStore, LoadedMacro};
 use rustc_middle::ty::{self, TyCtxt};
@@ -45,7 +45,7 @@ pub(crate) fn try_inline(
     res: Res,
     name: Symbol,
     attrs: Option<&[ast::Attribute]>,
-    visited: &mut FxHashSet<DefId>,
+    visited: &mut DefIdSet,
 ) -> Option<Vec<clean::Item>> {
     let did = res.opt_def_id()?;
     if did.is_local() {
@@ -162,7 +162,8 @@ pub(crate) fn try_inline(
 pub(crate) fn try_inline_glob(
     cx: &mut DocContext<'_>,
     res: Res,
-    visited: &mut FxHashSet<DefId>,
+    current_mod: LocalDefId,
+    visited: &mut DefIdSet,
     inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
 ) -> Option<Vec<clean::Item>> {
     let did = res.opt_def_id()?;
@@ -172,7 +173,16 @@ pub(crate) fn try_inline_glob(
 
     match res {
         Res::Def(DefKind::Mod, did) => {
-            let mut items = build_module_items(cx, did, visited, inlined_names);
+            // Use the set of module reexports to filter away names that are not actually
+            // reexported by the glob, e.g. because they are shadowed by something else.
+            let reexports = cx
+                .tcx
+                .module_reexports(current_mod)
+                .unwrap_or_default()
+                .iter()
+                .filter_map(|child| child.res.opt_def_id())
+                .collect();
+            let mut items = build_module_items(cx, did, visited, inlined_names, Some(&reexports));
             items.drain_filter(|item| {
                 if let Some(name) = item.name {
                     // If an item with the same type and name already exists,
@@ -558,12 +568,8 @@ pub(crate) fn build_impl(
     ));
 }
 
-fn build_module(
-    cx: &mut DocContext<'_>,
-    did: DefId,
-    visited: &mut FxHashSet<DefId>,
-) -> clean::Module {
-    let items = build_module_items(cx, did, visited, &mut FxHashSet::default());
+fn build_module(cx: &mut DocContext<'_>, did: DefId, visited: &mut DefIdSet) -> clean::Module {
+    let items = build_module_items(cx, did, visited, &mut FxHashSet::default(), None);
 
     let span = clean::Span::new(cx.tcx.def_span(did));
     clean::Module { items, span }
@@ -572,8 +578,9 @@ fn build_module(
 fn build_module_items(
     cx: &mut DocContext<'_>,
     did: DefId,
-    visited: &mut FxHashSet<DefId>,
+    visited: &mut DefIdSet,
     inlined_names: &mut FxHashSet<(ItemType, Symbol)>,
+    allowed_def_ids: Option<&DefIdSet>,
 ) -> Vec<clean::Item> {
     let mut items = Vec::new();
 
@@ -583,6 +590,11 @@ fn build_module_items(
     for &item in cx.tcx.module_children(did).iter() {
         if item.vis.is_public() {
             let res = item.res.expect_non_local();
+            if let Some(def_id) = res.opt_def_id()
+                && let Some(allowed_def_ids) = allowed_def_ids
+                && !allowed_def_ids.contains(&def_id) {
+                continue;
+            }
             if let Some(def_id) = res.mod_def_id() {
                 // If we're inlining a glob import, it's possible to have
                 // two distinct modules with the same name. We don't want to
index 415e7d5a360d067eeaee6abe84fef949c8b55d42..34a7068e5da53b84b75f5e5147027801a2fd9d36 100644 (file)
@@ -15,7 +15,7 @@
 use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry};
 use rustc_hir as hir;
 use rustc_hir::def::{CtorKind, DefKind, Res};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LOCAL_CRATE};
 use rustc_hir::PredicateOrigin;
 use rustc_hir_analysis::hir_ty_to_ty;
 use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData};
@@ -1528,7 +1528,7 @@ fn maybe_expand_private_type_alias<'tcx>(
     let hir::ItemKind::TyAlias(ty, generics) = alias else { return None };
 
     let provided_params = &path.segments.last().expect("segments were empty");
-    let mut substs = FxHashMap::default();
+    let mut substs = DefIdMap::default();
     let generic_args = provided_params.args();
 
     let mut indices: hir::GenericParamCount = Default::default();
@@ -2321,7 +2321,7 @@ fn clean_extern_crate<'tcx>(
 
     let krate_owner_def_id = krate.owner_id.to_def_id();
     if please_inline {
-        let mut visited = FxHashSet::default();
+        let mut visited = DefIdSet::default();
 
         let res = Res::Def(DefKind::Mod, crate_def_id);
 
@@ -2440,8 +2440,9 @@ fn clean_use_statement_inner<'tcx>(
     let path = clean_path(path, cx);
     let inner = if kind == hir::UseKind::Glob {
         if !denied {
-            let mut visited = FxHashSet::default();
-            if let Some(items) = inline::try_inline_glob(cx, path.res, &mut visited, inlined_names)
+            let mut visited = DefIdSet::default();
+            if let Some(items) =
+                inline::try_inline_glob(cx, path.res, current_mod, &mut visited, inlined_names)
             {
                 return items;
             }
@@ -2458,7 +2459,7 @@ fn clean_use_statement_inner<'tcx>(
             }
         }
         if !denied {
-            let mut visited = FxHashSet::default();
+            let mut visited = DefIdSet::default();
             let import_def_id = import.owner_id.to_def_id();
 
             if let Some(mut items) = inline::try_inline(
index e96a9bab72620970b87e9ab4f3d8acc0839ec664..dbbc25739aa078fa002422c3439b1376c6224283 100644 (file)
@@ -46,7 +46,7 @@ pub(crate) fn where_clauses(cx: &DocContext<'_>, clauses: Vec<WP>) -> ThinVec<WP
 
     // Look for equality predicates on associated types that can be merged into
     // general bound predicates.
-    equalities.retain(|&(ref lhs, ref rhs, ref bound_params)| {
+    equalities.retain(|(lhs, rhs, bound_params)| {
         let Some((ty, trait_did, name)) = lhs.projection() else { return true; };
         let Some((bounds, _)) = tybounds.get_mut(ty) else { return true };
         let bound_params = bound_params
index da0df596c41e34f7ca88e07b1e7b0811b75ed22d..10b606f425ea41fcd4839ff72fd0d95806428d04 100644 (file)
@@ -6,7 +6,7 @@
 use rustc_errors::json::JsonEmitter;
 use rustc_feature::UnstableFeatures;
 use rustc_hir::def::{Namespace, Res};
-use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId};
 use rustc_hir::intravisit::{self, Visitor};
 use rustc_hir::{HirId, Path, TraitCandidate};
 use rustc_interface::interface;
@@ -60,11 +60,11 @@ pub(crate) struct DocContext<'tcx> {
     pub(crate) external_traits: Rc<RefCell<FxHashMap<DefId, clean::Trait>>>,
     /// Used while populating `external_traits` to ensure we don't process the same trait twice at
     /// the same time.
-    pub(crate) active_extern_traits: FxHashSet<DefId>,
+    pub(crate) active_extern_traits: DefIdSet,
     // The current set of parameter substitutions,
     // for expanding type aliases at the HIR level:
     /// Table `DefId` of type, lifetime, or const parameter -> substituted type, lifetime, or const
-    pub(crate) substs: FxHashMap<DefId, clean::SubstParam>,
+    pub(crate) substs: DefIdMap<clean::SubstParam>,
     /// Table synthetic type parameter for `impl Trait` in argument position -> bounds
     pub(crate) impl_trait_bounds: FxHashMap<ImplTraitParam, Vec<clean::GenericBound>>,
     /// Auto-trait or blanket impls processed so far, as `(self_ty, trait_def_id)`.
@@ -108,11 +108,7 @@ pub(crate) fn enter_resolver<F, R>(&self, f: F) -> R
 
     /// Call the closure with the given parameters set as
     /// the substitutions for a type alias' RHS.
-    pub(crate) fn enter_alias<F, R>(
-        &mut self,
-        substs: FxHashMap<DefId, clean::SubstParam>,
-        f: F,
-    ) -> R
+    pub(crate) fn enter_alias<F, R>(&mut self, substs: DefIdMap<clean::SubstParam>, f: F) -> R
     where
         F: FnOnce(&mut Self) -> R,
     {
@@ -225,7 +221,6 @@ pub(crate) fn create_config(
     // Add the doc cfg into the doc build.
     cfgs.push("doc".to_string());
 
-    let cpath = Some(input.clone());
     let input = Input::File(input);
 
     // By default, rustdoc ignores all lints.
@@ -277,7 +272,6 @@ pub(crate) fn create_config(
         crate_cfg: interface::parse_cfgspecs(cfgs),
         crate_check_cfg: interface::parse_check_cfg(check_cfgs),
         input,
-        input_path: cpath,
         output_file: None,
         output_dir: None,
         file_loader: None,
index d1b6d470e86ce91da866d11d89abbed6dc1622be..c1a652c75f4a16a0999fface3e8d8f8db60752af 100644 (file)
@@ -95,7 +95,6 @@ pub(crate) fn run(options: RustdocOptions) -> Result<(), ErrorGuaranteed> {
         crate_cfg: interface::parse_cfgspecs(cfgs),
         crate_check_cfg: interface::parse_check_cfg(options.check_cfgs.clone()),
         input,
-        input_path: None,
         output_file: None,
         output_dir: None,
         file_loader: None,
index d027fb6e8763cad8fdc3d1ffd6a237111b5de891..24752cddb337ceeade25dcacdde3e9370d2af23e 100644 (file)
@@ -1,7 +1,7 @@
 use std::mem;
 
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::{CrateNum, DefId};
+use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet};
 use rustc_middle::ty::{self, TyCtxt};
 use rustc_span::Symbol;
 
@@ -33,7 +33,7 @@ pub(crate) struct Cache {
     ///
     /// The values of the map are a list of implementations and documentation
     /// found on that implementation.
-    pub(crate) impls: FxHashMap<DefId, Vec<Impl>>,
+    pub(crate) impls: DefIdMap<Vec<Impl>>,
 
     /// Maintains a mapping of local crate `DefId`s to the fully qualified name
     /// and "short type description" of that node. This is used when generating
@@ -56,7 +56,7 @@ pub(crate) struct Cache {
     /// to the path used if the corresponding type is inlined. By
     /// doing this, we can detect duplicate impls on a trait page, and only display
     /// the impl for the inlined type.
-    pub(crate) exact_paths: FxHashMap<DefId, Vec<Symbol>>,
+    pub(crate) exact_paths: DefIdMap<Vec<Symbol>>,
 
     /// This map contains information about all known traits of this crate.
     /// Implementations of a crate should inherit the documentation of the
@@ -127,7 +127,7 @@ pub(crate) struct Cache {
 struct CacheBuilder<'a, 'tcx> {
     cache: &'a mut Cache,
     /// This field is used to prevent duplicated impl blocks.
-    impl_ids: FxHashMap<DefId, FxHashSet<DefId>>,
+    impl_ids: DefIdMap<DefIdSet>,
     tcx: TyCtxt<'tcx>,
 }
 
@@ -173,7 +173,7 @@ pub(crate) fn populate(cx: &mut DocContext<'_>, mut krate: clean::Crate) -> clea
 
         let (krate, mut impl_ids) = {
             let mut cache_builder =
-                CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: FxHashMap::default() };
+                CacheBuilder { tcx, cache: &mut cx.cache, impl_ids: Default::default() };
             krate = cache_builder.fold_crate(krate);
             (krate, cache_builder.impl_ids)
         };
@@ -242,7 +242,7 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
         }
 
         // Index this method for searching later on.
-        if let Some(ref s) = item.name.or_else(|| {
+        if let Some(s) = item.name.or_else(|| {
             if item.is_stripped() {
                 None
             } else if let clean::ImportItem(ref i) = *item.kind &&
@@ -296,7 +296,7 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
                             // for where the type was defined. On the other
                             // hand, `paths` always has the right
                             // information if present.
-                            Some(&(ref fqp, _)) => Some(&fqp[..fqp.len() - 1]),
+                            Some((fqp, _)) => Some(&fqp[..fqp.len() - 1]),
                             None => None,
                         };
                         ((did, path), true)
@@ -317,14 +317,15 @@ fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
                             short_markdown_summary(x.as_str(), &item.link_names(self.cache))
                         });
                         let ty = item.type_();
-                        let name = s.to_string();
-                        if ty != ItemType::StructField || u16::from_str_radix(&name, 10).is_err() {
+                        if ty != ItemType::StructField
+                            || u16::from_str_radix(s.as_str(), 10).is_err()
+                        {
                             // In case this is a field from a tuple struct, we don't add it into
                             // the search index because its name is something like "0", which is
                             // not useful for rustdoc search.
                             self.cache.search_index.push(IndexItem {
                                 ty,
-                                name,
+                                name: s,
                                 path: join_with_double_colon(path),
                                 desc,
                                 parent,
index 5ad24bf2681332b83efb73a192b345bfa4993113..d3dc4065dfc7230d98cc259d7853633a4f2236a4 100644 (file)
@@ -569,7 +569,7 @@ fn generate_macro_def_id_path(
     root_path: Option<&str>,
 ) -> Result<(String, ItemType, Vec<Symbol>), HrefError> {
     let tcx = cx.shared.tcx;
-    let crate_name = tcx.crate_name(def_id.krate).to_string();
+    let crate_name = tcx.crate_name(def_id.krate);
     let cache = cx.cache();
 
     let fqp: Vec<Symbol> = tcx
@@ -584,7 +584,7 @@ fn generate_macro_def_id_path(
             }
         })
         .collect();
-    let mut relative = fqp.iter().map(|elem| elem.to_string());
+    let mut relative = fqp.iter().copied();
     let cstore = CStore::from_tcx(tcx);
     // We need this to prevent a `panic` when this function is used from intra doc links...
     if !cstore.has_crate_data(def_id.krate) {
@@ -602,9 +602,9 @@ fn generate_macro_def_id_path(
     };
 
     let mut path = if is_macro_2 {
-        once(crate_name.clone()).chain(relative).collect()
+        once(crate_name).chain(relative).collect()
     } else {
-        vec![crate_name.clone(), relative.next_back().unwrap()]
+        vec![crate_name, relative.next_back().unwrap()]
     };
     if path.len() < 2 {
         // The minimum we can have is the crate name followed by the macro name. If shorter, then
@@ -614,17 +614,22 @@ fn generate_macro_def_id_path(
     }
 
     if let Some(last) = path.last_mut() {
-        *last = format!("macro.{}.html", last);
+        *last = Symbol::intern(&format!("macro.{}.html", last.as_str()));
     }
 
     let url = match cache.extern_locations[&def_id.krate] {
         ExternalLocation::Remote(ref s) => {
             // `ExternalLocation::Remote` always end with a `/`.
-            format!("{}{}", s, path.join("/"))
+            format!("{}{}", s, path.iter().map(|p| p.as_str()).join("/"))
         }
         ExternalLocation::Local => {
             // `root_path` always end with a `/`.
-            format!("{}{}/{}", root_path.unwrap_or(""), crate_name, path.join("/"))
+            format!(
+                "{}{}/{}",
+                root_path.unwrap_or(""),
+                crate_name,
+                path.iter().map(|p| p.as_str()).join("/")
+            )
         }
         ExternalLocation::Unknown => {
             debug!("crate {} not in cache when linkifying macros", crate_name);
@@ -957,7 +962,7 @@ fn fmt_type<'cx>(
         clean::Tuple(ref typs) => {
             match &typs[..] {
                 &[] => primitive_link(f, PrimitiveType::Unit, "()", cx),
-                &[ref one] => {
+                [one] => {
                     if let clean::Generic(name) = one {
                         primitive_link(f, PrimitiveType::Tuple, &format!("({name},)"), cx)
                     } else {
@@ -1050,7 +1055,7 @@ fn fmt_type<'cx>(
                 _ => String::new(),
             };
             let m = mutability.print_with_space();
-            let amp = if f.alternate() { "&".to_string() } else { "&amp;".to_string() };
+            let amp = if f.alternate() { "&" } else { "&amp;" };
             match **ty {
                 clean::DynTrait(ref bounds, ref trait_lt)
                     if bounds.len() > 1 || trait_lt.is_some() =>
index aeaee524fd4532b820f130d04ccb7912ceb68411..4ff67fe1551dd019f638bc79ce4ee6460047e6bf 100644 (file)
@@ -30,7 +30,7 @@
 use rustc_hir::HirId;
 use rustc_middle::ty::TyCtxt;
 use rustc_span::edition::Edition;
-use rustc_span::Span;
+use rustc_span::{Span, Symbol};
 
 use once_cell::sync::Lazy;
 use std::borrow::Cow;
@@ -198,7 +198,7 @@ fn slugify(c: char) -> Option<char> {
 
 #[derive(Clone, Debug)]
 pub struct Playground {
-    pub crate_name: Option<String>,
+    pub crate_name: Option<Symbol>,
     pub url: String,
 }
 
@@ -290,7 +290,7 @@ fn next(&mut self) -> Option<Self::Item> {
                 .map(|l| map_line(l).for_code())
                 .intersperse("\n".into())
                 .collect::<String>();
-            let krate = krate.as_ref().map(|s| &**s);
+            let krate = krate.as_ref().map(|s| s.as_str());
             let (test, _, _) =
                 doctest::make_test(&test, krate, false, &Default::default(), edition, None);
             let channel = if test.contains("#![feature(") { "&amp;version=nightly" } else { "" };
index 20b51336c350f9300aa9f28859f6a17a01e87003..15258a467a228106abf2a18d728c066681b2bc6a 100644 (file)
@@ -6,7 +6,7 @@
 use std::sync::mpsc::{channel, Receiver};
 
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE};
 use rustc_middle::ty::TyCtxt;
 use rustc_session::Session;
 use rustc_span::edition::Edition;
@@ -56,7 +56,7 @@ pub(crate) struct Context<'tcx> {
     pub(super) render_redirect_pages: bool,
     /// Tracks section IDs for `Deref` targets so they match in both the main
     /// body and the sidebar.
-    pub(super) deref_id_map: FxHashMap<DefId, String>,
+    pub(super) deref_id_map: DefIdMap<String>,
     /// The map used to ensure all generated 'id=' attributes are unique.
     pub(super) id_map: IdMap,
     /// Shared mutable state.
@@ -464,8 +464,7 @@ fn init(
         // If user passed in `--playground-url` arg, we fill in crate name here
         let mut playground = None;
         if let Some(url) = playground_url {
-            playground =
-                Some(markdown::Playground { crate_name: Some(krate.name(tcx).to_string()), url });
+            playground = Some(markdown::Playground { crate_name: Some(krate.name(tcx)), url });
         }
         let mut layout = layout::Layout {
             logo: String::new(),
@@ -491,7 +490,7 @@ fn init(
                 }
                 (sym::html_playground_url, Some(s)) => {
                     playground = Some(markdown::Playground {
-                        crate_name: Some(krate.name(tcx).to_string()),
+                        crate_name: Some(krate.name(tcx)),
                         url: s.to_string(),
                     });
                 }
@@ -545,7 +544,7 @@ fn init(
             dst,
             render_redirect_pages: false,
             id_map,
-            deref_id_map: FxHashMap::default(),
+            deref_id_map: Default::default(),
             shared: Rc::new(scx),
             include_sources,
             types_with_notable_traits: FxHashSet::default(),
@@ -573,7 +572,7 @@ fn make_child_renderer(&self) -> Self {
             current: self.current.clone(),
             dst: self.dst.clone(),
             render_redirect_pages: self.render_redirect_pages,
-            deref_id_map: FxHashMap::default(),
+            deref_id_map: Default::default(),
             id_map: IdMap::new(),
             shared: Rc::clone(&self.shared),
             include_sources: self.include_sources,
index 358b9185fb36126997dbb2af23f78d4eb7cebb67..d644293d3ef12c0174da5c7f83fb499ab8373983 100644 (file)
@@ -50,7 +50,7 @@
 use rustc_attr::{ConstStability, Deprecation, StabilityLevel};
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_hir::def::CtorKind;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, DefIdSet};
 use rustc_hir::Mutability;
 use rustc_middle::middle::stability;
 use rustc_middle::ty;
@@ -100,7 +100,7 @@ pub(crate) fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
 #[derive(Debug)]
 pub(crate) struct IndexItem {
     pub(crate) ty: ItemType,
-    pub(crate) name: String,
+    pub(crate) name: Symbol,
     pub(crate) path: String,
     pub(crate) desc: String,
     pub(crate) parent: Option<DefId>,
@@ -1115,7 +1115,7 @@ fn render_assoc_items(
     it: DefId,
     what: AssocItemRender<'_>,
 ) {
-    let mut derefs = FxHashSet::default();
+    let mut derefs = DefIdSet::default();
     derefs.insert(it);
     render_assoc_items_inner(w, cx, containing_item, it, what, &mut derefs)
 }
@@ -1126,7 +1126,7 @@ fn render_assoc_items_inner(
     containing_item: &clean::Item,
     it: DefId,
     what: AssocItemRender<'_>,
-    derefs: &mut FxHashSet<DefId>,
+    derefs: &mut DefIdSet,
 ) {
     info!("Documenting associated items of {:?}", containing_item.name);
     let shared = Rc::clone(&cx.shared);
@@ -1215,7 +1215,7 @@ fn render_deref_methods(
     impl_: &Impl,
     container_item: &clean::Item,
     deref_mut: bool,
-    derefs: &mut FxHashSet<DefId>,
+    derefs: &mut DefIdSet,
 ) {
     let cache = cx.cache();
     let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
@@ -1343,7 +1343,7 @@ fn notable_traits_decl(ty: &clean::Type, cx: &Context<'_>) -> (String, String) {
                     write!(
                         &mut out,
                         "<h3>Notable traits for <code>{}</code></h3>\
-                     <pre class=\"content\"><code>",
+                     <pre><code>",
                         impl_.for_.print(cx)
                     );
                 }
@@ -2175,7 +2175,7 @@ fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
             if let Some(impl_) =
                 v.iter().find(|i| i.trait_did() == cx.tcx().lang_items().deref_trait())
             {
-                let mut derefs = FxHashSet::default();
+                let mut derefs = DefIdSet::default();
                 derefs.insert(did);
                 sidebar_deref_methods(cx, out, impl_, v, &mut derefs, &mut used_links);
             }
@@ -2195,7 +2195,7 @@ fn sidebar_deref_methods(
     out: &mut Buffer,
     impl_: &Impl,
     v: &[Impl],
-    derefs: &mut FxHashSet<DefId>,
+    derefs: &mut DefIdSet,
     used_links: &mut FxHashSet<String>,
 ) {
     let c = cx.cache();
@@ -2769,8 +2769,8 @@ fn collect_paths_for_type(first_ty: clean::Type, cache: &Cache) -> Vec<String> {
     let mut work = VecDeque::new();
 
     let mut process_path = |did: DefId| {
-        let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone());
-        let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern);
+        let get_extern = || cache.external_paths.get(&did).map(|s| &s.0);
+        let fqp = cache.exact_paths.get(&did).or_else(get_extern);
 
         if let Some(path) = fqp {
             out.push(join_with_double_colon(&path));
@@ -2921,7 +2921,7 @@ fn render_call_locations(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Ite
         // Look for the example file in the source map if it exists, otherwise return a dummy span
         let file_span = (|| {
             let source_map = tcx.sess.source_map();
-            let crate_src = tcx.sess.local_crate_source_file.as_ref()?;
+            let crate_src = tcx.sess.local_crate_source_file()?;
             let abs_crate_src = crate_src.canonicalize().ok()?;
             let crate_root = abs_crate_src.parent()?.parent()?;
             let rel_path = path.strip_prefix(crate_root).ok()?;
index d6e57decdcf18463cb76d372d4f688484c3bbff6..f824c9e3ad2bd5eacbeec79587c137bb728067a9 100644 (file)
@@ -531,7 +531,7 @@ fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &cle
         f.decl.output.as_return().and_then(|output| notable_traits_button(output, cx));
 
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "fn", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             w.reserve(header_len);
             write!(
@@ -570,7 +570,7 @@ fn item_trait(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean:
 
     // Output the trait definition
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "trait", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             write!(
                 w,
@@ -1027,8 +1027,8 @@ fn trait_item(w: &mut Buffer, cx: &mut Context<'_>, m: &clean::Item, t: &clean::
         .chain(std::iter::once("implementors"))
         .collect();
     if let Some(did) = it.item_id.as_def_id() &&
-        let get_extern = { || cache.external_paths.get(&did).map(|s| s.0.clone()) } &&
-        let Some(fqp) = cache.exact_paths.get(&did).cloned().or_else(get_extern) {
+        let get_extern = { || cache.external_paths.get(&did).map(|s| &s.0) } &&
+        let Some(fqp) = cache.exact_paths.get(&did).or_else(get_extern) {
         js_src_path.extend(fqp[..fqp.len() - 1].iter().copied());
         js_src_path.push_fmt(format_args!("{}.{}.js", it.type_(), fqp.last().unwrap()));
     } else {
@@ -1051,7 +1051,7 @@ fn trait_item(w: &mut Buffer, cx: &mut Context<'_>, m: &clean::Item, t: &clean::
 
 fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::TraitAlias) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "trait-alias", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             write!(
                 w,
@@ -1075,7 +1075,7 @@ fn item_trait_alias(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &
 
 fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::OpaqueTy) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "opaque", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             write!(
                 w,
@@ -1099,7 +1099,7 @@ fn item_opaque_ty(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &cl
 
 fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clean::Typedef) {
     fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) {
-        wrap_item(w, "typedef", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             write!(w, "{}", visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx));
             write!(
@@ -1128,7 +1128,7 @@ fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::
 
 fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "union", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             render_union(w, it, Some(&s.generics), &s.fields, "", cx);
         });
@@ -1193,7 +1193,7 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean::
     let tcx = cx.tcx();
     let count_variants = e.variants().count();
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "enum", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_pre(w, it, "");
             write!(
                 w,
@@ -1357,17 +1357,17 @@ fn item_proc_macro(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, m: &c
         let name = it.name.expect("proc-macros always have names");
         match m.kind {
             MacroKind::Bang => {
-                wrap_item(w, "macro", |w| {
+                wrap_item(w, |w| {
                     write!(w, "{}!() {{ /* proc-macro */ }}", name);
                 });
             }
             MacroKind::Attr => {
-                wrap_item(w, "attr", |w| {
+                wrap_item(w, |w| {
                     write!(w, "#[{}]", name);
                 });
             }
             MacroKind::Derive => {
-                wrap_item(w, "derive", |w| {
+                wrap_item(w, |w| {
                     write!(w, "#[derive({})]", name);
                     if !m.helpers.is_empty() {
                         w.push_str("\n{\n");
@@ -1401,7 +1401,7 @@ fn item_primitive(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
 
 fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &clean::Constant) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "const", |w| {
+        wrap_item(w, |w| {
             let tcx = cx.tcx();
             render_attributes_in_code(w, it);
 
@@ -1451,7 +1451,7 @@ fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &cle
 
 fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "struct", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_code(w, it);
             render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx);
         });
@@ -1504,7 +1504,7 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
 
 fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "static", |w| {
+        wrap_item(w, |w| {
             render_attributes_in_code(w, it);
             write!(
                 w,
@@ -1521,7 +1521,7 @@ fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
 
 fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
     wrap_into_item_decl(w, |w| {
-        wrap_item(w, "foreigntype", |w| {
+        wrap_item(w, |w| {
             w.write_str("extern {\n");
             render_attributes_in_code(w, it);
             write!(
@@ -1618,11 +1618,11 @@ fn wrap_into_item_decl<F>(w: &mut Buffer, f: F)
     w.write_str("</div>")
 }
 
-fn wrap_item<F>(w: &mut Buffer, item_name: &str, f: F)
+fn wrap_item<F>(w: &mut Buffer, f: F)
 where
     F: FnOnce(&mut Buffer),
 {
-    w.write_fmt(format_args!("<pre class=\"rust {}\"><code>", item_name));
+    w.write_str(r#"<pre class="rust"><code>"#);
     f(w);
     w.write_str("</code></pre>");
 }
index bc74d9cf969741eb56e3ba7cb91c007ab2900ea3..5b0caac099bc3010c7cb9873dc7a8dba990e3d80 100644 (file)
@@ -29,13 +29,13 @@ pub(crate) fn build_index<'tcx>(
     // Attach all orphan items to the type's definition if the type
     // has since been learned.
     for &OrphanImplItem { parent, ref item, ref impl_generics } in &cache.orphan_impl_items {
-        if let Some(&(ref fqp, _)) = cache.paths.get(&parent) {
+        if let Some((fqp, _)) = cache.paths.get(&parent) {
             let desc = item
                 .doc_value()
                 .map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(cache)));
             cache.search_index.push(IndexItem {
                 ty: item.type_(),
-                name: item.name.unwrap().to_string(),
+                name: item.name.unwrap(),
                 path: join_with_double_colon(&fqp[..fqp.len() - 1]),
                 desc,
                 parent: Some(parent),
@@ -58,8 +58,8 @@ pub(crate) fn build_index<'tcx>(
     // Sort search index items. This improves the compressibility of the search index.
     cache.search_index.sort_unstable_by(|k1, k2| {
         // `sort_unstable_by_key` produces lifetime errors
-        let k1 = (&k1.path, &k1.name, &k1.ty, &k1.parent);
-        let k2 = (&k2.path, &k2.name, &k2.ty, &k2.parent);
+        let k1 = (&k1.path, k1.name.as_str(), &k1.ty, &k1.parent);
+        let k2 = (&k2.path, k2.name.as_str(), &k2.ty, &k2.parent);
         std::cmp::Ord::cmp(&k1, &k2)
     });
 
@@ -240,7 +240,7 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
             )?;
             crate_data.serialize_field(
                 "n",
-                &self.items.iter().map(|item| &item.name).collect::<Vec<_>>(),
+                &self.items.iter().map(|item| item.name.as_str()).collect::<Vec<_>>(),
             )?;
             crate_data.serialize_field(
                 "q",
@@ -299,7 +299,7 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
             )?;
             crate_data.serialize_field(
                 "p",
-                &self.paths.iter().map(|(it, s)| (it, s.to_string())).collect::<Vec<_>>(),
+                &self.paths.iter().map(|(it, s)| (it, s.as_str())).collect::<Vec<_>>(),
             )?;
             if has_aliases {
                 crate_data.serialize_field("a", &self.aliases)?;
@@ -573,7 +573,7 @@ fn get_fn_inputs_and_outputs<'tcx>(
     let decl = &func.decl;
 
     let combined_generics;
-    let (self_, generics) = if let Some(&(ref impl_self, ref impl_generics)) = impl_generics {
+    let (self_, generics) = if let Some((impl_self, impl_generics)) = impl_generics {
         match (impl_generics.is_empty(), func.generics.is_empty()) {
             (true, _) => (Some(impl_self), &func.generics),
             (_, true) => (Some(impl_self), impl_generics),
index ca3e9916487aad060546b9ac358d9f3a7fb20515..bc8badad38eb0578ea3a0b5f40310241b66eab0c 100644 (file)
@@ -138,7 +138,7 @@ fn collect(path: &Path, krate: &str) -> io::Result<(Vec<String>, Vec<String>)> {
         Ok((ret, krates))
     }
 
-    /// Read a file and return all lines that match the <code>"{crate}":{data},\ </code> format,
+    /// Read a file and return all lines that match the <code>"{crate}":{data},\</code> format,
     /// and return a tuple `(Vec<DataString>, Vec<CrateNameString>)`.
     ///
     /// This forms the payload of files that look like this:
index 2070bb54b1ba9bcc8e0777a96245487545851d17..424bbb0ec42be8a1e42765ebc29638de8bc6a399 100644 (file)
@@ -497,7 +497,7 @@ ul.block, .block li {
        padding-left: 24px;
 }
 
-.sidebar a, .sidebar .current {
+.sidebar a {
        color: var(--sidebar-link-color);
 }
 .sidebar .current,
@@ -1214,11 +1214,11 @@ a.test-arrow:hover {
        content: "\00a0";
 }
 
-.notable .docblock {
+.notable .content {
        margin: 0.25em 0.5em;
 }
 
-.notable .docblock pre, .notable .docblock code {
+.notable .content pre, .notable .content code {
        background: transparent;
        margin: 0;
        padding: 0;
@@ -1226,6 +1226,10 @@ a.test-arrow:hover {
        white-space: pre-wrap;
 }
 
+.notable .content > h3:first-child {
+       margin: 0 0 5px 0;
+}
+
 .search-failed {
        text-align: center;
        margin-top: 20px;
@@ -1893,19 +1897,25 @@ in storage.js
        right: 0.25em;
 }
 
-.scraped-example:not(.expanded) .code-wrapper:before,
-.scraped-example:not(.expanded) .code-wrapper:after {
+.scraped-example:not(.expanded) .code-wrapper::before,
+.scraped-example:not(.expanded) .code-wrapper::after {
        content: " ";
        width: 100%;
        height: 5px;
        position: absolute;
        z-index: 1;
 }
-.scraped-example:not(.expanded) .code-wrapper:before {
+.scraped-example:not(.expanded) .code-wrapper::before {
        top: 0;
+       background: linear-gradient(to bottom,
+               var(--scrape-example-code-wrapper-background-start),
+               var(--scrape-example-code-wrapper-background-end));
 }
-.scraped-example:not(.expanded) .code-wrapper:after {
+.scraped-example:not(.expanded) .code-wrapper::after {
        bottom: 0;
+       background: linear-gradient(to top,
+               var(--scrape-example-code-wrapper-background-start),
+               var(--scrape-example-code-wrapper-background-end));
 }
 
 .scraped-example .code-wrapper .example-wrap {
index 91419093147d728bbdda9478eecda9305db4621e..4e9803fe2366d2adf3b0b46d905134d4520ba9ca 100644 (file)
@@ -1,13 +1,8 @@
 .setting-line {
-       margin: 0.6em 0 0.6em 0.3em;
+       margin: 1.2em 0.6em;
        position: relative;
 }
 
-.setting-line .choices {
-       display: flex;
-       flex-wrap: wrap;
-}
-
 .setting-line .radio-line input,
 .setting-line .settings-toggle input {
        margin-right: 0.3em;
        padding-bottom: 1px;
 }
 
-.radio-line .setting-name {
-       width: 100%;
-}
-
 .radio-line .choice {
        margin-top: 0.1em;
        margin-bottom: 0.1em;
        min-width: 3.8em;
        padding: 0.3em;
-       display: flex;
+       display: inline-flex;
        align-items: center;
        cursor: pointer;
 }
        cursor: pointer;
 }
 
-#settings .setting-line {
-       margin: 1.2em 0.6em;
-}
-
 .setting-line .radio-line input:checked {
        box-shadow: inset 0 0 0 3px var(--main-background-color);
        background-color: var(--settings-input-color);
index 979e7e0f999eda96a2d18ab552c3b668076d8965..ed779bf6166eed34cb97036796868426617b05aa 100644 (file)
@@ -97,6 +97,8 @@ Original by Dempfi (https://github.com/dempfi/ayu)
        --scrape-example-help-color: #eee;
        --scrape-example-help-hover-border-color: #fff;
        --scrape-example-help-hover-color: #fff;
+       --scrape-example-code-wrapper-background-start: rgba(15, 20, 25, 1);
+       --scrape-example-code-wrapper-background-end: rgba(15, 20, 25, 0);
 }
 
 h1, h2, h3, h4 {
@@ -203,10 +205,3 @@ above the `@media (max-width: 700px)` rules due to a bug in the css checker */
 #source-sidebar div.files > a.selected {
        color: #ffb44c;
 }
-
-.scraped-example:not(.expanded) .code-wrapper::before {
-       background: linear-gradient(to bottom, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0));
-}
-.scraped-example:not(.expanded) .code-wrapper::after {
-       background: linear-gradient(to top, rgba(15, 20, 25, 1), rgba(15, 20, 25, 0));
-}
index fb15863b027ca5f992954da7656696bc8dec4fb3..3766f0daa42ff837264542bee9f59c15212b3170 100644 (file)
@@ -92,6 +92,8 @@
        --scrape-example-help-color: #eee;
        --scrape-example-help-hover-border-color: #fff;
        --scrape-example-help-hover-color: #fff;
+       --scrape-example-code-wrapper-background-start: rgba(53, 53, 53, 1);
+       --scrape-example-code-wrapper-background-end: rgba(53, 53, 53, 0);
 }
 
 #search-tabs > button:not(.selected) {
        border-top-color: #0089ff;
        background-color: #353535;
 }
-
-.scraped-example:not(.expanded) .code-wrapper::before {
-       background: linear-gradient(to bottom, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0));
-}
-.scraped-example:not(.expanded) .code-wrapper::after {
-       background: linear-gradient(to top, rgba(53, 53, 53, 1), rgba(53, 53, 53, 0));
-}
index 053fa78d1dc5884dae832d7524e8c3654b3e0828..8a7f6abcf8d8e83141923f3a8400bd38606725bd 100644 (file)
@@ -89,6 +89,8 @@
        --scrape-example-help-color: #333;
        --scrape-example-help-hover-border-color: #000;
        --scrape-example-help-hover-color: #000;
+       --scrape-example-code-wrapper-background-start: rgba(255, 255, 255, 1);
+       --scrape-example-code-wrapper-background-end: rgba(255, 255, 255, 0);
 }
 
 #search-tabs > button:not(.selected) {
        background-color: #ffffff;
        border-top-color: #0089ff;
 }
-
-.scraped-example:not(.expanded) .code-wrapper::before {
-       background: linear-gradient(to bottom, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0));
-}
-.scraped-example:not(.expanded) .code-wrapper::after {
-       background: linear-gradient(to top, rgba(255, 255, 255, 1), rgba(255, 255, 255, 0));
-}
index 4f6f8c57597f08408822da9a9a8f4a78dd1f7302..604ab147f6a16ea5a251d6ed4a71aa59f295f9ac 100644 (file)
@@ -390,7 +390,8 @@ function loadCss(cssUrl) {
         }
 
         if (document.activeElement.tagName === "INPUT" &&
-            document.activeElement.type !== "checkbox") {
+            document.activeElement.type !== "checkbox" &&
+            document.activeElement.type !== "radio") {
             switch (getVirtualKey(ev)) {
             case "Escape":
                 handleEscape(ev);
@@ -803,15 +804,10 @@ function loadCss(cssUrl) {
         }
     });
 
-    function handleClick(id, f) {
-        const elem = document.getElementById(id);
-        if (elem) {
-            elem.addEventListener("click", f);
-        }
+    const mainElem = document.getElementById(MAIN_ID);
+    if (mainElem) {
+        mainElem.addEventListener("click", hideSidebar);
     }
-    handleClick(MAIN_ID, () => {
-        hideSidebar();
-    });
 
     onEachLazy(document.querySelectorAll("a[href^='#']"), el => {
         // For clicks on internal links (<A> tags with a hash property), we expand the section we're
@@ -847,7 +843,7 @@ function loadCss(cssUrl) {
         window.hideAllModals(false);
         const ty = e.getAttribute("data-ty");
         const wrapper = document.createElement("div");
-        wrapper.innerHTML = "<div class=\"docblock\">" + window.NOTABLE_TRAITS[ty] + "</div>";
+        wrapper.innerHTML = "<div class=\"content\">" + window.NOTABLE_TRAITS[ty] + "</div>";
         wrapper.className = "notable popover";
         const focusCatcher = document.createElement("div");
         focusCatcher.setAttribute("tabindex", "0");
@@ -945,7 +941,7 @@ function loadCss(cssUrl) {
                 return;
             }
             if (!this.NOTABLE_FORCE_VISIBLE &&
-                !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT)) {
+                !elemIsInParent(ev.relatedTarget, window.CURRENT_NOTABLE_ELEMENT)) {
                 hideNotable(true);
             }
         };
@@ -1087,6 +1083,9 @@ function loadCss(cssUrl) {
      * Show the help popup menu.
      */
     function showHelp() {
+        // Prevent `blur` events from being dispatched as a result of closing
+        // other modals.
+        getHelpButton().querySelector("a").focus();
         const menu = getHelpMenu(true);
         if (menu.style.display === "none") {
             window.hideAllModals();
index 1b8822b0b2b7dedfa0f4c6589043a1208e799d53..88592fa0c84c1453fbda216b9f3d92eddb0ab307 100644 (file)
@@ -781,7 +781,29 @@ function initSearch(rawSearchIndex) {
                     return a - b;
                 }
 
-                // Sort by non levenshtein results and then levenshtein results by the distance
+                // sort by index of keyword in item name (no literal occurrence goes later)
+                a = (aaa.index < 0);
+                b = (bbb.index < 0);
+                if (a !== b) {
+                    return a - b;
+                }
+
+                // Sort by distance in the path part, if specified
+                // (less changes required to match means higher rankings)
+                a = aaa.path_lev;
+                b = bbb.path_lev;
+                if (a !== b) {
+                    return a - b;
+                }
+
+                // (later literal occurrence, if any, goes later)
+                a = aaa.index;
+                b = bbb.index;
+                if (a !== b) {
+                    return a - b;
+                }
+
+                // Sort by distance in the name part, the last part of the path
                 // (less changes required to match means higher rankings)
                 a = (aaa.lev);
                 b = (bbb.lev);
@@ -810,19 +832,6 @@ function initSearch(rawSearchIndex) {
                     return (a > b ? +1 : -1);
                 }
 
-                // sort by index of keyword in item name (no literal occurrence goes later)
-                a = (aaa.index < 0);
-                b = (bbb.index < 0);
-                if (a !== b) {
-                    return a - b;
-                }
-                // (later literal occurrence, if any, goes later)
-                a = aaa.index;
-                b = bbb.index;
-                if (a !== b) {
-                    return a - b;
-                }
-
                 // special precedence for primitive and keyword pages
                 if ((aaa.item.ty === TY_PRIMITIVE && bbb.item.ty !== TY_KEYWORD) ||
                     (aaa.item.ty === TY_KEYWORD && bbb.item.ty !== TY_PRIMITIVE)) {
@@ -1230,15 +1239,19 @@ function initSearch(rawSearchIndex) {
          * * `id` is the index in both `searchWords` and `searchIndex` arrays for this element.
          * * `index` is an `integer`` used to sort by the position of the word in the item's name.
          * * `lev` is the main metric used to sort the search results.
+         * * `path_lev` is zero if a single-component search query is used, otherwise it's the
+         *   distance computed for everything other than the last path component.
          *
          * @param {Results} results
          * @param {string} fullId
          * @param {integer} id
          * @param {integer} index
          * @param {integer} lev
+         * @param {integer} path_lev
          */
-        function addIntoResults(results, fullId, id, index, lev) {
-            if (lev === 0 || (!parsedQuery.literalSearch && lev <= MAX_LEV_DISTANCE)) {
+        function addIntoResults(results, fullId, id, index, lev, path_lev) {
+            const inBounds = lev <= MAX_LEV_DISTANCE || index !== -1;
+            if (lev === 0 || (!parsedQuery.literalSearch && inBounds)) {
                 if (results[fullId] !== undefined) {
                     const result = results[fullId];
                     if (result.dontValidate || result.lev <= lev) {
@@ -1250,6 +1263,7 @@ function initSearch(rawSearchIndex) {
                     index: index,
                     dontValidate: parsedQuery.literalSearch,
                     lev: lev,
+                    path_lev: path_lev,
                 };
             }
         }
@@ -1280,68 +1294,68 @@ function initSearch(rawSearchIndex) {
             if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
                 return;
             }
-            let lev, lev_add = 0, index = -1;
+            let lev, index = -1, path_lev = 0;
             const fullId = row.id;
+            const searchWord = searchWords[pos];
 
             const in_args = findArg(row, elem, parsedQuery.typeFilter);
             const returned = checkReturned(row, elem, parsedQuery.typeFilter);
 
-            addIntoResults(results_in_args, fullId, pos, index, in_args);
-            addIntoResults(results_returned, fullId, pos, index, returned);
+            // path_lev is 0 because no parent path information is currently stored
+            // in the search index
+            addIntoResults(results_in_args, fullId, pos, -1, in_args, 0);
+            addIntoResults(results_returned, fullId, pos, -1, returned, 0);
 
             if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
                 return;
             }
-            const searchWord = searchWords[pos];
 
-            if (parsedQuery.literalSearch) {
-                if (searchWord === elem.name) {
-                    addIntoResults(results_others, fullId, pos, -1, 0);
-                }
-                return;
+            const row_index = row.normalizedName.indexOf(elem.pathLast);
+            const word_index = searchWord.indexOf(elem.pathLast);
+
+            // lower indexes are "better" matches
+            // rank based on the "best" match
+            if (row_index === -1) {
+                index = word_index;
+            } else if (word_index === -1) {
+                index = row_index;
+            } else if (word_index < row_index) {
+                index = word_index;
+            } else {
+                index = row_index;
             }
 
             // No need to check anything else if it's a "pure" generics search.
             if (elem.name.length === 0) {
                 if (row.type !== null) {
                     lev = checkGenerics(row.type, elem, MAX_LEV_DISTANCE + 1);
-                    addIntoResults(results_others, fullId, pos, index, lev);
+                    // path_lev is 0 because we know it's empty
+                    addIntoResults(results_others, fullId, pos, index, lev, 0);
                 }
                 return;
             }
 
             if (elem.fullPath.length > 1) {
-                lev = checkPath(elem.pathWithoutLast, row);
-                if (lev > MAX_LEV_DISTANCE || (parsedQuery.literalSearch && lev !== 0)) {
+                path_lev = checkPath(elem.pathWithoutLast, row);
+                if (path_lev > MAX_LEV_DISTANCE) {
                     return;
-                } else if (lev > 0) {
-                    lev_add = lev / 10;
                 }
             }
 
-            if (searchWord.indexOf(elem.pathLast) > -1 ||
-                row.normalizedName.indexOf(elem.pathLast) > -1
-            ) {
-                index = row.normalizedName.indexOf(elem.pathLast);
-            }
-            lev = levenshtein(searchWord, elem.pathLast);
-            if (lev > 0 && elem.pathLast.length > 2 && searchWord.indexOf(elem.pathLast) > -1) {
-                if (elem.pathLast.length < 6) {
-                    lev = 1;
-                } else {
-                    lev = 0;
+            if (parsedQuery.literalSearch) {
+                if (searchWord === elem.name) {
+                    addIntoResults(results_others, fullId, pos, index, 0, path_lev);
                 }
-            }
-            lev += lev_add;
-            if (lev > MAX_LEV_DISTANCE) {
                 return;
-            } else if (index !== -1 && elem.fullPath.length < 2) {
-                lev -= 1;
             }
-            if (lev < 0) {
-                lev = 0;
+
+            lev = levenshtein(searchWord, elem.pathLast);
+
+            if (index === -1 && lev + path_lev > MAX_LEV_DISTANCE) {
+                return;
             }
-            addIntoResults(results_others, fullId, pos, index, lev);
+
+            addIntoResults(results_others, fullId, pos, index, lev, path_lev);
         }
 
         /**
@@ -1386,7 +1400,7 @@ function initSearch(rawSearchIndex) {
                 return;
             }
             const lev = Math.round(totalLev / nbLev);
-            addIntoResults(results, row.id, pos, 0, lev);
+            addIntoResults(results, row.id, pos, 0, lev, 0);
         }
 
         function innerRunQuery() {
index 9ed8f63610ff6b336d940e934043a6a7f53dd540..84df1b7d3911a4ecf0d3209afa4cee6f5466b079 100644 (file)
                 // This is a select setting.
                 output += `\
 <div class="radio-line" id="${js_data_name}">
-    <span class="setting-name">${setting_name}</span>
+    <div class="setting-name">${setting_name}</div>
 <div class="choices">`;
                 onEach(setting["options"], option => {
                     const checked = option === setting["default"] ? " checked" : "";
index c5c687df74fd88072f0f348134f219b19b96af8d..982370aa21c43b2ef4960f73714b2993710e2748 100644 (file)
@@ -1,5 +1,4 @@
-use rustc_data_structures::fx::FxHashSet;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::DefIdSet;
 
 use crate::{
     clean::{self, Import, ImportSource, Item},
 /// See [#100973](https://github.com/rust-lang/rust/issues/100973) and
 /// [#101103](https://github.com/rust-lang/rust/issues/101103) for times when
 /// this information is needed.
-pub(crate) fn get_imports(krate: clean::Crate) -> (clean::Crate, FxHashSet<DefId>) {
-    let mut finder = ImportFinder { imported: FxHashSet::default() };
+pub(crate) fn get_imports(krate: clean::Crate) -> (clean::Crate, DefIdSet) {
+    let mut finder = ImportFinder::default();
     let krate = finder.fold_crate(krate);
     (krate, finder.imported)
 }
 
+#[derive(Default)]
 struct ImportFinder {
-    imported: FxHashSet<DefId>,
+    imported: DefIdSet,
 }
 
 impl DocFolder for ImportFinder {
index 1196f944faad2d26981b806023fe739cfaacc953..5adc0d2a40e41bf2944e0aa079c169decf4fb253 100644 (file)
@@ -13,8 +13,8 @@
 use std::path::PathBuf;
 use std::rc::Rc;
 
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::DefId;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def_id::{DefId, DefIdSet};
 use rustc_middle::ty::TyCtxt;
 use rustc_session::Session;
 use rustc_span::def_id::LOCAL_CRATE;
@@ -40,7 +40,7 @@ pub(crate) struct JsonRenderer<'tcx> {
     /// The directory where the blob will be written to.
     out_path: PathBuf,
     cache: Rc<Cache>,
-    imported_items: FxHashSet<DefId>,
+    imported_items: DefIdSet,
 }
 
 impl<'tcx> JsonRenderer<'tcx> {
index 79db3c6c3e78699845da1046efadd30e849c33ed..7d15a207d06524b06bf3415f3621d60f6d647113 100644 (file)
@@ -7,8 +7,8 @@
 use crate::formats::cache::Cache;
 use crate::visit::DocVisitor;
 
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_hir::def_id::{DefId, LOCAL_CRATE};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LOCAL_CRATE};
 use rustc_middle::ty::{self, DefIdTree};
 use rustc_span::symbol::sym;
 
@@ -126,14 +126,14 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
     });
 
     let mut cleaner = BadImplStripper { prims, items: crate_items, cache: &cx.cache };
-    let mut type_did_to_deref_target: FxHashMap<DefId, &Type> = FxHashMap::default();
+    let mut type_did_to_deref_target: DefIdMap<&Type> = DefIdMap::default();
 
     // Follow all `Deref` targets of included items and recursively add them as valid
     fn add_deref_target(
         cx: &DocContext<'_>,
-        map: &FxHashMap<DefId, &Type>,
+        map: &DefIdMap<&Type>,
         cleaner: &mut BadImplStripper<'_>,
-        targets: &mut FxHashSet<DefId>,
+        targets: &mut DefIdSet,
         type_did: DefId,
     ) {
         if let Some(target) = map.get(&type_did) {
@@ -177,7 +177,7 @@ fn add_deref_target(
                         // `Deref` target type and the impl for type positions, this map of types is keyed by
                         // `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
                         if cleaner.keep_impl_with_def_id(for_did.into()) {
-                            let mut targets = FxHashSet::default();
+                            let mut targets = DefIdSet::default();
                             targets.insert(for_did);
                             add_deref_target(
                                 cx,
index 7db470359672f834fdaff1017f3a496a5979e28f..00ea6ca4152c8ee460c09434fa3350911b7f950a 100644 (file)
@@ -1,14 +1,11 @@
 //! The Rust AST Visitor. Extracts useful information and massages it into a form
 //! usable for `clean`.
 
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::FxHashSet;
 use rustc_hir as hir;
 use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::DefId;
-use rustc_hir::intravisit::{walk_item, Visitor};
-use rustc_hir::Node;
-use rustc_hir::CRATE_HIR_ID;
-use rustc_middle::hir::nested_filter;
+use rustc_hir::def_id::{DefId, DefIdMap};
+use rustc_hir::{HirIdSet, Node, CRATE_HIR_ID};
 use rustc_middle::ty::TyCtxt;
 use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
 use rustc_span::symbol::{kw, sym, Symbol};
@@ -59,34 +56,29 @@ pub(crate) fn inherits_doc_hidden(tcx: TyCtxt<'_>, mut node: hir::HirId) -> bool
     false
 }
 
+// Also, is there some reason that this doesn't use the 'visit'
+// framework from syntax?.
+
 pub(crate) struct RustdocVisitor<'a, 'tcx> {
     cx: &'a mut core::DocContext<'tcx>,
-    view_item_stack: FxHashSet<hir::HirId>,
+    view_item_stack: HirIdSet,
     inlining: bool,
     /// Are the current module and all of its parents public?
     inside_public_path: bool,
-    exact_paths: FxHashMap<DefId, Vec<Symbol>>,
-    modules: Vec<Module<'tcx>>,
+    exact_paths: DefIdMap<Vec<Symbol>>,
 }
 
 impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
     pub(crate) fn new(cx: &'a mut core::DocContext<'tcx>) -> RustdocVisitor<'a, 'tcx> {
         // If the root is re-exported, terminate all recursion.
-        let mut stack = FxHashSet::default();
+        let mut stack = HirIdSet::default();
         stack.insert(hir::CRATE_HIR_ID);
-        let om = Module::new(
-            cx.tcx.crate_name(LOCAL_CRATE),
-            hir::CRATE_HIR_ID,
-            cx.tcx.hir().root_module().spans.inner_span,
-        );
-
         RustdocVisitor {
             cx,
             view_item_stack: stack,
             inlining: false,
             inside_public_path: true,
-            exact_paths: FxHashMap::default(),
-            modules: vec![om],
+            exact_paths: Default::default(),
         }
     }
 
@@ -96,10 +88,12 @@ fn store_path(&mut self, did: DefId) {
     }
 
     pub(crate) fn visit(mut self) -> Module<'tcx> {
-        let root_module = self.cx.tcx.hir().root_module();
-        self.visit_mod_contents(CRATE_HIR_ID, root_module);
-
-        let mut top_level_module = self.modules.pop().unwrap();
+        let mut top_level_module = self.visit_mod_contents(
+            hir::CRATE_HIR_ID,
+            self.cx.tcx.hir().root_module(),
+            self.cx.tcx.crate_name(LOCAL_CRATE),
+            None,
+        );
 
         // `#[macro_export] macro_rules!` items are reexported at the top level of the
         // crate, regardless of where they're defined. We want to document the
@@ -114,13 +108,15 @@ pub(crate) fn visit(mut self) -> Module<'tcx> {
         // macro in the same module.
         let mut inserted = FxHashSet::default();
         for export in self.cx.tcx.module_reexports(CRATE_DEF_ID).unwrap_or(&[]) {
-            if let Res::Def(DefKind::Macro(_), def_id) = export.res &&
-                let Some(local_def_id) = def_id.as_local() &&
-                self.cx.tcx.has_attr(def_id, sym::macro_export) &&
-                inserted.insert(def_id)
-            {
-                    let item = self.cx.tcx.hir().expect_item(local_def_id);
-                    top_level_module.items.push((item, None, None));
+            if let Res::Def(DefKind::Macro(_), def_id) = export.res {
+                if let Some(local_def_id) = def_id.as_local() {
+                    if self.cx.tcx.has_attr(def_id, sym::macro_export) {
+                        if inserted.insert(def_id) {
+                            let item = self.cx.tcx.hir().expect_item(local_def_id);
+                            top_level_module.items.push((item, None, None));
+                        }
+                    }
+                }
             }
         }
 
@@ -154,23 +150,24 @@ pub(crate) fn visit(mut self) -> Module<'tcx> {
         top_level_module
     }
 
-    /// This method will go through the given module items in two passes:
-    /// 1. The items which are not glob imports/reexports.
-    /// 2. The glob imports/reexports.
-    fn visit_mod_contents(&mut self, id: hir::HirId, m: &'tcx hir::Mod<'tcx>) {
-        debug!("Going through module {:?}", m);
+    fn visit_mod_contents(
+        &mut self,
+        id: hir::HirId,
+        m: &'tcx hir::Mod<'tcx>,
+        name: Symbol,
+        parent_id: Option<hir::HirId>,
+    ) -> Module<'tcx> {
+        let mut om = Module::new(name, id, m.spans.inner_span);
         let def_id = self.cx.tcx.hir().local_def_id(id).to_def_id();
         // Keep track of if there were any private modules in the path.
         let orig_inside_public_path = self.inside_public_path;
         self.inside_public_path &= self.cx.tcx.visibility(def_id).is_public();
-
-        // Reimplementation of `walk_mod` because we need to do it in two passes (explanations in
-        // the second loop):
         for &i in m.item_ids {
             let item = self.cx.tcx.hir().item(i);
-            if !matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
-                self.visit_item(item);
+            if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
+                continue;
             }
+            self.visit_item(item, None, &mut om, parent_id);
         }
         for &i in m.item_ids {
             let item = self.cx.tcx.hir().item(i);
@@ -178,10 +175,11 @@ fn visit_mod_contents(&mut self, id: hir::HirId, m: &'tcx hir::Mod<'tcx>) {
             // Later passes in rustdoc will de-duplicate by name and kind, so if glob-
             // imported items appear last, then they'll be the ones that get discarded.
             if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) {
-                self.visit_item(item);
+                self.visit_item(item, None, &mut om, parent_id);
             }
         }
         self.inside_public_path = orig_inside_public_path;
+        om
     }
 
     /// Tries to resolve the target of a `pub use` statement and inlines the
@@ -199,6 +197,7 @@ fn maybe_inline_local(
         res: Res,
         renamed: Option<Symbol>,
         glob: bool,
+        om: &mut Module<'tcx>,
         please_inline: bool,
     ) -> bool {
         debug!("maybe_inline_local res: {:?}", res);
@@ -249,20 +248,20 @@ fn maybe_inline_local(
                 let prev = mem::replace(&mut self.inlining, true);
                 for &i in m.item_ids {
                     let i = self.cx.tcx.hir().item(i);
-                    self.visit_item_inner(i, None, Some(id));
+                    self.visit_item(i, None, om, Some(id));
                 }
                 self.inlining = prev;
                 true
             }
             Node::Item(it) if !glob => {
                 let prev = mem::replace(&mut self.inlining, true);
-                self.visit_item_inner(it, renamed, Some(id));
+                self.visit_item(it, renamed, om, Some(id));
                 self.inlining = prev;
                 true
             }
             Node::ForeignItem(it) if !glob => {
                 let prev = mem::replace(&mut self.inlining, true);
-                self.visit_foreign_item_inner(it, renamed);
+                self.visit_foreign_item(it, renamed, om);
                 self.inlining = prev;
                 true
             }
@@ -272,22 +271,13 @@ fn maybe_inline_local(
         ret
     }
 
-    #[inline]
-    fn add_to_current_mod(
+    fn visit_item(
         &mut self,
         item: &'tcx hir::Item<'_>,
         renamed: Option<Symbol>,
+        om: &mut Module<'tcx>,
         parent_id: Option<hir::HirId>,
     ) {
-        self.modules.last_mut().unwrap().items.push((item, renamed, parent_id))
-    }
-
-    fn visit_item_inner(
-        &mut self,
-        item: &'tcx hir::Item<'_>,
-        renamed: Option<Symbol>,
-        parent_id: Option<hir::HirId>,
-    ) -> bool {
         debug!("visiting item {:?}", item);
         let name = renamed.unwrap_or(item.ident.name);
 
@@ -302,7 +292,7 @@ fn visit_item_inner(
             hir::ItemKind::ForeignMod { items, .. } => {
                 for item in items {
                     let item = self.cx.tcx.hir().foreign_item(item.id);
-                    self.visit_foreign_item_inner(item, None);
+                    self.visit_foreign_item(item, None, om);
                 }
             }
             // If we're inlining, skip private items or item reexported as "_".
@@ -335,13 +325,14 @@ fn visit_item_inner(
                             res,
                             ident,
                             is_glob,
+                            om,
                             please_inline,
                         ) {
                             continue;
                         }
                     }
 
-                    self.add_to_current_mod(item, renamed, parent_id);
+                    om.items.push((item, renamed, parent_id))
                 }
             }
             hir::ItemKind::Macro(ref macro_def, _) => {
@@ -361,11 +352,11 @@ fn visit_item_inner(
                 let nonexported = !self.cx.tcx.has_attr(def_id, sym::macro_export);
 
                 if is_macro_2_0 || nonexported || self.inlining {
-                    self.add_to_current_mod(item, renamed, None);
+                    om.items.push((item, renamed, None));
                 }
             }
             hir::ItemKind::Mod(ref m) => {
-                self.enter_mod(item.hir_id(), m, name);
+                om.mods.push(self.visit_mod_contents(item.hir_id(), m, name, parent_id));
             }
             hir::ItemKind::Fn(..)
             | hir::ItemKind::ExternCrate(..)
@@ -376,92 +367,33 @@ fn visit_item_inner(
             | hir::ItemKind::OpaqueTy(..)
             | hir::ItemKind::Static(..)
             | hir::ItemKind::Trait(..)
-            | hir::ItemKind::TraitAlias(..) => {
-                self.add_to_current_mod(item, renamed, parent_id);
-            }
+            | hir::ItemKind::TraitAlias(..) => om.items.push((item, renamed, parent_id)),
             hir::ItemKind::Const(..) => {
                 // Underscore constants do not correspond to a nameable item and
                 // so are never useful in documentation.
                 if name != kw::Underscore {
-                    self.add_to_current_mod(item, renamed, parent_id);
+                    om.items.push((item, renamed, parent_id));
                 }
             }
             hir::ItemKind::Impl(impl_) => {
                 // Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
                 // them up regardless of where they're located.
                 if !self.inlining && impl_.of_trait.is_none() {
-                    self.add_to_current_mod(item, None, None);
+                    om.items.push((item, None, None));
                 }
             }
         }
-        true
     }
 
-    fn visit_foreign_item_inner(
+    fn visit_foreign_item(
         &mut self,
         item: &'tcx hir::ForeignItem<'_>,
         renamed: Option<Symbol>,
+        om: &mut Module<'tcx>,
     ) {
         // If inlining we only want to include public functions.
         if !self.inlining || self.cx.tcx.visibility(item.owner_id).is_public() {
-            self.modules.last_mut().unwrap().foreigns.push((item, renamed));
+            om.foreigns.push((item, renamed));
         }
     }
-
-    /// This method will create a new module and push it onto the "modules stack" then call
-    /// `visit_mod_contents`. Once done, it'll remove it from the "modules stack" and instead
-    /// add into the list of modules of the current module.
-    fn enter_mod(&mut self, id: hir::HirId, m: &'tcx hir::Mod<'tcx>, name: Symbol) {
-        self.modules.push(Module::new(name, id, m.spans.inner_span));
-
-        self.visit_mod_contents(id, m);
-
-        let last = self.modules.pop().unwrap();
-        self.modules.last_mut().unwrap().mods.push(last);
-    }
-}
-
-// We need to implement this visitor so it'll go everywhere and retrieve items we're interested in
-// such as impl blocks in const blocks.
-impl<'a, 'tcx> Visitor<'tcx> for RustdocVisitor<'a, 'tcx> {
-    type NestedFilter = nested_filter::All;
-
-    fn nested_visit_map(&mut self) -> Self::Map {
-        self.cx.tcx.hir()
-    }
-
-    fn visit_item(&mut self, i: &'tcx hir::Item<'tcx>) {
-        let parent_id = if self.modules.len() > 1 {
-            Some(self.modules[self.modules.len() - 2].id)
-        } else {
-            None
-        };
-        if self.visit_item_inner(i, None, parent_id) {
-            walk_item(self, i);
-        }
-    }
-
-    fn visit_mod(&mut self, _: &hir::Mod<'tcx>, _: Span, _: hir::HirId) {
-        // Handled in `visit_item_inner`
-    }
-
-    fn visit_use(&mut self, _: &hir::UsePath<'tcx>, _: hir::HirId) {
-        // Handled in `visit_item_inner`
-    }
-
-    fn visit_path(&mut self, _: &hir::Path<'tcx>, _: hir::HirId) {
-        // Handled in `visit_item_inner`
-    }
-
-    fn visit_label(&mut self, _: &rustc_ast::Label) {
-        // Unneeded.
-    }
-
-    fn visit_infer(&mut self, _: &hir::InferArg) {
-        // Unneeded.
-    }
-
-    fn visit_lifetime(&mut self, _: &hir::Lifetime) {
-        // Unneeded.
-    }
 }
index e490559b0e92addd40d61d7f25f040749ffa4fb6..fd4f9254107caee8baa3b9837b7260f115a1737b 100644 (file)
@@ -1,14 +1,13 @@
 use crate::core::DocContext;
-use rustc_data_structures::fx::FxHashSet;
 use rustc_hir::def::DefKind;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, DefIdSet};
 use rustc_middle::ty::TyCtxt;
 
 // FIXME: this may not be exhaustive, but is sufficient for rustdocs current uses
 
 #[derive(Default)]
 pub(crate) struct RustdocEffectiveVisibilities {
-    extern_public: FxHashSet<DefId>,
+    extern_public: DefIdSet,
 }
 
 macro_rules! define_method {
@@ -43,9 +42,9 @@ pub(crate) fn lib_embargo_visit_item(cx: &mut DocContext<'_>, def_id: DefId) {
 struct LibEmbargoVisitor<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
     // Effective visibilities for reachable nodes
-    extern_public: &'a mut FxHashSet<DefId>,
+    extern_public: &'a mut DefIdSet,
     // Keeps track of already visited modules, in case a module re-exports its parent
-    visited_mods: FxHashSet<DefId>,
+    visited_mods: DefIdSet,
 }
 
 impl LibEmbargoVisitor<'_, '_> {
index 9ad24035fea8d309753f5e39e6eb53d1d0eb39ce..477e7285b12f876ad105188cfcfc8adda7dc29aa 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 9ad24035fea8d309753f5e39e6eb53d1d0eb39ce
+Subproject commit 477e7285b12f876ad105188cfcfc8adda7dc29aa
index 1cd6d3803dfb0b342272862a8590f5dfc9f72573..985d561f0bb9b76ec043a2b12511790ec7a2b954 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 1cd6d3803dfb0b342272862a8590f5dfc9f72573
+Subproject commit 985d561f0bb9b76ec043a2b12511790ec7a2b954
index c5abcc462545c935fb8a6b37e678685ce7bf639b..e9b2e31a769ad5a216a3375d9be6a52f9e4f2e24 100644 (file)
@@ -52,21 +52,19 @@ fn check_crate_post(&mut self, cx: &LateContext<'tcx>) {
         // List of spans to lint. (lint_span, first_span)
         let mut lint_spans = Vec::new();
 
-        for (_, impl_ids) in cx
+        let inherent_impls = cx
             .tcx
-            .crate_inherent_impls(())
-            .inherent_impls
-            .iter()
-            .filter(|(&id, impls)| {
-                impls.len() > 1
-                    // Check for `#[allow]` on the type definition
-                    && !is_lint_allowed(
-                        cx,
-                        MULTIPLE_INHERENT_IMPL,
-                        cx.tcx.hir().local_def_id_to_hir_id(id),
-                    )
-            })
-        {
+            .with_stable_hashing_context(|hcx| cx.tcx.crate_inherent_impls(()).inherent_impls.to_sorted(&hcx, true));
+
+        for (_, impl_ids) in inherent_impls.into_iter().filter(|(&id, impls)| {
+            impls.len() > 1
+            // Check for `#[allow]` on the type definition
+            && !is_lint_allowed(
+                cx,
+                MULTIPLE_INHERENT_IMPL,
+                cx.tcx.hir().local_def_id_to_hir_id(id),
+            )
+        }) {
             for impl_id in impl_ids.iter().map(|id| id.expect_local()) {
                 match type_map.entry(cx.tcx.type_of(impl_id)) {
                     Entry::Vacant(e) => {
index dd1b23e7d9d29f19a513b36efbc1468e690907b0..9f6e89405713c0d5825e979d1c5a91d15c8666a5 100644 (file)
@@ -61,7 +61,7 @@
     /// [`Instant::now()`]: std::time::Instant::now;
     #[clippy::version = "1.65.0"]
     pub UNCHECKED_DURATION_SUBTRACTION,
-    suspicious,
+    pedantic,
     "finds unchecked subtraction of a 'Duration' from an 'Instant'"
 }
 
index 9eba46756299c57bc178a48fc92b65a4af99fc38..3c70c9cf19a516ccdc00d91ef97324d6a86b5682 100644 (file)
@@ -219,7 +219,7 @@ fn fill_trait_set(traitt: DefId, set: &mut DefIdSet, cx: &LateContext<'_>) {
         let is_empty = sym!(is_empty);
 
         let is_empty_method_found = current_and_super_traits
-            .iter()
+            .items()
             .flat_map(|&i| cx.tcx.associated_items(i).filter_by_name_unhygienic(is_empty))
             .any(|i| {
                 i.kind == ty::AssocKind::Fn
index a63422d2a36ac530c14b75843a7bd38905c58774..d1a1f773f87b3b0878b83ab05a261a8ea89ab4c9 100644 (file)
@@ -35,7 +35,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, expr: &'
         } else {
             return;
         };
-    let mutable_static_in_cond = var_visitor.def_ids.iter().any(|(_, v)| *v);
+    let mutable_static_in_cond = var_visitor.def_ids.items().any(|(_, v)| *v);
 
     let mut has_break_or_return_visitor = HasBreakOrReturnVisitor {
         has_break_or_return: false,
index 68af8a672f6aed19b7e7f22f235a6255129a2abc..3371b4cce32c1fa265b21ab0c207c6d5d8742a99 100644 (file)
@@ -80,19 +80,21 @@ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) {
                 }
             }
 
-            for assoc in provided.values() {
-                let source_map = cx.tcx.sess.source_map();
-                let definition_span = source_map.guess_head_span(cx.tcx.def_span(assoc.def_id));
+            cx.tcx.with_stable_hashing_context(|hcx| {
+                for assoc in provided.values_sorted(&hcx, true) {
+                    let source_map = cx.tcx.sess.source_map();
+                    let definition_span = source_map.guess_head_span(cx.tcx.def_span(assoc.def_id));
 
-                span_lint_and_help(
-                    cx,
-                    MISSING_TRAIT_METHODS,
-                    source_map.guess_head_span(item.span),
-                    &format!("missing trait method provided by default: `{}`", assoc.name),
-                    Some(definition_span),
-                    "implement the method",
-                );
-            }
+                    span_lint_and_help(
+                        cx,
+                        MISSING_TRAIT_METHODS,
+                        source_map.guess_head_span(item.span),
+                        &format!("missing trait method provided by default: `{}`", assoc.name),
+                        Some(definition_span),
+                        "implement the method",
+                    );
+                }
+            })
         }
     }
 }
index 1249db5dc4792307ed353ba64dfd959e8a7368b4..8c9d4c5cfe66fae84961b8ab9020b7313adc2bd3 100644 (file)
@@ -24,7 +24,7 @@
 use rustc_span::{sym, Span};
 use rustc_target::spec::abi::Abi;
 use rustc_trait_selection::traits;
-use rustc_trait_selection::traits::misc::can_type_implement_copy;
+use rustc_trait_selection::traits::misc::type_allowed_to_implement_copy;
 use std::borrow::Cow;
 
 declare_clippy_lint! {
@@ -200,7 +200,7 @@ fn check_fn(
                     let sugg = |diag: &mut Diagnostic| {
                         if let ty::Adt(def, ..) = ty.kind() {
                             if let Some(span) = cx.tcx.hir().span_if_local(def.did()) {
-                                if can_type_implement_copy(
+                                if type_allowed_to_implement_copy(
                                     cx.tcx,
                                     cx.param_env,
                                     ty,
index 870a1c7d88d532bd6eadbf148acde49fd64b58be..2d21aaa4f7fdb5606f560763c8e00dcac0a8ab93 100644 (file)
@@ -190,10 +190,10 @@ fn check_poly_fn(&mut self, cx: &LateContext<'tcx>, def_id: LocalDefId, decl: &F
                             // Don't lint if an unsafe pointer is created.
                             // TODO: Limit the check only to unsafe pointers to the argument (or part of the argument)
                             //       which escape the current function.
-                            if typeck.node_types().iter().any(|(_, &ty)| ty.is_unsafe_ptr())
+                            if typeck.node_types().items().any(|(_, &ty)| ty.is_unsafe_ptr())
                                 || typeck
                                     .adjustments()
-                                    .iter()
+                                    .items()
                                     .flat_map(|(_, a)| a)
                                     .any(|a| matches!(a.kind, Adjust::Pointer(PointerCast::UnsafeFnPointer)))
                             {
index c8d56a3be5cf356ca7c3723dd8cb29fbed2fbf79..99fba4fe741a1c398c234a1d1a7b53b010221ab1 100644 (file)
@@ -647,8 +647,8 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t
             Some(ExprFnSig::Closure(decl, subs.as_closure().sig()))
         },
         ty::FnDef(id, subs) => Some(ExprFnSig::Sig(cx.tcx.bound_fn_sig(id).subst(cx.tcx, subs), Some(id))),
-        ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) => {
-            sig_from_bounds(cx, ty, cx.tcx.item_bounds(def_id), cx.tcx.opt_parent(def_id))
+        ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => {
+            sig_from_bounds(cx, ty, cx.tcx.item_bounds(def_id).subst(cx.tcx, substs), cx.tcx.opt_parent(def_id))
         },
         ty::FnPtr(sig) => Some(ExprFnSig::Sig(sig, None)),
         ty::Dynamic(bounds, _, _) => {
index a5f5eb447da6ccdde644a47e48c4ef2705986ec4..3676f69b100db0136b8701413a1e15513a634399 100644 (file)
@@ -123,6 +123,7 @@ pub enum FailMode {
 pub enum CompareMode {
     Polonius,
     Chalk,
+    NextSolver,
     SplitDwarf,
     SplitDwarfSingle,
 }
@@ -132,6 +133,7 @@ pub(crate) fn to_str(&self) -> &'static str {
         match *self {
             CompareMode::Polonius => "polonius",
             CompareMode::Chalk => "chalk",
+            CompareMode::NextSolver => "next-solver",
             CompareMode::SplitDwarf => "split-dwarf",
             CompareMode::SplitDwarfSingle => "split-dwarf-single",
         }
@@ -141,6 +143,7 @@ pub fn parse(s: String) -> CompareMode {
         match s.as_str() {
             "polonius" => CompareMode::Polonius,
             "chalk" => CompareMode::Chalk,
+            "next-solver" => CompareMode::NextSolver,
             "split-dwarf" => CompareMode::SplitDwarf,
             "split-dwarf-single" => CompareMode::SplitDwarfSingle,
             x => panic!("unknown --compare-mode option: {}", x),
index c5767a795382e41a7af9413e8e30adc609b2c3ee..dc30e4bb1bef793171358affe8acb3d156dd0c43 100644 (file)
@@ -162,6 +162,9 @@ pub struct TestProps {
     pub stderr_per_bitwidth: bool,
     // The MIR opt to unit test, if any
     pub mir_unit_test: Option<String>,
+    // Whether to tell `rustc` to remap the "src base" directory to a fake
+    // directory.
+    pub remap_src_base: bool,
 }
 
 mod directives {
@@ -196,6 +199,7 @@ mod directives {
     pub const INCREMENTAL: &'static str = "incremental";
     pub const KNOWN_BUG: &'static str = "known-bug";
     pub const MIR_UNIT_TEST: &'static str = "unit-test";
+    pub const REMAP_SRC_BASE: &'static str = "remap-src-base";
     // This isn't a real directive, just one that is probably mistyped often
     pub const INCORRECT_COMPILER_FLAGS: &'static str = "compiler-flags";
 }
@@ -241,6 +245,7 @@ pub fn new() -> Self {
             should_ice: false,
             stderr_per_bitwidth: false,
             mir_unit_test: None,
+            remap_src_base: false,
         }
     }
 
@@ -273,6 +278,9 @@ pub fn from_file(testfile: &Path, cfg: Option<&str>, config: &Config) -> Self {
     /// `//[foo]`), then the property is ignored unless `cfg` is
     /// `Some("foo")`.
     fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
+        // Mode-dependent defaults.
+        self.remap_src_base = config.mode == Mode::Ui && !config.suite.contains("rustdoc");
+
         let mut has_edition = false;
         if !testfile.is_dir() {
             let file = File::open(testfile).unwrap();
@@ -426,13 +434,19 @@ fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
                         self.known_bug = true;
                     } else {
                         panic!(
-                            "Invalid known-bug value: {known_bug}\nIt requires comma-separated issue references (`#000` or `chalk#000`) or `unknown`."
+                            "Invalid known-bug value: {known_bug}\nIt requires comma-separated issue references (`#000` or `chalk#000`) or `known-bug: unknown`."
                         );
                     }
+                } else if config.parse_name_directive(ln, KNOWN_BUG) {
+                    panic!(
+                        "Invalid known-bug attribute, requires comma-separated issue references (`#000` or `chalk#000`) or `known-bug: unknown`."
+                    );
                 }
+
                 config.set_name_value_directive(ln, MIR_UNIT_TEST, &mut self.mir_unit_test, |s| {
                     s.trim().to_string()
                 });
+                config.set_name_directive(ln, REMAP_SRC_BASE, &mut self.remap_src_base);
             });
         }
 
@@ -696,6 +710,7 @@ fn parse_cfg_name_directive(&self, line: &str, prefix: &str) -> ParsedNameDirect
             match self.compare_mode {
                 Some(CompareMode::Polonius) => name == "compare-mode-polonius",
                 Some(CompareMode::Chalk) => name == "compare-mode-chalk",
+                Some(CompareMode::NextSolver) => name == "compare-mode-next-solver",
                 Some(CompareMode::SplitDwarf) => name == "compare-mode-split-dwarf",
                 Some(CompareMode::SplitDwarfSingle) => name == "compare-mode-split-dwarf-single",
                 None => false,
@@ -725,6 +740,10 @@ fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
             && matches!(line.as_bytes().get(directive.len()), None | Some(&b' ') | Some(&b':'))
     }
 
+    fn parse_negative_name_directive(&self, line: &str, directive: &str) -> bool {
+        line.starts_with("no-") && self.parse_name_directive(&line[3..], directive)
+    }
+
     pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
         let colon = directive.len();
         if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') {
@@ -754,8 +773,17 @@ fn parse_edition(&self, line: &str) -> Option<String> {
     }
 
     fn set_name_directive(&self, line: &str, directive: &str, value: &mut bool) {
-        if !*value {
-            *value = self.parse_name_directive(line, directive)
+        match value {
+            true => {
+                if self.parse_negative_name_directive(line, directive) {
+                    *value = false;
+                }
+            }
+            false => {
+                if self.parse_name_directive(line, directive) {
+                    *value = true;
+                }
+            }
         }
     }
 
index 2aea30870ff5e6dc97ebf1543ebe6b5cb34fd34b..3092c656cd729ba787276b95d199b2afbe15fb8f 100644 (file)
@@ -775,7 +775,7 @@ fn make_test_name(
 ) -> test::TestName {
     // Print the name of the file, relative to the repository root.
     // `src_base` looks like `/path/to/rust/tests/ui`
-    let root_directory = config.src_base.parent().unwrap().parent().unwrap().parent().unwrap();
+    let root_directory = config.src_base.parent().unwrap().parent().unwrap();
     let path = testpaths.file.strip_prefix(root_directory).unwrap();
     let debugger = match config.debugger {
         Some(d) => format!("-{}", d),
index 7640e6517442881ca3089c9745b324e9860b1e29..a5dc6859732a3d2c5348fb8084e00e0afbd876a5 100644 (file)
@@ -110,9 +110,18 @@ fn extend(&mut self, data: &[u8], filter_paths_from_len: &[String]) {
     fn into_bytes(self) -> Vec<u8> {
         match self {
             ProcOutput::Full { bytes, .. } => bytes,
-            ProcOutput::Abbreviated { mut head, skipped, tail } => {
+            ProcOutput::Abbreviated { mut head, mut skipped, tail } => {
+                let mut tail = &*tail;
+
+                // Skip over '{' at the start of the tail, so we don't later wrongfully consider this as json.
+                // See <https://rust-lang.zulipchat.com/#narrow/stream/182449-t-compiler.2Fhelp/topic/Weird.20CI.20failure/near/321797811>
+                while tail.get(0) == Some(&b'{') {
+                    tail = &tail[1..];
+                    skipped += 1;
+                }
+
                 write!(&mut head, "\n\n<<<<<< SKIPPED {} BYTES >>>>>>\n\n", skipped).unwrap();
-                head.extend_from_slice(&tail);
+                head.extend_from_slice(tail);
                 head
             }
         }
index a16ab11e2f9788b377edbae1511983c05b9d3b60..51c9a27c83d51cfbb0e7dac0541d66e5ea4b1977 100644 (file)
@@ -44,6 +44,8 @@
 #[cfg(test)]
 mod tests;
 
+const FAKE_SRC_BASE: &str = "fake-test-src-base";
+
 #[cfg(windows)]
 fn disable_error_reporting<F: FnOnce() -> R, R>(f: F) -> R {
     use std::sync::Mutex;
@@ -1328,12 +1330,19 @@ fn check_expected_errors(&self, expected_errors: Vec<errors::Error>, proc_res: &
             return;
         }
 
+        // On Windows, translate all '\' path separators to '/'
+        let file_name = format!("{}", self.testpaths.file.display()).replace(r"\", "/");
+
         // On Windows, keep all '\' path separators to match the paths reported in the JSON output
         // from the compiler
-        let os_file_name = self.testpaths.file.display().to_string();
-
-        // on windows, translate all '\' path separators to '/'
-        let file_name = format!("{}", self.testpaths.file.display()).replace(r"\", "/");
+        let diagnostic_file_name = if self.props.remap_src_base {
+            let mut p = PathBuf::from(FAKE_SRC_BASE);
+            p.push(&self.testpaths.relative_dir);
+            p.push(self.testpaths.file.file_name().unwrap());
+            p.display().to_string()
+        } else {
+            self.testpaths.file.display().to_string()
+        };
 
         // If the testcase being checked contains at least one expected "help"
         // message, then we'll ensure that all "help" messages are expected.
@@ -1343,7 +1352,7 @@ fn check_expected_errors(&self, expected_errors: Vec<errors::Error>, proc_res: &
         let expect_note = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Note));
 
         // Parse the JSON output from the compiler and extract out the messages.
-        let actual_errors = json::parse_output(&os_file_name, &proc_res.stderr, proc_res);
+        let actual_errors = json::parse_output(&diagnostic_file_name, &proc_res.stderr, proc_res);
         let mut unexpected = Vec::new();
         let mut found = vec![false; expected_errors.len()];
         for actual_error in &actual_errors {
@@ -1970,6 +1979,14 @@ fn make_compile_args(
             }
         }
 
+        if self.props.remap_src_base {
+            rustc.arg(format!(
+                "--remap-path-prefix={}={}",
+                self.config.src_base.display(),
+                FAKE_SRC_BASE,
+            ));
+        }
+
         match emit {
             Emit::None => {}
             Emit::Metadata if is_rustdoc => {}
@@ -2013,6 +2030,9 @@ fn make_compile_args(
             Some(CompareMode::Chalk) => {
                 rustc.args(&["-Ztrait-solver=chalk"]);
             }
+            Some(CompareMode::NextSolver) => {
+                rustc.args(&["-Ztrait-solver=next"]);
+            }
             Some(CompareMode::SplitDwarf) if self.config.target.contains("windows") => {
                 rustc.args(&["-Csplit-debuginfo=unpacked", "-Zunstable-options"]);
             }
@@ -3545,6 +3565,14 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
         let parent_dir = self.testpaths.file.parent().unwrap();
         normalize_path(parent_dir, "$DIR");
 
+        if self.props.remap_src_base {
+            let mut remapped_parent_dir = PathBuf::from(FAKE_SRC_BASE);
+            if self.testpaths.relative_dir != Path::new("") {
+                remapped_parent_dir.push(&self.testpaths.relative_dir);
+            }
+            normalize_path(&remapped_parent_dir, "$DIR");
+        }
+
         let source_bases = &[
             // Source base on the current filesystem (calculated as parent of `tests/$suite`):
             Some(self.config.src_base.parent().unwrap().parent().unwrap().into()),
index 6c63b760ff6a9de167c26c783851beee3dbc2981..ff7e8df987816ec29614a59a9450ba3bf8dd3ad7 100644 (file)
@@ -23,6 +23,7 @@
     "x86_64-linux-android",
     "x86_64-unknown-freebsd",
     "x86_64-unknown-linux-gnu",
+    "s390x-unknown-linux-gnu",
 ];
 
 // FIXME(rcvalle): More targets are likely supported.
     "aarch64-unknown-linux-gnu",
     "x86_64-apple-darwin",
     "x86_64-unknown-linux-gnu",
+    "s390x-unknown-linux-gnu",
 ];
 
-pub const MSAN_SUPPORTED_TARGETS: &[&str] =
-    &["aarch64-unknown-linux-gnu", "x86_64-unknown-freebsd", "x86_64-unknown-linux-gnu"];
+pub const MSAN_SUPPORTED_TARGETS: &[&str] = &[
+    "aarch64-unknown-linux-gnu",
+    "x86_64-unknown-freebsd",
+    "x86_64-unknown-linux-gnu",
+    "s390x-unknown-linux-gnu",
+];
 
 pub const TSAN_SUPPORTED_TARGETS: &[&str] = &[
     "aarch64-apple-darwin",
@@ -61,6 +67,7 @@
     "x86_64-apple-darwin",
     "x86_64-unknown-freebsd",
     "x86_64-unknown-linux-gnu",
+    "s390x-unknown-linux-gnu",
 ];
 
 pub const HWASAN_SUPPORTED_TARGETS: &[&str] =
index 6a147de3be2eac3acd613f75eecc4d41c90e49c9..c0267956aab4ab714da49b422f5a8fb1eaa4e972 100644 (file)
@@ -56,12 +56,12 @@ fn config(&mut self, config: &mut Config) {
 
     fn after_analysis<'tcx>(
         &mut self,
-        compiler: &rustc_interface::interface::Compiler,
+        _: &rustc_interface::interface::Compiler,
         queries: &'tcx rustc_interface::Queries<'tcx>,
     ) -> Compilation {
-        compiler.session().abort_if_errors();
-
         queries.global_ctxt().unwrap().enter(|tcx| {
+            tcx.sess.abort_if_errors();
+
             init_late_loggers(tcx);
             if !tcx.sess.crate_types().contains(&CrateType::Executable) {
                 tcx.sess.fatal("miri only makes sense on bin crates");
@@ -75,7 +75,7 @@ fn after_analysis<'tcx>(
             let mut config = self.miri_config.clone();
 
             // Add filename to `miri` arguments.
-            config.args.insert(0, compiler.input().filestem().to_string());
+            config.args.insert(0, tcx.sess.io.input.filestem().to_string());
 
             // Adjust working directory for interpretation.
             if let Some(cwd) = env::var_os("MIRI_CWD") {
@@ -87,10 +87,9 @@ fn after_analysis<'tcx>(
                     i32::try_from(return_code).expect("Return value was too large!"),
                 );
             }
+            tcx.sess.abort_if_errors();
         });
 
-        compiler.session().abort_if_errors();
-
         Compilation::Stop
     }
 }
index f1838cf64f7feefbf5d5b0a01f7e8c5e3fe7fd9a..d6d19a3fe8159ccc66ceb1ad7cf44e9ba9da742b 100644 (file)
@@ -22,7 +22,8 @@ fn main() {
     }
 
     // test `stat`
-    assert_eq!(fs::metadata("foo.txt").unwrap_err().kind(), ErrorKind::PermissionDenied);
+    let err = fs::metadata("foo.txt").unwrap_err();
+    assert_eq!(err.kind(), ErrorKind::PermissionDenied);
     // check that it is the right kind of `PermissionDenied`
-    assert_eq!(Error::last_os_error().raw_os_error(), Some(libc::EACCES));
+    assert_eq!(err.raw_os_error(), Some(libc::EACCES));
 }
index 3a6655e2ba69f5bfa5daa1394721b3011cfac622..30a28bc5803ddf41f0479606e9e94ca99d06d6c3 100644 (file)
@@ -162,6 +162,11 @@ fn reverse() {
     assert!(v[0].0 == 49);
 }
 
+fn miri_issue_2759() {
+    let mut input = "1".to_string();
+    input.replace_range(0..0, "0");
+}
+
 fn main() {
     assert_eq!(vec_reallocate().len(), 5);
 
@@ -191,4 +196,5 @@ fn main() {
     swap();
     swap_remove();
     reverse();
+    miri_issue_2759();
 }
index 19812fc6f55b630f9e557d2216e0a45f548a37c6..cdf1dd366046c56c92d285239cef4ecc9101e251 100644 (file)
@@ -12,6 +12,7 @@ miropt-test-tools = { path = "../miropt-test-tools" }
 lazy_static = "1"
 walkdir = "2"
 ignore = "0.4.18"
+semver = "1.0"
 termcolor = "1.1.3"
 
 [[bin]]
index bc9fd35ecde3786c2db4092104dce15e25d69290..5b84b51a035d4141ccd80a1d3cafd6522182fd5a 100644 (file)
@@ -27,8 +27,7 @@
 const ERROR_TESTS_PATH: &str = "tests/ui/error-codes/";
 
 // Error codes that (for some reason) can't have a doctest in their explanation. Error codes are still expected to provide a code example, even if untested.
-const IGNORE_DOCTEST_CHECK: &[&str] =
-    &["E0208", "E0464", "E0570", "E0601", "E0602", "E0640", "E0717"];
+const IGNORE_DOCTEST_CHECK: &[&str] = &["E0464", "E0570", "E0601", "E0602", "E0640", "E0717"];
 
 // Error codes that don't yet have a UI test. This list will eventually be removed.
 const IGNORE_UI_TEST_CHECK: &[&str] =
index 40375f1306d629700f41b8e81369de264d843636..35000320d1abfcbe24610b55ab4a5df2f5f82a76 100644 (file)
@@ -62,6 +62,7 @@ fn tidy_error(bad: &mut bool, args: impl Display) -> std::io::Result<()> {
 pub mod mir_opt_tests;
 pub mod pal;
 pub mod primitive_docs;
+pub mod rustdoc_gui_tests;
 pub mod style;
 pub mod target_specific_tests;
 pub mod tests_placement;
@@ -69,3 +70,4 @@ fn tidy_error(bad: &mut bool, args: impl Display) -> std::io::Result<()> {
 pub mod unit_tests;
 pub mod unstable_book;
 pub mod walk;
+pub mod x_version;
index ea2886a3c2f8fda013abaf1c581a466939f36694..505f9d724c8d3d450eae9115f271c6726106c497 100644 (file)
@@ -60,7 +60,7 @@ macro_rules! check {
 
                 let handle = s.spawn(|| {
                     let mut flag = false;
-                    $p::check($($args),* , &mut flag);
+                    $p::check($($args, )* &mut flag);
                     if (flag) {
                         bad.store(true, Ordering::Relaxed);
                     }
@@ -80,6 +80,7 @@ macro_rules! check {
         check!(debug_artifacts, &tests_path);
         check!(ui_tests, &tests_path);
         check!(mir_opt_tests, &tests_path, bless);
+        check!(rustdoc_gui_tests, &tests_path);
 
         // Checks that only make sense for the compiler.
         check!(error_codes, &root_path, &[&compiler_path, &librustdoc_path], verbose);
@@ -113,6 +114,8 @@ macro_rules! check {
         check!(alphabetical, &compiler_path);
         check!(alphabetical, &library_path);
 
+        check!(x_version, &root_path, &cargo);
+
         let collected = {
             drain_handles(&mut handles);
 
diff --git a/src/tools/tidy/src/rustdoc_gui_tests.rs b/src/tools/tidy/src/rustdoc_gui_tests.rs
new file mode 100644 (file)
index 0000000..feb513d
--- /dev/null
@@ -0,0 +1,33 @@
+//! Tidy check to ensure that rustdoc GUI tests start with a small description.
+
+use std::path::Path;
+
+pub fn check(path: &Path, bad: &mut bool) {
+    crate::walk::walk(
+        &path.join("rustdoc-gui"),
+        &mut |p| {
+            // If there is no extension, it's very likely a folder and we want to go into it.
+            p.extension().map(|e| e != "goml").unwrap_or(false)
+        },
+        &mut |entry, content| {
+            for line in content.lines() {
+                if !line.starts_with("// ") {
+                    tidy_error!(
+                        bad,
+                        "{}: rustdoc-gui tests must start with a small description",
+                        entry.path().display(),
+                    );
+                    return;
+                } else if line.starts_with("// ") {
+                    let parts = line[2..].trim();
+                    // We ignore tidy comments.
+                    if parts.starts_with("// tidy-") {
+                        continue;
+                    }
+                    // All good!
+                    return;
+                }
+            }
+        },
+    );
+}
index 5c4ba86936456f683e6dd5386be061cba7ec57f2..6a0855405ec90bb491547aa8e34a9bc6b4755e2d 100644 (file)
@@ -45,6 +45,9 @@
 when executed when assertions are disabled.
 Use llvm::report_fatal_error for increased robustness.";
 
+const DOUBLE_SPACE_AFTER_DOT: &str = r"\
+Use a single space after dots in comments.";
+
 const ANNOTATIONS_TO_IGNORE: &[&str] = &[
     "// @!has",
     "// @has",
@@ -279,6 +282,10 @@ fn skip(path: &Path) -> bool {
         if filename.contains("ignore-tidy") {
             return;
         }
+        // apfloat shouldn't be changed because of license problems
+        if is_in(file, "compiler", "rustc_apfloat") {
+            return;
+        }
         let mut skip_cr = contains_ignore_directive(can_contain, &contents, "cr");
         let mut skip_undocumented_unsafe =
             contains_ignore_directive(can_contain, &contents, "undocumented-unsafe");
@@ -405,6 +412,19 @@ fn skip(path: &Path) -> bool {
             if filename.ends_with(".cpp") && line.contains("llvm_unreachable") {
                 err(LLVM_UNREACHABLE_INFO);
             }
+
+            // For now only enforce in compiler
+            let is_compiler = || file.components().any(|c| c.as_os_str() == "compiler");
+            if is_compiler()
+                && line.contains("//")
+                && line
+                    .chars()
+                    .collect::<Vec<_>>()
+                    .windows(4)
+                    .any(|cs| matches!(cs, ['.', ' ', ' ', last] if last.is_alphabetic()))
+            {
+                err(DOUBLE_SPACE_AFTER_DOT)
+            }
         }
         if leading_new_lines {
             let mut err = |_| {
diff --git a/src/tools/tidy/src/x_version.rs b/src/tools/tidy/src/x_version.rs
new file mode 100644 (file)
index 0000000..c470d50
--- /dev/null
@@ -0,0 +1,68 @@
+use semver::Version;
+use std::path::Path;
+use std::process::{Command, Stdio};
+
+pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
+    let cargo_list = Command::new(cargo).args(["install", "--list"]).stdout(Stdio::piped()).spawn();
+
+    let child = match cargo_list {
+        Ok(child) => child,
+        Err(e) => return tidy_error!(bad, "failed to run `cargo`: {}", e),
+    };
+
+    let cargo_list = child.wait_with_output().unwrap();
+
+    if cargo_list.status.success() {
+        let exe_list = String::from_utf8_lossy(&cargo_list.stdout);
+        let exe_list = exe_list.lines();
+
+        let mut installed: Option<Version> = None;
+
+        for line in exe_list {
+            let mut iter = line.split_whitespace();
+            if iter.next() == Some("x") {
+                if let Some(version) = iter.next() {
+                    // Check this is the rust-lang/rust x tool installation since it should be
+                    // installed at a path containing `src/tools/x`.
+                    if let Some(path) = iter.next() {
+                        if path.contains(&"src/tools/x") {
+                            let version = version.strip_prefix("v").unwrap();
+                            installed = Some(Version::parse(version).unwrap());
+                            break;
+                        }
+                    };
+                }
+            } else {
+                continue;
+            }
+        }
+        // Unwrap the some if x is installed, otherwise return because it's fine if x isn't installed.
+        let installed = if let Some(i) = installed { i } else { return };
+
+        if let Some(expected) = get_x_wrapper_version(root, cargo) {
+            if installed < expected {
+                return println!(
+                    "Current version of x is {installed}, but the latest version is {expected}\nConsider updating to the newer version of x by running `cargo install --path src/tools/x`"
+                );
+            }
+        } else {
+            return tidy_error!(
+                bad,
+                "Unable to parse the latest version of `x` at `src/tools/x/Cargo.toml`"
+            );
+        }
+    } else {
+        return tidy_error!(bad, "failed to check version of `x`: {}", cargo_list.status);
+    }
+}
+
+// Parse latest version out of `x` Cargo.toml
+fn get_x_wrapper_version(root: &Path, cargo: &Path) -> Option<Version> {
+    let mut cmd = cargo_metadata::MetadataCommand::new();
+    cmd.cargo_path(cargo)
+        .manifest_path(root.join("src/tools/x/Cargo.toml"))
+        .no_deps()
+        .features(cargo_metadata::CargoOpt::AllFeatures);
+    let mut metadata = t!(cmd.exec());
+    metadata.packages.pop().map(|x| x.version)
+}
index ee2f4ca913048744aab4763a96594995e832c29a..49349856550b67391da4db65f04579adf8044003 100644 (file)
@@ -1 +1 @@
-1.68.0
+1.69.0
index 04b5de83423709c09a5980b1ec645712715956bb..620a3da94636e80b010ba0fe6dd4d8f897fe9574 100644 (file)
@@ -1,5 +1,5 @@
 // assembly-output: emit-asm
-// min-llvm-version: 14.0
+// min-llvm-version: 15.0
 // only-x86_64
 // revisions: opt-speed opt-size
 // [opt-speed] compile-flags: -Copt-level=1
index dfc312279083d6fe576440b4e6fa7bb796651ec0..3c2d4e719d423f9a309064cb688d0419f9b25a59 100644 (file)
@@ -3,7 +3,7 @@
 // of the sysv64 abi.
 //
 // needs-llvm-components: x86
-// compile-flags: -C no-prepopulate-passes --target=x86_64-unknown-linux-gnu
+// compile-flags: -C no-prepopulate-passes --target=x86_64-unknown-linux-gnu -Copt-level=0
 
 #![crate_type = "lib"]
 #![no_core]
index d612f603e4fea0d27e64b0e4f17b914f565a195d..928ad5a9bbd6376d493b902cbc6dabdb1c0ff909 100644 (file)
@@ -3,7 +3,7 @@
 // of the x86-interrupt abi.
 
 // needs-llvm-components: x86
-// compile-flags: -C no-prepopulate-passes --target=x86_64-unknown-linux-gnu
+// compile-flags: -C no-prepopulate-passes --target=x86_64-unknown-linux-gnu -Copt-level=0
 
 #![crate_type = "lib"]
 #![no_core]
index 39880c9341f4f96aea9585835053c12f00f93775..6d22475175270570f022bd0816bf3d18dd594007 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 
 #![crate_type = "lib"]
 
index 7b5ae894311eff15f8d2e9184a23fbe426a230ac..b0c88f76c436dc9067dad98b7ebad22902b4983a 100644 (file)
@@ -31,4 +31,4 @@ pub fn box_uninitialized() -> Box<MaybeUninit<usize>> {
 // Hide the LLVM 15+ `allocalign` attribute in the declaration of __rust_alloc
 // from the CHECK-NOT above. We don't check the attributes here because we can't rely
 // on all of them being set until LLVM 15.
-// CHECK: declare noalias{{.*}} @__rust_alloc(i{{[0-9]+}}, i{{[0-9]+.*}})
+// CHECK: declare noalias{{.*}} @__rust_alloc(i{{[0-9]+}} noundef, i{{[0-9]+.*}} noundef)
index c82b56a71f5cfde9d3c9a90f2143841536110119..2f88966996ab2e25efb1adfcb8d03420d3cf5e5d 100644 (file)
@@ -28,6 +28,6 @@ pub fn box_uninitialized() -> Box<MaybeUninit<usize>> {
 
 // Hide the `allocalign` attribute in the declaration of __rust_alloc
 // from the CHECK-NOT above, and also verify the attributes got set reasonably.
-// CHECK: declare noalias ptr @__rust_alloc(i{{[0-9]+}}, i{{[0-9]+}} allocalign) unnamed_addr [[RUST_ALLOC_ATTRS:#[0-9]+]]
+// CHECK: declare noalias noundef ptr @__rust_alloc(i{{[0-9]+}} noundef, i{{[0-9]+}} allocalign noundef) unnamed_addr [[RUST_ALLOC_ATTRS:#[0-9]+]]
 
 // CHECK-DAG: attributes [[RUST_ALLOC_ATTRS]] = { {{.*}} allockind("alloc,uninitialized,aligned") allocsize(0) uwtable "alloc-family"="__rust_alloc" {{.*}} }
index a5be56c47be81e131c5ec114531dbc4f56f76804..cab32652210d0a08188e870c3c893267fa29191f 100644 (file)
@@ -1,5 +1,5 @@
 // ignore-wasm32-bare compiled with panic=abort by default
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 //
 
 #![crate_type = "lib"]
index 998099c23909855e163986cbd410adc5828307c7..cb8abae198ee65b9315f9d1a1d827605f8512400 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 
 // ignore-riscv64
 
index 5cf6c3ac0a2333f55db87a00349ae93ae0ee9fef..683a2bd4fbb5a8abb5f68bd1cb944dc60f45c43d 100644 (file)
@@ -13,7 +13,7 @@
 pub struct Foo(u16);
 
 // CHECK-LABEL: @check_lt
-// CHECK-SAME: (i16 %[[A:.+]], i16 %[[B:.+]])
+// CHECK-SAME: (i16 noundef %[[A:.+]], i16 noundef %[[B:.+]])
 #[no_mangle]
 pub fn check_lt(a: Foo, b: Foo) -> bool {
     // CHECK: %[[R:.+]] = icmp ult i16 %[[A]], %[[B]]
@@ -22,7 +22,7 @@ pub fn check_lt(a: Foo, b: Foo) -> bool {
 }
 
 // CHECK-LABEL: @check_le
-// CHECK-SAME: (i16 %[[A:.+]], i16 %[[B:.+]])
+// CHECK-SAME: (i16 noundef %[[A:.+]], i16 noundef %[[B:.+]])
 #[no_mangle]
 pub fn check_le(a: Foo, b: Foo) -> bool {
     // CHECK: %[[R:.+]] = icmp ule i16 %[[A]], %[[B]]
@@ -31,7 +31,7 @@ pub fn check_le(a: Foo, b: Foo) -> bool {
 }
 
 // CHECK-LABEL: @check_gt
-// CHECK-SAME: (i16 %[[A:.+]], i16 %[[B:.+]])
+// CHECK-SAME: (i16 noundef %[[A:.+]], i16 noundef %[[B:.+]])
 #[no_mangle]
 pub fn check_gt(a: Foo, b: Foo) -> bool {
     // CHECK: %[[R:.+]] = icmp ugt i16 %[[A]], %[[B]]
@@ -40,7 +40,7 @@ pub fn check_gt(a: Foo, b: Foo) -> bool {
 }
 
 // CHECK-LABEL: @check_ge
-// CHECK-SAME: (i16 %[[A:.+]], i16 %[[B:.+]])
+// CHECK-SAME: (i16 noundef %[[A:.+]], i16 noundef %[[B:.+]])
 #[no_mangle]
 pub fn check_ge(a: Foo, b: Foo) -> bool {
     // CHECK: %[[R:.+]] = icmp uge i16 %[[A]], %[[B]]
index ab599992ffd79d70de76b07846caf2c999cc7567..383940e95906d1c6dad3db944243ae674e98cb35 100644 (file)
@@ -1,4 +1,4 @@
-// This test is for *-windows-msvc only.
+ // This test is for *-windows-msvc only.
 // only-windows
 // ignore-gnu
 
 // CHECK: @static_global1 = external local_unnamed_addr global i32
 // CHECK: @static_global2 = external local_unnamed_addr global i32
 
-// CHECK: declare dllimport i32 @dylib_func1(i32)
-// CHECK: declare dllimport i32 @dylib_func2(i32)
-// CHECK: declare i32 @static_func1(i32)
-// CHECK: declare i32 @static_func2(i32)
+// CHECK: declare dllimport noundef i32 @dylib_func1(i32 noundef)
+// CHECK: declare dllimport noundef i32 @dylib_func2(i32 noundef)
+// CHECK: declare noundef i32 @static_func1(i32 noundef)
+// CHECK: declare noundef i32 @static_func2(i32 noundef)
 
 #[link(name = "dummy", kind="dylib")]
 extern "C" {
index 827eb20154afd0195cc1da957b67aa483444f56b..5f8063a27f7ac7355927906e2756a2ea2ae96fd8 100644 (file)
@@ -11,7 +11,7 @@ pub enum Enum0 {
     B,
 }
 
-// CHECK: define i8 @match0{{.*}}
+// CHECK: define noundef i8 @match0{{.*}}
 // CHECK-NEXT: start:
 // CHECK-NEXT: %1 = icmp eq i8 %0, 2
 // CHECK-NEXT: %2 = and i8 %0, 1
@@ -32,7 +32,7 @@ pub enum Enum1 {
     C,
 }
 
-// CHECK: define i8 @match1{{.*}}
+// CHECK: define noundef i8 @match1{{.*}}
 // CHECK-NEXT: start:
 // CHECK-NEXT: [[DISCR:%.*]] = {{.*}}call i8 @llvm.usub.sat.i8(i8 %0, i8 1)
 // CHECK-NEXT: switch i8 [[DISCR]], label {{.*}} [
@@ -88,7 +88,7 @@ pub enum Enum2 {
     E,
 }
 
-// CHECK: define i8 @match2{{.*}}
+// CHECK: define noundef i8 @match2{{.*}}
 // CHECK-NEXT: start:
 // CHECK-NEXT: %1 = add i8 %0, 2
 // CHECK-NEXT: %2 = zext i8 %1 to i64
index d426ade28dd12696dd41073ef60b46ff526f8793..02f5d545910e1f7f2e12dc2e150a71ff62bb3771 100644 (file)
@@ -15,27 +15,27 @@ trait Sized {}
 trait Copy {}
 
 pub mod tests {
-    // CHECK: @f1(i32 inreg %_1, i32 inreg %_2, i32 %_3)
+    // CHECK: @f1(i32 inreg noundef %_1, i32 inreg noundef %_2, i32 noundef %_3)
     #[no_mangle]
     pub extern "fastcall" fn f1(_: i32, _: i32, _: i32) {}
 
-    // CHECK: @f2({{i32\*|ptr}} inreg %_1, {{i32\*|ptr}} inreg %_2, {{i32\*|ptr}} %_3)
+    // CHECK: @f2({{i32\*|ptr}} inreg noundef %_1, {{i32\*|ptr}} inreg noundef %_2, {{i32\*|ptr}} noundef %_3)
     #[no_mangle]
     pub extern "fastcall" fn f2(_: *const i32, _: *const i32, _: *const i32) {}
 
-    // CHECK: @f3(float %_1, i32 inreg %_2, i32 inreg %_3, i32 %_4)
+    // CHECK: @f3(float noundef %_1, i32 inreg noundef %_2, i32 inreg noundef %_3, i32 noundef %_4)
     #[no_mangle]
     pub extern "fastcall" fn f3(_: f32, _: i32, _: i32, _: i32) {}
 
-    // CHECK: @f4(i32 inreg %_1, float %_2, i32 inreg %_3, i32 %_4)
+    // CHECK: @f4(i32 inreg noundef %_1, float noundef %_2, i32 inreg noundef %_3, i32 noundef %_4)
     #[no_mangle]
     pub extern "fastcall" fn f4(_: i32, _: f32, _: i32, _: i32) {}
 
-    // CHECK: @f5(i64 %_1, i32 %_2)
+    // CHECK: @f5(i64 noundef %_1, i32 noundef %_2)
     #[no_mangle]
     pub extern "fastcall" fn f5(_: i64, _: i32) {}
 
-    // CHECK: @f6(i1 inreg noundef zeroext %_1, i32 inreg %_2, i32 %_3)
+    // CHECK: @f6(i1 inreg noundef zeroext %_1, i32 inreg noundef %_2, i32 noundef %_3)
     #[no_mangle]
     pub extern "fastcall" fn f6(_: bool, _: i32, _: i32) {}
 }
index 7307e0379dfa042eb02b7484b556f2393b61c590..ac8cba06b48f7616f3450e5b6e2630c2ed84bfd2 100644 (file)
@@ -7,11 +7,11 @@
 
 #[no_mangle]
 pub fn sum(x: u32, y: u32) -> u32 {
-// YES-LABEL: define{{.*}}i32 @sum(i32 %0, i32 %1)
+// YES-LABEL: define{{.*}}i32 @sum(i32 noundef %0, i32 noundef %1)
 // YES-NEXT:    %3 = add i32 %1, %0
 // YES-NEXT:    ret i32 %3
 
-// NO-LABEL: define{{.*}}i32 @sum(i32 %x, i32 %y)
+// NO-LABEL: define{{.*}}i32 @sum(i32 noundef %x, i32 noundef %y)
 // NO-NEXT:  start:
 // NO-NEXT:    %z = add i32 %y, %x
 // NO-NEXT:    ret i32 %z
index f7c02d47939fed4314f87dde85ba35526761d7da..d8933262e528e98cc4522726d43b1fff87176257 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: --crate-type=rlib
+// compile-flags: --crate-type=rlib -Copt-level=0
 // revisions: aarch64-apple aarch64-linux force x64-apple x64-linux
 // [aarch64-apple] needs-llvm-components: aarch64
 // [aarch64-apple] compile-flags: --target=aarch64-apple-darwin
index 0f9e90f6ba77990d2c265057699170a5cd372dbe..1f979d7b90a70b08b7a5edd95028777218481772 100644 (file)
@@ -1,11 +1,11 @@
 // compile-flags: -O -C no-prepopulate-passes
 
 #![crate_type = "lib"]
-#![feature(rustc_attrs)]
 
 use std::mem::MaybeUninit;
 use std::num::NonZeroU64;
 use std::marker::PhantomPinned;
+use std::ptr::NonNull;
 
 pub struct S {
   _field: [i32; 8],
@@ -61,7 +61,7 @@ pub fn maybeuninit_char(x: MaybeUninit<char>) -> MaybeUninit<char> {
   x
 }
 
-// CHECK: i64 @int(i64 %x)
+// CHECK: noundef i64 @int(i64 noundef %x)
 #[no_mangle]
 pub fn int(x: u64) -> u64 {
   x
@@ -73,7 +73,7 @@ pub fn nonzero_int(x: NonZeroU64) -> NonZeroU64 {
   x
 }
 
-// CHECK: i64 @option_nonzero_int(i64 %x)
+// CHECK: noundef i64 @option_nonzero_int(i64 noundef %x)
 #[no_mangle]
 pub fn option_nonzero_int(x: Option<NonZeroU64>) -> Option<NonZeroU64> {
   x
@@ -138,11 +138,27 @@ pub fn indirect_struct(_: S) {
 pub fn borrowed_struct(_: &S) {
 }
 
-// CHECK: @raw_struct({{%S\*|ptr}} %_1)
+// CHECK: @option_borrow({{i32\*|ptr}} noalias noundef readonly align 4 dereferenceable_or_null(4) %x)
+#[no_mangle]
+pub fn option_borrow(x: Option<&i32>) {
+}
+
+// CHECK: @option_borrow_mut({{i32\*|ptr}} noalias noundef align 4 dereferenceable_or_null(4) %x)
+#[no_mangle]
+pub fn option_borrow_mut(x: Option<&mut i32>) {
+}
+
+// CHECK: @raw_struct({{%S\*|ptr}} noundef %_1)
 #[no_mangle]
 pub fn raw_struct(_: *const S) {
 }
 
+// CHECK: @raw_option_nonnull_struct({{i32\*|ptr}} noundef %_1)
+#[no_mangle]
+pub fn raw_option_nonnull_struct(_: Option<NonNull<S>>) {
+}
+
+
 // `Box` can get deallocated during execution of the function, so it should
 // not get `dereferenceable`.
 // CHECK: noundef nonnull align 4 {{i32\*|ptr}} @_box({{i32\*|ptr}} noalias noundef nonnull align 4 %x)
@@ -160,35 +176,35 @@ pub fn struct_return() -> S {
 }
 
 // Hack to get the correct size for the length part in slices
-// CHECK: @helper([[USIZE:i[0-9]+]] %_1)
+// CHECK: @helper([[USIZE:i[0-9]+]] noundef %_1)
 #[no_mangle]
 pub fn helper(_: usize) {
 }
 
-// CHECK: @slice({{\[0 x i8\]\*|ptr}} noalias noundef nonnull readonly align 1 %_1.0, [[USIZE]] %_1.1)
+// CHECK: @slice({{\[0 x i8\]\*|ptr}} noalias noundef nonnull readonly align 1 %_1.0, [[USIZE]] noundef %_1.1)
 // FIXME #25759 This should also have `nocapture`
 #[no_mangle]
 pub fn slice(_: &[u8]) {
 }
 
-// CHECK: @mutable_slice({{\[0 x i8\]\*|ptr}} noalias noundef nonnull align 1 %_1.0, [[USIZE]] %_1.1)
+// CHECK: @mutable_slice({{\[0 x i8\]\*|ptr}} noalias noundef nonnull align 1 %_1.0, [[USIZE]] noundef %_1.1)
 // FIXME #25759 This should also have `nocapture`
 #[no_mangle]
 pub fn mutable_slice(_: &mut [u8]) {
 }
 
-// CHECK: @unsafe_slice({{\[0 x i16\]\*|ptr}} noundef nonnull align 2 %_1.0, [[USIZE]] %_1.1)
+// CHECK: @unsafe_slice({{\[0 x i16\]\*|ptr}} noundef nonnull align 2 %_1.0, [[USIZE]] noundef %_1.1)
 // unsafe interior means this isn't actually readonly and there may be aliases ...
 #[no_mangle]
 pub fn unsafe_slice(_: &[UnsafeInner]) {
 }
 
-// CHECK: @raw_slice({{\[0 x i8\]\*|ptr}} %_1.0, [[USIZE]] %_1.1)
+// CHECK: @raw_slice({{\[0 x i8\]\*|ptr}} noundef %_1.0, [[USIZE]] noundef %_1.1)
 #[no_mangle]
 pub fn raw_slice(_: *const [u8]) {
 }
 
-// CHECK: @str({{\[0 x i8\]\*|ptr}} noalias noundef nonnull readonly align 1 %_1.0, [[USIZE]] %_1.1)
+// CHECK: @str({{\[0 x i8\]\*|ptr}} noalias noundef nonnull readonly align 1 %_1.0, [[USIZE]] noundef %_1.1)
 // FIXME #25759 This should also have `nocapture`
 #[no_mangle]
 pub fn str(_: &[u8]) {
@@ -197,26 +213,36 @@ pub fn str(_: &[u8]) {
 // CHECK: @trait_borrow({{\{\}\*|ptr}} noundef nonnull align 1 %_1.0, {{.+}} noalias noundef readonly align {{.*}} dereferenceable({{.*}}) %_1.1)
 // FIXME #25759 This should also have `nocapture`
 #[no_mangle]
-pub fn trait_borrow(_: &Drop) {
+pub fn trait_borrow(_: &dyn Drop) {
+}
+
+// CHECK: @option_trait_borrow({{i8\*|ptr}} noundef align 1 %x.0, {{i8\*|ptr}} %x.1)
+#[no_mangle]
+pub fn option_trait_borrow(x: Option<&dyn Drop>) {
+}
+
+// CHECK: @option_trait_borrow_mut({{i8\*|ptr}} noundef align 1 %x.0, {{i8\*|ptr}} %x.1)
+#[no_mangle]
+pub fn option_trait_borrow_mut(x: Option<&mut dyn Drop>) {
 }
 
-// CHECK: @trait_raw({{\{\}\*|ptr}} %_1.0, {{.+}} noalias noundef readonly align {{.*}} dereferenceable({{.*}}) %_1.1)
+// CHECK: @trait_raw({{\{\}\*|ptr}} noundef %_1.0, {{.+}} noalias noundef readonly align {{.*}} dereferenceable({{.*}}) %_1.1)
 #[no_mangle]
-pub fn trait_raw(_: *const Drop) {
+pub fn trait_raw(_: *const dyn Drop) {
 }
 
 // CHECK: @trait_box({{\{\}\*|ptr}} noalias noundef nonnull align 1{{( %0)?}}, {{.+}} noalias noundef readonly align {{.*}} dereferenceable({{.*}}){{( %1)?}})
 #[no_mangle]
-pub fn trait_box(_: Box<Drop>) {
+pub fn trait_box(_: Box<dyn Drop>) {
 }
 
 // CHECK: { {{i8\*|ptr}}, {{i8\*|ptr}} } @trait_option({{i8\*|ptr}} noalias noundef align 1 %x.0, {{i8\*|ptr}} %x.1)
 #[no_mangle]
-pub fn trait_option(x: Option<Box<Drop>>) -> Option<Box<Drop>> {
+pub fn trait_option(x: Option<Box<dyn Drop>>) -> Option<Box<dyn Drop>> {
   x
 }
 
-// CHECK: { {{\[0 x i16\]\*|ptr}}, [[USIZE]] } @return_slice({{\[0 x i16\]\*|ptr}} noalias noundef nonnull readonly align 2 %x.0, [[USIZE]] %x.1)
+// CHECK: { {{\[0 x i16\]\*|ptr}}, [[USIZE]] } @return_slice({{\[0 x i16\]\*|ptr}} noalias noundef nonnull readonly align 2 %x.0, [[USIZE]] noundef %x.1)
 #[no_mangle]
 pub fn return_slice(x: &[u16]) -> &[u16] {
   x
index db8a04763d35dc0ac3242b8b20e02ff16f2f8b05..f3877dc6b96a68e2efe69158dcaad271eab83690 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 
 #![crate_type = "lib"]
 #![feature(const_eval_select)]
index 2e984db1be528d30e5a97c25fb4f5d5f8b0930c3..8f93da2e5da437f616250d863f23d71b8c24c8de 100644 (file)
@@ -1,3 +1,4 @@
+// compile-flags: -Copt-level=0
 #![crate_type = "lib"]
 #![feature(core_intrinsics)]
 
@@ -6,6 +7,6 @@
 #[no_mangle]
 pub fn mask_ptr(ptr: *const u16, mask: usize) -> *const u16 {
     // CHECK: call
-    // CHECK-SAME: @llvm.ptrmask.{{p0|p0i8}}.[[WORD]]({{ptr|i8\*}} {{%ptr|%0}}, [[WORD]] %mask)
+    // CHECK-SAME: @llvm.ptrmask.{{p0|p0i8}}.[[WORD]]({{ptr|i8\*}} {{%ptr|%1}}, [[WORD]] %mask)
     core::intrinsics::ptr_mask(ptr, mask)
 }
index 82ba325572ab45ddfb5da2c1ef516860c1641f91..abef92c19b610b612da29d6c5fbad049c2ddd695 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 
 #![crate_type = "lib"]
 
index aa59c713b7846dabbb55cce82be977b148eb91f8..1daa213fc821392f6ebefe5ce99ca36387af7c17 100644 (file)
@@ -2,6 +2,7 @@
 // prevent optimizing away bounds checks
 
 // compile-flags: -O
+// ignore-debug: the debug assertions get in the way
 
 #![crate_type="rlib"]
 
index 0900a33377bcdc41993464d83fb5043e9bccde70..00f8953d94952f1c1ec7a521f49510c09b1d8acb 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 //
 // only-x86_64
 // ignore-windows
index a5dbef93460272a18a8c9857fc0df95bc7bf90e8..0413ed6b26f360d59817257595f0bfac463b46bc 100644 (file)
@@ -3,7 +3,7 @@
 // in some situations, see https://github.com/rust-lang/rust/issues/96497#issuecomment-1112865218
 
 // compile-flags: -O
-// min-llvm-version: 14.0
+// min-llvm-version: 15.0
 
 #![crate_type="lib"]
 
index 20e1d9b4d5988daf60977e13b4aabdc23a952e66..24059f190acf608944117454013e8a33d7ee1429 100644 (file)
@@ -46,7 +46,7 @@ pub fn iter_repeat_n_next(it: &mut std::iter::RepeatN<NotCopy>) -> Option<NotCop
 #[no_mangle]
 // CHECK-LABEL: @vec_extend_via_iter_repeat_n
 pub fn vec_extend_via_iter_repeat_n() -> Vec<u8> {
-    // CHECK: %[[ADDR:.+]] = tail call dereferenceable_or_null(1234) ptr @__rust_alloc(i64 1234, i64 1)
+    // CHECK: %[[ADDR:.+]] = tail call noundef dereferenceable_or_null(1234) ptr @__rust_alloc(i64 noundef 1234, i64 noundef 1)
     // CHECK: tail call void @llvm.memset.p0.i64(ptr noundef nonnull align 1 dereferenceable(1234) %[[ADDR]], i8 42, i64 1234,
 
     let n = 1234_usize;
index f448306ba1b086fd7157cff68a5e667c0694d400..f29a26596bfd5a8af36962e3e3ad6fa9e4244fc5 100644 (file)
@@ -50,8 +50,8 @@ pub fn load_scalar_pair<'a>(x: &(&'a i32, &'a Align16)) -> (&'a i32, &'a Align16
 // CHECK-LABEL: @load_raw_pointer
 #[no_mangle]
 pub fn load_raw_pointer<'a>(x: &*const i32) -> *const i32 {
-    // loaded raw pointer should not have !nonnull, !align, or !noundef metadata
-    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
+    // loaded raw pointer should not have !nonnull or !align metadata
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !noundef ![[NOUNDEF:[0-9]+]]{{$}}
     *x
 }
 
@@ -93,7 +93,7 @@ pub fn load_maybeuninit_enum_bool(x: &MaybeUninit<MyBool>) -> MaybeUninit<MyBool
 // CHECK-LABEL: @load_int
 #[no_mangle]
 pub fn load_int(x: &u16) -> u16 {
-    // CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
+    // CHECK: load i16, {{i16\*|ptr}} %x, align 2, !noundef ![[NOUNDEF]]{{$}}
     *x
 }
 
@@ -107,7 +107,7 @@ pub fn load_nonzero_int(x: &NonZeroU16) -> NonZeroU16 {
 // CHECK-LABEL: @load_option_nonzero_int
 #[no_mangle]
 pub fn load_option_nonzero_int(x: &Option<NonZeroU16>) -> Option<NonZeroU16> {
-    // CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
+    // CHECK: load i16, {{i16\*|ptr}} %x, align 2, !noundef ![[NOUNDEF]]{{$}}
     *x
 }
 
index 51c7a0c615d00d321c39578ade27990b55d5879b..e05bbc26e830c33d43db8295fb5a0709eab735ed 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 // needs-asm-support
 // only-x86_64
 
index 602a08067bae183cd1e489d24e3801d052421017..518e949ffe34386cb1446ee1082eb6f1ea9de6d5 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C relocation-model=pic
+// compile-flags: -C relocation-model=pic -Copt-level=0
 
 #![crate_type = "rlib"]
 
index ec44edc0667741ad863b968aa99141a040f77963..941cca922bd328de6f23e25aceea3d69128e7e6c 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C relocation-model=pie
+// compile-flags: -C relocation-model=pie -Copt-level=0
 // only-x86_64-unknown-linux-gnu
 
 #![crate_type = "rlib"]
index 0b796754d1d861f8c15b6387d181827c03bd0f2d..a528976671110963852af8779e27dcaa8c2fda44 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes -Zmir-opt-level=0
+// compile-flags: -C no-prepopulate-passes -Zmir-opt-level=0 -Copt-level=0
 
 #![crate_type = "lib"]
 
index 4f2313ce47a979be33140172948c254831e8b538..311cbfbaa09372915dba17b5b1125ec36d9ae2c0 100644 (file)
 #[repr(transparent)]
 pub struct F32(f32);
 
-// CHECK: define{{.*}}float @test_F32(float %_1)
+// CHECK: define{{.*}}float @test_F32(float noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_F32(_: F32) -> F32 { loop {} }
 
 #[repr(transparent)]
 pub struct Ptr(*mut u8);
 
-// CHECK: define{{.*}}{{i8\*|ptr}} @test_Ptr({{i8\*|ptr}} %_1)
+// CHECK: define{{.*}}{{i8\*|ptr}} @test_Ptr({{i8\*|ptr}} noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_Ptr(_: Ptr) -> Ptr { loop {} }
 
 #[repr(transparent)]
 pub struct WithZst(u64, Zst1);
 
-// CHECK: define{{.*}}i64 @test_WithZst(i64 %_1)
+// CHECK: define{{.*}}i64 @test_WithZst(i64 noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_WithZst(_: WithZst) -> WithZst { loop {} }
 
@@ -40,14 +40,14 @@ pub extern "C" fn test_WithZst(_: WithZst) -> WithZst { loop {} }
 pub struct WithZeroSizedArray(*const f32, [i8; 0]);
 
 // Apparently we use i32* when newtype-unwrapping f32 pointers. Whatever.
-// CHECK: define{{.*}}{{i32\*|ptr}} @test_WithZeroSizedArray({{i32\*|ptr}} %_1)
+// CHECK: define{{.*}}{{i32\*|ptr}} @test_WithZeroSizedArray({{i32\*|ptr}} noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_WithZeroSizedArray(_: WithZeroSizedArray) -> WithZeroSizedArray { loop {} }
 
 #[repr(transparent)]
 pub struct Generic<T>(T);
 
-// CHECK: define{{.*}}double @test_Generic(double %_1)
+// CHECK: define{{.*}}double @test_Generic(double noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_Generic(_: Generic<f64>) -> Generic<f64> { loop {} }
 
@@ -64,7 +64,7 @@ pub extern "C" fn test_Gpz(_: GenericPlusZst<Bool>) -> GenericPlusZst<Bool> { lo
 #[repr(transparent)]
 pub struct LifetimePhantom<'a, T: 'a>(*const T, PhantomData<&'a T>);
 
-// CHECK: define{{.*}}{{i16\*|ptr}} @test_LifetimePhantom({{i16\*|ptr}} %_1)
+// CHECK: define{{.*}}{{i16\*|ptr}} @test_LifetimePhantom({{i16\*|ptr}} noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_LifetimePhantom(_: LifetimePhantom<i16>) -> LifetimePhantom<i16> { loop {} }
 
@@ -74,28 +74,28 @@ pub struct UnitPhantom<T, U> { val: T, unit: PhantomData<U> }
 
 pub struct Px;
 
-// CHECK: define{{.*}}float @test_UnitPhantom(float %_1)
+// CHECK: define{{.*}}float @test_UnitPhantom(float noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_UnitPhantom(_: UnitPhantom<f32, Px>) -> UnitPhantom<f32, Px> { loop {} }
 
 #[repr(transparent)]
 pub struct TwoZsts(Zst1, i8, Zst2);
 
-// CHECK: define{{( dso_local)?}}{{( signext)?}} i8 @test_TwoZsts(i8{{( signext)?}} %_1)
+// CHECK: define{{( dso_local)?}} noundef{{( signext)?}} i8 @test_TwoZsts(i8 noundef{{( signext)?}} %_1)
 #[no_mangle]
 pub extern "C" fn test_TwoZsts(_: TwoZsts) -> TwoZsts { loop {} }
 
 #[repr(transparent)]
 pub struct Nested1(Zst2, Generic<f64>);
 
-// CHECK: define{{.*}}double @test_Nested1(double %_1)
+// CHECK: define{{.*}}double @test_Nested1(double noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_Nested1(_: Nested1) -> Nested1 { loop {} }
 
 #[repr(transparent)]
 pub struct Nested2(Nested1, Zst1);
 
-// CHECK: define{{.*}}double @test_Nested2(double %_1)
+// CHECK: define{{.*}}double @test_Nested2(double noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_Nested2(_: Nested2) -> Nested2 { loop {} }
 
@@ -115,7 +115,7 @@ pub extern "C" fn test_Vector(_: Vector) -> Vector { loop {} }
 #[repr(transparent)]
 pub struct StructWithProjection(<f32 as Mirror>::It);
 
-// CHECK: define{{.*}}float @test_Projection(float %_1)
+// CHECK: define{{.*}}float @test_Projection(float noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_Projection(_: StructWithProjection) -> StructWithProjection { loop {} }
 
@@ -124,7 +124,7 @@ pub enum EnumF32 {
     Variant(F32)
 }
 
-// CHECK: define{{.*}}float @test_EnumF32(float %_1)
+// CHECK: define{{.*}}float @test_EnumF32(float noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_EnumF32(_: EnumF32) -> EnumF32 { loop {} }
 
@@ -133,7 +133,7 @@ pub enum EnumF32WithZsts {
     Variant(Zst1, F32, Zst2)
 }
 
-// CHECK: define{{.*}}float @test_EnumF32WithZsts(float %_1)
+// CHECK: define{{.*}}float @test_EnumF32WithZsts(float noundef %_1)
 #[no_mangle]
 pub extern "C" fn test_EnumF32WithZsts(_: EnumF32WithZsts) -> EnumF32WithZsts { loop {} }
 
@@ -142,7 +142,7 @@ pub union UnionF32 {
     field: F32,
 }
 
-// CHECK: define{{.*}}float @test_UnionF32(float %_1)
+// CHECK: define{{.*}} float @test_UnionF32(float %_1)
 #[no_mangle]
 pub extern "C" fn test_UnionF32(_: UnionF32) -> UnionF32 { loop {} }
 
index 61c4b7b51af7be5344eb86985277d8605baa2820..045f01985a57f1e14a75a4085c382f2f3ff56c0f 100644 (file)
@@ -29,25 +29,25 @@ pub extern "C" fn f_scalar_0(a: bool) -> bool {
     a
 }
 
-// CHECK: define signext i8 @f_scalar_1(i8 signext %x)
+// CHECK: define noundef signext i8 @f_scalar_1(i8 noundef signext %x)
 #[no_mangle]
 pub extern "C" fn f_scalar_1(x: i8) -> i8 {
     x
 }
 
-// CHECK: define zeroext i8 @f_scalar_2(i8 zeroext %x)
+// CHECK: define noundef zeroext i8 @f_scalar_2(i8 noundef zeroext %x)
 #[no_mangle]
 pub extern "C" fn f_scalar_2(x: u8) -> u8 {
     x
 }
 
-// CHECK: define signext i32 @f_scalar_3(i32 signext %x)
+// CHECK: define noundef signext i32 @f_scalar_3(i32 noundef signext %x)
 #[no_mangle]
 pub extern "C" fn f_scalar_3(x: i32) -> u32 {
     x as u32
 }
 
-// CHECK: define i64 @f_scalar_4(i64 %x)
+// CHECK: define noundef i64 @f_scalar_4(i64 noundef %x)
 #[no_mangle]
 pub extern "C" fn f_scalar_4(x: i64) -> i64 {
     x
@@ -132,13 +132,13 @@ pub struct Large {
 pub extern "C" fn f_agg_large(mut x: Large) {
 }
 
-// CHECK: define void @f_agg_large_ret({{%Large\*|ptr}} {{.*}}sret{{.*}}, i32 signext %i, i8 signext %j)
+// CHECK: define void @f_agg_large_ret({{%Large\*|ptr}} {{.*}}sret{{.*}}, i32 noundef signext %i, i8 noundef signext %j)
 #[no_mangle]
 pub extern "C" fn f_agg_large_ret(i: i32, j: i8) -> Large {
     Large { a: 1, b: 2, c: 3, d: 4 }
 }
 
-// CHECK: define void @f_scalar_stack_1(i64 %0, [2 x i64] %1, i128 %2, {{%Large\*|ptr}} {{.*}}%d, i8 zeroext %e, i8 signext %f, i8 %g, i8 %h)
+// CHECK: define void @f_scalar_stack_1(i64 %0, [2 x i64] %1, i128 %2, {{%Large\*|ptr}} {{.*}}%d, i8 noundef zeroext %e, i8 noundef signext %f, i8 noundef %g, i8 noundef %h)
 #[no_mangle]
 pub extern "C" fn f_scalar_stack_1(
     a: Tiny,
@@ -152,7 +152,7 @@ pub extern "C" fn f_scalar_stack_1(
 ) {
 }
 
-// CHECK: define void @f_scalar_stack_2({{%Large\*|ptr}} {{.*}}sret{{.*}} %0, i64 %a, i128 %1, i128 %2, i64 %d, i8 zeroext %e, i8 %f, i8 %g)
+// CHECK: define void @f_scalar_stack_2({{%Large\*|ptr}} {{.*}}sret{{.*}} %0, i64 noundef %a, i128 %1, i128 %2, i64 noundef %d, i8 noundef zeroext %e, i8 noundef %f, i8 noundef %g)
 #[no_mangle]
 pub extern "C" fn f_scalar_stack_2(
     a: u64,
@@ -172,7 +172,7 @@ pub extern "C" fn f_scalar_stack_2(
 
 #[no_mangle]
 pub unsafe extern "C" fn f_va_caller() {
-    // CHECK: call signext i32 (i32, ...) @f_va_callee(i32 signext 1, i32 signext 2, i64 3, double {{.*}}, double {{.*}}, i64 {{.*}}, [2 x i64] {{.*}}, i128 {{.*}}, {{%Large\*|ptr}} {{.*}})
+    // CHECK: call noundef signext i32 (i32, ...) @f_va_callee(i32 noundef signext 1, i32 noundef signext 2, i64 noundef 3, double {{.*}}, double {{.*}}, i64 {{.*}}, [2 x i64] {{.*}}, i128 {{.*}}, {{%Large\*|ptr}} {{.*}})
     f_va_callee(
         1,
         2i32,
@@ -184,6 +184,6 @@ pub extern "C" fn f_scalar_stack_2(
         SmallAligned { a: 11 },
         Large { a: 12, b: 13, c: 14, d: 15 },
     );
-    // CHECK: call signext i32 (i32, ...) @f_va_callee(i32 signext 1, i32 signext 2, i32 signext 3, i32 signext 4, i128 {{.*}}, i32 signext 6, i32 signext 7, i32 8, i32 9)
+    // CHECK: call noundef signext i32 (i32, ...) @f_va_callee(i32 noundef signext 1, i32 noundef signext 2, i32 noundef signext 3, i32 noundef signext 4, i128 {{.*}}, i32 noundef signext 6, i32 noundef signext 7, i32 noundef 8, i32 noundef 9)
     f_va_callee(1, 2i32, 3i32, 4i32, SmallAligned { a: 5 }, 6i32, 7i32, 8i32, 9i32);
 }
index 8be5186de9e773781e950955272cefc85d29a2ce..597b867ebad143ade9660a48d6ed64ca08e0a8d9 100644 (file)
@@ -1,7 +1,7 @@
 // Verifies that pointer type membership tests for indirect calls are emitted.
 //
 // needs-sanitizer-cfi
-// compile-flags: -Clto -Cno-prepopulate-passes -Ctarget-feature=-crt-static -Zsanitizer=cfi
+// compile-flags: -Clto -Cno-prepopulate-passes -Ctarget-feature=-crt-static -Zsanitizer=cfi -Copt-level=0
 
 #![crate_type="lib"]
 
index 8e0d02550ee94a5fa859adb0edcd5c511c452965..2537df80a90b44b62f84d9e9aa037e0f9bc01e65 100644 (file)
@@ -5,7 +5,7 @@
 // [aarch64] needs-llvm-components: aarch64
 // [x86_64] compile-flags: --target x86_64-unknown-none
 // [x86_64] needs-llvm-components:
-// compile-flags: -Cno-prepopulate-passes -Zsanitizer=kcfi
+// compile-flags: -Cno-prepopulate-passes -Zsanitizer=kcfi -Copt-level=0
 
 #![crate_type="lib"]
 #![feature(no_core, lang_items)]
index 7ce0fa0a20fc2ecad15669c069be8eedf0eadbdd..7b00fcf8e1bd743d6b7268b361626f0308574f60 100644 (file)
@@ -6,8 +6,8 @@
 // revisions:ASAN ASAN-RECOVER MSAN MSAN-RECOVER MSAN-RECOVER-LTO
 // no-prefer-dynamic
 //
-//[ASAN]             compile-flags: -Zsanitizer=address
-//[ASAN-RECOVER]     compile-flags: -Zsanitizer=address -Zsanitizer-recover=address
+//[ASAN]             compile-flags: -Zsanitizer=address -Copt-level=0
+//[ASAN-RECOVER]     compile-flags: -Zsanitizer=address -Zsanitizer-recover=address -Copt-level=0
 //[MSAN]             compile-flags: -Zsanitizer=memory
 //[MSAN-RECOVER]     compile-flags: -Zsanitizer=memory  -Zsanitizer-recover=memory
 //[MSAN-RECOVER-LTO] compile-flags: -Zsanitizer=memory  -Zsanitizer-recover=memory -C lto=fat
 // ASAN-RECOVER-NOT:     unreachable
 // ASAN:               }
 //
-// MSAN-LABEL: define dso_local i32 @penguin(
+// MSAN-LABEL: define dso_local noundef i32 @penguin(
 // MSAN:         call void @__msan_warning{{(_with_origin_noreturn\(i32 0\)|_noreturn\(\))}}
 // MSAN:         unreachable
 // MSAN:       }
 //
-// MSAN-RECOVER-LABEL: define dso_local i32 @penguin(
+// MSAN-RECOVER-LABEL: define dso_local noundef i32 @penguin(
 // MSAN-RECOVER:         call void @__msan_warning{{(_with_origin\(i32 0\)|\(\))}}
 // MSAN-RECOVER-NOT:     unreachable
 // MSAN-RECOVER:       }
 //
-// MSAN-RECOVER-LTO-LABEL: define dso_local i32 @penguin(
+// MSAN-RECOVER-LTO-LABEL: define dso_local noundef i32 @penguin(
 // MSAN-RECOVER-LTO:          call void @__msan_warning{{(_with_origin\(i32 0\)|\(\))}}
 // MSAN-RECOVER-LTO-NOT:      unreachable
 // MSAN-RECOVER-LTO:       }
index 264f28fdb5feea5e115473b341f18a52eec08de5..8e8365b6a673b6baa8dea7a34d293bc8e420f167 100644 (file)
@@ -8,13 +8,13 @@ pub fn pair_bool_bool(pair: (bool, bool)) -> (bool, bool) {
     pair
 }
 
-// CHECK: define{{.*}}{ i8, i32 } @pair_bool_i32(i1 noundef zeroext %pair.0, i32 %pair.1)
+// CHECK: define{{.*}}{ i8, i32 } @pair_bool_i32(i1 noundef zeroext %pair.0, i32 noundef %pair.1)
 #[no_mangle]
 pub fn pair_bool_i32(pair: (bool, i32)) -> (bool, i32) {
     pair
 }
 
-// CHECK: define{{.*}}{ i32, i8 } @pair_i32_bool(i32 %pair.0, i1 noundef zeroext %pair.1)
+// CHECK: define{{.*}}{ i32, i8 } @pair_i32_bool(i32 noundef %pair.0, i1 noundef zeroext %pair.1)
 #[no_mangle]
 pub fn pair_i32_bool(pair: (i32, bool)) -> (i32, bool) {
     pair
index 7fc34af3da72a2d2137d3da41c9fd207737b4c91..9f2d9d06524f0d65a30dd56432a01216570aa34a 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -Cno-prepopulate-passes
+// compile-flags: -Cno-prepopulate-passes -Copt-level=0
 
 // revisions:x86_64 i686 aarch64-apple aarch64-windows aarch64-linux arm riscv
 
index b2afc7deb679aa40a23b4372ba4800f18d4afc2e..735ef7081c9566c057e9a9638b0c84d8e7731a81 100644 (file)
@@ -15,8 +15,8 @@
 // it to be marked `dso_local` as well, given the static relocation model.
 //
 // CHECK: @extern_static = external dso_local local_unnamed_addr global i8
-// CHECK: define dso_local i8 @access_extern() {{.*}}
-// CHECK: declare dso_local i8 @extern_fn() {{.*}}
+// CHECK: define dso_local noundef i8 @access_extern() {{.*}}
+// CHECK: declare dso_local noundef i8 @extern_fn() {{.*}}
 
 #[no_mangle]
 pub fn access_extern() -> u8 {
index a7e5deeffd8e24f16874a3dee1b9bbc62f4af637..260dcbac0fc4f33919ca7d43889f3b13d901ef07 100644 (file)
@@ -5,7 +5,7 @@
 // FIXME(eddyb) all of these tests show memory stores and loads, even after a
 // scalar `bitcast`, more special-casing is required to remove `alloca` usage.
 
-// CHECK-LABEL: define{{.*}}i32 @f32_to_bits(float %x)
+// CHECK-LABEL: define{{.*}}i32 @f32_to_bits(float noundef %x)
 // CHECK: store i32 %{{.*}}, {{.*}} %0
 // CHECK-NEXT: %[[RES:.*]] = load i32, {{.*}} %0
 // CHECK: ret i32 %[[RES]]
@@ -24,7 +24,7 @@ pub fn bool_to_byte(b: bool) -> u8 {
     unsafe { std::mem::transmute(b) }
 }
 
-// CHECK-LABEL: define{{.*}}noundef zeroext i1 @byte_to_bool(i8 %byte)
+// CHECK-LABEL: define{{.*}}noundef zeroext i1 @byte_to_bool(i8 noundef %byte)
 // CHECK: %1 = trunc i8 %byte to i1
 // CHECK-NEXT: %2 = zext i1 %1 to i8
 // CHECK-NEXT: store i8 %2, {{.*}} %0
@@ -36,7 +36,7 @@ pub unsafe fn byte_to_bool(byte: u8) -> bool {
     std::mem::transmute(byte)
 }
 
-// CHECK-LABEL: define{{.*}}{{i8\*|ptr}} @ptr_to_ptr({{i16\*|ptr}} %p)
+// CHECK-LABEL: define{{.*}}{{i8\*|ptr}} @ptr_to_ptr({{i16\*|ptr}} noundef %p)
 // CHECK: store {{i8\*|ptr}} %{{.*}}, {{.*}} %0
 // CHECK-NEXT: %[[RES:.*]] = load {{i8\*|ptr}}, {{.*}} %0
 // CHECK: ret {{i8\*|ptr}} %[[RES]]
@@ -52,7 +52,7 @@ pub fn ptr_to_ptr(p: *mut u16) -> *mut u8 {
 // Tests below show the non-special-cased behavior (with the possible
 // future special-cased instructions in the "NOTE(eddyb)" comments).
 
-// CHECK: define{{.*}}[[USIZE:i[0-9]+]] @ptr_to_int({{i16\*|ptr}} %p)
+// CHECK: define{{.*}}[[USIZE:i[0-9]+]] @ptr_to_int({{i16\*|ptr}} noundef %p)
 
 // NOTE(eddyb) see above, the following two CHECK lines should ideally be this:
 //        %2 = ptrtoint i16* %p to [[USIZE]]
@@ -66,7 +66,7 @@ pub fn ptr_to_int(p: *mut u16) -> usize {
     unsafe { std::mem::transmute(p) }
 }
 
-// CHECK: define{{.*}}{{i16\*|ptr}} @int_to_ptr([[USIZE]] %i)
+// CHECK: define{{.*}}{{i16\*|ptr}} @int_to_ptr([[USIZE]] noundef %i)
 
 // NOTE(eddyb) see above, the following two CHECK lines should ideally be this:
 //        %2 = inttoptr [[USIZE]] %i to i16*
index e86c75f3f482607b204d715d879f4cfa5b136a40..35f760851451e108b124552924d048c32c7f23de 100644 (file)
@@ -1,5 +1,5 @@
 // ignore-emscripten
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 
 // Test that tuples get optimized layout, in particular with a ZST in the last field (#63244)
 
index 8f1b038708e667b9cfccd9b8ec4b7124aa314c3e..d4715efad73c0fe0855a3a22314dd662d735861d 100644 (file)
@@ -2,7 +2,7 @@
 
 #![crate_type = "lib"]
 
-// CHECK-LABEL: define{{.*}}i32 @test(i32 %a, i32 %b)
+// CHECK-LABEL: define{{.*}}i32 @test(i32 noundef %a, i32 noundef %b)
 #[no_mangle]
 pub fn test(a: u32, b: u32) -> u32 {
     let c = a + b;
index ae6e448f172f7fcd87bd6d67cb16ea00579199f1..4481a9d1e9983c0991c63f58bb6c23ecad1b3d00 100644 (file)
@@ -161,7 +161,24 @@ pub fn vec_option_bool(n: usize) -> Vec<Option<bool>> {
     vec![Some(false); n]
 }
 
+// CHECK-LABEL: @vec_option_i32
+#[no_mangle]
+pub fn vec_option_i32(n: usize) -> Vec<Option<i32>> {
+    // CHECK-NOT: call {{.*}}alloc::vec::from_elem
+    // CHECK-NOT: call {{.*}}reserve
+    // CHECK-NOT: call {{.*}}__rust_alloc(
+
+    // CHECK: call {{.*}}__rust_alloc_zeroed(
+
+    // CHECK-NOT: call {{.*}}alloc::vec::from_elem
+    // CHECK-NOT: call {{.*}}reserve
+    // CHECK-NOT: call {{.*}}__rust_alloc(
+
+    // CHECK: ret void
+    vec![None; n]
+}
+
 // Ensure that __rust_alloc_zeroed gets the right attributes for LLVM to optimize it away.
-// CHECK: declare noalias ptr @__rust_alloc_zeroed(i64, i64 allocalign) unnamed_addr [[RUST_ALLOC_ZEROED_ATTRS:#[0-9]+]]
+// CHECK: declare noalias noundef ptr @__rust_alloc_zeroed(i64 noundef, i64 allocalign noundef) unnamed_addr [[RUST_ALLOC_ZEROED_ATTRS:#[0-9]+]]
 
 // CHECK-DAG: attributes [[RUST_ALLOC_ZEROED_ATTRS]] = { {{.*}} allockind("alloc,zeroed,aligned") allocsize(0) uwtable "alloc-family"="__rust_alloc" {{.*}} }
index 844d5870a846fa6c52f362b3a2e578990c5cd4cc..cef4b9bdaaf0a2dc018ccd61cff0c3c0ab0d4f6c 100644 (file)
@@ -1,4 +1,4 @@
-// compile-flags: -C no-prepopulate-passes
+// compile-flags: -C no-prepopulate-passes -Copt-level=0
 
 #![crate_type = "lib"]
 #![feature(repr_simd)]
diff --git a/tests/mir-opt/building/async_await.a-{closure#0}.generator_resume.0.mir b/tests/mir-opt/building/async_await.a-{closure#0}.generator_resume.0.mir
new file mode 100644 (file)
index 0000000..2a7f90f
--- /dev/null
@@ -0,0 +1,41 @@
+// MIR for `a::{closure#0}` 0 generator_resume
+/* generator_layout = GeneratorLayout {
+    field_tys: {},
+    variant_fields: {
+        Unresumed(0): [],
+        Returned (1): [],
+        Panicked (2): [],
+    },
+    storage_conflicts: BitMatrix(0x0) {},
+} */
+
+fn a::{closure#0}(_1: Pin<&mut [async fn body@$DIR/async_await.rs:11:14: 11:16]>, _2: &mut Context<'_>) -> Poll<()> {
+    debug _task_context => _4;           // in scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    let mut _0: std::task::Poll<()>;     // return place in scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    let mut _3: ();                      // in scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    let mut _4: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    let mut _5: u32;                     // in scope 0 at $DIR/async_await.rs:+0:14: +0:16
+
+    bb0: {
+        _5 = discriminant((*(_1.0: &mut [async fn body@$DIR/async_await.rs:11:14: 11:16]))); // scope 0 at $DIR/async_await.rs:+0:14: +0:16
+        switchInt(move _5) -> [0: bb1, 1: bb2, otherwise: bb3]; // scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    }
+
+    bb1: {
+        _4 = move _2;                    // scope 0 at $DIR/async_await.rs:+0:14: +0:16
+        _3 = const ();                   // scope 0 at $DIR/async_await.rs:+0:14: +0:16
+        Deinit(_0);                      // scope 0 at $DIR/async_await.rs:+0:16: +0:16
+        ((_0 as Ready).0: ()) = move _3; // scope 0 at $DIR/async_await.rs:+0:16: +0:16
+        discriminant(_0) = 0;            // scope 0 at $DIR/async_await.rs:+0:16: +0:16
+        discriminant((*(_1.0: &mut [async fn body@$DIR/async_await.rs:11:14: 11:16]))) = 1; // scope 0 at $DIR/async_await.rs:+0:16: +0:16
+        return;                          // scope 0 at $DIR/async_await.rs:+0:16: +0:16
+    }
+
+    bb2: {
+        assert(const false, "`async fn` resumed after completion") -> bb2; // scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    }
+
+    bb3: {
+        unreachable;                     // scope 0 at $DIR/async_await.rs:+0:14: +0:16
+    }
+}
diff --git a/tests/mir-opt/building/async_await.b-{closure#0}.generator_resume.0.mir b/tests/mir-opt/building/async_await.b-{closure#0}.generator_resume.0.mir
new file mode 100644 (file)
index 0000000..05edc47
--- /dev/null
@@ -0,0 +1,337 @@
+// MIR for `b::{closure#0}` 0 generator_resume
+/* generator_layout = GeneratorLayout {
+    field_tys: {
+        _0: impl std::future::Future<Output = ()>,
+        _1: impl std::future::Future<Output = ()>,
+    },
+    variant_fields: {
+        Unresumed(0): [],
+        Returned (1): [],
+        Panicked (2): [],
+        Suspend0 (3): [_0],
+        Suspend1 (4): [_1],
+    },
+    storage_conflicts: BitMatrix(2x2) {
+        (_0, _0),
+        (_1, _1),
+    },
+} */
+
+fn b::{closure#0}(_1: Pin<&mut [async fn body@$DIR/async_await.rs:14:18: 17:2]>, _2: &mut Context<'_>) -> Poll<()> {
+    debug _task_context => _38;          // in scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    let mut _0: std::task::Poll<()>;     // return place in scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    let _3: ();                          // in scope 0 at $DIR/async_await.rs:+1:5: +1:14
+    let mut _4: impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _5: impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+1:5: +1:8
+    let mut _6: impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _7: ();                      // in scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    let _8: ();                          // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _9: std::task::Poll<()>;     // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _10: std::pin::Pin<&mut impl std::future::Future<Output = ()>>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _11: &mut impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _12: &mut impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _13: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+1:5: +1:14
+    let mut _14: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+1:5: +1:14
+    let mut _15: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _16: isize;                  // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _18: !;                      // in scope 0 at $DIR/async_await.rs:+1:5: +1:14
+    let mut _19: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _20: ();                     // in scope 0 at $DIR/async_await.rs:+1:8: +1:14
+    let mut _21: impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _22: impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+2:5: +2:8
+    let mut _23: impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let _24: ();                         // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _25: std::task::Poll<()>;    // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _26: std::pin::Pin<&mut impl std::future::Future<Output = ()>>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _27: &mut impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _28: &mut impl std::future::Future<Output = ()>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _29: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+2:5: +2:14
+    let mut _30: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+2:5: +2:14
+    let mut _31: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _32: isize;                  // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _34: !;                      // in scope 0 at $DIR/async_await.rs:+2:5: +2:14
+    let mut _35: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _36: ();                     // in scope 0 at $DIR/async_await.rs:+2:8: +2:14
+    let mut _37: ();                     // in scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    let mut _38: &mut std::task::Context<'_>; // in scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    let mut _39: u32;                    // in scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    scope 1 {
+        debug __awaitee => (((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2])) as variant#3).0: impl std::future::Future<Output = ()>); // in scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        let _17: ();                     // in scope 1 at $DIR/async_await.rs:+1:5: +1:14
+        scope 2 {
+        }
+        scope 3 {
+            debug result => _17;         // in scope 3 at $DIR/async_await.rs:+1:5: +1:14
+        }
+    }
+    scope 4 {
+        debug __awaitee => (((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2])) as variant#4).0: impl std::future::Future<Output = ()>); // in scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        let _33: ();                     // in scope 4 at $DIR/async_await.rs:+2:5: +2:14
+        scope 5 {
+        }
+        scope 6 {
+            debug result => _33;         // in scope 6 at $DIR/async_await.rs:+2:5: +2:14
+        }
+    }
+
+    bb0: {
+        _39 = discriminant((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2]))); // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        switchInt(move _39) -> [0: bb1, 1: bb29, 3: bb27, 4: bb28, otherwise: bb30]; // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    }
+
+    bb1: {
+        _38 = move _2;                   // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        StorageLive(_3);                 // scope 0 at $DIR/async_await.rs:+1:5: +1:14
+        StorageLive(_4);                 // scope 0 at $DIR/async_await.rs:+1:8: +1:14
+        StorageLive(_5);                 // scope 0 at $DIR/async_await.rs:+1:5: +1:8
+        _5 = a() -> bb2;                 // scope 0 at $DIR/async_await.rs:+1:5: +1:8
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:15:5: 15:6
+                                         // + literal: Const { ty: fn() -> impl Future<Output = ()> {a}, val: Value(<ZST>) }
+    }
+
+    bb2: {
+        _4 = <impl Future<Output = ()> as IntoFuture>::into_future(move _5) -> bb3; // scope 0 at $DIR/async_await.rs:+1:8: +1:14
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:15:8: 15:14
+                                         // + literal: Const { ty: fn(impl Future<Output = ()>) -> <impl Future<Output = ()> as IntoFuture>::IntoFuture {<impl Future<Output = ()> as IntoFuture>::into_future}, val: Value(<ZST>) }
+    }
+
+    bb3: {
+        StorageDead(_5);                 // scope 0 at $DIR/async_await.rs:+1:13: +1:14
+        nop;                             // scope 0 at $DIR/async_await.rs:+1:8: +1:14
+        (((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2])) as variant#3).0: impl std::future::Future<Output = ()>) = move _4; // scope 0 at $DIR/async_await.rs:+1:8: +1:14
+        goto -> bb4;                     // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+    }
+
+    bb4: {
+        StorageLive(_8);                 // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        StorageLive(_9);                 // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        StorageLive(_10);                // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        StorageLive(_11);                // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        StorageLive(_12);                // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        _12 = &mut (((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2])) as variant#3).0: impl std::future::Future<Output = ()>); // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        _11 = &mut (*_12);               // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        _10 = Pin::<&mut impl Future<Output = ()>>::new_unchecked(move _11) -> bb5; // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:15:8: 15:14
+                                         // + literal: Const { ty: unsafe fn(&mut impl Future<Output = ()>) -> Pin<&mut impl Future<Output = ()>> {Pin::<&mut impl Future<Output = ()>>::new_unchecked}, val: Value(<ZST>) }
+    }
+
+    bb5: {
+        StorageDead(_11);                // scope 2 at $DIR/async_await.rs:+1:13: +1:14
+        StorageLive(_13);                // scope 2 at $DIR/async_await.rs:+1:5: +1:14
+        StorageLive(_14);                // scope 2 at $DIR/async_await.rs:+1:5: +1:14
+        StorageLive(_15);                // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        _15 = _38;                       // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+        _14 = move _15;                  // scope 2 at $DIR/async_await.rs:+1:5: +1:14
+        goto -> bb6;                     // scope 2 at $DIR/async_await.rs:+1:5: +1:14
+    }
+
+    bb6: {
+        _13 = &mut (*_14);               // scope 2 at $DIR/async_await.rs:+1:5: +1:14
+        StorageDead(_15);                // scope 2 at $DIR/async_await.rs:+1:13: +1:14
+        _9 = <impl Future<Output = ()> as Future>::poll(move _10, move _13) -> bb7; // scope 2 at $DIR/async_await.rs:+1:8: +1:14
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:15:8: 15:14
+                                         // + literal: Const { ty: for<'a, 'b, 'c> fn(Pin<&'a mut impl Future<Output = ()>>, &'b mut Context<'c>) -> Poll<<impl Future<Output = ()> as Future>::Output> {<impl Future<Output = ()> as Future>::poll}, val: Value(<ZST>) }
+    }
+
+    bb7: {
+        StorageDead(_13);                // scope 2 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_10);                // scope 2 at $DIR/async_await.rs:+1:13: +1:14
+        _16 = discriminant(_9);          // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        switchInt(move _16) -> [0: bb10, 1: bb8, otherwise: bb9]; // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+    }
+
+    bb8: {
+        _8 = const ();                   // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        StorageDead(_14);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_12);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_9);                 // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_8);                 // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageLive(_19);                // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        StorageLive(_20);                // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        _20 = ();                        // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        Deinit(_0);                      // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        discriminant(_0) = 1;            // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        discriminant((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2]))) = 3; // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        return;                          // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+    }
+
+    bb9: {
+        unreachable;                     // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+    }
+
+    bb10: {
+        StorageLive(_17);                // scope 1 at $DIR/async_await.rs:+1:5: +1:14
+        _17 = ((_9 as Ready).0: ());     // scope 1 at $DIR/async_await.rs:+1:5: +1:14
+        _3 = _17;                        // scope 3 at $DIR/async_await.rs:+1:5: +1:14
+        StorageDead(_17);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_14);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_12);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_9);                 // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        StorageDead(_8);                 // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        goto -> bb12;                    // scope 0 at $DIR/async_await.rs:+1:13: +1:14
+    }
+
+    bb11: {
+        StorageDead(_20);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        _38 = move _19;                  // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        StorageDead(_19);                // scope 1 at $DIR/async_await.rs:+1:13: +1:14
+        _7 = const ();                   // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+        goto -> bb4;                     // scope 1 at $DIR/async_await.rs:+1:8: +1:14
+    }
+
+    bb12: {
+        nop;                             // scope 0 at $DIR/async_await.rs:+1:13: +1:14
+        goto -> bb13;                    // scope 0 at $DIR/async_await.rs:+1:14: +1:15
+    }
+
+    bb13: {
+        StorageDead(_4);                 // scope 0 at $DIR/async_await.rs:+1:14: +1:15
+        StorageDead(_3);                 // scope 0 at $DIR/async_await.rs:+1:14: +1:15
+        StorageLive(_21);                // scope 0 at $DIR/async_await.rs:+2:8: +2:14
+        StorageLive(_22);                // scope 0 at $DIR/async_await.rs:+2:5: +2:8
+        _22 = a() -> bb14;               // scope 0 at $DIR/async_await.rs:+2:5: +2:8
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:16:5: 16:6
+                                         // + literal: Const { ty: fn() -> impl Future<Output = ()> {a}, val: Value(<ZST>) }
+    }
+
+    bb14: {
+        _21 = <impl Future<Output = ()> as IntoFuture>::into_future(move _22) -> bb15; // scope 0 at $DIR/async_await.rs:+2:8: +2:14
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:16:8: 16:14
+                                         // + literal: Const { ty: fn(impl Future<Output = ()>) -> <impl Future<Output = ()> as IntoFuture>::IntoFuture {<impl Future<Output = ()> as IntoFuture>::into_future}, val: Value(<ZST>) }
+    }
+
+    bb15: {
+        StorageDead(_22);                // scope 0 at $DIR/async_await.rs:+2:13: +2:14
+        nop;                             // scope 0 at $DIR/async_await.rs:+2:8: +2:14
+        (((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2])) as variant#4).0: impl std::future::Future<Output = ()>) = move _21; // scope 0 at $DIR/async_await.rs:+2:8: +2:14
+        goto -> bb16;                    // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+    }
+
+    bb16: {
+        StorageLive(_24);                // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        StorageLive(_25);                // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        StorageLive(_26);                // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        StorageLive(_27);                // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        StorageLive(_28);                // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        _28 = &mut (((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2])) as variant#4).0: impl std::future::Future<Output = ()>); // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        _27 = &mut (*_28);               // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        _26 = Pin::<&mut impl Future<Output = ()>>::new_unchecked(move _27) -> bb17; // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:16:8: 16:14
+                                         // + literal: Const { ty: unsafe fn(&mut impl Future<Output = ()>) -> Pin<&mut impl Future<Output = ()>> {Pin::<&mut impl Future<Output = ()>>::new_unchecked}, val: Value(<ZST>) }
+    }
+
+    bb17: {
+        StorageDead(_27);                // scope 5 at $DIR/async_await.rs:+2:13: +2:14
+        StorageLive(_29);                // scope 5 at $DIR/async_await.rs:+2:5: +2:14
+        StorageLive(_30);                // scope 5 at $DIR/async_await.rs:+2:5: +2:14
+        StorageLive(_31);                // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        _31 = _38;                       // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+        _30 = move _31;                  // scope 5 at $DIR/async_await.rs:+2:5: +2:14
+        goto -> bb18;                    // scope 5 at $DIR/async_await.rs:+2:5: +2:14
+    }
+
+    bb18: {
+        _29 = &mut (*_30);               // scope 5 at $DIR/async_await.rs:+2:5: +2:14
+        StorageDead(_31);                // scope 5 at $DIR/async_await.rs:+2:13: +2:14
+        _25 = <impl Future<Output = ()> as Future>::poll(move _26, move _29) -> bb19; // scope 5 at $DIR/async_await.rs:+2:8: +2:14
+                                         // mir::Constant
+                                         // + span: $DIR/async_await.rs:16:8: 16:14
+                                         // + literal: Const { ty: for<'a, 'b, 'c> fn(Pin<&'a mut impl Future<Output = ()>>, &'b mut Context<'c>) -> Poll<<impl Future<Output = ()> as Future>::Output> {<impl Future<Output = ()> as Future>::poll}, val: Value(<ZST>) }
+    }
+
+    bb19: {
+        StorageDead(_29);                // scope 5 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_26);                // scope 5 at $DIR/async_await.rs:+2:13: +2:14
+        _32 = discriminant(_25);         // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        switchInt(move _32) -> [0: bb22, 1: bb20, otherwise: bb21]; // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+    }
+
+    bb20: {
+        _24 = const ();                  // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        StorageDead(_30);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_28);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_25);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_24);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageLive(_35);                // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        StorageLive(_36);                // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        _36 = ();                        // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        Deinit(_0);                      // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        discriminant(_0) = 1;            // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        discriminant((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2]))) = 4; // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        return;                          // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+    }
+
+    bb21: {
+        unreachable;                     // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+    }
+
+    bb22: {
+        StorageLive(_33);                // scope 4 at $DIR/async_await.rs:+2:5: +2:14
+        _33 = ((_25 as Ready).0: ());    // scope 4 at $DIR/async_await.rs:+2:5: +2:14
+        _37 = _33;                       // scope 6 at $DIR/async_await.rs:+2:5: +2:14
+        StorageDead(_33);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_30);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_28);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_25);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        StorageDead(_24);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        goto -> bb24;                    // scope 0 at $DIR/async_await.rs:+2:13: +2:14
+    }
+
+    bb23: {
+        StorageDead(_36);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        _38 = move _35;                  // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        StorageDead(_35);                // scope 4 at $DIR/async_await.rs:+2:13: +2:14
+        _7 = const ();                   // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+        goto -> bb16;                    // scope 4 at $DIR/async_await.rs:+2:8: +2:14
+    }
+
+    bb24: {
+        nop;                             // scope 0 at $DIR/async_await.rs:+2:13: +2:14
+        goto -> bb25;                    // scope 0 at $DIR/async_await.rs:+3:1: +3:2
+    }
+
+    bb25: {
+        StorageDead(_21);                // scope 0 at $DIR/async_await.rs:+3:1: +3:2
+        goto -> bb26;                    // scope 0 at $DIR/async_await.rs:+3:1: +3:2
+    }
+
+    bb26: {
+        Deinit(_0);                      // scope 0 at $DIR/async_await.rs:+3:2: +3:2
+        ((_0 as Ready).0: ()) = move _37; // scope 0 at $DIR/async_await.rs:+3:2: +3:2
+        discriminant(_0) = 0;            // scope 0 at $DIR/async_await.rs:+3:2: +3:2
+        discriminant((*(_1.0: &mut [async fn body@$DIR/async_await.rs:14:18: 17:2]))) = 1; // scope 0 at $DIR/async_await.rs:+3:2: +3:2
+        return;                          // scope 0 at $DIR/async_await.rs:+3:2: +3:2
+    }
+
+    bb27: {
+        StorageLive(_3);                 // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        StorageLive(_4);                 // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        StorageLive(_19);                // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        StorageLive(_20);                // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        _19 = move _2;                   // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        goto -> bb11;                    // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    }
+
+    bb28: {
+        StorageLive(_21);                // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        StorageLive(_35);                // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        StorageLive(_36);                // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        _35 = move _2;                   // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+        goto -> bb23;                    // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    }
+
+    bb29: {
+        assert(const false, "`async fn` resumed after completion") -> bb29; // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    }
+
+    bb30: {
+        unreachable;                     // scope 0 at $DIR/async_await.rs:+0:18: +3:2
+    }
+}
diff --git a/tests/mir-opt/building/async_await.rs b/tests/mir-opt/building/async_await.rs
new file mode 100644 (file)
index 0000000..0b991e3
--- /dev/null
@@ -0,0 +1,17 @@
+// This test makes sure that the generator MIR pass eliminates all calls to
+// `get_context`, and that the MIR argument type for an async fn and all locals
+// related to `yield` are `&mut Context`, and its return type is `Poll`.
+
+// edition:2018
+// compile-flags: -C panic=abort
+
+#![crate_type = "lib"]
+
+// EMIT_MIR async_await.a-{closure#0}.generator_resume.0.mir
+async fn a() {}
+
+// EMIT_MIR async_await.b-{closure#0}.generator_resume.0.mir
+pub async fn b() {
+    a().await;
+    a().await
+}
index ec6dbe1d0526b7799436afe1729994cb59411936..db041aab239e38e53845fec8cc3f293577a325ae 100644 (file)
@@ -11,12 +11,14 @@ pub fn simple(x: i32) -> i32 {
         let temp2: _;
 
         {
+            StorageLive(temp1);
             temp1 = x;
             Goto(exit)
         }
 
         exit = {
             temp2 = Move(temp1);
+            StorageDead(temp1);
             RET = temp2;
             Return()
         }
index d7560fde69c9500412035d2815c5995e57d659d0..743016708c583ae8cac3f1655dd6c8a2a02f05c7 100644 (file)
@@ -6,13 +6,15 @@ fn simple(_1: i32) -> i32 {
     let mut _3: i32;                     // in scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL
 
     bb0: {
-        _2 = _1;                         // scope 0 at $DIR/simple_assign.rs:+6:13: +6:22
-        goto -> bb1;                     // scope 0 at $DIR/simple_assign.rs:+7:13: +7:23
+        StorageLive(_2);                 // scope 0 at $DIR/simple_assign.rs:+6:13: +6:31
+        _2 = _1;                         // scope 0 at $DIR/simple_assign.rs:+7:13: +7:22
+        goto -> bb1;                     // scope 0 at $DIR/simple_assign.rs:+8:13: +8:23
     }
 
     bb1: {
-        _3 = move _2;                    // scope 0 at $DIR/simple_assign.rs:+11:13: +11:32
-        _0 = _3;                         // scope 0 at $DIR/simple_assign.rs:+12:13: +12:24
-        return;                          // scope 0 at $DIR/simple_assign.rs:+13:13: +13:21
+        _3 = move _2;                    // scope 0 at $DIR/simple_assign.rs:+12:13: +12:32
+        StorageDead(_2);                 // scope 0 at $DIR/simple_assign.rs:+13:13: +13:31
+        _0 = _3;                         // scope 0 at $DIR/simple_assign.rs:+14:13: +14:24
+        return;                          // scope 0 at $DIR/simple_assign.rs:+15:13: +15:21
     }
 }
index 2a4dc9e3e809919ed553686727fbb420e1594fb4..a28da146e378659e68193505aeba97c8cb3d0d08 100644 (file)
       let mut _6: ();                      // in scope 0 at $DIR/inline_into_box_place.rs:+1:42: +1:43
       let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline_into_box_place.rs:+1:29: +1:43
 +     let mut _8: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
++     let mut _9: std::vec::Vec<u32>;      // in scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
       scope 1 {
           debug _x => _1;                  // in scope 1 at $DIR/inline_into_box_place.rs:+1:9: +1:11
       }
       scope 2 {
       }
 +     scope 3 (inlined Vec::<u32>::new) {  // at $DIR/inline_into_box_place.rs:8:33: 8:43
-+         let mut _9: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         let mut _10: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
 +     }
   
       bb0: {
@@ -37,8 +38,9 @@
 -         (*_7) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
 +         StorageLive(_8);                 // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
 +         _8 = &mut (*_7);                 // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
-+         StorageLive(_9);                 // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         _9 = const _;                    // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         StorageLive(_9);                 // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
++         StorageLive(_10);                // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         _10 = const _;                   // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
                                            // mir::Constant
 -                                          // + span: $DIR/inline_into_box_place.rs:8:33: 8:41
 -                                          // + user_ty: UserType(1)
 +                                          // + span: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
 +                                          // + user_ty: UserType(0)
 +                                          // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Unevaluated(alloc::raw_vec::RawVec::<T>::NEW, [u32], None) }
-+         Deinit((*_8));                   // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         ((*_8).0: alloc::raw_vec::RawVec<u32>) = move _9; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         ((*_8).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         StorageDead(_9);                 // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         Deinit(_9);                      // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         (_9.0: alloc::raw_vec::RawVec<u32>) = move _10; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         (_9.1: usize) = const 0_usize;   // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         StorageDead(_10);                // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         (*_8) = move _9;                 // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
++         StorageDead(_9);                 // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
 +         StorageDead(_8);                 // scope 0 at $DIR/inline_into_box_place.rs:+1:33: +1:43
           _1 = move _5;                    // scope 0 at $DIR/inline_into_box_place.rs:+1:29: +1:43
           StorageDead(_5);                 // scope 0 at $DIR/inline_into_box_place.rs:+1:42: +1:43
diff --git a/tests/mir-opt/inline/issue_106141.outer.Inline.diff b/tests/mir-opt/inline/issue_106141.outer.Inline.diff
new file mode 100644 (file)
index 0000000..97361fa
--- /dev/null
@@ -0,0 +1,55 @@
+- // MIR for `outer` before Inline
++ // MIR for `outer` after Inline
+  
+  fn outer() -> usize {
+      let mut _0: usize;                   // return place in scope 0 at $DIR/issue_106141.rs:+0:19: +0:24
++     scope 1 (inlined inner) {            // at $DIR/issue_106141.rs:2:5: 2:12
++         let mut _1: bool;                // in scope 1 at $DIR/issue_106141.rs:13:8: 13:21
++         let mut _2: bool;                // in scope 1 at $DIR/issue_106141.rs:13:8: 13:21
++         let mut _3: &[bool; 1];          // in scope 1 at $DIR/issue_106141.rs:11:18: 11:25
++         scope 2 {
++             debug buffer => _3;          // in scope 2 at $DIR/issue_106141.rs:11:9: 11:15
++             scope 3 {
++                 debug index => _0;       // in scope 3 at $DIR/issue_106141.rs:12:9: 12:14
++             }
++         }
++     }
+  
+      bb0: {
+-         _0 = inner() -> bb1;             // scope 0 at $DIR/issue_106141.rs:+1:5: +1:12
++         StorageLive(_3);                 // scope 0 at $DIR/issue_106141.rs:+1:5: +1:12
++         _3 = const _;                    // scope 1 at $DIR/issue_106141.rs:11:18: 11:25
+                                           // mir::Constant
+-                                          // + span: $DIR/issue_106141.rs:2:5: 2:10
+-                                          // + literal: Const { ty: fn() -> usize {inner}, val: Value(<ZST>) }
++                                          // + span: $DIR/issue_106141.rs:11:18: 11:25
++                                          // + literal: Const { ty: &[bool; 1], val: Unevaluated(inner, [], Some(promoted[0])) }
++         _0 = index() -> bb1;             // scope 2 at $DIR/issue_106141.rs:12:17: 12:24
++                                          // mir::Constant
++                                          // + span: $DIR/issue_106141.rs:12:17: 12:22
++                                          // + literal: Const { ty: fn() -> usize {index}, val: Value(<ZST>) }
+      }
+  
+      bb1: {
++         StorageLive(_1);                 // scope 3 at $DIR/issue_106141.rs:13:8: 13:21
++         _2 = Lt(_0, const 1_usize);      // scope 3 at $DIR/issue_106141.rs:13:8: 13:21
++         assert(move _2, "index out of bounds: the length is {} but the index is {}", const 1_usize, _0) -> bb2; // scope 3 at $DIR/issue_106141.rs:13:8: 13:21
++     }
++ 
++     bb2: {
++         _1 = (*_3)[_0];                  // scope 3 at $DIR/issue_106141.rs:13:8: 13:21
++         switchInt(move _1) -> [0: bb3, otherwise: bb4]; // scope 3 at $DIR/issue_106141.rs:13:8: 13:21
++     }
++ 
++     bb3: {
++         _0 = const 0_usize;              // scope 3 at $DIR/issue_106141.rs:16:9: 16:10
++         goto -> bb4;                     // scope 3 at $DIR/issue_106141.rs:13:5: 17:6
++     }
++ 
++     bb4: {
++         StorageDead(_1);                 // scope 3 at $DIR/issue_106141.rs:17:5: 17:6
++         StorageDead(_3);                 // scope 0 at $DIR/issue_106141.rs:+1:5: +1:12
+          return;                          // scope 0 at $DIR/issue_106141.rs:+2:2: +2:2
+      }
+  }
+  
diff --git a/tests/mir-opt/inline/issue_106141.rs b/tests/mir-opt/inline/issue_106141.rs
new file mode 100644 (file)
index 0000000..c8288b7
--- /dev/null
@@ -0,0 +1,24 @@
+pub fn outer() -> usize {
+    inner()
+}
+
+fn index() -> usize {
+    loop {}
+}
+
+#[inline]
+fn inner() -> usize {
+    let buffer = &[true];
+    let index = index();
+    if buffer[index] {
+        index
+    } else {
+        0
+    }
+}
+
+fn main() {
+    outer();
+}
+
+// EMIT_MIR issue_106141.outer.Inline.diff
index 8e6564a38b0bbbaafe2dfd25125cdb4c2efefaf2..798e45df8ca766f8c0bdf0daac9492cbe7b23944 100644 (file)
@@ -22,7 +22,7 @@
 |
 fn main() -> () {
     let mut _0: ();                      // return place in scope 0 at $DIR/region_subtyping_basic.rs:+0:11: +0:11
-    let mut _1: [usize; Const { ty: usize, kind: Value(Leaf(0x00000003)) }]; // in scope 0 at $DIR/region_subtyping_basic.rs:+1:9: +1:14
+    let mut _1: [usize; Const(Value(Leaf(0x00000003)): usize)]; // in scope 0 at $DIR/region_subtyping_basic.rs:+1:9: +1:14
     let _3: usize;                       // in scope 0 at $DIR/region_subtyping_basic.rs:+2:16: +2:17
     let mut _4: usize;                   // in scope 0 at $DIR/region_subtyping_basic.rs:+2:14: +2:18
     let mut _5: bool;                    // in scope 0 at $DIR/region_subtyping_basic.rs:+2:14: +2:18
index 74d44c6741a92747b01cf35d5f7d1b52324ff91e..4767bfc76ed9de92c1632622e26392765d54cb87 100644 (file)
@@ -22,7 +22,7 @@
 |
 fn main() -> () {
     let mut _0: ();                      // return place in scope 0 at $DIR/region_subtyping_basic.rs:+0:11: +0:11
-    let mut _1: [usize; Const { ty: usize, kind: Value(Leaf(0x0000000000000003)) }]; // in scope 0 at $DIR/region_subtyping_basic.rs:+1:9: +1:14
+    let mut _1: [usize; Const(Value(Leaf(0x0000000000000003)): usize)]; // in scope 0 at $DIR/region_subtyping_basic.rs:+1:9: +1:14
     let _3: usize;                       // in scope 0 at $DIR/region_subtyping_basic.rs:+2:16: +2:17
     let mut _4: usize;                   // in scope 0 at $DIR/region_subtyping_basic.rs:+2:14: +2:18
     let mut _5: bool;                    // in scope 0 at $DIR/region_subtyping_basic.rs:+2:14: +2:18
index 7e1b6aeb31558c30da283cc004473e47c7e2bca3..5bb38fc02af91be94cf296b3c6358341d0b6eb23 100644 (file)
@@ -1,12 +1,12 @@
 #![feature(rustc_private)]
 
-extern crate rustc_interface;
 extern crate rustc_driver;
+extern crate rustc_interface;
 extern crate rustc_session;
 extern crate rustc_span;
 
-use rustc_session::config::{Input, Options, OutputType, OutputTypes};
 use rustc_interface::interface;
+use rustc_session::config::{Input, Options, OutputType, OutputTypes};
 use rustc_span::source_map::FileName;
 
 use std::path::PathBuf;
@@ -50,7 +50,6 @@ fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
         crate_cfg: Default::default(),
         crate_check_cfg: Default::default(),
         input,
-        input_path: None,
         output_file: Some(output),
         output_dir: None,
         file_loader: None,
@@ -64,9 +63,7 @@ fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
 
     interface::run_compiler(config, |compiler| {
         // This runs all the passes prior to linking, too.
-        let linker = compiler.enter(|queries| {
-            queries.linker()
-        });
+        let linker = compiler.enter(|queries| queries.linker());
         if let Ok(linker) = linker {
             linker.link();
         }
index 108cf8abcb529d2bf238733f3487c4430a053766..971c2f9480ea2fb00a6bb7f235af31d5865bec01 100644 (file)
@@ -1,3 +1,5 @@
+// Small test to ensure the "src-line-numbers" element is only present once on
+// the page.
 goto: "file://" + |DOC_PATH| + "/test_docs/index.html"
 click: ".srclink"
 wait-for: ".src-line-numbers"
index 94c1a6525aaa5e08bfbce9db7531c71cdcca5384..8561f537f3d32a8a18ce237832d04593fdf37be7 100644 (file)
@@ -9,16 +9,16 @@ size: (1080, 600)
 // Check that their content is inside <pre><code>
 assert-count: (".example-wrap pre > code", 4)
 // Check that function signature is inside <pre><code>
-assert: "pre.rust.fn > code"
+assert: ".item-decl pre.rust > code"
 
 goto: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html"
-assert: "pre.rust.struct > code"
+assert: ".item-decl pre.rust > code"
 
 goto: "file://" + |DOC_PATH| + "/test_docs/enum.AnEnum.html"
-assert: "pre.rust.enum > code"
+assert: ".item-decl pre.rust > code"
 
 goto: "file://" + |DOC_PATH| + "/test_docs/trait.AnotherOne.html"
-assert: "pre.rust.trait > code"
+assert: ".item-decl pre.rust > code"
 
 goto: "file://" + |DOC_PATH| + "/test_docs/type.SomeType.html"
-assert: "pre.rust.typedef > code"
+assert: ".item-decl pre.rust > code"
index 8ba005b0c35a0c7da4ee52ee7d72d91e05e4e9cb..fafb156317866cc1970c78c0cc1395a0485afdf2 100644 (file)
@@ -20,7 +20,7 @@ goto: "file://" + |DOC_PATH| + "/lib2/trait.Trait.html"
 // This is a complex selector, so here's how it works:
 //
 // * //*[@class='item-decl'] — selects element of any tag with classes docblock and item-decl
-// * /pre[@class='rust trait'] — selects immediate child with tag pre and classes rust and trait
+// * /pre[@class='rust'] — selects immediate child with tag pre and class rust
 // * /code — selects immediate child with tag code
 // * /a[@class='constant'] — selects immediate child with tag a and class constant
 // * //text() — selects child that is text node
@@ -29,11 +29,11 @@ goto: "file://" + |DOC_PATH| + "/lib2/trait.Trait.html"
 // This uses '/parent::*' as a proxy for the style of the text node.
 // We can't just select the '<a>' because intermediate tags could be added.
 assert-count: (
-    "//*[@class='item-decl']/pre[@class='rust trait']/code/a[@class='constant']//text()/parent::*",
+    "//*[@class='item-decl']/pre[@class='rust']/code/a[@class='constant']//text()/parent::*",
     1,
 )
 assert-css: (
-    "//*[@class='item-decl']/pre[@class='rust trait']/code/a[@class='constant']//text()/parent::*",
+    "//*[@class='item-decl']/pre[@class='rust']/code/a[@class='constant']//text()/parent::*",
     {"font-weight": "400"},
 )
 
index 3423a449de478a38b5dafe8ab921d0e69102801d..c527cfbfcbc5630558ab81e40d3fd7c0d094d109 100644 (file)
@@ -1,4 +1,4 @@
 // This test checks that code blocks in list are supported.
 goto: "file://" + |DOC_PATH| + "/test_docs/index.html"
 goto: "./fn.check_list_code_block.html"
-assert: ("pre.rust.fn")
+assert: (".item-decl pre.rust")
index 10651a3f6696d5ef41eaef999ee37c527446b97a..1b5c3a0d202a0cfd1bd88fb96cb528764ff69822 100644 (file)
@@ -1,3 +1,5 @@
+// This test ensures that the scraped examples buttons are working as expecting
+// when 'Enter' key is pressed when they're focused.
 goto: "file://" + |DOC_PATH| + "/scrape_examples/fn.test.html"
 
 // The next/prev buttons vertically scroll the code viewport between examples
index 40f31b2771b258f4d995510d45fc40836c4b2262..67c58826efc2691e226365fe3a4b9a0253af9353 100644 (file)
@@ -58,3 +58,39 @@ call-function: ("check-colors", {
     "help_hover_border": "rgb(0, 0, 0)",
     "help_hover_color": "rgb(0, 0, 0)",
 })
+
+// Now testing the top and bottom background in case there is only one scraped examples.
+goto: "file://" + |DOC_PATH| + "/scrape_examples/fn.test.html"
+
+define-function: (
+    "check-background",
+    (theme, background_color_start, background_color_end),
+    block {
+        local-storage: { "rustdoc-theme": |theme|, "rustdoc-use-system-theme": "false", }
+        reload:
+        assert-css: (".scraped-example:not(.expanded) .code-wrapper::before", {
+            "background-image": "linear-gradient(" + |background_color_start| + ", " +
+                |background_color_end| + ")",
+        })
+        assert-css: (".scraped-example:not(.expanded) .code-wrapper::after", {
+            "background-image": "linear-gradient(to top, " + |background_color_start| + ", " +
+                |background_color_end| + ")",
+        })
+    },
+)
+
+call-function: ("check-background", {
+    "theme": "ayu",
+    "background_color_start": "rgb(15, 20, 25)",
+    "background_color_end": "rgba(15, 20, 25, 0)",
+})
+call-function: ("check-background", {
+    "theme": "dark",
+    "background_color_start": "rgb(53, 53, 53)",
+    "background_color_end": "rgba(53, 53, 53, 0)",
+})
+call-function: ("check-background", {
+    "theme": "light",
+    "background_color_start": "rgb(255, 255, 255)",
+    "background_color_end": "rgba(255, 255, 255, 0)",
+})
index f236dc3e0fe7681344504d8677b693de6c59044b..419cc5ebac35de6325566ccb8d8501c998fa31fb 100644 (file)
@@ -8,6 +8,10 @@ assert-false: "#settings"
 click: "#settings-menu"
 wait-for: "#settings"
 assert-css: ("#settings", {"display": "block"})
+
+// Store the line margin to compare with the settings.html later.
+store-css: (setting_line_margin, ".setting-line", "margin")
+
 // Let's close it by clicking on the same button.
 click: "#settings-menu"
 wait-for-css: ("#settings", {"display": "none"})
@@ -105,6 +109,33 @@ assert-css: (
         "box-shadow": "rgb(33, 150, 243) 0px 0px 1px 1px",
     },
 )
+// Now we check the setting-name for radio buttons is on a different line than the label.
+compare-elements-position-near: (
+    "#theme .setting-name",
+    "#theme .choices",
+    {"x": 1}
+)
+compare-elements-position-near-false: (
+    "#theme .setting-name",
+    "#theme .choices",
+    {"y": 1}
+)
+// Now we check that the label positions are all on the same line.
+compare-elements-position-near: (
+    "#theme .choices #theme-light",
+    "#theme .choices #theme-dark",
+    {"y": 1}
+)
+compare-elements-position-near: (
+    "#theme .choices #theme-dark",
+    "#theme .choices #theme-ayu",
+    {"y": 1}
+)
+compare-elements-position-near: (
+    "#theme .choices #theme-ayu",
+    "#theme .choices #theme-system-preference",
+    {"y": 1}
+)
 
 // First we check the "default" display for toggles.
 assert-css: (
@@ -176,6 +207,25 @@ press-key: "?"
 wait-for-css: ("#help-button .popover", {"display": "block"})
 assert-css: ("#settings-menu .popover", {"display": "none"})
 
+// Now switch back to the settings popover, and make sure the keyboard
+// shortcut works when a check box is selected.
+click: "#settings-menu > a"
+wait-for-css: ("#settings-menu .popover", {"display": "block"})
+focus: "#auto-hide-large-items"
+press-key: "?"
+wait-for-css: ("#settings-menu .popover", {"display": "none"})
+wait-for-css: ("#help-button .popover", {"display": "block"})
+
+// Now switch back to the settings popover, and make sure the keyboard
+// shortcut works when a check box is selected.
+click: "#settings-menu > a"
+wait-for-css: ("#settings-menu .popover", {"display": "block"})
+wait-for-css: ("#help-button .popover", {"display": "none"})
+focus: "#theme-system-preference"
+press-key: "?"
+wait-for-css: ("#settings-menu .popover", {"display": "none"})
+wait-for-css: ("#help-button .popover", {"display": "block"})
+
 // Now we go to the settings page to check that the CSS is loaded as expected.
 goto: "file://" + |DOC_PATH| + "/settings.html"
 wait-for: "#settings"
@@ -184,6 +234,9 @@ assert-css: (".setting-line", {"position": "relative"})
 assert-attribute-false: ("#settings", {"class": "popover"}, CONTAINS)
 compare-elements-position: (".sub form", "#settings", ("x"))
 
+// Check that setting-line has the same margin in this mode as in the popover.
+assert-css: (".setting-line", {"margin": |setting_line_margin|})
+
 // We now check the display with JS disabled.
 assert-false: "noscript section"
 javascript: false
index 858046e72e9a456518acb9d749bd972218101a00..1b4c7b40570203b2b4eecb55488615984a72505d 100644 (file)
@@ -3,8 +3,8 @@ const QUERY = 'macro:print';
 const EXPECTED = {
     'others': [
         { 'path': 'std', 'name': 'print' },
-        { 'path': 'std', 'name': 'eprint' },
         { 'path': 'std', 'name': 'println' },
+        { 'path': 'std', 'name': 'eprint' },
         { 'path': 'std', 'name': 'eprintln' },
     ],
 };
index 25efbad26954003233d959692152f34888221bd7..fd5c5489d79cfe04a4d844cfdec619ec542719f7 100644 (file)
@@ -6,8 +6,8 @@ const FILTER_CRATE = 'std';
 const EXPECTED = {
     'others': [
         { 'path': 'std', 'name': 'print' },
-        { 'path': 'std', 'name': 'eprint' },
         { 'path': 'std', 'name': 'println' },
+        { 'path': 'std', 'name': 'eprint' },
         { 'path': 'std', 'name': 'eprintln' },
         { 'path': 'std::pin', 'name': 'pin' },
         { 'path': 'std::future', 'name': 'join' },
index cd0e8e7b4a9eb41524adbb55fc8d9c931a8d20d4..fc44a566af21f8ab1f2d9554370366c60975ab09 100644 (file)
@@ -3,7 +3,8 @@ const QUERY = 'Vec::new';
 const EXPECTED = {
     'others': [
         { 'path': 'std::vec::Vec', 'name': 'new' },
-        { 'path': 'std::vec::Vec', 'name': 'ne' },
-        { 'path': 'alloc::vec::Vec', 'name': 'ne' },
+        { 'path': 'alloc::vec::Vec', 'name': 'new' },
+        { 'path': 'std::vec::Vec', 'name': 'new_in' },
+        { 'path': 'alloc::vec::Vec', 'name': 'new_in' },
     ],
 };
index d14672af71fd6b714d3107b68c04ee16bd130d71..3b2f15a40bf87448fb9f441266e1934d1a6c6301 100644 (file)
@@ -4,7 +4,6 @@ const EXPECTED = {
     'others': [
         { 'path': 'search_short_types', 'name': 'P' },
         { 'path': 'search_short_types::VeryLongTypeName', 'name': 'p' },
-        { 'path': 'search_short_types', 'name': 'Ap' },
-        { 'path': 'search_short_types::VeryLongTypeName', 'name': 'ap' },
+        { 'path': 'search_short_types', 'name': 'Pa' },
     ],
 };
index 939da186fbcdb2dcac7b1f6b7f5dca4d7c280f32..4b1e04234c870594bcaaf458bb9b2fbe39507c8c 100644 (file)
@@ -1,10 +1,12 @@
+// check-pass
 // normalize-stderr-test: "`.*`" -> "`DEF_ID`"
 // normalize-stdout-test: "`.*`" -> "`DEF_ID`"
 // edition:2018
 
 pub async fn f() -> impl std::fmt::Debug {
+    // rustdoc doesn't care that this is infinitely sized
     #[derive(Debug)]
-    enum E { //~ ERROR
+    enum E {
         This(E),
         Unit,
     }
diff --git a/tests/rustdoc-ui/infinite-recursive-type-impl-trait-return.stderr b/tests/rustdoc-ui/infinite-recursive-type-impl-trait-return.stderr
deleted file mode 100644 (file)
index aff7402..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-error[E0072]: recursive type `DEF_ID` has infinite size
-  --> $DIR/infinite-recursive-type-impl-trait-return.rs:7:5
-   |
-LL |     enum E {
-   |     ^^^^^^
-LL |         This(E),
-   |              - recursive without indirection
-   |
-help: insert some indirection (e.g., a `DEF_ID`) to break the cycle
-   |
-LL |         This(Box<E>),
-   |              ++++ +
-
-error: aborting due to previous error
-
-For more information about this error, try `DEF_ID`.
index ac51725749867cea7f7b979839320140378480d3..ac79582fb3f0df48e75c11cc1101e0d9f99ac5bc 100644 (file)
@@ -1,5 +1,8 @@
+// check-pass
+
 fn f() -> impl Sized {
-    enum E { //~ ERROR
+    // rustdoc doesn't care that this is infinitely sized
+    enum E {
         V(E),
     }
     unimplemented!()
diff --git a/tests/rustdoc-ui/infinite-recursive-type-impl-trait.stderr b/tests/rustdoc-ui/infinite-recursive-type-impl-trait.stderr
deleted file mode 100644 (file)
index a61577b..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-error[E0072]: recursive type `f::E` has infinite size
-  --> $DIR/infinite-recursive-type-impl-trait.rs:2:5
-   |
-LL |     enum E {
-   |     ^^^^^^
-LL |         V(E),
-   |           - recursive without indirection
-   |
-help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to break the cycle
-   |
-LL |         V(Box<E>),
-   |           ++++ +
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0072`.
index 07f92ac51b9f53de511b434344791a001f42fd37..e7c0ee2de1a7a9036cc55a6ea9f6aecd588acfd5 100644 (file)
@@ -4,25 +4,25 @@
 pub struct MyBox<T: ?Sized>(*const T);
 
 // @has 'foo/fn.alpha.html'
-// @snapshot link_slice_u32 - '//pre[@class="rust fn"]/code'
+// @snapshot link_slice_u32 - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn alpha() -> &'static [u32; 1] {
     loop {}
 }
 
 // @has 'foo/fn.beta.html'
-// @snapshot link_slice_generic - '//pre[@class="rust fn"]/code'
+// @snapshot link_slice_generic - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn beta<T>() -> &'static [T; 1] {
     loop {}
 }
 
 // @has 'foo/fn.gamma.html'
-// @snapshot link_box_u32 - '//pre[@class="rust fn"]/code'
+// @snapshot link_box_u32 - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn gamma() -> MyBox<[u32; 1]> {
     loop {}
 }
 
 // @has 'foo/fn.delta.html'
-// @snapshot link_box_generic - '//pre[@class="rust fn"]/code'
+// @snapshot link_box_generic - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn delta<T>() -> MyBox<[T; 1]> {
     loop {}
 }
index 3da19a13e5331ecb3e83dabc2b014f252eb98a01..77b139b644f3a8dd97b0cff39336735535d6c22e 100644 (file)
@@ -1,5 +1,5 @@
 pub trait Foo {
-    // @has assoc_consts/trait.Foo.html '//*[@class="rust trait"]' \
+    // @has assoc_consts/trait.Foo.html '//div[@class="item-decl"]/pre[@class="rust"]' \
     //      'const FOO: usize = 13usize;'
     // @has - '//*[@id="associatedconstant.FOO"]' 'const FOO: usize'
     const FOO: usize = 12 + 1;
index a409d64131afdfa96de35c418376f41569be6738..ab9702a24f469834a8fba8881cd660977831b744 100644 (file)
@@ -10,5 +10,5 @@ pub trait AsExpression<T> {
 }
 
 // @has foo/type.AsExprOf.html
-// @has - '//pre[@class="rust typedef"]' 'type AsExprOf<Item, Type> = <Item as AsExpression<Type>>::Expression;'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type AsExprOf<Item, Type> = <Item as AsExpression<Type>>::Expression;'
 pub type AsExprOf<Item, Type> = <Item as AsExpression<Type>>::Expression;
index a9e5b8d0019280842bd4a0d4e82623a1665ae524..de36c8ffeff0f6121e63f6ef38a24efb28d4ed8b 100644 (file)
@@ -12,8 +12,8 @@ pub trait Index<I: ?Sized> {
 }
 
 // @has assoc_types/fn.use_output.html
-// @has - '//*[@class="rust fn"]' '-> &T::Output'
-// @has - '//*[@class="rust fn"]//a[@href="trait.Index.html#associatedtype.Output"]' 'Output'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' '-> &T::Output'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]//a[@href="trait.Index.html#associatedtype.Output"]' 'Output'
 pub fn use_output<T: Index<usize>>(obj: &T, index: usize) -> &T::Output {
     obj.index(index)
 }
@@ -23,13 +23,13 @@ pub trait Feed {
 }
 
 // @has assoc_types/fn.use_input.html
-// @has - '//*[@class="rust fn"]' 'T::Input'
-// @has - '//*[@class="rust fn"]//a[@href="trait.Feed.html#associatedtype.Input"]' 'Input'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'T::Input'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]//a[@href="trait.Feed.html#associatedtype.Input"]' 'Input'
 pub fn use_input<T: Feed>(_feed: &T, _element: T::Input) { }
 
 // @has assoc_types/fn.cmp_input.html
-// @has - '//*[@class="rust fn"]' 'where T::Input: PartialEq<U::Input>'
-// @has - '//*[@class="rust fn"]//a[@href="trait.Feed.html#associatedtype.Input"]' 'Input'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'where T::Input: PartialEq<U::Input>'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]//a[@href="trait.Feed.html#associatedtype.Input"]' 'Input'
 pub fn cmp_input<T: Feed, U: Feed>(a: &T::Input, b: &U::Input) -> bool
     where T::Input: PartialEq<U::Input>
 {
index af765c51ace391da7ea7608e5d85b46f9d9939af..fb7ebb5f82239791039b451f21815eb27a00f443 100644 (file)
@@ -1,35 +1,35 @@
 // edition:2018
-// @has async_fn/fn.foo.html '//pre[@class="rust fn"]' 'pub async fn foo() -> Option<Foo>'
+// @has async_fn/fn.foo.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn foo() -> Option<Foo>'
 pub async fn foo() -> Option<Foo> {
     None
 }
 
-// @has async_fn/fn.bar.html '//pre[@class="rust fn"]' 'pub async fn bar(a: i32, b: i32) -> i32'
+// @has async_fn/fn.bar.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn bar(a: i32, b: i32) -> i32'
 pub async fn bar(a: i32, b: i32) -> i32 {
     0
 }
 
-// @has async_fn/fn.baz.html '//pre[@class="rust fn"]' 'pub async fn baz<T>(a: T) -> T'
+// @has async_fn/fn.baz.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn baz<T>(a: T) -> T'
 pub async fn baz<T>(a: T) -> T {
     a
 }
 
-// @has async_fn/fn.qux.html '//pre[@class="rust fn"]' 'pub async unsafe fn qux() -> char'
+// @has async_fn/fn.qux.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async unsafe fn qux() -> char'
 pub async unsafe fn qux() -> char {
     '⚠'
 }
 
-// @has async_fn/fn.mut_args.html '//pre[@class="rust fn"]' 'pub async fn mut_args(a: usize)'
+// @has async_fn/fn.mut_args.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn mut_args(a: usize)'
 pub async fn mut_args(mut a: usize) {}
 
-// @has async_fn/fn.mut_ref.html '//pre[@class="rust fn"]' 'pub async fn mut_ref(x: i32)'
+// @has async_fn/fn.mut_ref.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn mut_ref(x: i32)'
 pub async fn mut_ref(ref mut x: i32) {}
 
 trait Bar {}
 
 impl Bar for () {}
 
-// @has async_fn/fn.quux.html '//pre[@class="rust fn"]' 'pub async fn quux() -> impl Bar'
+// @has async_fn/fn.quux.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn quux() -> impl Bar'
 pub async fn quux() -> impl Bar {
     ()
 }
@@ -50,27 +50,27 @@ pub trait Pattern<'a> {}
 
 pub trait Trait<const N: usize> {}
 // @has async_fn/fn.const_generics.html
-// @has - '//pre[@class="rust fn"]' 'pub async fn const_generics<const N: usize>(_: impl Trait<N>)'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn const_generics<const N: usize>(_: impl Trait<N>)'
 pub async fn const_generics<const N: usize>(_: impl Trait<N>) {}
 
 // test that elided lifetimes are properly elided and not displayed as `'_`
 // regression test for #63037
 // @has async_fn/fn.elided.html
-// @has - '//pre[@class="rust fn"]' 'pub async fn elided(foo: &str) -> &str'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn elided(foo: &str) -> &str'
 pub async fn elided(foo: &str) -> &str {}
 // This should really be shown as written, but for implementation reasons it's difficult.
 // See `impl Clean for TyKind::Ref`.
 // @has async_fn/fn.user_elided.html
-// @has - '//pre[@class="rust fn"]' 'pub async fn user_elided(foo: &str) -> &str'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn user_elided(foo: &str) -> &str'
 pub async fn user_elided(foo: &'_ str) -> &str {}
 // @has async_fn/fn.static_trait.html
-// @has - '//pre[@class="rust fn"]' 'pub async fn static_trait(foo: &str) -> Box<dyn Bar>'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub async fn static_trait(foo: &str) -> Box<dyn Bar>'
 pub async fn static_trait(foo: &str) -> Box<dyn Bar> {}
 // @has async_fn/fn.lifetime_for_trait.html
-// @has - '//pre[@class="rust fn"]' "pub async fn lifetime_for_trait(foo: &str) -> Box<dyn Bar + '_>"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub async fn lifetime_for_trait(foo: &str) -> Box<dyn Bar + '_>"
 pub async fn lifetime_for_trait(foo: &str) -> Box<dyn Bar + '_> {}
 // @has async_fn/fn.elided_in_input_trait.html
-// @has - '//pre[@class="rust fn"]' "pub async fn elided_in_input_trait(t: impl Pattern<'_>)"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub async fn elided_in_input_trait(t: impl Pattern<'_>)"
 pub async fn elided_in_input_trait(t: impl Pattern<'_>) {}
 
 struct AsyncFdReadyGuard<'a, T> { x: &'a T }
@@ -88,8 +88,8 @@ pub async fn mut_self(&mut self) {}
 
 // test named lifetimes, just in case
 // @has async_fn/fn.named.html
-// @has - '//pre[@class="rust fn"]' "pub async fn named<'a, 'b>(foo: &'a str) -> &'b str"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub async fn named<'a, 'b>(foo: &'a str) -> &'b str"
 pub async fn named<'a, 'b>(foo: &'a str) -> &'b str {}
 // @has async_fn/fn.named_trait.html
-// @has - '//pre[@class="rust fn"]' "pub async fn named_trait<'a, 'b>(foo: impl Pattern<'a>) -> impl Pattern<'b>"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub async fn named_trait<'a, 'b>(foo: impl Pattern<'a>) -> impl Pattern<'b>"
 pub async fn named_trait<'a, 'b>(foo: impl Pattern<'a>) -> impl Pattern<'b> {}
index a36dadced87d71e906c79babf398039265b46c12..70e2e5c29508c211b28246df05e984b8fc2167b4 100644 (file)
@@ -1,13 +1,13 @@
 #![crate_name = "foo"]
 
-// @has foo/fn.f.html '//*[@class="rust fn"]' '#[no_mangle]'
+// @has foo/fn.f.html '//div[@class="item-decl"]/pre[@class="rust"]' '#[no_mangle]'
 #[no_mangle]
 pub extern "C" fn f() {}
 
-// @has foo/fn.g.html '//*[@class="rust fn"]' '#[export_name = "bar"]'
+// @has foo/fn.g.html '//div[@class="item-decl"]/pre[@class="rust"]' '#[export_name = "bar"]'
 #[export_name = "bar"]
 pub extern "C" fn g() {}
 
-// @has foo/struct.Repr.html '//*[@class="item-decl"]' '#[repr(C, align(8))]'
+// @has foo/struct.Repr.html '//div[@class="item-decl"]' '#[repr(C, align(8))]'
 #[repr(C, align(8))]
 pub struct Repr;
index 45664dfc3823da490d5625df87efdb3c2594b43e..5143968bbd4390df0bab4c66cb4eb1f611eeb7c6 100644 (file)
@@ -1,5 +1,5 @@
 // @has issue_85454/trait.FromResidual.html
-// @has - '//pre[@class="rust trait"]' 'pub trait FromResidual<R = <Self as Try>::Residual> { fn from_residual(residual: R) -> Self; }'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub trait FromResidual<R = <Self as Try>::Residual> { fn from_residual(residual: R) -> Self; }'
 pub trait FromResidual<R = <Self as Try>::Residual> {
     fn from_residual(residual: R) -> Self;
 }
index 28eba849ace072df369e2abdccd1556b1da4fec9..4366ad4d0adaca435f5cf5ee8df2b307673e8a37 100644 (file)
@@ -1,7 +1,7 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.bar.html
-// @has - '//*[@class="rust fn"]' 'pub const fn bar() -> '
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub const fn bar() -> '
 /// foo
 pub const fn bar() -> usize {
     2
index 6cbae9abebb73021d2f43d5ee852a92336e128f0..b5226ad3f78bd6e0199ce2a4629b2207e406fbfa 100644 (file)
@@ -2,7 +2,7 @@
 
 use std::ops::Add;
 
-// @has foo/struct.Simd.html '//pre[@class="rust struct"]' 'pub struct Simd<T, const WIDTH: usize>'
+// @has foo/struct.Simd.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub struct Simd<T, const WIDTH: usize>'
 pub struct Simd<T, const WIDTH: usize> {
     inner: T,
 }
index 2693d9b596993ff1e75a5e411fbbecb2c0330a79..acc3b853e5679c577696ffb87ff3578e00791293 100644 (file)
@@ -1,5 +1,5 @@
 #![crate_name = "foo"]
 
-// @has foo/struct.Foo.html '//pre[@class="rust struct"]' \
+// @has foo/struct.Foo.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub struct Foo<const M: usize = 10, const N: usize = M, T = i32>(_);'
 pub struct Foo<const M: usize = 10, const N: usize = M, T = i32>(T);
index 5bf76e3c4690818b361e7bfa8e2665e0b1c7ee11..543332d2c320b8922d18135ee7e27cfe01d25606 100644 (file)
@@ -3,21 +3,21 @@
 #![crate_name = "foo"]
 
 extern crate extern_crate;
-// @has foo/fn.extern_fn.html '//pre[@class="rust fn"]' \
+// @has foo/fn.extern_fn.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub fn extern_fn<const N: usize>() -> impl Iterator<Item = [u8; N]>'
 pub use extern_crate::extern_fn;
-// @has foo/struct.ExternTy.html '//pre[@class="rust struct"]' \
+// @has foo/struct.ExternTy.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub struct ExternTy<const N: usize> {'
 pub use extern_crate::ExternTy;
-// @has foo/type.TyAlias.html '//pre[@class="rust typedef"]' \
+// @has foo/type.TyAlias.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'type TyAlias<const N: usize> = ExternTy<N>;'
 pub use extern_crate::TyAlias;
-// @has foo/trait.WTrait.html '//pre[@class="rust trait"]' \
+// @has foo/trait.WTrait.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub trait WTrait<const N: usize, const M: usize>'
-// @has - '//*[@class="rust trait"]' 'fn hey<const P: usize>() -> usize'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn hey<const P: usize>() -> usize'
 pub use extern_crate::WTrait;
 
-// @has foo/trait.Trait.html '//pre[@class="rust trait"]' \
+// @has foo/trait.Trait.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub trait Trait<const N: usize>'
 // @has - '//*[@id="impl-Trait%3C1%3E-for-u8"]//h3[@class="code-header"]' 'impl Trait<1> for u8'
 // @has - '//*[@id="impl-Trait%3C2%3E-for-u8"]//h3[@class="code-header"]' 'impl Trait<2> for u8'
@@ -30,10 +30,10 @@ impl Trait<2> for u8 {}
 impl Trait<{1 + 2}> for u8 {}
 impl<const N: usize> Trait<N> for [u8; N] {}
 
-// @has foo/struct.Foo.html '//pre[@class="rust struct"]' \
+// @has foo/struct.Foo.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub struct Foo<const N: usize>where u8: Trait<N>'
 pub struct Foo<const N: usize> where u8: Trait<N>;
-// @has foo/struct.Bar.html '//pre[@class="rust struct"]' 'pub struct Bar<T, const N: usize>(_)'
+// @has foo/struct.Bar.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub struct Bar<T, const N: usize>(_)'
 pub struct Bar<T, const N: usize>([T; N]);
 
 // @has foo/struct.Foo.html '//*[@id="impl-Foo%3CM%3E"]/h3[@class="code-header"]' 'impl<const M: usize> Foo<M>where u8: Trait<M>'
@@ -56,32 +56,32 @@ pub fn hey<const N: usize>(&self) -> Foo<N> where u8: Trait<N> {
     }
 }
 
-// @has foo/fn.test.html '//pre[@class="rust fn"]' \
+// @has foo/fn.test.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub fn test<const N: usize>() -> impl Trait<N>where u8: Trait<N>'
 pub fn test<const N: usize>() -> impl Trait<N> where u8: Trait<N> {
     2u8
 }
 
-// @has foo/fn.a_sink.html '//pre[@class="rust fn"]' \
+// @has foo/fn.a_sink.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub async fn a_sink<const N: usize>(v: [u8; N]) -> impl Trait<N>'
 pub async fn a_sink<const N: usize>(v: [u8; N]) -> impl Trait<N> {
     v
 }
 
-// @has foo/fn.b_sink.html '//pre[@class="rust fn"]' \
+// @has foo/fn.b_sink.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub async fn b_sink<const N: usize>(_: impl Trait<N>)'
 pub async fn b_sink<const N: usize>(_: impl Trait<N>) {}
 
-// @has foo/fn.concrete.html '//pre[@class="rust fn"]' \
+// @has foo/fn.concrete.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub fn concrete() -> [u8; 22]'
 pub fn concrete() -> [u8; 3 + std::mem::size_of::<u64>() << 1] {
     Default::default()
 }
 
-// @has foo/type.Faz.html '//pre[@class="rust typedef"]' \
+// @has foo/type.Faz.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'type Faz<const N: usize> = [u8; N];'
 pub type Faz<const N: usize> = [u8; N];
-// @has foo/type.Fiz.html '//pre[@class="rust typedef"]' \
+// @has foo/type.Fiz.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'type Fiz<const N: usize> = [[u8; N]; 48];'
 pub type Fiz<const N: usize> = [[u8; N]; 3 << 4];
 
@@ -91,7 +91,7 @@ macro_rules! define_me {
     }
 }
 
-// @has foo/struct.Foz.html '//pre[@class="rust struct"]' \
+// @has foo/struct.Foz.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub struct Foz<const N: usize>(_);'
 define_me!(Foz<N>);
 
@@ -103,13 +103,13 @@ trait Q {
     const ASSOC: usize = N;
 }
 
-// @has foo/fn.q_user.html '//pre[@class="rust fn"]' \
+// @has foo/fn.q_user.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub fn q_user() -> [u8; 13]'
 pub fn q_user() -> [u8; <[u8; 13] as Q>::ASSOC] {
     [0; <[u8; 13] as Q>::ASSOC]
 }
 
-// @has foo/union.Union.html '//pre[@class="rust union"]' \
+// @has foo/union.Union.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub union Union<const N: usize>'
 pub union Union<const N: usize> {
     // @has - //pre "pub arr: [u8; N]"
@@ -118,7 +118,7 @@ pub union Union<const N: usize> {
     pub another_arr: [(); N],
 }
 
-// @has foo/enum.Enum.html '//pre[@class="rust enum"]' \
+// @has foo/enum.Enum.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub enum Enum<const N: usize>'
 pub enum Enum<const N: usize> {
     // @has - //pre "Variant([u8; N])"
index 75ee84279be3c840976325a93401735af5b4c800..726fb8f0c341681af2ca5df0b666ee9dc6643c3e 100644 (file)
@@ -8,7 +8,7 @@ pub enum Order {
     Unsorted,
 }
 
-// @has foo/struct.VSet.html '//pre[@class="rust struct"]' 'pub struct VSet<T, const ORDER: Order>'
+// @has foo/struct.VSet.html '//div[@class="item-decl"]/pre[@class="rust"]' 'pub struct VSet<T, const ORDER: Order>'
 // @has foo/struct.VSet.html '//*[@id="impl-Send-for-VSet%3CT%2C%20ORDER%3E"]/h3[@class="code-header"]' 'impl<T, const ORDER: Order> Send for VSet<T, ORDER>'
 // @has foo/struct.VSet.html '//*[@id="impl-Sync-for-VSet%3CT%2C%20ORDER%3E"]/h3[@class="code-header"]' 'impl<T, const ORDER: Order> Sync for VSet<T, ORDER>'
 pub struct VSet<T, const ORDER: Order> {
index 215ee228eb857dfba2f05370de52b9390a767dc8..c53cf6dcd0570b4f0717ca79062cb2f35333ee74 100644 (file)
@@ -2,6 +2,6 @@
 #![feature(generic_const_exprs)]
 #![allow(incomplete_features)]
 // make sure that `ConstEvaluatable` predicates dont cause rustdoc to ICE #77647
-// @has foo/struct.Ice.html '//pre[@class="rust struct"]' \
+// @has foo/struct.Ice.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub struct Ice<const N: usize>;'
 pub struct Ice<const N: usize> where [(); N + 1]:;
index ebda5b1940455a2ab5611c7f47d9b6afe3a177f0..72473a112440ce28414f01b479f18dc3e4a789db 100644 (file)
@@ -1,4 +1,4 @@
 #![crate_name = "foo"]
 
-// @has foo/type.CellIndex.html '//pre[@class="rust typedef"]' 'type CellIndex<const D: usize> = [i64; D];'
+// @has foo/type.CellIndex.html '//div[@class="item-decl"]/pre[@class="rust"]' 'type CellIndex<const D: usize> = [i64; D];'
 pub type CellIndex<const D: usize> = [i64; D];
index 2fc486d01dae02b1ed7f2260b001690d96a82fb5..42f6ac7923bf2e12b6a95944a29a69c824ce881c 100644 (file)
@@ -6,20 +6,20 @@
 
 extern "rust-intrinsic" {
     // @has 'foo/fn.transmute.html'
-    // @has - '//pre[@class="rust fn"]' 'pub const unsafe extern "rust-intrinsic" fn transmute<T, U>(_: T) -> U'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub const unsafe extern "rust-intrinsic" fn transmute<T, U>(_: T) -> U'
     #[stable(since="1.0.0", feature="rust1")]
     #[rustc_const_stable(feature = "const_transmute", since = "1.56.0")]
     pub fn transmute<T, U>(_: T) -> U;
 
     // @has 'foo/fn.unreachable.html'
-    // @has - '//pre[@class="rust fn"]' 'pub unsafe extern "rust-intrinsic" fn unreachable() -> !'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub unsafe extern "rust-intrinsic" fn unreachable() -> !'
     #[stable(since="1.0.0", feature="rust1")]
     pub fn unreachable() -> !;
 }
 
 extern "C" {
     // @has 'foo/fn.needs_drop.html'
-    // @has - '//pre[@class="rust fn"]' 'pub unsafe extern "C" fn needs_drop() -> !'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub unsafe extern "C" fn needs_drop() -> !'
     #[stable(since="1.0.0", feature="rust1")]
     pub fn needs_drop() -> !;
 }
index f2ec8320a05259464874f15483906d95c7581435..46be00a080482818f4af2c588dbdf6612953ea4a 100644 (file)
@@ -1 +1 @@
-<script type="text/json" id="notable-traits-data">{"&amp;'static [SomeStruct]":"&lt;h3&gt;Notable traits for &lt;code&gt;&amp;amp;[&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait_slice::SomeStruct\"&gt;SomeStruct&lt;/a&gt;]&lt;/code&gt;&lt;/h3&gt;&lt;pre class=\"content\"&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait_slice::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &amp;amp;[&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait_slice::SomeStruct\"&gt;SomeStruct&lt;/a&gt;]&lt;/span&gt;"}</script>
\ No newline at end of file
+<script type="text/json" id="notable-traits-data">{"&amp;'static [SomeStruct]":"&lt;h3&gt;Notable traits for &lt;code&gt;&amp;amp;[&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait_slice::SomeStruct\"&gt;SomeStruct&lt;/a&gt;]&lt;/code&gt;&lt;/h3&gt;&lt;pre&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait_slice::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &amp;amp;[&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait_slice::SomeStruct\"&gt;SomeStruct&lt;/a&gt;]&lt;/span&gt;"}</script>
\ No newline at end of file
index b426a4d7a8b7ba660183a36e66f09a120dd763f9..f592e3b375c026061ec0fb7274a8eef453f201b5 100644 (file)
@@ -1 +1 @@
-<script type="text/json" id="notable-traits-data">{"SomeStruct":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre class=\"content\"&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/span&gt;"}</script>
\ No newline at end of file
+<script type="text/json" id="notable-traits-data">{"SomeStruct":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/span&gt;"}</script>
\ No newline at end of file
index 4f8063807e67dc590cbadd4babd5fb0c2b271c68..384be668954000f0a61b53d43cdccd4249881487 100644 (file)
@@ -1 +1 @@
-<script type="text/json" id="notable-traits-data">{"SomeStruct":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre class=\"content\"&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/span&gt;","Wrapper&lt;Self&gt;":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre class=\"content\"&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl&amp;lt;T:&amp;nbsp;&lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt;&amp;gt; &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/span&gt;"}</script>
\ No newline at end of file
+<script type="text/json" id="notable-traits-data">{"SomeStruct":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.SomeStruct.html\" title=\"struct doc_notable_trait::SomeStruct\"&gt;SomeStruct&lt;/a&gt;&lt;/span&gt;","Wrapper&lt;Self&gt;":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl&amp;lt;T:&amp;nbsp;&lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt;&amp;gt; &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/span&gt;"}</script>
\ No newline at end of file
index bed2a38b24a2bb1b3421c94d4abc3e05fa6a6a71..0cc1ee10fd33571f268e88aacd0fe54a185e8013 100644 (file)
@@ -1 +1 @@
-<script type="text/json" id="notable-traits-data">{"Wrapper&lt;Self&gt;":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre class=\"content\"&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl&amp;lt;T:&amp;nbsp;&lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt;&amp;gt; &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/span&gt;"}</script>
\ No newline at end of file
+<script type="text/json" id="notable-traits-data">{"Wrapper&lt;Self&gt;":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl&amp;lt;T:&amp;nbsp;&lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt;&amp;gt; &lt;a class=\"trait\" href=\"trait.SomeTrait.html\" title=\"trait doc_notable_trait::SomeTrait\"&gt;SomeTrait&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.Wrapper.html\" title=\"struct doc_notable_trait::Wrapper\"&gt;Wrapper&lt;/a&gt;&amp;lt;T&amp;gt;&lt;/span&gt;"}</script>
\ No newline at end of file
index 4293d849df52c1893f0c6f323d978bdfb6fc9e9f..96c64ac4e0211b2c09873e337208f6913899e1b9 100644 (file)
@@ -1,5 +1,5 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.f.html
-// @has - '//*[@class="rust fn"]' 'pub fn f(callback: fn(len: usize, foo: u32))'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn f(callback: fn(len: usize, foo: u32))'
 pub fn f(callback: fn(len: usize, foo: u32)) {}
index 644a6e1cf33c52324035a5dd9d8beaaed3290d16..406157ce26c8093e4de47be334f9d4bf02a03d05 100644 (file)
@@ -63,7 +63,7 @@ impl<const S: Struct, St: Stage + ?Sized> Helper<S> for St {
 // this test as long as one can ensure that private fields are not leaked!
 //
 // @has hide_complex_unevaluated_const_arguments/trait.Sub.html \
-//      '//*[@class="rust trait"]' \
+//      '//div[@class="item-decl"]/pre[@class="rust"]' \
 //      'pub trait Sub: Sup<{ _ }, { _ }> { }'
 pub trait Sub: Sup<{ 90 * 20 * 4 }, { Struct { private: () } }> {}
 
diff --git a/tests/rustdoc/impl-in-const-block.rs b/tests/rustdoc/impl-in-const-block.rs
deleted file mode 100644 (file)
index b44e713..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-// Regression test for #83026.
-// The goal of this test is to ensure that impl blocks inside
-// const expressions are documented as well.
-
-#![crate_name = "foo"]
-
-// @has 'foo/struct.A.html'
-// @has - '//*[@id="method.new"]/*[@class="code-header"]' 'pub fn new() -> A'
-// @has - '//*[@id="method.bar"]/*[@class="code-header"]' 'pub fn bar(&self)'
-// @has - '//*[@id="method.woo"]/*[@class="code-header"]' 'pub fn woo(&self)'
-// @has - '//*[@id="method.yoo"]/*[@class="code-header"]' 'pub fn yoo()'
-// @has - '//*[@id="method.yuu"]/*[@class="code-header"]' 'pub fn yuu()'
-pub struct A;
-
-const _: () = {
-    impl A {
-        const FOO: () = {
-            impl A {
-                pub fn woo(&self) {}
-            }
-        };
-
-        pub fn new() -> A {
-            A
-        }
-    }
-};
-pub const X: () = {
-    impl A {
-        pub fn bar(&self) {}
-    }
-};
-
-fn foo() {
-    impl A {
-        pub fn yoo() {}
-    }
-    const _: () = {
-        impl A {
-            pub fn yuu() {}
-        }
-    };
-}
index c97644e7f87237384cd3ead7f275f5dcec45ecf3..a4ca928f3331b94068dd7cd89d75dc1f8d8d9cc8 100644 (file)
@@ -4,6 +4,6 @@
 extern crate inline_default_methods;
 
 // @has inline_default_methods/trait.Foo.html
-// @has - '//*[@class="rust trait"]' 'fn bar(&self);'
-// @has - '//*[@class="rust trait"]' 'fn foo(&mut self) { ... }'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn bar(&self);'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn foo(&mut self) { ... }'
 pub use inline_default_methods::Foo;
index cde7f68ff01826c88a1162e43a6c1b626338d600..48672590a12dd969ea7e9daa274c0e86f8a7389c 100644 (file)
@@ -3,3 +3,5 @@
 pub struct SomeStruct;
 
 pub fn some_fn() {}
+
+pub enum Shadowed {}
index f97da11a9014905d7168341b6e7ddf16c9b0e6d6..7a519d2d2554792362e233f75cab42eae31c13b5 100644 (file)
@@ -6,6 +6,11 @@
 
 // @has cross_glob/struct.SomeStruct.html
 // @has cross_glob/fn.some_fn.html
+// @!has cross_glob/enum.Shadowed.html
 // @!has cross_glob/index.html '//code' 'pub use inner::*;'
 #[doc(inline)]
 pub use inner::*;
+
+// This type shadows the glob-imported enum `Shadowed`.
+// @has cross_glob/type.Shadowed.html
+pub type Shadowed = u8;
index fa760540e436596d54dc7c359a1ab4112b5333ac..0da8bfc3a9ae368460d0c44fb3307b30c6515170 100644 (file)
 pub use dyn_trait::Ty3;
 
 // @has user/fn.func0.html
-// @has - '//pre[@class="rust fn"]' "func0(_: &dyn Fn())"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "func0(_: &dyn Fn())"
 // FIXME(fmease): Show placeholder-lifetime bound, render "func0(_: &(dyn Fn() + '_))"
 pub use dyn_trait::func0;
 
 // @has user/fn.func1.html
-// @has - '//pre[@class="rust fn"]' "func1<'func>(_: &(dyn Fn() + 'func))"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "func1<'func>(_: &(dyn Fn() + 'func))"
 pub use dyn_trait::func1;
index 9c4f646592038e6ec62fe4036f2b47c4aef8f794..e8587209b61655ea358c161a77580a1c4a2e9d22 100644 (file)
@@ -4,37 +4,37 @@
 extern crate impl_trait_aux;
 
 // @has impl_trait/fn.func.html
-// @has - '//pre[@class="rust fn"]' "pub fn func<'a>(_x: impl Clone + Into<Vec<u8, Global>> + 'a)"
-// @!has - '//pre[@class="rust fn"]' 'where'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub fn func<'a>(_x: impl Clone + Into<Vec<u8, Global>> + 'a)"
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]' 'where'
 pub use impl_trait_aux::func;
 
 // @has impl_trait/fn.func2.html
-// @has - '//pre[@class="rust fn"]' "func2<T>("
-// @has - '//pre[@class="rust fn"]' "_x: impl Deref<Target = Option<T>> + Iterator<Item = T>,"
-// @has - '//pre[@class="rust fn"]' "_y: impl Iterator<Item = u8>)"
-// @!has - '//pre[@class="rust fn"]' 'where'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "func2<T>("
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "_x: impl Deref<Target = Option<T>> + Iterator<Item = T>,"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "_y: impl Iterator<Item = u8>)"
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]' 'where'
 pub use impl_trait_aux::func2;
 
 // @has impl_trait/fn.func3.html
-// @has - '//pre[@class="rust fn"]' "func3("
-// @has - '//pre[@class="rust fn"]' "_x: impl Iterator<Item = impl Iterator<Item = u8>> + Clone)"
-// @!has - '//pre[@class="rust fn"]' 'where'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "func3("
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "_x: impl Iterator<Item = impl Iterator<Item = u8>> + Clone)"
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]' 'where'
 pub use impl_trait_aux::func3;
 
 // @has impl_trait/fn.func4.html
-// @has - '//pre[@class="rust fn"]' "func4<T>("
-// @has - '//pre[@class="rust fn"]' "T: Iterator<Item = impl Clone>,"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "func4<T>("
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "T: Iterator<Item = impl Clone>,"
 pub use impl_trait_aux::func4;
 
 // @has impl_trait/fn.func5.html
-// @has - '//pre[@class="rust fn"]' "func5("
-// @has - '//pre[@class="rust fn"]' "_f: impl for<'any> Fn(&'any str, &'any str) -> bool + for<'r> Other<T<'r> = ()>,"
-// @has - '//pre[@class="rust fn"]' "_a: impl for<'alpha, 'beta> Auxiliary<'alpha, Item<'beta> = fn(_: &'beta ())>"
-// @!has - '//pre[@class="rust fn"]' 'where'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "func5("
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "_f: impl for<'any> Fn(&'any str, &'any str) -> bool + for<'r> Other<T<'r> = ()>,"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "_a: impl for<'alpha, 'beta> Auxiliary<'alpha, Item<'beta> = fn(_: &'beta ())>"
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]' 'where'
 pub use impl_trait_aux::func5;
 
 // @has impl_trait/fn.async_fn.html
-// @has - '//pre[@class="rust fn"]' "pub async fn async_fn()"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub async fn async_fn()"
 pub use impl_trait_aux::async_fn;
 
 // @has impl_trait/struct.Foo.html
index 2589e27f215029de4dd4b19841a9a97a841a518e..a774b0ca7cd7a4ca54324b86e49f17406b641a18 100644 (file)
@@ -13,7 +13,7 @@ pub trait Trait {
 }
 
 // @has issue_20646/fn.fun.html \
-//      '//*[@class="rust fn"]' 'where T: Trait<Output = i32>'
+//      '//div[@class="item-decl"]/pre[@class="rust"]' 'where T: Trait<Output = i32>'
 pub fn fun<T>(_: T) where T: Trait<Output=i32> {}
 
 pub mod reexport {
@@ -21,6 +21,6 @@ pub mod reexport {
     //      '//*[@id="associatedtype.Output"]' \
     //      'type Output'
     // @has issue_20646/reexport/fn.fun.html \
-    //      '//*[@class="rust fn"]' 'where T: Trait<Output = i32>'
+    //      '//div[@class="item-decl"]/pre[@class="rust"]' 'where T: Trait<Output = i32>'
     pub use issue_20646::{Trait, fun};
 }
index 022ff290e1a7153c5c2aff5615596648227466be..026b4f5acc912d7b2eb6889e81d2f20341e1814b 100644 (file)
@@ -5,18 +5,18 @@
 
 // @has issue_20727_2/trait.Add.html
 pub trait Add<RHS = Self> {
-    // @has - '//*[@class="rust trait"]' 'trait Add<RHS = Self> {'
-    // @has - '//*[@class="rust trait"]' 'type Output;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Add<RHS = Self> {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Output;'
     type Output;
 
-    // @has - '//*[@class="rust trait"]' 'fn add(self, rhs: RHS) -> Self::Output;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn add(self, rhs: RHS) -> Self::Output;'
     fn add(self, rhs: RHS) -> Self::Output;
 }
 
 // @has issue_20727_2/reexport/trait.Add.html
 pub mod reexport {
-    // @has - '//*[@class="rust trait"]' 'trait Add<RHS = Self> {'
-    // @has - '//*[@class="rust trait"]' 'type Output;'
-    // @has - '//*[@class="rust trait"]' 'fn add(self, rhs: RHS) -> Self::Output;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Add<RHS = Self> {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Output;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn add(self, rhs: RHS) -> Self::Output;'
     pub use issue_20727::Add;
 }
index 52032b75aeaf5134313acbde842cad7de9fe65e7..741ce8023d7e78bfb58fae0d34cd6e7b27baf512 100644 (file)
@@ -7,18 +7,18 @@ pub trait Bar {}
 
 // @has issue_20727_3/trait.Deref2.html
 pub trait Deref2 {
-    // @has - '//*[@class="rust trait"]' 'trait Deref2 {'
-    // @has - '//*[@class="rust trait"]' 'type Target: Bar;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Deref2 {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Target: Bar;'
     type Target: Bar;
 
-    // @has - '//*[@class="rust trait"]' 'fn deref(&self) -> Self::Target;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn deref(&self) -> Self::Target;'
     fn deref(&self) -> Self::Target;
 }
 
 // @has issue_20727_3/reexport/trait.Deref2.html
 pub mod reexport {
-    // @has - '//*[@class="rust trait"]' 'trait Deref2 {'
-    // @has - '//*[@class="rust trait"]' 'type Target: Bar;'
-    // @has - '//*[@class="rust trait"]' 'fn deref(&self) -> Self::Target;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Deref2 {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Target: Bar;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn deref(&self) -> Self::Target;'
     pub use issue_20727::Deref2;
 }
index 643f93875909390b0b8bf410c325ecfd9393a309..b8fac4da6eadf8288b91c9370d730291873cbed7 100644 (file)
@@ -5,36 +5,36 @@
 
 // @has issue_20727_4/trait.Index.html
 pub trait Index<Idx: ?Sized> {
-    // @has - '//*[@class="rust trait"]' 'trait Index<Idx: ?Sized> {'
-    // @has - '//*[@class="rust trait"]' 'type Output: ?Sized'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Index<Idx: ?Sized> {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Output: ?Sized'
     type Output: ?Sized;
 
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        'fn index(&self, index: Idx) -> &Self::Output'
     fn index(&self, index: Idx) -> &Self::Output;
 }
 
 // @has issue_20727_4/trait.IndexMut.html
 pub trait IndexMut<Idx: ?Sized>: Index<Idx> {
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        'trait IndexMut<Idx: ?Sized>: Index<Idx> {'
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        'fn index_mut(&mut self, index: Idx) -> &mut Self::Output;'
     fn index_mut(&mut self, index: Idx) -> &mut Self::Output;
 }
 
 pub mod reexport {
     // @has issue_20727_4/reexport/trait.Index.html
-    // @has - '//*[@class="rust trait"]' 'trait Index<Idx>where Idx: ?Sized,{'
-    // @has - '//*[@class="rust trait"]' 'type Output: ?Sized'
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Index<Idx>where Idx: ?Sized,{'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Output: ?Sized'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        'fn index(&self, index: Idx) -> &Self::Output'
     pub use issue_20727::Index;
 
     // @has issue_20727_4/reexport/trait.IndexMut.html
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        'trait IndexMut<Idx>: Index<Idx>where Idx: ?Sized,{'
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        'fn index_mut(&mut self, index: Idx) -> &mut Self::Output;'
     pub use issue_20727::IndexMut;
 }
index c1a98cd57daf8cc66e452bf5fee3991e0e8cacb5..df334821cccc97868e71746592ff6e100c7bb6e6 100644 (file)
@@ -5,20 +5,20 @@
 
 // @has issue_20727/trait.Deref.html
 pub trait Deref {
-    // @has - '//*[@class="rust trait"]' 'trait Deref {'
-    // @has - '//*[@class="rust trait"]' 'type Target: ?Sized;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Deref {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Target: ?Sized;'
     type Target: ?Sized;
 
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //        "fn deref<'a>(&'a self) -> &'a Self::Target;"
     fn deref<'a>(&'a self) -> &'a Self::Target;
 }
 
 // @has issue_20727/reexport/trait.Deref.html
 pub mod reexport {
-    // @has - '//*[@class="rust trait"]' 'trait Deref {'
-    // @has - '//*[@class="rust trait"]' 'type Target: ?Sized;'
-    // @has - '//*[@class="rust trait"]' \
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'trait Deref {'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'type Target: ?Sized;'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' \
     //      "fn deref<'a>(&'a self) -> &'a Self::Target;"
     pub use issue_20727::Deref;
 }
index ff5813dac8099613997ec87ded88393e0f2473d8..19e626ba132356d69889be10f1fe80a42849824e 100644 (file)
@@ -1,19 +1,19 @@
 extern "C" {
     // @has issue_22038/fn.foo1.html \
-    //      '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()'
+    //      '//div[@class="item-decl"]/pre[@class="rust"]' 'pub unsafe extern "C" fn foo1()'
     pub fn foo1();
 }
 
 extern "system" {
     // @has issue_22038/fn.foo2.html \
-    //      '//*[@class="rust fn"]' 'pub unsafe extern "system" fn foo2()'
+    //      '//div[@class="item-decl"]/pre[@class="rust"]' 'pub unsafe extern "system" fn foo2()'
     pub fn foo2();
 }
 
 // @has issue_22038/fn.bar.html \
-//      '//*[@class="rust fn"]' 'pub extern "C" fn bar()'
+//      '//div[@class="item-decl"]/pre[@class="rust"]' 'pub extern "C" fn bar()'
 pub extern "C" fn bar() {}
 
 // @has issue_22038/fn.baz.html \
-//      '//*[@class="rust fn"]' 'pub extern "system" fn baz()'
+//      '//div[@class="item-decl"]/pre[@class="rust"]' 'pub extern "system" fn baz()'
 pub extern "system" fn baz() {}
index 1e4791e01253df01edcb4aa0c7fdaa5816c6d812..b4c52e2f17a6d93fba6a71fba51fa2795939889f 100644 (file)
@@ -6,17 +6,17 @@ macro_rules! make {
         pub struct S;
 
         // @has issue_33302/constant.CST.html \
-        //        '//pre[@class="rust const"]' 'pub const CST: i32'
+        //        '//div[@class="item-decl"]/pre[@class="rust"]' 'pub const CST: i32'
         pub const CST: i32 = ($n * $n);
         // @has issue_33302/static.ST.html \
-        //        '//pre[@class="rust static"]' 'pub static ST: i32'
+        //        '//div[@class="item-decl"]/pre[@class="rust"]' 'pub static ST: i32'
         pub static ST: i32 = ($n * $n);
 
         pub trait T<X> {
             fn ignore(_: &X) {}
             const C: X;
             // @has issue_33302/trait.T.html \
-            //        '//*[@class="rust trait"]' 'const D: i32'
+            //        '//div[@class="item-decl"]/pre[@class="rust"]' 'const D: i32'
             // @has - '//*[@id="associatedconstant.D"]' 'const D: i32'
             const D: i32 = ($n * $n);
         }
index 3351b5c8350fd7284ac0e7d2598e5c866ee7c9a7..74fc22b31dc0c84cd6956e52bb6aff5173bfd85e 100644 (file)
@@ -5,7 +5,7 @@
 extern crate issue_85454;
 
 // @has foo/trait.FromResidual.html
-// @has - '//pre[@class="rust trait"]' 'pub trait FromResidual<R = <Self as Try>::Residual> { fn from_residual(residual: R) -> Self; }'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub trait FromResidual<R = <Self as Try>::Residual> { fn from_residual(residual: R) -> Self; }'
 pub trait FromResidual<R = <Self as Try>::Residual> {
     fn from_residual(residual: R) -> Self;
 }
@@ -24,6 +24,6 @@ pub enum ControlFlow<B, C = ()> {
 
 pub mod reexport {
     // @has foo/reexport/trait.FromResidual.html
-    // @has - '//pre[@class="rust trait"]' 'pub trait FromResidual<R = <Self as Try>::Residual> { fn from_residual(residual: R) -> Self; }'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub trait FromResidual<R = <Self as Try>::Residual> { fn from_residual(residual: R) -> Self; }'
     pub use issue_85454::*;
 }
index d50268509b2c1ababc06f03b2a4d123202869fd3..884b63ac97ffceee0fc6f6033ed96b742372fcbf 100644 (file)
@@ -8,8 +8,8 @@
 
 extern crate issue_98697_reexport_with_anonymous_lifetime;
 
-// @has issue_98697/fn.repro.html '//pre[@class="rust fn"]/code' 'fn repro<F>()where F: Fn(&str)'
-// @!has issue_98697/fn.repro.html '//pre[@class="rust fn"]/code' 'for<'
+// @has issue_98697/fn.repro.html '//div[@class="item-decl"]/pre[@class="rust"]/code' 'fn repro<F>()where F: Fn(&str)'
+// @!has issue_98697/fn.repro.html '//div[@class="item-decl"]/pre[@class="rust"]/code' 'for<'
 pub use issue_98697_reexport_with_anonymous_lifetime::repro;
 
 // @has issue_98697/struct.Extra.html '//div[@id="trait-implementations-list"]//h3[@class="code-header"]' 'impl MyTrait<&Extra> for Extra'
index 46a50e2fc30b4b33e3e9004e53d97d593984cb9c..14533624e448e6f059699be88c56dd554d7b34fc 100644 (file)
@@ -2,14 +2,14 @@
 #![feature(rustc_attrs)]
 
 // @has 'foo/fn.foo.html'
-// @has - '//*[@class="rust fn"]' 'fn foo(x: usize, const Y: usize, z: usize) -> [usize; 3]'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn foo(x: usize, const Y: usize, z: usize) -> [usize; 3]'
 #[rustc_legacy_const_generics(1)]
 pub fn foo<const Y: usize>(x: usize, z: usize) -> [usize; 3] {
     [x, Y, z]
 }
 
 // @has 'foo/fn.bar.html'
-// @has - '//*[@class="rust fn"]' 'fn bar(x: usize, const Y: usize, const Z: usize) -> [usize; 3]'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'fn bar(x: usize, const Y: usize, const Z: usize) -> [usize; 3]'
 #[rustc_legacy_const_generics(1, 2)]
 pub fn bar<const Y: usize, const Z: usize>(x: usize) -> [usize; 3] {
     [x, Y, z]
index 5d30a745a61a6027e4d3c8dab75fb571baff0717..0fb66059109071885e06a42ac2b60cc12db8f26e 100644 (file)
@@ -1,5 +1,5 @@
 #![crate_name = "foo"]
 
 // @has 'foo/type.Resolutions.html'
-// @has - '//*[@class="rust typedef"]' "pub type Resolutions<'tcx> = &'tcx u8;"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "pub type Resolutions<'tcx> = &'tcx u8;"
 pub type Resolutions<'tcx> = &'tcx u8;
index f3ea6995839174fce43e40925fd7a4dd2a64ee09..3b862e651c9098e5989b7208159c8b0f9889fda3 100644 (file)
@@ -13,6 +13,6 @@ pub fn foo(mut self) {}
     pub fn bar(mut bar: ()) {}
 }
 
-// @count foo/fn.baz.html '//*[@class="rust fn"]' 1
-// @!has - '//*[@class="rust fn"]' 'mut'
+// @count foo/fn.baz.html '//div[@class="item-decl"]/pre[@class="rust"]' 1
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]' 'mut'
 pub fn baz(mut foo: Foo) {}
index db56f68526b3fdce61f5120aea93a29dbc88531c..659480479fd13e9dd747eb52294909696ac08891 100644 (file)
@@ -19,12 +19,12 @@ impl Trait for isize {
     type X = <() as Trait>::X;
 }
 
-// @has 'normalize_assoc_item/fn.f.html' '//pre[@class="rust fn"]' 'pub fn f() -> isize'
+// @has 'normalize_assoc_item/fn.f.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn f() -> isize'
 pub fn f() -> <usize as Trait>::X {
     0
 }
 
-// @has 'normalize_assoc_item/fn.f2.html' '//pre[@class="rust fn"]' 'pub fn f2() -> fn() -> i32'
+// @has 'normalize_assoc_item/fn.f2.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn f2() -> fn() -> i32'
 pub fn f2() -> <isize as Trait>::X {
     todo!()
 }
@@ -49,10 +49,10 @@ impl<Inner: Trait> Trait for Generic<Inner> {
 // These can't be normalized because they depend on a generic parameter.
 // However the user can choose whether the text should be displayed as `Inner::X` or `<Inner as Trait>::X`.
 
-// @has 'normalize_assoc_item/struct.Unknown.html' '//pre[@class="rust struct"]' 'pub struct Unknown<Inner: Trait>(pub <Inner as Trait>::X);'
+// @has 'normalize_assoc_item/struct.Unknown.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub struct Unknown<Inner: Trait>(pub <Inner as Trait>::X);'
 pub struct Unknown<Inner: Trait>(pub <Inner as Trait>::X);
 
-// @has 'normalize_assoc_item/struct.Unknown2.html' '//pre[@class="rust struct"]' 'pub struct Unknown2<Inner: Trait>(pub Inner::X);'
+// @has 'normalize_assoc_item/struct.Unknown2.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub struct Unknown2<Inner: Trait>(pub Inner::X);'
 pub struct Unknown2<Inner: Trait>(pub Inner::X);
 
 trait Lifetimes<'a> {
@@ -63,20 +63,20 @@ impl<'a> Lifetimes<'a> for usize {
     type Y = &'a isize;
 }
 
-// @has 'normalize_assoc_item/fn.g.html' '//pre[@class="rust fn"]' "pub fn g() -> &isize"
+// @has 'normalize_assoc_item/fn.g.html' '//div[@class="item-decl"]/pre[@class="rust"]' "pub fn g() -> &isize"
 pub fn g() -> <usize as Lifetimes<'static>>::Y {
     &0
 }
 
-// @has 'normalize_assoc_item/constant.A.html' '//pre[@class="rust const"]' "pub const A: &isize"
+// @has 'normalize_assoc_item/constant.A.html' '//div[@class="item-decl"]/pre[@class="rust"]' "pub const A: &isize"
 pub const A: <usize as Lifetimes<'static>>::Y = &0;
 
 // test cross-crate re-exports
 extern crate inner;
-// @has 'normalize_assoc_item/fn.foo.html' '//pre[@class="rust fn"]' "pub fn foo() -> i32"
+// @has 'normalize_assoc_item/fn.foo.html' '//div[@class="item-decl"]/pre[@class="rust"]' "pub fn foo() -> i32"
 pub use inner::foo;
 
-// @has 'normalize_assoc_item/fn.h.html' '//pre[@class="rust fn"]' "pub fn h<T>() -> IntoIter<T, Global>"
+// @has 'normalize_assoc_item/fn.h.html' '//div[@class="item-decl"]/pre[@class="rust"]' "pub fn h<T>() -> IntoIter<T, Global>"
 pub fn h<T>() -> <Vec<T> as IntoIterator>::IntoIter {
     vec![].into_iter()
 }
index fa7de0aff6a228fdff94fec796f9424e467ae6a9..0dca3f672cd3c42ca703a80e9112a2298b82084c 100644 (file)
@@ -3,7 +3,7 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.bar.html
-// @has - '//*[@class="rust fn"]' 'pub fn bar() -> '
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn bar() -> '
 /// foo
 pub fn bar() -> usize {
     2
index 756939ae3773c37fbf9f46c5043d81599aa4888e..bdbcc47c9f28dd4328e3f28c2c9d006277de5efd 100644 (file)
@@ -1,5 +1,5 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.f.html
-// @has - '//*[@class="rust fn"]' 'pub fn f(_: u8)'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn f(_: u8)'
 pub fn f(0u8..=255: u8) {}
index 11364e7f707ef9c7e0d3e4756245907abea61087..35c90ba5d7b894fac9f3e5fa9bb81882031d98d4 100644 (file)
@@ -32,7 +32,7 @@
 // @!has 'foo/enum.BarLocal.html'
 use reexports::BarLocal;
 
-// @has 'foo/fn.foo.html' '//*[@class="rust fn"]' 'pub fn foo()'
+// @has 'foo/fn.foo.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn foo()'
 pub use reexports::foo;
 // @!has 'foo/fn.foo_crate.html'
 pub(crate) use reexports::foo_crate;
@@ -41,7 +41,7 @@
 // @!has 'foo/fn.foo_local.html'
 use reexports::foo_local;
 
-// @has 'foo/type.Type.html' '//*[@class="rust typedef"]' 'pub type Type ='
+// @has 'foo/type.Type.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub type Type ='
 pub use reexports::Type;
 // @!has 'foo/type.TypeCrate.html'
 pub(crate) use reexports::TypeCrate;
@@ -94,22 +94,22 @@ pub mod inner {
         // @!has 'foo/outer/inner/enum.BarLocal.html'
         use reexports::BarLocal;
 
-        // @has 'foo/outer/inner/fn.foo.html' '//*[@class="rust fn"]' 'pub fn foo()'
+        // @has 'foo/outer/inner/fn.foo.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn foo()'
         pub use reexports::foo;
-        // @has 'foo/outer/inner/fn.foo_crate.html' '//*[@class="rust fn"]' 'pub(crate) fn foo_crate()'
+        // @has 'foo/outer/inner/fn.foo_crate.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub(crate) fn foo_crate()'
         pub(crate) use reexports::foo_crate;
-        // @has 'foo/outer/inner/fn.foo_super.html' '//*[@class="rust fn"]' 'pub(in outer) fn foo_super()'
+        // @has 'foo/outer/inner/fn.foo_super.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub(in outer) fn foo_super()'
         pub(super) use::reexports::foo_super;
         // @!has 'foo/outer/inner/fn.foo_self.html'
         pub(self) use reexports::foo_self;
         // @!has 'foo/outer/inner/fn.foo_local.html'
         use reexports::foo_local;
 
-        // @has 'foo/outer/inner/type.Type.html' '//*[@class="rust typedef"]' 'pub type Type ='
+        // @has 'foo/outer/inner/type.Type.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub type Type ='
         pub use reexports::Type;
-        // @has 'foo/outer/inner/type.TypeCrate.html' '//*[@class="rust typedef"]' 'pub(crate) type TypeCrate ='
+        // @has 'foo/outer/inner/type.TypeCrate.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub(crate) type TypeCrate ='
         pub(crate) use reexports::TypeCrate;
-        // @has 'foo/outer/inner/type.TypeSuper.html' '//*[@class="rust typedef"]' 'pub(in outer) type TypeSuper ='
+        // @has 'foo/outer/inner/type.TypeSuper.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub(in outer) type TypeSuper ='
         pub(super) use reexports::TypeSuper;
         // @!has 'foo/outer/inner/type.TypeSelf.html'
         pub(self) use reexports::TypeSelf;
index 9aa6d7224baca200e54974cbee5ab25404d18a68..65d305c6d2c4202b7a2b6e6f61e59ed74eb68914 100644 (file)
@@ -31,7 +31,7 @@
 // @!has 'foo/enum.BarLocal.html'
 use reexports::BarLocal;
 
-// @has 'foo/fn.foo.html' '//*[@class="rust fn"]' 'pub fn foo()'
+// @has 'foo/fn.foo.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn foo()'
 pub use reexports::foo;
 // @!has 'foo/fn.foo_crate.html'
 pub(crate) use reexports::foo_crate;
@@ -40,7 +40,7 @@
 // @!has 'foo/fn.foo_local.html'
 use reexports::foo_local;
 
-// @has 'foo/type.Type.html' '//*[@class="rust typedef"]' 'pub type Type ='
+// @has 'foo/type.Type.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub type Type ='
 pub use reexports::Type;
 // @!has 'foo/type.TypeCrate.html'
 pub(crate) use reexports::TypeCrate;
@@ -93,7 +93,7 @@ pub mod inner {
         // @!has 'foo/outer/inner/enum.BarLocal.html'
         use reexports::BarLocal;
 
-        // @has 'foo/outer/inner/fn.foo.html' '//*[@class="rust fn"]' 'pub fn foo()'
+        // @has 'foo/outer/inner/fn.foo.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn foo()'
         pub use reexports::foo;
         // @!has 'foo/outer/inner/fn.foo_crate.html'
         pub(crate) use reexports::foo_crate;
@@ -104,7 +104,7 @@ pub mod inner {
         // @!has 'foo/outer/inner/fn.foo_local.html'
         use reexports::foo_local;
 
-        // @has 'foo/outer/inner/type.Type.html' '//*[@class="rust typedef"]' 'pub type Type ='
+        // @has 'foo/outer/inner/type.Type.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub type Type ='
         pub use reexports::Type;
         // @!has 'foo/outer/inner/type.TypeCrate.html'
         pub(crate) use reexports::TypeCrate;
index 7ed9d6729b647a67523462e6ec57a04be926a407..a229a4e29fefb43a2ec580474fa492e3444dfe99 100644 (file)
 
 pub struct S<T>(T);
 
-// @!has foo/trait.Tr.html '//pre[@class="rust trait"]/code/a[@class="trait"]' '~const'
-// @has - '//pre[@class="rust trait"]/code/a[@class="trait"]' 'Clone'
-// @!has - '//pre[@class="rust trait"]/code/span[@class="where"]' '~const'
-// @has - '//pre[@class="rust trait"]/code/span[@class="where"]' ': Clone'
+// @!has foo/trait.Tr.html '//div[@class="item-decl"]/pre[@class="rust"]/code/a[@class="trait"]' '~const'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]/code/a[@class="trait"]' 'Clone'
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]/code/span[@class="where"]' '~const'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]/code/span[@class="where"]' ': Clone'
 #[const_trait]
 pub trait Tr<T> {
     // @!has - '//section[@id="method.a"]/h4[@class="code-header"]' '~const'
@@ -45,10 +45,10 @@ fn a<A: ~const Clone + ~const Destruct>()
     }
 }
 
-// @!has foo/fn.foo.html '//pre[@class="rust fn"]/code/a[@class="trait"]' '~const'
-// @has - '//pre[@class="rust fn"]/code/a[@class="trait"]' 'Clone'
-// @!has - '//pre[@class="rust fn"]/code/span[@class="where fmt-newline"]' '~const'
-// @has - '//pre[@class="rust fn"]/code/span[@class="where fmt-newline"]' ': Clone'
+// @!has foo/fn.foo.html '//div[@class="item-decl"]/pre[@class="rust"]/code/a[@class="trait"]' '~const'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]/code/a[@class="trait"]' 'Clone'
+// @!has - '//div[@class="item-decl"]/pre[@class="rust"]/code/span[@class="where fmt-newline"]' '~const'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]/code/span[@class="where fmt-newline"]' ': Clone'
 pub const fn foo<F: ~const Clone + ~const Destruct>()
 where
     Option<F>: ~const Clone + ~const Destruct,
index d08abdaeb1411f45e4cb90fb654e23de1f447362..0b65bf1dfed0fa5f2d3b85bd3c7cefdaec169f6e 100644 (file)
@@ -7,16 +7,16 @@
 
 extern "rust-intrinsic" {
     // @has 'foo/fn.abort.html'
-    // @has - '//pre[@class="rust fn"]' 'pub extern "rust-intrinsic" fn abort() -> !'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub extern "rust-intrinsic" fn abort() -> !'
     #[rustc_safe_intrinsic]
     pub fn abort() -> !;
     // @has 'foo/fn.unreachable.html'
-    // @has - '//pre[@class="rust fn"]' 'pub unsafe extern "rust-intrinsic" fn unreachable() -> !'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub unsafe extern "rust-intrinsic" fn unreachable() -> !'
     pub fn unreachable() -> !;
 }
 
 extern "C" {
     // @has 'foo/fn.needs_drop.html'
-    // @has - '//pre[@class="rust fn"]' 'pub unsafe extern "C" fn needs_drop() -> !'
+    // @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub unsafe extern "C" fn needs_drop() -> !'
     pub fn needs_drop() -> !;
 }
index 9a78e963e30363a6ac195cc84c110cd5daef41a8..67137fdcab274a75332d87f977bd365ccaf727c7 100644 (file)
@@ -4,25 +4,25 @@
 pub struct MyBox<T: ?Sized>(*const T);
 
 // @has 'foo/fn.alpha.html'
-// @snapshot link_slice_u32 - '//pre[@class="rust fn"]/code'
+// @snapshot link_slice_u32 - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn alpha() -> &'static [u32] {
     loop {}
 }
 
 // @has 'foo/fn.beta.html'
-// @snapshot link_slice_generic - '//pre[@class="rust fn"]/code'
+// @snapshot link_slice_generic - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn beta<T>() -> &'static [T] {
     loop {}
 }
 
 // @has 'foo/fn.gamma.html'
-// @snapshot link_box_u32 - '//pre[@class="rust fn"]/code'
+// @snapshot link_box_u32 - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn gamma() -> MyBox<[u32]> {
     loop {}
 }
 
 // @has 'foo/fn.delta.html'
-// @snapshot link_box_generic - '//pre[@class="rust fn"]/code'
+// @snapshot link_box_generic - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn delta<T>() -> MyBox<[T]> {
     loop {}
 }
index 1d02c13ebfb3c2b9b3ed8d1cf34d94fbb6a8f7eb..5f54b7522ae38774e61e4c8cd7c44dc775ad410b 100644 (file)
@@ -1 +1 @@
-<script type="text/json" id="notable-traits-data">{"Odd":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.Odd.html\" title=\"struct foo::Odd\"&gt;Odd&lt;/a&gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre class=\"content\"&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"{{channel}}/core/iter/traits/iterator/trait.Iterator.html\" title=\"trait core::iter::traits::iterator::Iterator\"&gt;Iterator&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.Odd.html\" title=\"struct foo::Odd\"&gt;Odd&lt;/a&gt;&lt;/span&gt;&lt;span class=\"where fmt-newline\"&gt;    type &lt;a href=\"{{channel}}/core/iter/traits/iterator/trait.Iterator.html#associatedtype.Item\" class=\"associatedtype\"&gt;Item&lt;/a&gt; = &lt;a class=\"primitive\" href=\"{{channel}}/std/primitive.usize.html\"&gt;usize&lt;/a&gt;;&lt;/span&gt;"}</script>
\ No newline at end of file
+<script type="text/json" id="notable-traits-data">{"Odd":"&lt;h3&gt;Notable traits for &lt;code&gt;&lt;a class=\"struct\" href=\"struct.Odd.html\" title=\"struct foo::Odd\"&gt;Odd&lt;/a&gt;&lt;/code&gt;&lt;/h3&gt;&lt;pre&gt;&lt;code&gt;&lt;span class=\"where fmt-newline\"&gt;impl &lt;a class=\"trait\" href=\"{{channel}}/core/iter/traits/iterator/trait.Iterator.html\" title=\"trait core::iter::traits::iterator::Iterator\"&gt;Iterator&lt;/a&gt; for &lt;a class=\"struct\" href=\"struct.Odd.html\" title=\"struct foo::Odd\"&gt;Odd&lt;/a&gt;&lt;/span&gt;&lt;span class=\"where fmt-newline\"&gt;    type &lt;a href=\"{{channel}}/core/iter/traits/iterator/trait.Iterator.html#associatedtype.Item\" class=\"associatedtype\"&gt;Item&lt;/a&gt; = &lt;a class=\"primitive\" href=\"{{channel}}/std/primitive.usize.html\"&gt;usize&lt;/a&gt;;&lt;/span&gt;"}</script>
\ No newline at end of file
index 3c0369e3d3413e221f775561555dd3e37b9d2c28..3bfb43a0bef2c7fed5758002ec0bf30484a185ce 100644 (file)
@@ -4,7 +4,7 @@ struct BodyId {
     hir_id: usize,
 }
 
-// @has 'foo/fn.body_owner.html' '//*[@class="rust fn"]' 'pub fn body_owner(_: BodyId)'
+// @has 'foo/fn.body_owner.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn body_owner(_: BodyId)'
 pub fn body_owner(BodyId { hir_id }: BodyId) {
     // ...
 }
index d9b9c7957d9a1a0fe2357d0417a14c1d2fa978c5..f5fdb1f52bf8ece496e19348de475cee5d5821a1 100644 (file)
@@ -1,5 +1,5 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.foo.html
-// @has - '//*[@class="rust fn"]' "_: &(dyn ToString + 'static)"
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' "_: &(dyn ToString + 'static)"
 pub fn foo(_: &(ToString + 'static)) {}
index 87240f233ff2afef5e28f90d16d49c1b6d0c9133..5d34ec09b66dc621a2d1bd3b0ddac44acac71f2d 100644 (file)
@@ -81,8 +81,8 @@ pub enum EnumStructVariant {
 }
 
 // @has 'toggle_item_contents/enum.LargeEnum.html'
-// @count - '//*[@class="rust enum"]//details[@class="toggle type-contents-toggle"]' 1
-// @has - '//*[@class="rust enum"]//details[@class="toggle type-contents-toggle"]' 'Show 13 variants'
+// @count - '//div[@class="item-decl"]/pre//details[@class="toggle type-contents-toggle"]' 1
+// @has - '//div[@class="item-decl"]/pre//details[@class="toggle type-contents-toggle"]' 'Show 13 variants'
 pub enum LargeEnum {
     A, B, C, D, E, F(u8), G, H, I, J, K, L, M
 }
index 8ab1143d1f70edb0286e7f51d80affad75d9e046..d72c10f2b2542e9a04df62cb6ddd72da877ea208 100644 (file)
@@ -19,7 +19,7 @@ pub struct Foo(
 );
 
 // @has foo/enum.Bar.html
-// @has - '//pre[@class="rust enum"]' 'BarVariant(String),'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'BarVariant(String),'
 // @matches - '//*[@id="variant.BarVariant.fields"]/h4' '^Tuple Fields$'
 // @has - '//*[@id="variant.BarVariant.field.0"]' '0: String'
 // @has - '//*[@id="variant.BarVariant.fields"]//*[@class="docblock"]' 'Hello docs'
index 62e2f9e7ef24474008caf643c7d04a49eba424c6..e716de8b55c8fdfe3a037eedee8c4d237c9919fb 100644 (file)
@@ -1,20 +1,20 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.tuple0.html //pre 'pub fn tuple0(x: ())'
-// @snapshot link_unit - '//pre[@class="rust fn"]/code'
+// @snapshot link_unit - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn tuple0(x: ()) -> () { x }
 // @has foo/fn.tuple1.html //pre 'pub fn tuple1(x: (i32,)) -> (i32,)'
-// @snapshot link1_i32 - '//pre[@class="rust fn"]/code'
+// @snapshot link1_i32 - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn tuple1(x: (i32,)) -> (i32,) { x }
 // @has foo/fn.tuple2.html //pre 'pub fn tuple2(x: (i32, i32)) -> (i32, i32)'
-// @snapshot link2_i32 - '//pre[@class="rust fn"]/code'
+// @snapshot link2_i32 - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn tuple2(x: (i32, i32)) -> (i32, i32) { x }
 // @has foo/fn.tuple1_t.html //pre 'pub fn tuple1_t<T>(x: (T,)) -> (T,)'
-// @snapshot link1_t - '//pre[@class="rust fn"]/code'
+// @snapshot link1_t - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn tuple1_t<T>(x: (T,)) -> (T,) { x }
 // @has foo/fn.tuple2_t.html //pre 'pub fn tuple2_t<T>(x: (T, T)) -> (T, T)'
-// @snapshot link2_t - '//pre[@class="rust fn"]/code'
+// @snapshot link2_t - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn tuple2_t<T>(x: (T, T)) -> (T, T) { x }
 // @has foo/fn.tuple2_tu.html //pre 'pub fn tuple2_tu<T, U>(x: (T, U)) -> (T, U)'
-// @snapshot link2_tu - '//pre[@class="rust fn"]/code'
+// @snapshot link2_tu - '//div[@class="item-decl"]/pre[@class="rust"]/code'
 pub fn tuple2_tu<T, U>(x: (T, U)) -> (T, U) { x }
index ae3a6031519fb56a50ea9423c277a2dc31295387..353cd1c477249dae11398e389762048d56f53b9a 100644 (file)
@@ -4,14 +4,14 @@
 
 extern crate unit_return;
 
-// @has 'foo/fn.f0.html' '//*[@class="rust fn"]' 'F: FnMut(u8) + Clone'
+// @has 'foo/fn.f0.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'F: FnMut(u8) + Clone'
 pub fn f0<F: FnMut(u8) + Clone>(f: F) {}
 
-// @has 'foo/fn.f1.html' '//*[@class="rust fn"]' 'F: FnMut(u16) + Clone'
+// @has 'foo/fn.f1.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'F: FnMut(u16) + Clone'
 pub fn f1<F: FnMut(u16) -> () + Clone>(f: F) {}
 
-// @has 'foo/fn.f2.html' '//*[@class="rust fn"]' 'F: FnMut(u32) + Clone'
+// @has 'foo/fn.f2.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'F: FnMut(u32) + Clone'
 pub use unit_return::f2;
 
-// @has 'foo/fn.f3.html' '//*[@class="rust fn"]' 'F: FnMut(u64) + Clone'
+// @has 'foo/fn.f3.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'F: FnMut(u64) + Clone'
 pub use unit_return::f3;
index fe7cad8c3ef84a3d874312b7b3c3a2f6ab7c8140..c0c085e6ac72500d63deb2848063d4f54654b136 100644 (file)
@@ -1,6 +1,6 @@
 #![crate_name = "foo"]
 
 // @has foo/fn.foo.html
-// @has - '//*[@class="rust fn"]' 'pub fn foo<X, Y: ?Sized>(_: &X)'
-// @has - '//*[@class="rust fn"]' 'where X: ?Sized,'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn foo<X, Y: ?Sized>(_: &X)'
+// @has - '//div[@class="item-decl"]/pre[@class="rust"]' 'where X: ?Sized,'
 pub fn foo<X, Y: ?Sized>(_: &X) where X: ?Sized {}
index 6c1b5d31513526b21c8630f5c6b3e2c39d05864b..f84cb3753cb93a119ab72b7fc3a91eb5c76c0c99 100644 (file)
@@ -1 +1 @@
-<div class="item-decl"><pre class="rust struct"><code>pub struct Simd&lt;T&gt;(_)<br /><span class="where">where<br />&#160;&#160;&#160;&#160;T: <a class="trait" href="trait.MyTrait.html" title="trait foo::MyTrait">MyTrait</a></span>;</code></pre></div>
\ No newline at end of file
+<div class="item-decl"><pre class="rust"><code>pub struct Simd&lt;T&gt;(_)<br /><span class="where">where<br />&#160;&#160;&#160;&#160;T: <a class="trait" href="trait.MyTrait.html" title="trait foo::MyTrait">MyTrait</a></span>;</code></pre></div>
\ No newline at end of file
index d5d6c556d8001464d0576c0cab4461f75ebbaaa7..85b626674274cff6f4f4fd53b3c2e7bdcc9f1251 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust trait"><code>pub trait TraitWhere {
+<div class="item-decl"><pre class="rust"><code>pub trait TraitWhere {
     type <a href="#associatedtype.Item" class="associatedtype">Item</a>&lt;'a&gt;<br />&#160;&#160;&#160;&#160;<span class="where">where<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;Self: 'a</span>;
 
     fn <a href="#method.func" class="fn">func</a>(self)<br />&#160;&#160;&#160;&#160;<span class="where">where<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;Self: <a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a></span>,
index 8b5bce28f5a2ac4cbc3552fd7d0fb0cb5de95ce5..3ac0c6872a82150c4a621c8919f96a81a7e431ff 100644 (file)
@@ -57,6 +57,6 @@ pub enum Foxtrot<F> { Foxtrot1(F) }
 //          "impl<F> MyTrait for Foxtrot<F>where F: MyTrait"
 impl<F> MyTrait for Foxtrot<F>where F: MyTrait {}
 
-// @has foo/type.Golf.html '//pre[@class="rust typedef"]' \
+// @has foo/type.Golf.html '//div[@class="item-decl"]/pre[@class="rust"]' \
 //          "type Golf<T>where T: Clone, = (T, T)"
 pub type Golf<T> where T: Clone = (T, T);
index f7663e4616ae6784ca44d04afe6af378232d98ea..20bde549a037807f3f0351828177b1e6006899d4 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust enum"><code>pub enum Cow&lt;'a, B&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;B: <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + ?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + 'a,</span>{
+<div class="item-decl"><pre class="rust"><code>pub enum Cow&lt;'a, B&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;B: <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + ?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + 'a,</span>{
     Borrowed(<a class="primitive" href="{{channel}}/std/primitive.reference.html">&amp;'a </a>B),
     Whatever(<a class="primitive" href="{{channel}}/std/primitive.u32.html">u32</a>),
 }</code></pre></div>
\ No newline at end of file
index ac7d7759821107d0acaf605772640945441a58c3..d9fc0c22309dbc97f4f8c3fdd469bf676063e65f 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust enum"><code>pub enum Cow2&lt;'a, B:&#160;?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + 'a&gt; {
+<div class="item-decl"><pre class="rust"><code>pub enum Cow2&lt;'a, B:&#160;?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + 'a&gt; {
     Borrowed(<a class="primitive" href="{{channel}}/std/primitive.reference.html">&amp;'a </a>B),
     Whatever(<a class="primitive" href="{{channel}}/std/primitive.u32.html">u32</a>),
 }</code></pre></div>
\ No newline at end of file
index fa3f224e7ad0f5d21deb3fc66599eb2bd162d3d5..f375265d7c183ff57963f75bee1a1f136f2bf82e 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust struct"><code>pub struct Struct&lt;'a, B&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;B: <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + ?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + 'a,</span>{
+<div class="item-decl"><pre class="rust"><code>pub struct Struct&lt;'a, B&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;B: <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + ?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + 'a,</span>{
     pub a: <a class="primitive" href="{{channel}}/std/primitive.reference.html">&amp;'a </a>B,
     pub b: <a class="primitive" href="{{channel}}/std/primitive.u32.html">u32</a>,
 }</code></pre></div>
\ No newline at end of file
index fb06b0f77c5ce5f2c431f1a84bac8d4362965f52..1c59962eb1c58e1b75c0082fb48ec72b065422e6 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust struct"><code>pub struct Struct2&lt;'a, B:&#160;?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + 'a&gt; {
+<div class="item-decl"><pre class="rust"><code>pub struct Struct2&lt;'a, B:&#160;?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + 'a&gt; {
     pub a: <a class="primitive" href="{{channel}}/std/primitive.reference.html">&amp;'a </a>B,
     pub b: <a class="primitive" href="{{channel}}/std/primitive.u32.html">u32</a>,
 }</code></pre></div>
\ No newline at end of file
index 50cfe362328b681749613d4e81a1e01a4c2a1bb8..a2df06e7736af684b7ed8e12eedded83148491b0 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust trait"><code>pub trait ToOwned&lt;T&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;T: <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>,</span>{
+<div class="item-decl"><pre class="rust"><code>pub trait ToOwned&lt;T&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;T: <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>,</span>{
     type <a href="#associatedtype.Owned" class="associatedtype">Owned</a>;
 
     fn <a href="#tymethod.to_owned" class="fn">to_owned</a>(&amp;self) -&gt; Self::<a class="associatedtype" href="trait.ToOwned.html#associatedtype.Owned" title="type foo::ToOwned::Owned">Owned</a>;
index 21eb89b75011b633667482e5547a34933bfab95e..2bfd6f7685a6729dc8e1b2ffbe679978f04eaf58 100644 (file)
@@ -1,4 +1,4 @@
-<div class="item-decl"><pre class="rust trait"><code>pub trait ToOwned2&lt;T:&#160;<a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; {
+<div class="item-decl"><pre class="rust"><code>pub trait ToOwned2&lt;T:&#160;<a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; {
     type <a href="#associatedtype.Owned" class="associatedtype">Owned</a>;
 
     fn <a href="#tymethod.to_owned" class="fn">to_owned</a>(&amp;self) -&gt; Self::<a class="associatedtype" href="trait.ToOwned2.html#associatedtype.Owned" title="type foo::ToOwned2::Owned">Owned</a>;
index 7bb177debc3a8c828e6526c0f5a0e45ba47af870..066f8f87b160b6af62cfce0a1955940fd26c7304 100644 (file)
@@ -1,3 +1,3 @@
-<div class="item-decl"><pre class="rust union"><code>pub union Union&lt;'a, B&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;B: <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + ?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + 'a,</span>{
+<div class="item-decl"><pre class="rust"><code>pub union Union&lt;'a, B&gt;<span class="where fmt-newline">where<br />&#160;&#160;&#160;&#160;B: <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + ?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + 'a,</span>{
     /* private fields */
 }</code></pre></div>
\ No newline at end of file
index 0d237df53c7f463605bce968c4d96a3d8c0d876b..6b48c5dbd3e2803a1027e8eeec408fd1c9f8472f 100644 (file)
@@ -1,3 +1,3 @@
-<div class="item-decl"><pre class="rust union"><code>pub union Union2&lt;'a, B:&#160;?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + 'a&gt; {
+<div class="item-decl"><pre class="rust"><code>pub union Union2&lt;'a, B:&#160;?<a class="trait" href="{{channel}}/core/marker/trait.Sized.html" title="trait core::marker::Sized">Sized</a> + <a class="trait" href="trait.ToOwned.html" title="trait foo::ToOwned">ToOwned</a>&lt;dyn <a class="trait" href="{{channel}}/core/clone/trait.Clone.html" title="trait core::clone::Clone">Clone</a>&gt; + 'a&gt; {
     /* private fields */
 }</code></pre></div>
\ No newline at end of file
index 8d8221bcdf29342cd9a66fa6c73e392ec87c99df..178b8adc3f04af8f6de30df590a7de6e7499430e 100644 (file)
@@ -1,5 +1,5 @@
 use std::fmt::Debug;
 
-// @has 'wrapping/fn.foo.html' '//pre[@class="rust fn"]' 'pub fn foo() -> impl Debug'
-// @count - '//pre[@class="rust fn"]/br' 0
+// @has 'wrapping/fn.foo.html' '//div[@class="item-decl"]/pre[@class="rust"]' 'pub fn foo() -> impl Debug'
+// @count - '//div[@class="item-decl"]/pre[@class="rust"]/br' 0
 pub fn foo() -> impl Debug {}
index ff1be0804151b58973c1a41c5616e7a3cc5f73df..bdfd9628c48019966b85e7e894fb7d7a6003a4a4 100644 (file)
@@ -2,6 +2,7 @@
 // Testing that a librustc_ast can parse modules with canonicalized base path
 // ignore-cross-compile
 // ignore-remote
+// no-remap-src-base: Reading `file!()` (expectedly) fails when enabled.
 
 #![feature(rustc_private)]
 
index afb7f8fea92a1fae2aa6c205b9e55849c3720ddb..c023d1b1590565bd5fd61d7286780fa0a4f734db 100644 (file)
@@ -16,7 +16,7 @@ LL | fn f1<F>(_: F) where F: Fn(&(), &()) {}
 help: consider borrowing the argument
    |
 LL |     f1(|_: &(), _: &()| {});
-   |            ~~~     ~~~
+   |            +       +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:3:5
@@ -35,8 +35,8 @@ LL | fn f2<F>(_: F) where F: for<'a> Fn(&'a (), &()) {}
    |                         ^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `f2`
 help: consider borrowing the argument
    |
-LL |     f2(|_: &'a (), _: &()| {});
-   |            ~~~~~~     ~~~
+LL |     f2(|_: &(), _: &()| {});
+   |            +       +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:4:5
@@ -56,7 +56,7 @@ LL | fn f3<'a, F>(_: F) where F: Fn(&'a (), &()) {}
 help: consider borrowing the argument
    |
 LL |     f3(|_: &(), _: &()| {});
-   |            ~~~     ~~~
+   |            +       +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:5:5
@@ -75,8 +75,8 @@ LL | fn f4<F>(_: F) where F: for<'r> Fn(&(), &'r ()) {}
    |                         ^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `f4`
 help: consider borrowing the argument
    |
-LL |     f4(|_: &(), _: &'r ()| {});
-   |            ~~~     ~~~~~~
+LL |     f4(|_: &(), _: &()| {});
+   |            +       +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:6:5
@@ -95,17 +95,15 @@ LL | fn f5<F>(_: F) where F: for<'r> Fn(&'r (), &'r ()) {}
    |                         ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `f5`
 help: consider borrowing the argument
    |
-LL |     f5(|_: &'r (), _: &'r ()| {});
-   |            ~~~~~~     ~~~~~~
+LL |     f5(|_: &(), _: &()| {});
+   |            +       +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:7:5
    |
 LL |     g1(|_: (), _: ()| {});
-   |     ^^ --------------
-   |     |  |   |
-   |     |  |   help: consider borrowing the argument: `&()`
-   |     |  found signature defined here
+   |     ^^ -------------- found signature defined here
+   |     |
    |     expected due to this
    |
    = note: expected closure signature `for<'a> fn(&'a (), Box<(dyn for<'a> Fn(&'a ()) + 'static)>) -> _`
@@ -115,15 +113,17 @@ note: required by a bound in `g1`
    |
 LL | fn g1<F>(_: F) where F: Fn(&(), Box<dyn Fn(&())>) {}
    |                         ^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `g1`
+help: consider borrowing the argument
+   |
+LL |     g1(|_: &(), _: ()| {});
+   |            +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:8:5
    |
 LL |     g2(|_: (), _: ()| {});
-   |     ^^ --------------
-   |     |  |   |
-   |     |  |   help: consider borrowing the argument: `&()`
-   |     |  found signature defined here
+   |     ^^ -------------- found signature defined here
+   |     |
    |     expected due to this
    |
    = note: expected closure signature `for<'a> fn(&'a (), for<'a> fn(&'a ())) -> _`
@@ -133,15 +133,17 @@ note: required by a bound in `g2`
    |
 LL | fn g2<F>(_: F) where F: Fn(&(), fn(&())) {}
    |                         ^^^^^^^^^^^^^^^^ required by this bound in `g2`
+help: consider borrowing the argument
+   |
+LL |     g2(|_: &(), _: ()| {});
+   |            +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:9:5
    |
 LL |     g3(|_: (), _: ()| {});
-   |     ^^ --------------
-   |     |  |   |
-   |     |  |   help: consider borrowing the argument: `&'s ()`
-   |     |  found signature defined here
+   |     ^^ -------------- found signature defined here
+   |     |
    |     expected due to this
    |
    = note: expected closure signature `for<'s> fn(&'s (), Box<(dyn for<'a> Fn(&'a ()) + 'static)>) -> _`
@@ -151,15 +153,17 @@ note: required by a bound in `g3`
    |
 LL | fn g3<F>(_: F) where F: for<'s> Fn(&'s (), Box<dyn Fn(&())>) {}
    |                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `g3`
+help: consider borrowing the argument
+   |
+LL |     g3(|_: &(), _: ()| {});
+   |            +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:10:5
    |
 LL |     g4(|_: (), _: ()| {});
-   |     ^^ --------------
-   |     |  |   |
-   |     |  |   help: consider borrowing the argument: `&()`
-   |     |  found signature defined here
+   |     ^^ -------------- found signature defined here
+   |     |
    |     expected due to this
    |
    = note: expected closure signature `for<'a> fn(&'a (), for<'r> fn(&'r ())) -> _`
@@ -169,6 +173,10 @@ note: required by a bound in `g4`
    |
 LL | fn g4<F>(_: F) where F: Fn(&(), for<'r> fn(&'r ())) {}
    |                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `g4`
+help: consider borrowing the argument
+   |
+LL |     g4(|_: &(), _: ()| {});
+   |            +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:11:5
@@ -188,7 +196,7 @@ LL | fn h1<F>(_: F) where F: Fn(&(), Box<dyn Fn(&())>, &(), fn(&(), &())) {}
 help: consider borrowing the argument
    |
 LL |     h1(|_: &(), _: (), _: &(), _: ()| {});
-   |            ~~~            ~~~
+   |            +              +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/anonymous-higher-ranked-lifetime.rs:12:5
@@ -207,8 +215,8 @@ LL | fn h2<F>(_: F) where F: for<'t0> Fn(&(), Box<dyn Fn(&())>, &'t0 (), fn(&(),
    |                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `h2`
 help: consider borrowing the argument
    |
-LL |     h2(|_: &(), _: (), _: &'t0 (), _: ()| {});
-   |            ~~~            ~~~~~~~
+LL |     h2(|_: &(), _: (), _: &(), _: ()| {});
+   |            +              +
 
 error: aborting due to 11 previous errors
 
diff --git a/tests/ui/async-await/auxiliary/issue-107036.rs b/tests/ui/async-await/auxiliary/issue-107036.rs
new file mode 100644 (file)
index 0000000..c3f6141
--- /dev/null
@@ -0,0 +1,12 @@
+// edition:2021
+
+pub trait T {}
+impl T for () {}
+
+pub struct S {}
+
+impl S {
+    pub async fn f<'a>(&self) -> impl T + 'a {
+        ()
+    }
+}
diff --git a/tests/ui/async-await/await-sequence.rs b/tests/ui/async-await/await-sequence.rs
new file mode 100644 (file)
index 0000000..726c428
--- /dev/null
@@ -0,0 +1,21 @@
+// edition:2021
+// compile-flags: -Z drop-tracking
+// build-pass
+
+use std::collections::HashMap;
+
+fn main() {
+    let _ = real_main();
+}
+
+async fn nop() {}
+
+async fn real_main() {
+    nop().await;
+    nop().await;
+    nop().await;
+    nop().await;
+
+    let mut map: HashMap<(), ()> = HashMap::new();
+    map.insert((), nop().await);
+}
diff --git a/tests/ui/async-await/in-trait/missing-send-bound.rs b/tests/ui/async-await/in-trait/missing-send-bound.rs
new file mode 100644 (file)
index 0000000..78922b5
--- /dev/null
@@ -0,0 +1,21 @@
+// edition:2021
+
+#![feature(async_fn_in_trait)]
+//~^ WARN the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
+
+trait Foo {
+    async fn bar();
+}
+
+async fn test<T: Foo>() {
+    T::bar().await;
+}
+
+fn test2<T: Foo>() {
+    assert_is_send(test::<T>());
+    //~^ ERROR future cannot be sent between threads safely
+}
+
+fn assert_is_send(_: impl Send) {}
+
+fn main() {}
diff --git a/tests/ui/async-await/in-trait/missing-send-bound.stderr b/tests/ui/async-await/in-trait/missing-send-bound.stderr
new file mode 100644 (file)
index 0000000..5cedf3d
--- /dev/null
@@ -0,0 +1,29 @@
+warning: the feature `async_fn_in_trait` is incomplete and may not be safe to use and/or cause compiler crashes
+  --> $DIR/missing-send-bound.rs:3:12
+   |
+LL | #![feature(async_fn_in_trait)]
+   |            ^^^^^^^^^^^^^^^^^
+   |
+   = note: see issue #91611 <https://github.com/rust-lang/rust/issues/91611> for more information
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: future cannot be sent between threads safely
+  --> $DIR/missing-send-bound.rs:15:20
+   |
+LL |     assert_is_send(test::<T>());
+   |                    ^^^^^^^^^^^ future returned by `test` is not `Send`
+   |
+   = help: within `impl Future<Output = ()>`, the trait `Send` is not implemented for `impl Future<Output = ()>`
+note: future is not `Send` as it awaits another future which is not `Send`
+  --> $DIR/missing-send-bound.rs:11:5
+   |
+LL |     T::bar().await;
+   |     ^^^^^^^^ await occurs here on type `impl Future<Output = ()>`, which is not `Send`
+note: required by a bound in `assert_is_send`
+  --> $DIR/missing-send-bound.rs:19:27
+   |
+LL | fn assert_is_send(_: impl Send) {}
+   |                           ^^^^ required by this bound in `assert_is_send`
+
+error: aborting due to previous error; 1 warning emitted
+
diff --git a/tests/ui/async-await/issue-107036.rs b/tests/ui/async-await/issue-107036.rs
new file mode 100644 (file)
index 0000000..6a22de2
--- /dev/null
@@ -0,0 +1,14 @@
+// aux-build:issue-107036.rs
+// edition:2021
+// check-pass
+
+extern crate issue_107036;
+use issue_107036::S;
+
+async fn f() {
+    S{}.f().await;
+}
+
+fn main() {
+    let _ = f();
+}
diff --git a/tests/ui/borrowck/issue-92157.rs b/tests/ui/borrowck/issue-92157.rs
new file mode 100644 (file)
index 0000000..6ee2320
--- /dev/null
@@ -0,0 +1,40 @@
+#![feature(no_core)]
+#![feature(lang_items)]
+
+#![no_core]
+
+#[cfg(target_os = "linux")]
+#[link(name = "c")]
+extern {}
+
+#[lang = "start"]
+fn start<T>(_main: fn() -> T, _argc: isize, _argv: *const *const u8) -> isize {
+    //~^ ERROR: incorrect number of parameters for the `start` lang item
+    40+2
+}
+
+#[lang = "sized"]
+pub trait Sized {}
+#[lang = "copy"]
+pub trait Copy {}
+
+#[lang = "drop_in_place"]
+#[allow(unconditional_recursion)]
+pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
+    drop_in_place(to_drop)
+}
+
+#[lang = "add"]
+trait Add<RHS> {
+    type Output;
+    fn add(self, other: RHS) -> Self::Output;
+}
+
+impl Add<isize> for isize {
+    type Output = isize;
+    fn add(self, other: isize) -> isize {
+        self + other
+    }
+}
+
+fn main() {}
diff --git a/tests/ui/borrowck/issue-92157.stderr b/tests/ui/borrowck/issue-92157.stderr
new file mode 100644 (file)
index 0000000..a4010d7
--- /dev/null
@@ -0,0 +1,11 @@
+error: incorrect number of parameters for the `start` lang item
+  --> $DIR/issue-92157.rs:11:1
+   |
+LL | fn start<T>(_main: fn() -> T, _argc: isize, _argv: *const *const u8) -> isize {
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: the `start` lang item should have four parameters, but found 3
+   = note: the `start` lang item should have the signature `fn(fn() -> T, isize, *const *const u8, u8) -> isize`
+
+error: aborting due to previous error
+
index 1aed218aeb473c2bef1345b6e08c827f3502fcef..fbebc80d91ced614a38fe76cf3acc64cf3313e9f 100644 (file)
@@ -2,8 +2,12 @@ fn main() {
     let u = 5 as bool; //~ ERROR cannot cast as `bool`
                        //~| HELP compare with zero instead
                        //~| SUGGESTION 5 != 0
+
     let t = (1 + 2) as bool; //~ ERROR cannot cast as `bool`
                              //~| HELP compare with zero instead
                              //~| SUGGESTION (1 + 2) != 0
-    let v = "hello" as bool; //~ ERROR casting `&'static str` as `bool` is invalid
+
+    let v = "hello" as bool;
+    //~^ ERROR casting `&'static str` as `bool` is invalid
+    //~| HELP consider using the `is_empty` method on `&'static str` to determine if it contains anything
 }
index 15d94ab69d88c707016c314e0286620ff5855a94..19ac8f10fec216abdaf92214f69d3086aeb1791b 100644 (file)
@@ -5,16 +5,21 @@ LL |     let u = 5 as bool;
    |             ^^^^^^^^^ help: compare with zero instead: `5 != 0`
 
 error[E0054]: cannot cast as `bool`
-  --> $DIR/cast-as-bool.rs:5:13
+  --> $DIR/cast-as-bool.rs:6:13
    |
 LL |     let t = (1 + 2) as bool;
    |             ^^^^^^^^^^^^^^^ help: compare with zero instead: `(1 + 2) != 0`
 
 error[E0606]: casting `&'static str` as `bool` is invalid
-  --> $DIR/cast-as-bool.rs:8:13
+  --> $DIR/cast-as-bool.rs:10:13
    |
 LL |     let v = "hello" as bool;
    |             ^^^^^^^^^^^^^^^
+   |
+help: consider using the `is_empty` method on `&'static str` to determine if it contains anything
+   |
+LL |     let v = !"hello".is_empty();
+   |             +       ~~~~~~~~~~~
 
 error: aborting due to 3 previous errors
 
diff --git a/tests/ui/cast/issue-106883-is-empty.rs b/tests/ui/cast/issue-106883-is-empty.rs
new file mode 100644 (file)
index 0000000..27e0816
--- /dev/null
@@ -0,0 +1,27 @@
+use std::ops::Deref;
+
+struct Foo;
+
+impl Deref for Foo {
+    type Target = [u8];
+
+    fn deref(&self) -> &Self::Target {
+        &[]
+    }
+}
+
+fn main() {
+    let _ = "foo" as bool;
+    //~^ ERROR casting `&'static str` as `bool` is invalid [E0606]
+
+    let _ = String::from("foo") as bool;
+    //~^ ERROR non-primitive cast: `String` as `bool` [E0605]
+
+    let _ = Foo as bool;
+    //~^ ERROR non-primitive cast: `Foo` as `bool` [E0605]
+}
+
+fn _slice(bar: &[i32]) -> bool {
+    bar as bool
+    //~^ ERROR casting `&[i32]` as `bool` is invalid [E0606]
+}
diff --git a/tests/ui/cast/issue-106883-is-empty.stderr b/tests/ui/cast/issue-106883-is-empty.stderr
new file mode 100644 (file)
index 0000000..7115c77
--- /dev/null
@@ -0,0 +1,58 @@
+error[E0606]: casting `&'static str` as `bool` is invalid
+  --> $DIR/issue-106883-is-empty.rs:14:13
+   |
+LL |     let _ = "foo" as bool;
+   |             ^^^^^^^^^^^^^
+   |
+help: consider using the `is_empty` method on `&'static str` to determine if it contains anything
+   |
+LL |     let _ = !"foo".is_empty();
+   |             +     ~~~~~~~~~~~
+
+error[E0605]: non-primitive cast: `String` as `bool`
+  --> $DIR/issue-106883-is-empty.rs:17:13
+   |
+LL |     let _ = String::from("foo") as bool;
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^ an `as` expression can only be used to convert between primitive types or to coerce to a specific trait object
+   |
+note: this expression `Deref`s to `str` which implements `is_empty`
+  --> $DIR/issue-106883-is-empty.rs:17:13
+   |
+LL |     let _ = String::from("foo") as bool;
+   |             ^^^^^^^^^^^^^^^^^^^
+help: consider using the `is_empty` method on `String` to determine if it contains anything
+   |
+LL |     let _ = !String::from("foo").is_empty();
+   |             +                   ~~~~~~~~~~~
+
+error[E0605]: non-primitive cast: `Foo` as `bool`
+  --> $DIR/issue-106883-is-empty.rs:20:13
+   |
+LL |     let _ = Foo as bool;
+   |             ^^^^^^^^^^^ an `as` expression can only be used to convert between primitive types or to coerce to a specific trait object
+   |
+note: this expression `Deref`s to `[u8]` which implements `is_empty`
+  --> $DIR/issue-106883-is-empty.rs:20:13
+   |
+LL |     let _ = Foo as bool;
+   |             ^^^
+help: consider using the `is_empty` method on `Foo` to determine if it contains anything
+   |
+LL |     let _ = !Foo.is_empty();
+   |             +   ~~~~~~~~~~~
+
+error[E0606]: casting `&[i32]` as `bool` is invalid
+  --> $DIR/issue-106883-is-empty.rs:25:5
+   |
+LL |     bar as bool
+   |     ^^^^^^^^^^^
+   |
+help: consider using the `is_empty` method on `&[i32]` to determine if it contains anything
+   |
+LL |     !bar.is_empty()
+   |     +   ~~~~~~~~~~~
+
+error: aborting due to 4 previous errors
+
+Some errors have detailed explanations: E0605, E0606.
+For more information about an error, try `rustc --explain E0605`.
index 1c69b07e3d4af0859656946d6094f3c2bb07e173..3169e4781ee2e0c18ca968683971ef3b68e0b834 100644 (file)
@@ -1,7 +1,7 @@
-// check-fail
-// known-bug
+// edition:2021
+// known-bug: unknown
 // unset-rustc-env:RUST_BACKTRACE
-// compile-flags:-Z trait-solver=chalk --edition=2021
+// compile-flags:-Z trait-solver=chalk
 // error-pattern:internal compiler error
 // failure-status:101
 // normalize-stderr-test "DefId([^)]*)" -> "..."
index d1508cb17001b4cf807bc3dcb3a0449f2854dd98..8043f1e5a05820ee01ada4cdd9d569127a8ef9e8 100644 (file)
@@ -37,7 +37,7 @@ LL | async fn foo(x: u32) -> u32 {
    = help: the trait `Future` is not implemented for `[async fn body@$DIR/async.rs:23:29: 25:2]`
    = note: [async fn body@$DIR/async.rs:23:29: 25:2] must be a future or must implement `IntoFuture` to be awaited
 
-error: internal compiler error: projection clauses should be implied from elsewhere. obligation: `Obligation(predicate=Binder(ProjectionPredicate(AliasTy { substs: [[async fn body@$DIR/async.rs:23:29: 25:2]], def_id: ...), _use_mk_alias_ty_instead: () }, Term::Ty(u32)), []), depth=0)`
+error: internal compiler error: projection clauses should be implied from elsewhere. obligation: `Obligation(predicate=Binder(ProjectionPredicate(AliasTy { substs: [[async fn body@$DIR/async.rs:23:29: 25:2]], def_id: ...) }, Term::Ty(u32)), []), depth=0)`
   --> $DIR/async.rs:23:25
    |
 LL | async fn foo(x: u32) -> u32 {
index da26302c9d8a48219e8845a02c3820b764b95d4d..32a1edb0024c0da0e3bafa83f17d5f022d3432fc 100644 (file)
@@ -2,10 +2,8 @@ error[E0631]: type mismatch in closure arguments
   --> $DIR/multiple-fn-bounds.rs:10:5
    |
 LL |     foo(move |x| v);
-   |     ^^^ --------
-   |     |   |     |
-   |     |   |     help: do not borrow the argument: `char`
-   |     |   found signature defined here
+   |     ^^^ -------- found signature defined here
+   |     |
    |     expected due to this
    |
    = note: expected closure signature `fn(char) -> _`
@@ -20,6 +18,10 @@ note: required by a bound in `foo`
    |
 LL | fn foo<F: Fn(&char) -> bool + Fn(char) -> bool>(f: F) {
    |                               ^^^^^^^^^^^^^^^^ required by this bound in `foo`
+help: do not borrow the argument
+   |
+LL |     foo(move |char| v);
+   |               ~~~~
 
 error: aborting due to previous error
 
index 70665ba06f95435b138f5c608ae0d2dfed43153f..5eb8dc2a4687f53181f707efdea2267c79b558dd 100644 (file)
@@ -1,5 +1,11 @@
 // Test that encountering closures during coherence does not cause issues.
 #![feature(type_alias_impl_trait, generators)]
+#![cfg_attr(specialized, feature(specialization))]
+#![allow(incomplete_features)]
+
+// revisions: stock specialized
+// [specialized]check-pass
+
 type OpaqueGenerator = impl Sized;
 fn defining_use() -> OpaqueGenerator {
     || {
@@ -13,6 +19,6 @@ fn defining_use() -> OpaqueGenerator {
 trait Trait {}
 impl Trait for Wrapper<OpaqueGenerator> {}
 impl<T: Sync> Trait for Wrapper<T> {}
-//~^ ERROR conflicting implementations of trait `Trait` for type `Wrapper<OpaqueGenerator>`
+//[stock]~^ ERROR conflicting implementations of trait `Trait` for type `Wrapper<OpaqueGenerator>`
 
 fn main() {}
diff --git a/tests/ui/coherence/coherence-with-generator.stderr b/tests/ui/coherence/coherence-with-generator.stderr
deleted file mode 100644 (file)
index 6d3be2e..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-error[E0119]: conflicting implementations of trait `Trait` for type `Wrapper<OpaqueGenerator>`
-  --> $DIR/coherence-with-generator.rs:15:1
-   |
-LL | impl Trait for Wrapper<OpaqueGenerator> {}
-   | --------------------------------------- first implementation here
-LL | impl<T: Sync> Trait for Wrapper<T> {}
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `Wrapper<OpaqueGenerator>`
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0119`.
diff --git a/tests/ui/coherence/coherence-with-generator.stock.stderr b/tests/ui/coherence/coherence-with-generator.stock.stderr
new file mode 100644 (file)
index 0000000..478ac49
--- /dev/null
@@ -0,0 +1,11 @@
+error[E0119]: conflicting implementations of trait `Trait` for type `Wrapper<OpaqueGenerator>`
+  --> $DIR/coherence-with-generator.rs:21:1
+   |
+LL | impl Trait for Wrapper<OpaqueGenerator> {}
+   | --------------------------------------- first implementation here
+LL | impl<T: Sync> Trait for Wrapper<T> {}
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `Wrapper<OpaqueGenerator>`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0119`.
index a7b78b80ca5ea4a1082437d88f9daf797a62270e..24aa405211f4cc3f4322b4f5911b56590da6e6a3 100644 (file)
@@ -10,7 +10,7 @@ error[E0770]: the type of const parameters must not depend on other generic para
 LL | pub struct SelfDependent<const N: [u8; N]>;
    |                                        ^ the type must not depend on the parameter `N`
 
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; N]` is forbidden as the type of a const generic parameter
   --> $DIR/const-param-type-depends-on-const-param.rs:11:47
    |
 LL | pub struct Dependent<const N: usize, const X: [u8; N]>([(); N]);
@@ -19,7 +19,7 @@ LL | pub struct Dependent<const N: usize, const X: [u8; N]>([(); N]);
    = note: the only supported types are integers, `bool` and `char`
    = help: more complex types are supported with `#![feature(adt_const_params)]`
 
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; N]` is forbidden as the type of a const generic parameter
   --> $DIR/const-param-type-depends-on-const-param.rs:15:35
    |
 LL | pub struct SelfDependent<const N: [u8; N]>;
index 9d50f9a47ff6ef46d8e40ed363677db70f20f7ab..64b2acb03629235f33cad7d2032359eeb4fa1a07 100644 (file)
 
 pub struct Dependent<const N: usize, const X: [u8; N]>([(); N]);
 //~^ ERROR: the type of const parameters must not depend on other generic parameters
-//[min]~^^ ERROR `[u8; _]` is forbidden
+//[min]~^^ ERROR `[u8; N]` is forbidden
 
 pub struct SelfDependent<const N: [u8; N]>;
 //~^ ERROR: the type of const parameters must not depend on other generic parameters
-//[min]~^^ ERROR `[u8; _]` is forbidden
+//[min]~^^ ERROR `[u8; N]` is forbidden
 
 fn main() {}
index 68ce61bd4a374d3118ba39ab1672716dfa178189..d8eebeb0d2115c3673a4eb139a5c86703631a4be 100644 (file)
@@ -1,8 +1,8 @@
-error[E0277]: the trait bound `[Adt; _]: Foo` is not satisfied
+error[E0277]: the trait bound `[Adt; std::mem::size_of::<Self::Assoc>()]: Foo` is not satisfied
   --> $DIR/dont-evaluate-array-len-on-err-1.rs:15:9
    |
 LL |         <[Adt; std::mem::size_of::<Self::Assoc>()] as Foo>::bar()
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `[Adt; _]`
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `[Adt; std::mem::size_of::<Self::Assoc>()]`
 
 error: aborting due to previous error
 
index 041232e86907954a2e70142110fd1070dad440ca..1d10dfdf10c6ee1575d212655a3709dc0844a149 100644 (file)
@@ -10,7 +10,7 @@ error: overly complex generic constant
   --> $DIR/array-size-in-generic-struct-param.rs:19:15
    |
 LL |     arr: [u8; CFG.arr_size],
-   |               ^^^^^^^^^^^^ field access is not supported in generic constant
+   |               ^^^^^^^^^^^^ field access is not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
diff --git a/tests/ui/const-generics/generic_const_exprs/auxiliary/anon_const_non_local.rs b/tests/ui/const-generics/generic_const_exprs/auxiliary/anon_const_non_local.rs
new file mode 100644 (file)
index 0000000..97be074
--- /dev/null
@@ -0,0 +1,8 @@
+#![feature(generic_const_exprs)]
+#![allow(incomplete_features)]
+
+pub struct Foo<const N: usize>;
+
+pub fn foo<const N: usize>() -> Foo<{ N + 1 }> {
+    Foo
+}
diff --git a/tests/ui/const-generics/generic_const_exprs/const-block-is-poly.rs b/tests/ui/const-generics/generic_const_exprs/const-block-is-poly.rs
new file mode 100644 (file)
index 0000000..7332a8f
--- /dev/null
@@ -0,0 +1,11 @@
+#![feature(inline_const, generic_const_exprs)]
+//~^ WARN the feature `generic_const_exprs` is incomplete
+
+fn foo<T>() {
+    let _ = [0u8; const { std::mem::size_of::<T>() }];
+    //~^ ERROR: overly complex generic constant
+}
+
+fn main() {
+    foo::<i32>();
+}
diff --git a/tests/ui/const-generics/generic_const_exprs/const-block-is-poly.stderr b/tests/ui/const-generics/generic_const_exprs/const-block-is-poly.stderr
new file mode 100644 (file)
index 0000000..f262599
--- /dev/null
@@ -0,0 +1,20 @@
+warning: the feature `generic_const_exprs` is incomplete and may not be safe to use and/or cause compiler crashes
+  --> $DIR/const-block-is-poly.rs:1:26
+   |
+LL | #![feature(inline_const, generic_const_exprs)]
+   |                          ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: see issue #76560 <https://github.com/rust-lang/rust/issues/76560> for more information
+   = note: `#[warn(incomplete_features)]` on by default
+
+error: overly complex generic constant
+  --> $DIR/const-block-is-poly.rs:5:19
+   |
+LL |     let _ = [0u8; const { std::mem::size_of::<T>() }];
+   |                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ const blocks are not supported in generic constants
+   |
+   = help: consider moving this anonymous constant into a `const` function
+   = note: this operation may be supported in the future
+
+error: aborting due to previous error; 1 warning emitted
+
index 9bea4105d58b06099a7a07cfe46e581acea61060..65822856e1d7c100b25ab0e9ce4ab35be19159da 100644 (file)
@@ -15,7 +15,7 @@ LL |         ArrayHolder([0; Self::SIZE])
    |         arguments to this struct are incorrect
    |
    = note: expected array `[u32; X]`
-              found array `[u32; _]`
+              found array `[u32; Self::SIZE]`
 note: tuple struct defined here
   --> $DIR/issue-62504.rs:14:8
    |
index 029528c3a8172772da0aa816061918b95613641e..9baf9790e19b321601d2de1af2bd37bf8d08c790 100644 (file)
@@ -2,13 +2,13 @@ error[E0308]: mismatched types
   --> $DIR/issue-79518-default_trait_method_normalization.rs:16:32
    |
 LL |         Self::AssocInstance == [(); std::mem::size_of::<Self::Assoc>()];
-   |         -------------------    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected associated type, found array `[(); _]`
+   |         -------------------    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected associated type, found array `[(); std::mem::size_of::<Self::Assoc>()]`
    |         |
    |         expected because this is `<Self as Foo>::Assoc`
    |
    = note: expected associated type `<Self as Foo>::Assoc`
-                        found array `[(); _]`
-   = help: consider constraining the associated type `<Self as Foo>::Assoc` to `[(); _]` or calling a method that returns `<Self as Foo>::Assoc`
+                        found array `[(); std::mem::size_of::<Self::Assoc>()]`
+   = help: consider constraining the associated type `<Self as Foo>::Assoc` to `[(); std::mem::size_of::<Self::Assoc>()]` or calling a method that returns `<Self as Foo>::Assoc`
    = note: for more information, visit https://doc.rust-lang.org/book/ch19-03-advanced-traits.html
 
 error: aborting due to previous error
index 5ebb4c3999c365478c09af2842e91a67e7650ac0..823a4f8a185bb8cc7020063d2568921bbdb4dd17 100644 (file)
@@ -2,7 +2,7 @@ error: overly complex generic constant
   --> $DIR/let-bindings.rs:6:68
    |
 LL | fn test<const N: usize>() -> [u8; { let x = N; N + 1 }] where [u8; { let x = N; N + 1 }]: Default {
-   |                                                                    ^^^^^^^^^^^^^^^^^^^^ blocks are not supported in generic constant
+   |                                                                    ^^^^^^^^^^^^^^^^^^^^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
@@ -11,7 +11,7 @@ error: overly complex generic constant
   --> $DIR/let-bindings.rs:6:35
    |
 LL | fn test<const N: usize>() -> [u8; { let x = N; N + 1 }] where [u8; { let x = N; N + 1 }]: Default {
-   |                                   ^^^^^^^^^^^^^^^^^^^^ blocks are not supported in generic constant
+   |                                   ^^^^^^^^^^^^^^^^^^^^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
diff --git a/tests/ui/const-generics/generic_const_exprs/non_local_anon_const_diagnostics.rs b/tests/ui/const-generics/generic_const_exprs/non_local_anon_const_diagnostics.rs
new file mode 100644 (file)
index 0000000..1254b44
--- /dev/null
@@ -0,0 +1,16 @@
+// aux-build:anon_const_non_local.rs
+
+#![feature(generic_const_exprs)]
+#![allow(incomplete_features)]
+
+extern crate anon_const_non_local;
+
+fn bar<const M: usize>()
+where
+    [(); M + 1]:,
+{
+    let _: anon_const_non_local::Foo<2> = anon_const_non_local::foo::<M>();
+    //~^ ERROR: mismatched types
+}
+
+fn main() {}
diff --git a/tests/ui/const-generics/generic_const_exprs/non_local_anon_const_diagnostics.stderr b/tests/ui/const-generics/generic_const_exprs/non_local_anon_const_diagnostics.stderr
new file mode 100644 (file)
index 0000000..3926c83
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0308]: mismatched types
+  --> $DIR/non_local_anon_const_diagnostics.rs:12:43
+   |
+LL |     let _: anon_const_non_local::Foo<2> = anon_const_non_local::foo::<M>();
+   |                                           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `2`, found `anon_const_non_local::::foo::{constant#0}`
+   |
+   = note: expected constant `2`
+              found constant `anon_const_non_local::::foo::{constant#0}`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
index df73acf53de65f27be074845c9c0684c74628d06..265a3b9d233414912aabdca5085cd03487b76f78 100644 (file)
@@ -2,7 +2,7 @@ error: overly complex generic constant
   --> $DIR/unused_expr.rs:4:34
    |
 LL | fn add<const N: usize>() -> [u8; { N + 1; 5 }] {
-   |                                  ^^^^^^^^^^^^ blocks are not supported in generic constant
+   |                                  ^^^^^^^^^^^^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
@@ -11,7 +11,7 @@ error: overly complex generic constant
   --> $DIR/unused_expr.rs:9:34
    |
 LL | fn div<const N: usize>() -> [u8; { N / 1; 5 }] {
-   |                                  ^^^^^^^^^^^^ blocks are not supported in generic constant
+   |                                  ^^^^^^^^^^^^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
@@ -20,7 +20,7 @@ error: overly complex generic constant
   --> $DIR/unused_expr.rs:16:38
    |
 LL | fn fn_call<const N: usize>() -> [u8; { foo(N); 5 }] {
-   |                                      ^^^^^^^^^^^^^ blocks are not supported in generic constant
+   |                                      ^^^^^^^^^^^^^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
diff --git a/tests/ui/const-generics/issue-106419-struct-with-multiple-const-params.rs b/tests/ui/const-generics/issue-106419-struct-with-multiple-const-params.rs
new file mode 100644 (file)
index 0000000..8363e5a
--- /dev/null
@@ -0,0 +1,12 @@
+// check-pass
+#![feature(generic_const_exprs)]
+#![allow(incomplete_features)]
+
+#[derive(Clone)]
+struct Bar<const A: usize, const B: usize>
+where
+    [(); A as usize]:,
+    [(); B as usize]:,
+{}
+
+fn main() {}
index af029a6516bc625ae142e24a1a016c1583c1f99d..5a721720d78b5f31c8567f9cea625f31300449c5 100644 (file)
@@ -4,7 +4,7 @@ error[E0770]: the type of const parameters must not depend on other generic para
 LL | fn foo<const N: usize, const A: [u8; N]>() {}
    |                                      ^ the type must not depend on the parameter `N`
 
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; N]` is forbidden as the type of a const generic parameter
   --> $DIR/issue-62878.rs:5:33
    |
 LL | fn foo<const N: usize, const A: [u8; N]>() {}
index 578ce765b2fb82048309f1be5ef325f467ed28c2..4c08a484ef47b776e6d30a6cb58035df64d7f9ca 100644 (file)
@@ -4,7 +4,7 @@
 
 fn foo<const N: usize, const A: [u8; N]>() {}
 //~^ ERROR the type of const parameters must not
-//[min]~| ERROR `[u8; _]` is forbidden as the type of a const generic parameter
+//[min]~| ERROR `[u8; N]` is forbidden as the type of a const generic parameter
 
 fn main() {
     foo::<_, { [1] }>();
index cce85772aa4daab36009c35920a5a90c32bfbdc7..47429b7612f94095ec5ab8726964b3a76d34c6de 100644 (file)
@@ -8,7 +8,7 @@ LL | |         let x: Option<Box<Self>> = None;
 LL | |
 LL | |         0
 LL | |     }],
-   | |_____^ blocks are not supported in generic constant
+   | |_____^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
index d3d9452d316eaac8730b36e6101a06be674e5ebe..98f9f83976aa79b63f76e9f798ea2cec18bfaeba 100644 (file)
@@ -7,7 +7,7 @@ LL | |         let x: Option<S> = None;
 LL | |
 LL | |         0
 LL | |     }],
-   | |_____^ blocks are not supported in generic constant
+   | |_____^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
index 9604eb35d02b2abc1f68adf27a960755610f4a3d..c03d40a7bb832d80ee7bb2dbdaeeb2745f41150d 100644 (file)
@@ -7,7 +7,7 @@ LL | |         let x: Option<Box<S>> = None;
 LL | |
 LL | |         0
 LL | |     }],
-   | |_____^ blocks are not supported in generic constant
+   | |_____^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
index 87ed2d4f8da8cc6cdd616f0552194b22c6886f97..998b16a79e63863787d7f6fd7990832f9e032958 100644 (file)
@@ -4,7 +4,7 @@ error[E0770]: the type of const parameters must not depend on other generic para
 LL | fn foo<const LEN: usize, const DATA: [u8; LEN]>() {}
    |                                           ^^^ the type must not depend on the parameter `LEN`
 
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; LEN]` is forbidden as the type of a const generic parameter
   --> $DIR/issue-71169.rs:5:38
    |
 LL | fn foo<const LEN: usize, const DATA: [u8; LEN]>() {}
index 617149a841893825a1823c6f14ad7d8e806c1000..e4ec6b073761346d080ee8f9c25589e68f1b84ac 100644 (file)
@@ -4,7 +4,7 @@
 
 fn foo<const LEN: usize, const DATA: [u8; LEN]>() {}
 //~^ ERROR the type of const parameters must not
-//[min]~^^ ERROR `[u8; _]` is forbidden as the type of a const generic parameter
+//[min]~^^ ERROR `[u8; LEN]` is forbidden as the type of a const generic parameter
 fn main() {
     const DATA: [u8; 4] = *b"ABCD";
     foo::<4, DATA>();
index f2b58e59f731fd2df847a8ad008ded45c5f946f2..f03354fc472c088a8c3aa6ab1020bf0f03b03ee5 100644 (file)
@@ -1,4 +1,4 @@
-error: `[u32; _]` is forbidden as the type of a const generic parameter
+error: `[u32; LEN]` is forbidden as the type of a const generic parameter
   --> $DIR/issue-73491.rs:8:19
    |
 LL | fn hoge<const IN: [u32; LEN]>() {}
index f15c1f2d4552181e2c18d2396fa2993b58007094..482dbb04daae962eb4dbea96d171d0c7138a5f21 100644 (file)
@@ -6,6 +6,6 @@
 const LEN: usize = 1024;
 
 fn hoge<const IN: [u32; LEN]>() {}
-//[min]~^ ERROR `[u32; _]` is forbidden as the type of a const generic parameter
+//[min]~^ ERROR `[u32; LEN]` is forbidden as the type of a const generic parameter
 
 fn main() {}
index 82ffb2332404496ac404b15703266a2b3feae8fb..134c248347d3cd51facc116d257352bf2b5f4aa6 100644 (file)
@@ -1,4 +1,4 @@
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; 1 + 2]` is forbidden as the type of a const generic parameter
   --> $DIR/issue-74101.rs:6:18
    |
 LL | fn test<const N: [u8; 1 + 2]>() {}
@@ -7,7 +7,7 @@ LL | fn test<const N: [u8; 1 + 2]>() {}
    = note: the only supported types are integers, `bool` and `char`
    = help: more complex types are supported with `#![feature(adt_const_params)]`
 
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; 1 + 2]` is forbidden as the type of a const generic parameter
   --> $DIR/issue-74101.rs:9:21
    |
 LL | struct Foo<const N: [u8; 1 + 2]>;
index 6b606b9460fe22f95afc7d30e03488c213da00ee..4c9b2d3c634dacec0fa68bb4b48d094e430aadce 100644 (file)
@@ -4,9 +4,9 @@
 #![cfg_attr(full, allow(incomplete_features))]
 
 fn test<const N: [u8; 1 + 2]>() {}
-//[min]~^ ERROR `[u8; _]` is forbidden as the type of a const generic parameter
+//[min]~^ ERROR `[u8; 1 + 2]` is forbidden as the type of a const generic parameter
 
 struct Foo<const N: [u8; 1 + 2]>;
-//[min]~^ ERROR `[u8; _]` is forbidden as the type of a const generic parameter
+//[min]~^ ERROR `[u8; 1 + 2]` is forbidden as the type of a const generic parameter
 
 fn main() {}
index 7798ae7962983047f91dcb06498886a96467eebd..46af19ef395400c4908865e855af08d1466dc8d3 100644 (file)
@@ -1,4 +1,4 @@
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; Bar::<u32>::value()]` is forbidden as the type of a const generic parameter
   --> $DIR/issue-75047.rs:14:21
    |
 LL | struct Foo<const N: [u8; Bar::<u32>::value()]>;
index ee3dcf9ecec508fa9af1349197dba1f0f86f7ebc..7b6fb92bca96e04411ee2d41256bf33d9a92b8fd 100644 (file)
@@ -12,6 +12,6 @@ const fn value() -> usize {
 }
 
 struct Foo<const N: [u8; Bar::<u32>::value()]>;
-//[min]~^ ERROR `[u8; _]` is forbidden as the type of a const generic parameter
+//[min]~^ ERROR `[u8; Bar::<u32>::value()]` is forbidden as the type of a const generic parameter
 
 fn main() {}
index 804c0ae5175a8a7304b19e22d588d0a4a8970f56..68b35a38b0f8ada52a5a18342d537854369fe012 100644 (file)
@@ -2,7 +2,7 @@ error: overly complex generic constant
   --> $DIR/issue-77357.rs:6:46
    |
 LL | fn bug<'a, T>() -> &'static dyn MyTrait<[(); { |x: &'a u32| { x }; 4 }]> {
-   |                                              ^^^^^^^^^^^^^^^^^^^^^^^^^ blocks are not supported in generic constant
+   |                                              ^^^^^^^^^^^^^^^^^^^^^^^^^ blocks are not supported in generic constants
    |
    = help: consider moving this anonymous constant into a `const` function
    = note: this operation may be supported in the future
index 4908fb29692ccfd211e4ac4bf1a83ee05311fac2..50dd66da6dbb4505ef24919e204ad8ff24330980 100644 (file)
@@ -1,5 +1,5 @@
 // check-pass
-// known-bug
+// known-bug: unknown
 
 // This should not compile, as the compiler should not know
 // `A - 0` is satisfied `?x - 0` if `?x` is inferred to `A`.
@@ -10,7 +10,6 @@
 
 impl<'a> Ref<'a> {
     pub fn foo<const A: usize>() -> [(); A - 0] {
-        //~^ WARN function cannot
         Self::foo()
     }
 }
index fc690576875209fd5c91ecaf96252dc43dfab39f..896e1c7ea8dd663be2e440170f875f751e6bf677 100644 (file)
@@ -3,7 +3,6 @@ warning: function cannot return without recursing
    |
 LL |     pub fn foo<const A: usize>() -> [(); A - 0] {
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot return without recursing
-LL |
 LL |         Self::foo()
    |         ----------- recursive call site
    |
index d6c48e63bb3ce70f98fbdbebddae06b2019356a7..909997340f36efc0d35704b01eba7cf647b9220a 100644 (file)
@@ -12,14 +12,14 @@ impl True for If<true> {}
 fn consume<T: 'static>(_val: T)
 where
     If<{ TypeId::of::<T>() != TypeId::of::<()>() }>: True,
-    //~^ ERROR: can't compare
+    //~^ overly complex generic constant
 {
 }
 
 fn test<T: 'static>()
 where
     If<{ TypeId::of::<T>() != TypeId::of::<()>() }>: True,
-    //~^ ERROR: can't compare
+    //~^ overly complex generic constant
 {
 }
 
index aba4b5c1a8d8d8794f8cae24f8bf22d3ad610f33..f13fd795d7a1020c98c8788c1a4798cbcebf8152 100644 (file)
@@ -1,29 +1,24 @@
-error[E0277]: can't compare `TypeId` with `_` in const contexts
-  --> $DIR/issue-90318.rs:14:28
+error: overly complex generic constant
+  --> $DIR/issue-90318.rs:14:8
    |
 LL |     If<{ TypeId::of::<T>() != TypeId::of::<()>() }>: True,
-   |                            ^^ no implementation for `TypeId == _`
+   |        ^^-----------------^^^^^^^^^^^^^^^^^^^^^^^^
+   |          |
+   |          borrowing is not supported in generic constants
    |
-   = help: the trait `~const PartialEq<_>` is not implemented for `TypeId`
-note: the trait `PartialEq<_>` is implemented for `TypeId`, but that implementation is not `const`
-  --> $DIR/issue-90318.rs:14:28
-   |
-LL |     If<{ TypeId::of::<T>() != TypeId::of::<()>() }>: True,
-   |                            ^^
+   = help: consider moving this anonymous constant into a `const` function
+   = note: this operation may be supported in the future
 
-error[E0277]: can't compare `TypeId` with `_` in const contexts
-  --> $DIR/issue-90318.rs:21:28
+error: overly complex generic constant
+  --> $DIR/issue-90318.rs:21:8
    |
 LL |     If<{ TypeId::of::<T>() != TypeId::of::<()>() }>: True,
-   |                            ^^ no implementation for `TypeId == _`
+   |        ^^-----------------^^^^^^^^^^^^^^^^^^^^^^^^
+   |          |
+   |          borrowing is not supported in generic constants
    |
-   = help: the trait `~const PartialEq<_>` is not implemented for `TypeId`
-note: the trait `PartialEq<_>` is implemented for `TypeId`, but that implementation is not `const`
-  --> $DIR/issue-90318.rs:21:28
-   |
-LL |     If<{ TypeId::of::<T>() != TypeId::of::<()>() }>: True,
-   |                            ^^
+   = help: consider moving this anonymous constant into a `const` function
+   = note: this operation may be supported in the future
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0277`.
index 276ebf31ff8b88012faa61c24f8554150c0efbce..cff02b0d445c81e8d351fec56c4bc1e015d86923 100644 (file)
@@ -1,4 +1,14 @@
-error: `[u8; _]` is forbidden as the type of a const generic parameter
+error: `[u8; {
+           struct Foo<const N: usize>;
+       
+           impl<const N: usize> Foo<N> {
+               fn value() -> usize {
+                   N
+               }
+           }
+       
+           Foo::<17>::value()
+       }]` is forbidden as the type of a const generic parameter
   --> $DIR/nested-type.rs:6:21
    |
 LL |   struct Foo<const N: [u8; {
index d080c210e6bd291c7c53467d263d631f0993861d..4c1926387b926a4e843b3e6c5176af7d0b964c95 100644 (file)
@@ -2,10 +2,17 @@
 // run-rustfix
 #![warn(unused_braces)]
 
+macro_rules! make_1 {
+    () => {
+        1
+    }
+}
+
 struct A<const N: usize>;
 
 fn main() {
     let _: A<7>; // ok
     let _: A<7>; //~ WARN unnecessary braces
     let _: A<{ 3 + 5 }>; // ok
+    let _: A<{make_1!()}>; // ok
 }
index 47f0f8c1c96c91ba2d08368fc460b3401703b611..e9f15b401807915c0f85cf7893e8e14d67d60ab0 100644 (file)
@@ -2,10 +2,17 @@
 // run-rustfix
 #![warn(unused_braces)]
 
+macro_rules! make_1 {
+    () => {
+        1
+    }
+}
+
 struct A<const N: usize>;
 
 fn main() {
     let _: A<7>; // ok
     let _: A<{ 7 }>; //~ WARN unnecessary braces
     let _: A<{ 3 + 5 }>; // ok
+    let _: A<{make_1!()}>; // ok
 }
index 553a3a0f88ebae22e415562ff78016bcef43657b..2c8031c430020b8f86e38ab53137ab02c3783e30 100644 (file)
@@ -1,5 +1,5 @@
 warning: unnecessary braces around const expression
-  --> $DIR/unused_braces.rs:9:14
+  --> $DIR/unused_braces.rs:15:14
    |
 LL |     let _: A<{ 7 }>;
    |              ^^ ^^
diff --git a/tests/ui/consts/auxiliary/closure-in-foreign-crate.rs b/tests/ui/consts/auxiliary/closure-in-foreign-crate.rs
new file mode 100644 (file)
index 0000000..edc7fa8
--- /dev/null
@@ -0,0 +1,8 @@
+#![crate_type = "lib"]
+#![feature(const_closures, const_trait_impl)]
+#![allow(incomplete_features)]
+
+pub const fn test() {
+    let cl = const || {};
+    cl();
+}
diff --git a/tests/ui/consts/closure-in-foreign-crate.rs b/tests/ui/consts/closure-in-foreign-crate.rs
new file mode 100644 (file)
index 0000000..fc8f480
--- /dev/null
@@ -0,0 +1,8 @@
+// aux-build:closure-in-foreign-crate.rs
+// build-pass
+
+extern crate closure_in_foreign_crate;
+
+const _: () = closure_in_foreign_crate::test();
+
+fn main() {}
index 1caf1617e213cb198ad9cbf82e80804013bbd664..08fcd1deab1d337549d3acac3b9050f95792a193 100644 (file)
@@ -11,6 +11,10 @@ help: you might want to use `if let` to ignore the variants that aren't matched
    |
 LL |     A = { if let 0 = 0 { todo!() } 0 },
    |           ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL |     A = { let _0 = 0; 0 },
+   |               +
 
 error: aborting due to previous error
 
index f038ba1c8ed859314c4aa4140e4018cce0bba1c7..5d86ca4bfd17b632efd346068e0603a8afe5ae73 100644 (file)
@@ -11,6 +11,10 @@ help: you might want to use `if let` to ignore the variants that aren't matched
    |
 LL |     let x: [i32; { if let 0 = 0 { todo!() } 0 }] = [];
    |                    ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL |     let x: [i32; { let _0 = 0; 0 }] = [];
+   |                        +
 
 error: aborting due to previous error
 
index b1921f8a41e48db0560471f059b83b6114ab9ec5..c8f66bb0fc027f2d0a59322a3d75753c56633cc2 100644 (file)
@@ -11,6 +11,10 @@ help: you might want to use `if let` to ignore the variants that aren't matched
    |
 LL | const X: i32 = { if let 0 = 0 { todo!() } 0 };
    |                  ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL | const X: i32 = { let _0 = 0; 0 };
+   |                      +
 
 error[E0005]: refutable pattern in local binding
   --> $DIR/const-match-check.rs:8:23
@@ -25,6 +29,10 @@ help: you might want to use `if let` to ignore the variants that aren't matched
    |
 LL | static Y: i32 = { if let 0 = 0 { todo!() } 0 };
    |                   ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL | static Y: i32 = { let _0 = 0; 0 };
+   |                       +
 
 error[E0005]: refutable pattern in local binding
   --> $DIR/const-match-check.rs:13:26
@@ -39,6 +47,10 @@ help: you might want to use `if let` to ignore the variants that aren't matched
    |
 LL |     const X: i32 = { if let 0 = 0 { todo!() } 0 };
    |                      ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL |     const X: i32 = { let _0 = 0; 0 };
+   |                          +
 
 error[E0005]: refutable pattern in local binding
   --> $DIR/const-match-check.rs:19:26
@@ -53,6 +65,10 @@ help: you might want to use `if let` to ignore the variants that aren't matched
    |
 LL |     const X: i32 = { if let 0 = 0 { todo!() } 0 };
    |                      ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL |     const X: i32 = { let _0 = 0; 0 };
+   |                          +
 
 error: aborting due to 4 previous errors
 
index 46f02ce8a45337fc5a44f8fb52056992ea1958e3..17088d9995f2d1c65368333ca1f237324c2c73c7 100644 (file)
@@ -15,8 +15,8 @@ note: ...which requires const-evaluating + checking `Foo::bytes::{constant#0}`..
 LL |     bytes: [u8; std::mem::size_of::<Foo>()]
    |                 ^^^^^^^^^^^^^^^^^^^^^^^^^^
    = note: ...which requires computing layout of `Foo`...
-   = note: ...which requires computing layout of `[u8; _]`...
-   = note: ...which requires normalizing `[u8; _]`...
+   = note: ...which requires computing layout of `[u8; std::mem::size_of::<Foo>()]`...
+   = note: ...which requires normalizing `[u8; std::mem::size_of::<Foo>()]`...
    = note: ...which again requires evaluating type-level constant, completing the cycle
 note: cycle used when checking that `Foo` is well-formed
   --> $DIR/const-size_of-cycle.rs:3:1
diff --git a/tests/ui/consts/const_cmp_type_id.rs b/tests/ui/consts/const_cmp_type_id.rs
new file mode 100644 (file)
index 0000000..f10d1c2
--- /dev/null
@@ -0,0 +1,12 @@
+// run-pass
+#![feature(const_type_id)]
+#![feature(const_trait_impl)]
+
+use std::any::TypeId;
+
+const fn main() {
+    assert!(TypeId::of::<u8>() == TypeId::of::<u8>());
+    assert!(TypeId::of::<()>() != TypeId::of::<u8>());
+    const _A: bool = TypeId::of::<u8>() < TypeId::of::<u16>();
+    // can't assert `_A` because it is not deterministic
+}
index 57f94f8c6ab52d04dbcad0223d5a98a1b40865d9..ec64b956dfe2b8f99f33de8ea7daa6a80c216d41 100644 (file)
@@ -15,8 +15,8 @@ note: ...which requires const-evaluating + checking `Foo::bytes::{constant#0}`..
 LL |     bytes: [u8; unsafe { intrinsics::size_of::<Foo>() }],
    |                 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    = note: ...which requires computing layout of `Foo`...
-   = note: ...which requires computing layout of `[u8; _]`...
-   = note: ...which requires normalizing `[u8; _]`...
+   = note: ...which requires computing layout of `[u8; unsafe { intrinsics::size_of::<Foo>() }]`...
+   = note: ...which requires normalizing `[u8; unsafe { intrinsics::size_of::<Foo>() }]`...
    = note: ...which again requires evaluating type-level constant, completing the cycle
 note: cycle used when checking that `Foo` is well-formed
   --> $DIR/issue-44415.rs:5:1
index 7706a97f23b4834bd0c2166e8354c0659ca34f4b..addcc1eaab60bb514d472f03d09109ea267aba38 100644 (file)
@@ -7,6 +7,7 @@
 
 #![feature(const_type_id)]
 #![feature(const_type_name)]
+#![feature(const_trait_impl)]
 
 use std::any::{self, TypeId};
 
@@ -17,7 +18,7 @@ impl<T: 'static> GetTypeId<T> {
 }
 
 const fn check_type_id<T: 'static>() -> bool {
-    matches!(GetTypeId::<T>::VALUE, GetTypeId::<usize>::VALUE)
+    GetTypeId::<T>::VALUE == GetTypeId::<usize>::VALUE
 }
 
 pub struct GetTypeNameLen<T>(T);
index af494e37349146f91cb3a196a0dc5a1df5d849b0..8b3f4b714e1bd33662001b965d3d24a2429cbc6e 100644 (file)
@@ -7,7 +7,7 @@ pub fn crash() -> bool {
         [5; Self::HOST_SIZE] == [6; 0]
         //~^ ERROR constant expression depends on a generic parameter
         //~| ERROR constant expression depends on a generic parameter
-        //~| ERROR can't compare `[{integer}; _]` with `[{integer}; 0]`
+        //~| ERROR can't compare `[{integer}; Self::HOST_SIZE]` with `[{integer}; 0]`
     }
 }
 
index 8de61fcfb7330cc36fc1039076f90bdb18278435..5af82a3e34bf5f3959debe01554ac8da4d833a35 100644 (file)
@@ -14,13 +14,13 @@ LL |         [5; Self::HOST_SIZE] == [6; 0]
    |
    = note: this may fail depending on what value the parameter takes
 
-error[E0277]: can't compare `[{integer}; _]` with `[{integer}; 0]`
+error[E0277]: can't compare `[{integer}; Self::HOST_SIZE]` with `[{integer}; 0]`
   --> $DIR/too_generic_eval_ice.rs:7:30
    |
 LL |         [5; Self::HOST_SIZE] == [6; 0]
-   |                              ^^ no implementation for `[{integer}; _] == [{integer}; 0]`
+   |                              ^^ no implementation for `[{integer}; Self::HOST_SIZE] == [{integer}; 0]`
    |
-   = help: the trait `PartialEq<[{integer}; 0]>` is not implemented for `[{integer}; _]`
+   = help: the trait `PartialEq<[{integer}; 0]>` is not implemented for `[{integer}; Self::HOST_SIZE]`
    = help: the following other types implement trait `PartialEq<Rhs>`:
              <&[B] as PartialEq<[A; N]>>
              <&[T] as PartialEq<Vec<U, A>>>
index 851dca84c3dc01dcfef6885905eca960f54b85e0..d5991bcf5693df726787ee5cb4f2ed4005fd4cf2 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[u8; SIZE]` are too big for the current architecture
+error: values of the type `[u8; usize::MAX]` are too big for the current architecture
 
 error: aborting due to previous error
 
index 851dca84c3dc01dcfef6885905eca960f54b85e0..d5991bcf5693df726787ee5cb4f2ed4005fd4cf2 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[u8; SIZE]` are too big for the current architecture
+error: values of the type `[u8; usize::MAX]` are too big for the current architecture
 
 error: aborting due to previous error
 
diff --git a/tests/ui/dep-graph/dep-graph-dump.rs b/tests/ui/dep-graph/dep-graph-dump.rs
new file mode 100644 (file)
index 0000000..cbc4def
--- /dev/null
@@ -0,0 +1,6 @@
+// Test dump-dep-graph requires query-dep-graph enabled
+
+// incremental
+// compile-flags: -Z dump-dep-graph
+
+fn main() {}
diff --git a/tests/ui/dep-graph/dep-graph-dump.stderr b/tests/ui/dep-graph/dep-graph-dump.stderr
new file mode 100644 (file)
index 0000000..ea44b8b
--- /dev/null
@@ -0,0 +1,2 @@
+error: can't dump dependency graph without `-Z query-dep-graph`
+
index a63cbd4ca7edea3d2227db9b81300925274fe3c4..e6ee11a783b8849acabb006b020b5dad05d4f5ce 100644 (file)
@@ -731,11 +731,12 @@ impl ::core::marker::Copy for Fieldless { }
 #[automatically_derived]
 impl ::core::fmt::Debug for Fieldless {
     fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
-        match self {
-            Fieldless::A => ::core::fmt::Formatter::write_str(f, "A"),
-            Fieldless::B => ::core::fmt::Formatter::write_str(f, "B"),
-            Fieldless::C => ::core::fmt::Formatter::write_str(f, "C"),
-        }
+        ::core::fmt::Formatter::write_str(f,
+            match self {
+                Fieldless::A => "A",
+                Fieldless::B => "B",
+                Fieldless::C => "C",
+            })
     }
 }
 #[automatically_derived]
index 700f6616af40ff02586e2c66752b0d326aa32ef1..e0b3b8685d6eb02ea62254083bbe848c6d168596 100644 (file)
@@ -4,7 +4,7 @@ error: cannot find macro `macro_two` in this scope
 LL |     macro_two!();
    |     ^^^^^^^^^
    |
-   = note: consider importing this macro:
+   = help: consider importing this macro:
            two_macros::macro_two
 
 error: aborting due to previous error
diff --git a/tests/ui/error-codes/E0208.rs b/tests/ui/error-codes/E0208.rs
new file mode 100644 (file)
index 0000000..c67d428
--- /dev/null
@@ -0,0 +1,8 @@
+#![feature(rustc_attrs)]
+
+#[rustc_variance]
+struct Foo<'a, T> { //~ ERROR [-, o]
+    t: &'a mut T,
+}
+
+fn main() {}
diff --git a/tests/ui/error-codes/E0208.stderr b/tests/ui/error-codes/E0208.stderr
new file mode 100644 (file)
index 0000000..dbbb41e
--- /dev/null
@@ -0,0 +1,8 @@
+error: [-, o]
+  --> $DIR/E0208.rs:4:1
+   |
+LL | struct Foo<'a, T> {
+   | ^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
index cb0d8cfc31e235d49d095af3d68deca3c3c2b614..6f6c6513846cf228c0deeaad8c651af0faee273c 100644 (file)
@@ -1,3 +1,4 @@
 fn main() {
-    &0u8 as u8; //~ ERROR E0606
+    let x = &(&0u8 as u8); //~ ERROR E0606
+    x as u8; //~ casting `&u8` as `u8` is invalid [E0606]
 }
index fce24886eb0df82529ddf36c3c9c368f41cc9aff..2492eb299cc55b39de13f96225feee7ebfb76f79 100644 (file)
@@ -1,12 +1,26 @@
 error[E0606]: casting `&u8` as `u8` is invalid
-  --> $DIR/E0606.rs:2:5
+  --> $DIR/E0606.rs:2:14
    |
-LL |     &0u8 as u8;
-   |     ----^^^^^^
-   |     |
-   |     cannot cast `&u8` as `u8`
-   |     help: dereference the expression: `*&0u8`
+LL |     let x = &(&0u8 as u8);
+   |              ^^^^^^^^^^^^
+   |
+help: remove the unneeded borrow
+   |
+LL -     let x = &(&0u8 as u8);
+LL +     let x = &(0u8 as u8);
+   |
+
+error[E0606]: casting `&u8` as `u8` is invalid
+  --> $DIR/E0606.rs:3:5
+   |
+LL |     x as u8;
+   |     ^^^^^^^
+   |
+help: dereference the expression
+   |
+LL |     *x as u8;
+   |     +
 
-error: aborting due to previous error
+error: aborting due to 2 previous errors
 
 For more information about this error, try `rustc --explain E0606`.
index fe9956b70bdd75f3e9cb374adbb589cde91d2e50..e8ee1d96942f782562f80881dabc2b6f78846af4 100644 (file)
@@ -69,10 +69,12 @@ error[E0606]: casting `&u8` as `u32` is invalid
   --> $DIR/error-festival.rs:37:18
    |
 LL |     let y: u32 = x as u32;
-   |                  -^^^^^^^
-   |                  |
-   |                  cannot cast `&u8` as `u32`
-   |                  help: dereference the expression: `*x`
+   |                  ^^^^^^^^
+   |
+help: dereference the expression
+   |
+LL |     let y: u32 = *x as u32;
+   |                  +
 
 error[E0607]: cannot cast thin pointer `*const u8` to fat pointer `*const [u8]`
   --> $DIR/error-festival.rs:41:5
diff --git a/tests/ui/errors/auxiliary/remapped_dep.rs b/tests/ui/errors/auxiliary/remapped_dep.rs
new file mode 100644 (file)
index 0000000..f9bb7bf
--- /dev/null
@@ -0,0 +1,4 @@
+// compile-flags: --remap-path-prefix={{src-base}}/errors/auxiliary=remapped-aux
+// no-remap-src-base: Manually remap, so the remapped path remains in .stderr file.
+
+pub struct SomeStruct {} // This line should be show as part of the error.
diff --git a/tests/ui/errors/remap-path-prefix-reverse.local-self.stderr b/tests/ui/errors/remap-path-prefix-reverse.local-self.stderr
new file mode 100644 (file)
index 0000000..51e3b77
--- /dev/null
@@ -0,0 +1,14 @@
+error[E0423]: expected value, found struct `remapped_dep::SomeStruct`
+  --> $DIR/remap-path-prefix-reverse.rs:16:13
+   |
+LL |     let _ = remapped_dep::SomeStruct; // ~ERROR E0423
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^ help: use struct literal syntax instead: `remapped_dep::SomeStruct {}`
+   |
+  ::: remapped-aux/remapped_dep.rs:4:1
+   |
+LL | pub struct SomeStruct {} // This line should be show as part of the error.
+   | --------------------- `remapped_dep::SomeStruct` defined here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0423`.
diff --git a/tests/ui/errors/remap-path-prefix-reverse.remapped-self.stderr b/tests/ui/errors/remap-path-prefix-reverse.remapped-self.stderr
new file mode 100644 (file)
index 0000000..51e3b77
--- /dev/null
@@ -0,0 +1,14 @@
+error[E0423]: expected value, found struct `remapped_dep::SomeStruct`
+  --> $DIR/remap-path-prefix-reverse.rs:16:13
+   |
+LL |     let _ = remapped_dep::SomeStruct; // ~ERROR E0423
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^ help: use struct literal syntax instead: `remapped_dep::SomeStruct {}`
+   |
+  ::: remapped-aux/remapped_dep.rs:4:1
+   |
+LL | pub struct SomeStruct {} // This line should be show as part of the error.
+   | --------------------- `remapped_dep::SomeStruct` defined here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0423`.
diff --git a/tests/ui/errors/remap-path-prefix-reverse.rs b/tests/ui/errors/remap-path-prefix-reverse.rs
new file mode 100644 (file)
index 0000000..71c8006
--- /dev/null
@@ -0,0 +1,17 @@
+// aux-build:remapped_dep.rs
+// compile-flags: --remap-path-prefix={{src-base}}/errors/auxiliary=remapped-aux
+
+// revisions: local-self remapped-self
+// [local-self] no-remap-src-base: The hack should work regardless of remapping.
+// [remapped-self] remap-src-base
+
+// Verify that the expected source code is shown.
+// error-pattern: pub struct SomeStruct {} // This line should be show
+
+extern crate remapped_dep;
+
+fn main() {
+    // The actual error is irrelevant. The important part it that is should show
+    // a snippet of the dependency's source.
+    let _ = remapped_dep::SomeStruct; // ~ERROR E0423
+}
diff --git a/tests/ui/errors/remap-path-prefix.rs b/tests/ui/errors/remap-path-prefix.rs
new file mode 100644 (file)
index 0000000..393b8e2
--- /dev/null
@@ -0,0 +1,17 @@
+// compile-flags: --remap-path-prefix={{src-base}}=remapped
+// no-remap-src-base: Manually remap, so the remapped path remains in .stderr file.
+
+// The remapped paths are not normalized by compiletest.
+// normalize-stderr-test: "\\(errors)" -> "/$1"
+
+// The remapped paths aren't recognized by compiletest, so we
+// cannot use line-specific patterns.
+// error-pattern: E0425
+
+fn main() {
+    // We cannot actually put an ERROR marker here because
+    // the file name in the error message is not what the
+    // test framework expects (since the filename gets remapped).
+    // We still test the expected error in the stderr file.
+    ferris
+}
diff --git a/tests/ui/errors/remap-path-prefix.stderr b/tests/ui/errors/remap-path-prefix.stderr
new file mode 100644 (file)
index 0000000..62dbd4b
--- /dev/null
@@ -0,0 +1,9 @@
+error[E0425]: cannot find value `ferris` in this scope
+  --> remapped/errors/remap-path-prefix.rs:16:5
+   |
+LL |     ferris
+   |     ^^^^^^ not found in this scope
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0425`.
index 63797d4a71bce7f04b1fb44f4609db974f36fadd..92e8a44b55fe570fd2fdacf0fd9f4f1aedc8c01f 100644 (file)
@@ -30,7 +30,7 @@ LL |     use env;
 help: consider importing this module instead
    |
 LL |     use std::env;
-   |         ~~~~~~~~~
+   |         ~~~~~~~~
 
 error: cannot determine resolution for the macro `env`
   --> $DIR/issue-55897.rs:6:22
index 539c8fb27b3b0a5b5909b4e791da7d7f3d68bc03..1b7ef93f41d575abb3e3c0c05b1d615207a357b4 100644 (file)
@@ -28,6 +28,7 @@ pub fn err_with_input_span(input: TokenStream) -> TokenStream {
     TokenStream::from(TokenTree::Literal(lit))
 }
 
+
 #[proc_macro]
 pub fn respan_to_invalid_format_literal(input: TokenStream) -> TokenStream {
     let mut s = Literal::string("{");
@@ -38,3 +39,14 @@ pub fn respan_to_invalid_format_literal(input: TokenStream) -> TokenStream {
         TokenTree::from(Group::new(Delimiter::Parenthesis, TokenTree::from(s).into())),
     ])
 }
+
+#[proc_macro]
+pub fn capture_a_with_prepended_space_preserve_span(input: TokenStream) -> TokenStream {
+    let mut s = Literal::string(" {a}");
+    s.set_span(input.into_iter().next().unwrap().span());
+    TokenStream::from_iter([
+        TokenTree::from(Ident::new("format", Span::call_site())),
+        TokenTree::from(Punct::new('!', Spacing::Alone)),
+        TokenTree::from(Group::new(Delimiter::Parenthesis, TokenTree::from(s).into())),
+    ])
+}
diff --git a/tests/ui/fmt/indoc-issue-106408.rs b/tests/ui/fmt/indoc-issue-106408.rs
new file mode 100644 (file)
index 0000000..e4e3093
--- /dev/null
@@ -0,0 +1,9 @@
+// aux-build:format-string-proc-macro.rs
+// check-pass
+
+extern crate format_string_proc_macro;
+
+fn main() {
+    let a = 0;
+    format_string_proc_macro::capture_a_with_prepended_space_preserve_span!("{a}");
+}
index 44642a10fc076d3c0dfd2bbee21ad09942ebb5b7..bb741c0ef93fa8bf0d523f610c767200256fdece 100644 (file)
@@ -1,10 +1,15 @@
 // aux-build:format-string-proc-macro.rs
+// check-fail
+// known-bug: #106191
+// unset-rustc-env:RUST_BACKTRACE
+// had to be reverted
+// error-pattern:internal compiler error
+// failure-status:101
+// dont-check-compiler-stderr
 
 extern crate format_string_proc_macro;
 
 fn main() {
     format_string_proc_macro::respan_to_invalid_format_literal!("¡");
-    //~^ ERROR invalid format string: expected `'}'` but string was terminated
     format_args!(r#concat!("¡        {"));
-    //~^ ERROR invalid format string: expected `'}'` but string was terminated
 }
index 73a3af65a3849dbce8754f03987a89fc49bb0343..16717f42253d6464fd77914e2136d012d5e36a27 100644 (file)
@@ -1,19 +1,2 @@
-error: invalid format string: expected `'}'` but string was terminated
-  --> $DIR/respanned-literal-issue-106191.rs:6:65
-   |
-LL |     format_string_proc_macro::respan_to_invalid_format_literal!("¡");
-   |                                                                 ^^^ expected `'}'` in format string
-   |
-   = note: if you intended to print `{`, you can escape it using `{{`
-
-error: invalid format string: expected `'}'` but string was terminated
-  --> $DIR/respanned-literal-issue-106191.rs:8:18
-   |
-LL |     format_args!(r#concat!("¡        {"));
-   |                  ^^^^^^^^^^^^^^^^^^^^^^^ expected `'}'` in format string
-   |
-   = note: if you intended to print `{`, you can escape it using `{{`
-   = note: this error originates in the macro `concat` (in Nightly builds, run with -Z macro-backtrace for more info)
-
-error: aborting due to 2 previous errors
-
+                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                  query stack during panic:
+end of query stack
index 719d1bd5a4c7dabf518cdb41e5718b511632d482..5101de19d3cb6ea9ad3df085b6e27b5dc480f84b 100644 (file)
@@ -1,5 +1,5 @@
 // check-fail
-// known-bug
+// known-bug: unknown
 
 // This gives us problems because `for<'a> I::Item<'a>: Debug` should mean "for
 // all 'a where I::Item<'a> is WF", but really means "for all 'a possible"
@@ -29,7 +29,6 @@ fn print_items<I>(_iter: I)
 
 fn main() {
     let slice = &mut ();
-    //~^ temporary value dropped while borrowed
     let windows = WindowsMut { slice };
     print_items::<WindowsMut<'_>>(windows);
 }
index 1c9abc4e837c518aa888649f2b5b57b0265631fa..362aeae23614fcf972a6887d860c037f38064ff8 100644 (file)
@@ -3,7 +3,7 @@ error[E0716]: temporary value dropped while borrowed
    |
 LL |     let slice = &mut ();
    |                      ^^ creates a temporary value which is freed while still in use
-...
+LL |     let windows = WindowsMut { slice };
 LL |     print_items::<WindowsMut<'_>>(windows);
    |     -------------------------------------- argument requires that borrow lasts for `'static`
 LL | }
index 8e6c5348e71cab2194056bdb3d980a0ccc99fb8c..3174227a7a1e199a24fb633f6f14f417169a44c6 100644 (file)
@@ -1,5 +1,5 @@
 // check-fail
-// known-bug
+// known-bug: unknown
 
 // This gives us problems because `for<'a> I::Item<'a>: Debug` should mean "for
 // all 'a where I::Item<'a> is WF", but really means "for all 'a possible"
@@ -16,7 +16,6 @@ fn fails<I: LendingIterator, F>(iter: &mut I, f: F) -> bool
 {
     let mut iter2 = Eat(iter, f);
     let _next = iter2.next();
-    //~^ borrowed data escapes
     true
 }
 impl<I: LendingIterator> LendingIterator for &mut I {
index fc4e47a3ba18881963003ef6274e597ff4b9256f..973c548d785edd8bf867d6f23620a594b79f5114 100644 (file)
@@ -1,5 +1,5 @@
 // check-fail
-// known-bug
+// known-bug: unknown
 // edition: 2021
 
 // We really should accept this, but we need implied bounds between the regions
@@ -13,7 +13,6 @@ pub trait FutureIterator {
 
 fn call<I: FutureIterator>() -> impl Send {
     async { // a generator checked for autotrait impl `Send`
-        //~^ lifetime bound not satisfied
         let x = None::<I::Future<'_, '_>>; // a type referencing GAT
         async {}.await; // a yield point
     }
@@ -21,16 +20,13 @@ fn call<I: FutureIterator>() -> impl Send {
 
 fn call2<'a, 'b, I: FutureIterator>() -> impl Send {
     async { // a generator checked for autotrait impl `Send`
-        //~^ lifetime bound not satisfied
         let x = None::<I::Future<'a, 'b>>; // a type referencing GAT
-        //~^ lifetime may not live long enough
         async {}.await; // a yield point
     }
 }
 
 fn call3<'a: 'b, 'b, I: FutureIterator>() -> impl Send {
     async { // a generator checked for autotrait impl `Send`
-        //~^ lifetime bound not satisfied
         let x = None::<I::Future<'a, 'b>>; // a type referencing GAT
         async {}.await; // a yield point
     }
index 72ae288dcab64c4a41eb1ae30f6e9d08a8507f6c..9db124a81e487185972de56ae66977fe0b203336 100644 (file)
@@ -2,77 +2,73 @@ error: lifetime bound not satisfied
   --> $DIR/issue-100013.rs:15:5
    |
 LL | /     async { // a generator checked for autotrait impl `Send`
-LL | |
 LL | |         let x = None::<I::Future<'_, '_>>; // a type referencing GAT
 LL | |         async {}.await; // a yield point
 LL | |     }
    | |_____^
    |
 note: the lifetime defined here...
-  --> $DIR/issue-100013.rs:17:38
+  --> $DIR/issue-100013.rs:16:38
    |
 LL |         let x = None::<I::Future<'_, '_>>; // a type referencing GAT
    |                                      ^^
 note: ...must outlive the lifetime defined here
-  --> $DIR/issue-100013.rs:17:34
+  --> $DIR/issue-100013.rs:16:34
    |
 LL |         let x = None::<I::Future<'_, '_>>; // a type referencing GAT
    |                                  ^^
    = note: this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information)
 
 error: lifetime bound not satisfied
-  --> $DIR/issue-100013.rs:23:5
+  --> $DIR/issue-100013.rs:22:5
    |
 LL | /     async { // a generator checked for autotrait impl `Send`
-LL | |
 LL | |         let x = None::<I::Future<'a, 'b>>; // a type referencing GAT
-LL | |
 LL | |         async {}.await; // a yield point
 LL | |     }
    | |_____^
    |
 note: the lifetime defined here...
-  --> $DIR/issue-100013.rs:22:14
+  --> $DIR/issue-100013.rs:21:14
    |
 LL | fn call2<'a, 'b, I: FutureIterator>() -> impl Send {
    |              ^^
 note: ...must outlive the lifetime defined here
-  --> $DIR/issue-100013.rs:22:10
+  --> $DIR/issue-100013.rs:21:10
    |
 LL | fn call2<'a, 'b, I: FutureIterator>() -> impl Send {
    |          ^^
    = note: this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information)
 
 error: lifetime may not live long enough
-  --> $DIR/issue-100013.rs:25:17
+  --> $DIR/issue-100013.rs:23:17
    |
 LL | fn call2<'a, 'b, I: FutureIterator>() -> impl Send {
    |          --  -- lifetime `'b` defined here
    |          |
    |          lifetime `'a` defined here
-...
+LL |     async { // a generator checked for autotrait impl `Send`
 LL |         let x = None::<I::Future<'a, 'b>>; // a type referencing GAT
    |                 ^^^^^^^^^^^^^^^^^^^^^^^^^ requires that `'a` must outlive `'b`
    |
    = help: consider adding the following bound: `'a: 'b`
 
 error: lifetime bound not satisfied
-  --> $DIR/issue-100013.rs:32:5
+  --> $DIR/issue-100013.rs:29:5
    |
 LL | /     async { // a generator checked for autotrait impl `Send`
-LL | |
 LL | |         let x = None::<I::Future<'a, 'b>>; // a type referencing GAT
 LL | |         async {}.await; // a yield point
 LL | |     }
    | |_____^
    |
 note: the lifetime defined here...
-  --> $DIR/issue-100013.rs:31:18
+  --> $DIR/issue-100013.rs:28:18
    |
 LL | fn call3<'a: 'b, 'b, I: FutureIterator>() -> impl Send {
    |                  ^^
 note: ...must outlive the lifetime defined here
-  --> $DIR/issue-100013.rs:31:10
+  --> $DIR/issue-100013.rs:28:10
    |
 LL | fn call3<'a: 'b, 'b, I: FutureIterator>() -> impl Send {
    |          ^^
index dec668bec10edd262f6b5133b62c89cbe154d646..8f2cc45509ffcbd4bd1cfd4bf639d34ef33f7ca7 100644 (file)
@@ -1,5 +1,5 @@
 // check-fail
-// known-bug
+// known-bug: unknown
 
 // We almost certainly want this to pass, but
 // it's particularly difficult currently, because we need a way of specifying
@@ -22,7 +22,6 @@ fn fmap<U>(self) {
 
         arg = self;
         ret = <Self::Base as Functor>::fmap(arg);
-        //~^ type annotations needed
     }
 }
 
index 1dbe1e2cb2245ba1bc4a9f6560dc13ad8394371d..a085096e1f8c569d29e8c3a404a18d9c96b8d581 100644 (file)
@@ -9,6 +9,16 @@ LL | |         // probably should work.
 LL | |         let _x = x;
 LL | |     };
    | |_____^
+   |
+note: due to current limitations in the borrow checker, this implies a `'static` lifetime
+  --> $DIR/collectivity-regression.rs:11:16
+   |
+LL |     for<'a> T: Get<Value<'a> = ()>,
+   |                ^^^^^^^^^^^^^^^^^^^
+help: consider restricting the type parameter to the `'static` lifetime
+   |
+LL |     for<'a> T: Get<Value<'a> = ()> + 'static,
+   |                                    +++++++++
 
 error: aborting due to previous error
 
diff --git a/tests/ui/generic-associated-types/issue-88360.fixed b/tests/ui/generic-associated-types/issue-88360.fixed
new file mode 100644 (file)
index 0000000..3dea8bf
--- /dev/null
@@ -0,0 +1,20 @@
+// run-rustfix
+
+trait GatTrait {
+    type Gat<'a> where Self: 'a;
+
+    fn test(&self) -> Self::Gat<'_>;
+}
+
+trait SuperTrait<T>
+where
+    Self: 'static,
+    for<'a> Self: GatTrait<Gat<'a> = &'a T>,
+{
+    fn copy(&self) -> Self::Gat<'_> where T: Copy {
+        self.test()
+        //~^ mismatched types
+    }
+}
+
+fn main() {}
index c02690618d0ee9bf08ac4651394350436f852c54..4d4c7ea318078ff3412312e010bfa0872e0488ca 100644 (file)
@@ -1,3 +1,5 @@
+// run-rustfix
+
 trait GatTrait {
     type Gat<'a> where Self: 'a;
 
index cd3750344dda126094b8cb9f69aac27f10c3bdf4..520aeff1894835c966b31a3fb1ad01c7ac399442 100644 (file)
@@ -1,5 +1,5 @@
 error[E0308]: mismatched types
-  --> $DIR/issue-88360.rs:13:9
+  --> $DIR/issue-88360.rs:15:9
    |
 LL | trait SuperTrait<T>
    |                  - this type parameter
@@ -7,13 +7,15 @@ LL | trait SuperTrait<T>
 LL |     fn copy(&self) -> Self::Gat<'_> where T: Copy {
    |                       ------------- expected `&T` because of return type
 LL |         *self.test()
-   |         ^^^^^^^^^^^^
-   |         |
-   |         expected `&T`, found type parameter `T`
-   |         help: consider borrowing here: `&*self.test()`
+   |         ^^^^^^^^^^^^ expected `&T`, found type parameter `T`
    |
    = note:   expected reference `&T`
            found type parameter `T`
+help: consider removing deref here
+   |
+LL -         *self.test()
+LL +         self.test()
+   |
 
 error: aborting due to previous error
 
diff --git a/tests/ui/generics/issue-106694.rs b/tests/ui/generics/issue-106694.rs
new file mode 100644 (file)
index 0000000..c4b02ee
--- /dev/null
@@ -0,0 +1,24 @@
+trait Trait {}
+
+fn foo(_: impl &Trait) {}
+//~^ ERROR expected a trait, found type
+
+fn bar<T: &Trait>(_: T) {}
+//~^ ERROR expected a trait, found type
+
+fn partially_correct_impl(_: impl &*const &Trait + Copy) {}
+//~^ ERROR expected a trait, found type
+
+fn foo_bad(_: impl &BadTrait) {}
+//~^ ERROR expected a trait, found type
+//~^^ ERROR cannot find trait `BadTrait` in this scope
+
+fn bar_bad<T: &BadTrait>(_: T) {}
+//~^ ERROR expected a trait, found type
+//~^^ ERROR cannot find trait `BadTrait` in this scope
+
+fn partially_correct_impl_bad(_: impl &*const &BadTrait + Copy) {}
+//~^ ERROR expected a trait, found type
+//~^^ ERROR cannot find trait `BadTrait` in this scope
+
+fn main() {}
diff --git a/tests/ui/generics/issue-106694.stderr b/tests/ui/generics/issue-106694.stderr
new file mode 100644 (file)
index 0000000..235b898
--- /dev/null
@@ -0,0 +1,93 @@
+error: expected a trait, found type
+  --> $DIR/issue-106694.rs:3:16
+   |
+LL | fn foo(_: impl &Trait) {}
+   |                ^^^^^^
+   |
+help: consider removing the indirection
+   |
+LL - fn foo(_: impl &Trait) {}
+LL + fn foo(_: impl Trait) {}
+   |
+
+error: expected a trait, found type
+  --> $DIR/issue-106694.rs:6:11
+   |
+LL | fn bar<T: &Trait>(_: T) {}
+   |           ^^^^^^
+   |
+help: consider removing the indirection
+   |
+LL - fn bar<T: &Trait>(_: T) {}
+LL + fn bar<T: Trait>(_: T) {}
+   |
+
+error: expected a trait, found type
+  --> $DIR/issue-106694.rs:9:35
+   |
+LL | fn partially_correct_impl(_: impl &*const &Trait + Copy) {}
+   |                                   ^^^^^^^^^^^^^^
+   |
+help: consider removing the indirection
+   |
+LL - fn partially_correct_impl(_: impl &*const &Trait + Copy) {}
+LL + fn partially_correct_impl(_: impl Trait + Copy) {}
+   |
+
+error: expected a trait, found type
+  --> $DIR/issue-106694.rs:12:20
+   |
+LL | fn foo_bad(_: impl &BadTrait) {}
+   |                    ^^^^^^^^^
+   |
+help: consider removing the indirection
+   |
+LL - fn foo_bad(_: impl &BadTrait) {}
+LL + fn foo_bad(_: impl BadTrait) {}
+   |
+
+error: expected a trait, found type
+  --> $DIR/issue-106694.rs:16:15
+   |
+LL | fn bar_bad<T: &BadTrait>(_: T) {}
+   |               ^^^^^^^^^
+   |
+help: consider removing the indirection
+   |
+LL - fn bar_bad<T: &BadTrait>(_: T) {}
+LL + fn bar_bad<T: BadTrait>(_: T) {}
+   |
+
+error: expected a trait, found type
+  --> $DIR/issue-106694.rs:20:39
+   |
+LL | fn partially_correct_impl_bad(_: impl &*const &BadTrait + Copy) {}
+   |                                       ^^^^^^^^^^^^^^^^^
+   |
+help: consider removing the indirection
+   |
+LL - fn partially_correct_impl_bad(_: impl &*const &BadTrait + Copy) {}
+LL + fn partially_correct_impl_bad(_: impl BadTrait + Copy) {}
+   |
+
+error[E0405]: cannot find trait `BadTrait` in this scope
+  --> $DIR/issue-106694.rs:12:21
+   |
+LL | fn foo_bad(_: impl &BadTrait) {}
+   |                     ^^^^^^^^ not found in this scope
+
+error[E0405]: cannot find trait `BadTrait` in this scope
+  --> $DIR/issue-106694.rs:16:16
+   |
+LL | fn bar_bad<T: &BadTrait>(_: T) {}
+   |                ^^^^^^^^ not found in this scope
+
+error[E0405]: cannot find trait `BadTrait` in this scope
+  --> $DIR/issue-106694.rs:20:48
+   |
+LL | fn partially_correct_impl_bad(_: impl &*const &BadTrait + Copy) {}
+   |                                                ^^^^^^^^ not found in this scope
+
+error: aborting due to 9 previous errors
+
+For more information about this error, try `rustc --explain E0405`.
diff --git a/tests/ui/higher-rank-trait-bounds/issue-42114.rs b/tests/ui/higher-rank-trait-bounds/issue-42114.rs
new file mode 100644 (file)
index 0000000..01515fd
--- /dev/null
@@ -0,0 +1,20 @@
+// check-pass
+
+fn lifetime<'a>()
+where
+    &'a (): 'a,
+{
+    /* do nothing */
+}
+
+fn doesnt_work()
+where
+    for<'a> &'a (): 'a,
+{
+    /* do nothing */
+}
+
+fn main() {
+    lifetime();
+    doesnt_work();
+}
index 1f2a96a4c411a62bb1c016d41bf478da67cdf201..c01901be5fe78d976d102700253637731c744ba0 100644 (file)
@@ -51,7 +51,7 @@ LL | n!(f);
 LL |         n!(f);
    |            ^ not found in this scope
    |
-   = note: consider importing this function:
+   = help: consider importing this function:
            foo::f
    = note: this error originates in the macro `n` (in Nightly builds, run with -Z macro-backtrace for more info)
 
@@ -64,7 +64,7 @@ LL | n!(f);
 LL |                 f
    |                 ^ not found in this scope
    |
-   = note: consider importing this function:
+   = help: consider importing this function:
            foo::f
    = note: this error originates in the macro `n` (in Nightly builds, run with -Z macro-backtrace for more info)
 
index 02ddc391f6e3c01f1e22a096ad47ae982771dc3d..3f31b041b62038d329f16c08de10ac3cae1d36c0 100644 (file)
@@ -4,7 +4,7 @@ error: cannot find macro `print` in this scope
 LL |         print!();
    |         ^^^^^
    |
-   = note: consider importing this macro:
+   = help: consider importing this macro:
            std::print
 
 error: aborting due to previous error
diff --git a/tests/ui/impl-trait/issues/issue-105826.rs b/tests/ui/impl-trait/issues/issue-105826.rs
new file mode 100644 (file)
index 0000000..06dc2d4
--- /dev/null
@@ -0,0 +1,39 @@
+// check-pass
+
+use std::io::Write;
+
+struct A(Vec<u8>);
+
+struct B<'a> {
+    one: &'a mut A,
+    two: &'a mut Vec<u8>,
+    three: Vec<u8>,
+}
+
+impl<'a> B<'a> {
+    fn one(&mut self) -> &mut impl Write {
+        &mut self.one.0
+    }
+    fn two(&mut self) -> &mut impl Write {
+        &mut *self.two
+    }
+    fn three(&mut self) -> &mut impl Write {
+        &mut self.three
+    }
+}
+
+struct C<'a>(B<'a>);
+
+impl<'a> C<'a> {
+    fn one(&mut self) -> &mut impl Write {
+        self.0.one()
+    }
+    fn two(&mut self) -> &mut impl Write {
+        self.0.two()
+    }
+    fn three(&mut self) -> &mut impl Write {
+        self.0.three()
+    }
+}
+
+fn main() {}
diff --git a/tests/ui/impl-trait/recursive-generator.rs b/tests/ui/impl-trait/recursive-generator.rs
new file mode 100644 (file)
index 0000000..e876f0f
--- /dev/null
@@ -0,0 +1,23 @@
+#![feature(generators, generator_trait)]
+
+use std::ops::{Generator, GeneratorState};
+
+fn foo() -> impl Generator<Yield = (), Return = ()> {
+    //~^ ERROR cannot resolve opaque type
+    //~| NOTE recursive opaque type
+    //~| NOTE in this expansion of desugaring of
+    || {
+    //~^ NOTE returning here
+        let mut gen = Box::pin(foo());
+        //~^ NOTE generator captures itself here
+        let mut r = gen.as_mut().resume(());
+        while let GeneratorState::Yielded(v) = r {
+            yield v;
+            r = gen.as_mut().resume(());
+        }
+    }
+}
+
+fn main() {
+    foo();
+}
diff --git a/tests/ui/impl-trait/recursive-generator.stderr b/tests/ui/impl-trait/recursive-generator.stderr
new file mode 100644 (file)
index 0000000..e23fd4b
--- /dev/null
@@ -0,0 +1,19 @@
+error[E0720]: cannot resolve opaque type
+  --> $DIR/recursive-generator.rs:5:13
+   |
+LL |   fn foo() -> impl Generator<Yield = (), Return = ()> {
+   |               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ recursive opaque type
+...
+LL | /     || {
+LL | |
+LL | |         let mut gen = Box::pin(foo());
+   | |             ------- generator captures itself here
+LL | |
+...  |
+LL | |         }
+LL | |     }
+   | |_____- returning here with type `[generator@$DIR/recursive-generator.rs:9:5: 9:7]`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0720`.
index 2e34d3d4275adef3a2fb335c362e233fe796b519..ebb231ae14f0d77b564567461740e41a377d97dd 100644 (file)
@@ -53,6 +53,7 @@ LL |   fn closure_capture() -> impl Sized {
 ...
 LL | /     move || {
 LL | |         x;
+   | |         - closure captures itself here
 LL | |     }
    | |_____- returning here with type `[closure@$DIR/recursive-impl-trait-type-indirect.rs:35:5: 35:12]`
 
@@ -64,6 +65,7 @@ LL |   fn closure_ref_capture() -> impl Sized {
 ...
 LL | /     move || {
 LL | |         &x;
+   | |          - closure captures itself here
 LL | |     }
    | |_____- returning here with type `[closure@$DIR/recursive-impl-trait-type-indirect.rs:43:5: 43:12]`
 
@@ -94,6 +96,7 @@ LL |   fn generator_capture() -> impl Sized {
 LL | /     move || {
 LL | |         yield;
 LL | |         x;
+   | |         - generator captures itself here
 LL | |     }
    | |_____- returning here with type `[generator@$DIR/recursive-impl-trait-type-indirect.rs:61:5: 61:12]`
 
@@ -114,6 +117,7 @@ LL |   fn generator_hold() -> impl Sized {
 LL |
 LL | /     move || {
 LL | |         let x = generator_hold();
+   | |             - generator captures itself here
 LL | |         yield;
 LL | |         x;
 LL | |     }
index 855b1e637e97f5c892796597fae452c54fadc21d..b6b1bc5fccf022a3bcd278ecdf527dcb2296b5dd 100644 (file)
@@ -4,7 +4,7 @@ error[E0432]: unresolved import `super::super::C::D::AA`
 LL |         use super::{super::C::D::AA, AA as _};
    |                     ^^^^^^^^^^^^^^^ no `AA` in `C::D`
    |
-   = note: consider importing this type alias instead:
+   = help: consider importing this type alias instead:
            crate::A::AA
 
 error[E0432]: unresolved import `crate::C::AA`
@@ -13,7 +13,7 @@ error[E0432]: unresolved import `crate::C::AA`
 LL |     use crate::C::{self, AA};
    |                          ^^ no `AA` in `C`
    |
-   = note: consider importing this type alias instead:
+   = help: consider importing this type alias instead:
            crate::A::AA
 
 error[E0432]: unresolved import `crate::C::BB`
@@ -22,7 +22,7 @@ error[E0432]: unresolved import `crate::C::BB`
 LL |     use crate::{A, C::BB};
    |                    ^^^^^ no `BB` in `C`
    |
-   = note: consider importing this type alias instead:
+   = help: consider importing this type alias instead:
            crate::A::BB
 
 error: aborting due to 3 previous errors
index cace2a7a51c8eca5529630fd7788f7415af90c71..f9c5cf920e1f10fc25abc0ab67d36e12ad35295e 100644 (file)
@@ -7,7 +7,7 @@ LL |     use crate::D::B as _;
 help: consider importing this type alias instead
    |
 LL |     use A::B as _;
-   |         ~~~~~~~~~~
+   |         ~~~~~~~~~
 
 error[E0432]: unresolved import `crate::D::B2`
   --> $DIR/bad-import-with-rename.rs:10:9
@@ -18,7 +18,7 @@ LL |     use crate::D::B2;
 help: consider importing this type alias instead
    |
 LL |     use A::B2;
-   |         ~~~~~~
+   |         ~~~~~
 
 error: aborting due to 2 previous errors
 
index 059ca96808d9ae507c6076d3d339b41dbb17cf72..3448f3119778a6d4b5592297f1c56d0096d2f949 100644 (file)
@@ -7,13 +7,13 @@ LL |     use empty::issue_56125;
 help: consider importing one of these items instead
    |
 LL |     use crate::m3::last_segment::issue_56125;
-   |         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+   |         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 LL |     use crate::m3::non_last_segment::non_last_segment::issue_56125;
-   |         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+   |         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 LL |     use issue_56125::issue_56125;
-   |         ~~~~~~~~~~~~~~~~~~~~~~~~~
+   |         ~~~~~~~~~~~~~~~~~~~~~~~~
 LL |     use issue_56125::last_segment::issue_56125;
-   |         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+   |         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
      and 1 other candidate
 
 error[E0659]: `issue_56125` is ambiguous
index 3b72d57fee41ef03499bce2a8e0207fce291bb5e..5374ba3dc9e6d07783f9914523118b6ea121de6a 100644 (file)
@@ -7,7 +7,7 @@ LL | use single_err::something;
 help: consider importing this module instead
    |
 LL | use glob_ok::something;
-   |     ~~~~~~~~~~~~~~~~~~~
+   |     ~~~~~~~~~~~~~~~~~~
 
 error: aborting due to previous error
 
diff --git a/tests/ui/imports/issue-99695-b.fixed b/tests/ui/imports/issue-99695-b.fixed
new file mode 100644 (file)
index 0000000..0e60c73
--- /dev/null
@@ -0,0 +1,20 @@
+// run-rustfix
+#![allow(unused, nonstandard_style)]
+mod m {
+
+    mod p {
+        #[macro_export]
+        macro_rules! nu {
+            {} => {};
+        }
+
+        pub struct other_item;
+    }
+
+    use ::nu;
+pub use self::p::{other_item as _};
+    //~^ ERROR unresolved import `self::p::nu` [E0432]
+    //~| HELP a macro with this name exists at the root of the crate
+}
+
+fn main() {}
diff --git a/tests/ui/imports/issue-99695-b.rs b/tests/ui/imports/issue-99695-b.rs
new file mode 100644 (file)
index 0000000..031443a
--- /dev/null
@@ -0,0 +1,19 @@
+// run-rustfix
+#![allow(unused, nonstandard_style)]
+mod m {
+
+    mod p {
+        #[macro_export]
+        macro_rules! nu {
+            {} => {};
+        }
+
+        pub struct other_item;
+    }
+
+    pub use self::p::{nu, other_item as _};
+    //~^ ERROR unresolved import `self::p::nu` [E0432]
+    //~| HELP a macro with this name exists at the root of the crate
+}
+
+fn main() {}
diff --git a/tests/ui/imports/issue-99695-b.stderr b/tests/ui/imports/issue-99695-b.stderr
new file mode 100644 (file)
index 0000000..b6f5c72
--- /dev/null
@@ -0,0 +1,16 @@
+error[E0432]: unresolved import `self::p::nu`
+  --> $DIR/issue-99695-b.rs:14:23
+   |
+LL |     pub use self::p::{nu, other_item as _};
+   |                       ^^ no `nu` in `m::p`
+   |
+   = note: this could be because a macro annotated with `#[macro_export]` will be exported at the root of the crate instead of the module where it is defined
+help: a macro with this name exists at the root of the crate
+   |
+LL ~     use ::nu;
+LL ~ pub use self::p::{other_item as _};
+   |
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0432`.
diff --git a/tests/ui/imports/issue-99695.fixed b/tests/ui/imports/issue-99695.fixed
new file mode 100644 (file)
index 0000000..6bf228b
--- /dev/null
@@ -0,0 +1,17 @@
+// run-rustfix
+#![allow(unused, nonstandard_style)]
+mod m {
+    #[macro_export]
+    macro_rules! nu {
+        {} => {};
+    }
+
+    pub struct other_item;
+
+    use ::nu;
+pub use self::{other_item as _};
+    //~^ ERROR unresolved import `self::nu` [E0432]
+    //~| HELP a macro with this name exists at the root of the crate
+}
+
+fn main() {}
diff --git a/tests/ui/imports/issue-99695.rs b/tests/ui/imports/issue-99695.rs
new file mode 100644 (file)
index 0000000..f7199f1
--- /dev/null
@@ -0,0 +1,16 @@
+// run-rustfix
+#![allow(unused, nonstandard_style)]
+mod m {
+    #[macro_export]
+    macro_rules! nu {
+        {} => {};
+    }
+
+    pub struct other_item;
+
+    pub use self::{nu, other_item as _};
+    //~^ ERROR unresolved import `self::nu` [E0432]
+    //~| HELP a macro with this name exists at the root of the crate
+}
+
+fn main() {}
diff --git a/tests/ui/imports/issue-99695.stderr b/tests/ui/imports/issue-99695.stderr
new file mode 100644 (file)
index 0000000..0ef762e
--- /dev/null
@@ -0,0 +1,16 @@
+error[E0432]: unresolved import `self::nu`
+  --> $DIR/issue-99695.rs:11:20
+   |
+LL |     pub use self::{nu, other_item as _};
+   |                    ^^ no `nu` in `m`
+   |
+   = note: this could be because a macro annotated with `#[macro_export]` will be exported at the root of the crate instead of the module where it is defined
+help: a macro with this name exists at the root of the crate
+   |
+LL ~     use ::nu;
+LL ~ pub use self::{other_item as _};
+   |
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0432`.
index 2a56aaa44fef23b3ae435b47451aa9e7986f4769..888c321bc479b8269d1c87b8a22fa8ca7a6be61f 100644 (file)
@@ -1,8 +1,6 @@
 error[E0282]: type annotations needed
   --> $DIR/cannot-infer-partial-try-return.rs:20:9
    |
-LL |         infallible()?;
-   |         ------------- type must be known at this point
 LL |         Ok(())
    |         ^^ cannot infer type of the type parameter `E` declared on the enum `Result`
    |
diff --git a/tests/ui/inference/issue-107090.rs b/tests/ui/inference/issue-107090.rs
new file mode 100644 (file)
index 0000000..9426445
--- /dev/null
@@ -0,0 +1,31 @@
+use std::marker::PhantomData;
+struct Foo<'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>)
+where
+    Foo<'short, 'out, T>: Convert<'a, 'b>;
+    //~^ ERROR mismatched types
+    //~^^ ERROR mismatched types
+    //~^^^ ERROR use of undeclared lifetime name
+    //~| ERROR use of undeclared lifetime name `'out`
+
+trait Convert<'a, 'b>: Sized {
+    fn cast(&'a self) -> &'b Self;
+}
+impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+    //~^ ERROR use of undeclared lifetime name
+    //~^^ ERROR use of undeclared lifetime name `'out`
+    //~| ERROR cannot infer an appropriate lifetime for lifetime parameter
+    fn cast(&'long self) -> &'short Foo<'short, 'out, T> {
+        //~^ ERROR use of undeclared lifetime name
+        //~| ERROR cannot infer an appropriate lifetime for lifetime parameter
+        self
+    }
+}
+
+fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T {
+    //~^ ERROR use of undeclared lifetime name
+    //~^^ ERROR incompatible lifetime on type
+    //~| ERROR `x` has lifetime `'in_` but it needs to satisfy a `'static` lifetime requirement
+    sadness.cast()
+}
+
+fn main() {}
diff --git a/tests/ui/inference/issue-107090.stderr b/tests/ui/inference/issue-107090.stderr
new file mode 100644 (file)
index 0000000..33cb390
--- /dev/null
@@ -0,0 +1,173 @@
+error[E0261]: use of undeclared lifetime name `'short`
+  --> $DIR/issue-107090.rs:4:9
+   |
+LL |     Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |         ^^^^^^ undeclared lifetime
+   |
+   = note: for more information on higher-ranked polymorphism, visit https://doc.rust-lang.org/nomicon/hrtb.html
+help: consider making the bound lifetime-generic with a new `'short` lifetime
+   |
+LL |     for<'short> Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |     +++++++++++
+help: consider introducing lifetime `'short` here
+   |
+LL | struct Foo<'short, 'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>)
+   |            +++++++
+
+error[E0261]: use of undeclared lifetime name `'out`
+  --> $DIR/issue-107090.rs:4:17
+   |
+LL |     Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |                 ^^^^ undeclared lifetime
+   |
+help: consider making the bound lifetime-generic with a new `'out` lifetime
+   |
+LL |     for<'out> Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |     +++++++++
+help: consider introducing lifetime `'out` here
+   |
+LL | struct Foo<'out, 'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>)
+   |            +++++
+
+error[E0261]: use of undeclared lifetime name `'b`
+  --> $DIR/issue-107090.rs:13:47
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |      -                                        ^^ undeclared lifetime
+   |      |
+   |      help: consider introducing lifetime `'b` here: `'b,`
+
+error[E0261]: use of undeclared lifetime name `'out`
+  --> $DIR/issue-107090.rs:13:67
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |      - help: consider introducing lifetime `'out` here: `'out,`   ^^^^ undeclared lifetime
+
+error[E0261]: use of undeclared lifetime name `'out`
+  --> $DIR/issue-107090.rs:17:49
+   |
+LL |     fn cast(&'long self) -> &'short Foo<'short, 'out, T> {
+   |                                                 ^^^^ undeclared lifetime
+   |
+help: consider introducing lifetime `'out` here
+   |
+LL |     fn cast<'out>(&'long self) -> &'short Foo<'short, 'out, T> {
+   |            ++++++
+help: consider introducing lifetime `'out` here
+   |
+LL | impl<'out, 'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |      +++++
+
+error[E0261]: use of undeclared lifetime name `'short`
+  --> $DIR/issue-107090.rs:24:68
+   |
+LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T {
+   |           -                                                        ^^^^^^ undeclared lifetime
+   |           |
+   |           help: consider introducing lifetime `'short` here: `'short,`
+
+error[E0308]: mismatched types
+  --> $DIR/issue-107090.rs:4:27
+   |
+LL |     Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |                           ^^^^^^^^^^^^^^^ lifetime mismatch
+   |
+   = note: expected trait `Convert<'static, 'static>`
+              found trait `Convert<'a, 'b>`
+note: the lifetime `'a` as defined here...
+  --> $DIR/issue-107090.rs:2:12
+   |
+LL | struct Foo<'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>)
+   |            ^^
+   = note: ...does not necessarily outlive the static lifetime
+
+error[E0308]: mismatched types
+  --> $DIR/issue-107090.rs:4:27
+   |
+LL |     Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |                           ^^^^^^^^^^^^^^^ lifetime mismatch
+   |
+   = note: expected trait `Convert<'static, 'static>`
+              found trait `Convert<'a, 'b>`
+note: the lifetime `'b` as defined here...
+  --> $DIR/issue-107090.rs:2:16
+   |
+LL | struct Foo<'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>)
+   |                ^^
+   = note: ...does not necessarily outlive the static lifetime
+
+error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'long` due to conflicting requirements
+  --> $DIR/issue-107090.rs:13:55
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |                                                       ^^^^^^^^^^^^^^^^^^^^
+   |
+note: first, the lifetime cannot outlive the lifetime `'short` as defined here...
+  --> $DIR/issue-107090.rs:13:21
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |                     ^^^^^^
+   = note: ...but the lifetime must also be valid for the static lifetime...
+note: ...so that the types are compatible
+  --> $DIR/issue-107090.rs:13:55
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |                                                       ^^^^^^^^^^^^^^^^^^^^
+   = note: expected `Convert<'short, 'static>`
+              found `Convert<'_, 'static>`
+
+error: incompatible lifetime on type
+  --> $DIR/issue-107090.rs:24:29
+   |
+LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T {
+   |                             ^^^^^^^^^^^^^^^^^^
+   |
+note: because this has an unmet lifetime requirement
+  --> $DIR/issue-107090.rs:4:27
+   |
+LL |     Foo<'short, 'out, T>: Convert<'a, 'b>;
+   |                           ^^^^^^^^^^^^^^^ introduces a `'static` lifetime requirement
+note: the lifetime `'out` as defined here...
+  --> $DIR/issue-107090.rs:24:17
+   |
+LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T {
+   |                 ^^^^
+note: ...does not necessarily outlive the static lifetime introduced by the compatible `impl`
+  --> $DIR/issue-107090.rs:13:1
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error[E0759]: `x` has lifetime `'in_` but it needs to satisfy a `'static` lifetime requirement
+  --> $DIR/issue-107090.rs:24:29
+   |
+LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T {
+   |                             ^^^^^^^^^^^^^^^^^^
+   |                             |
+   |                             this data with lifetime `'in_`...
+   |                             ...is used and required to live as long as `'static` here
+
+error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'long` due to conflicting requirements
+  --> $DIR/issue-107090.rs:17:13
+   |
+LL |     fn cast(&'long self) -> &'short Foo<'short, 'out, T> {
+   |             ^^^^^^^^^^^
+   |
+note: first, the lifetime cannot outlive the lifetime `'short` as defined here...
+  --> $DIR/issue-107090.rs:13:21
+   |
+LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> {
+   |                     ^^^^^^
+   = note: ...but the lifetime must also be valid for the static lifetime...
+note: ...so that the types are compatible
+  --> $DIR/issue-107090.rs:17:13
+   |
+LL |     fn cast(&'long self) -> &'short Foo<'short, 'out, T> {
+   |             ^^^^^^^^^^^
+   = note: expected `Convert<'short, 'static>`
+              found `Convert<'_, 'static>`
+
+error: aborting due to 12 previous errors
+
+Some errors have detailed explanations: E0261, E0308, E0495, E0759.
+For more information about an error, try `rustc --explain E0261`.
index eaaef3463ddc9ee467b24e8c66e08d42db6bec42..c387046e91008f6ad823e9c233f9b2c038068e35 100644 (file)
@@ -6,5 +6,5 @@
 
 fn main() {
     let _ = foo("foo");
-    //~^ ERROR: type annotations needed for `[usize; _]`
+    //~^ ERROR: type annotations needed for `[usize; N]`
 }
index f5c84f960641a077f78d561541315f2c39c7abd1..f2ee8692e38a696d111c52ed9659972a7236d3ce 100644 (file)
@@ -1,4 +1,4 @@
-error[E0282]: type annotations needed for `[usize; _]`
+error[E0282]: type annotations needed for `[usize; N]`
   --> $DIR/issue-83606.rs:8:9
    |
 LL |     let _ = foo("foo");
@@ -6,7 +6,7 @@ LL |     let _ = foo("foo");
    |
 help: consider giving this pattern a type, where the the value of const parameter `N` is specified
    |
-LL |     let _: [usize; _] = foo("foo");
+LL |     let _: [usize; N] = foo("foo");
    |          ++++++++++++
 
 error: aborting due to previous error
index 9b822714f828a71a446fb169782482e6a0deb58c..a9cb7e5257c83d1db6c8fa8df7e3a1fed7811586 100644 (file)
@@ -1,8 +1,13 @@
 error[E0282]: type annotations needed
-  --> $DIR/question-mark-type-infer.rs:10:30
+  --> $DIR/question-mark-type-infer.rs:10:21
    |
 LL |     l.iter().map(f).collect()?
-   |                              ^ cannot infer type
+   |                     ^^^^^^^ cannot infer type of the type parameter `B` declared on the associated function `collect`
+   |
+help: consider specifying the generic argument
+   |
+LL |     l.iter().map(f).collect::<Vec<_>>()?
+   |                            ++++++++++
 
 error: aborting due to previous error
 
index 6dc8f7ddbc9724991d817fb0d27f7e7e2d10d588..029855de2dea0e594b4d4b64019fc358c967f95d 100644 (file)
@@ -1,4 +1,3 @@
-// run-pass
 // run-rustfix
 
 #![allow(non_snake_case)]
@@ -16,11 +15,11 @@ impl Foo {
         match self {
             &
 Foo::Bar if true
-//~^ WARN pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+//~^ ERROR pattern binding `Bar` is named the same as one of the variants of the type `Foo`
 => println!("bar"),
             &
 Foo::Baz if false
-//~^ WARN pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+//~^ ERROR pattern binding `Baz` is named the same as one of the variants of the type `Foo`
 => println!("baz"),
 _ => ()
         }
index cfdc7c9e754888708a1f3d23efb414a901386fa9..bd9e4ea5b601b0c1f894dca181225a43d76ad40c 100644 (file)
@@ -1,4 +1,3 @@
-// run-pass
 // run-rustfix
 
 #![allow(non_snake_case)]
@@ -16,11 +15,11 @@ fn foo(&self) {
         match self {
             &
 Bar if true
-//~^ WARN pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+//~^ ERROR pattern binding `Bar` is named the same as one of the variants of the type `Foo`
 => println!("bar"),
             &
 Baz if false
-//~^ WARN pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+//~^ ERROR pattern binding `Baz` is named the same as one of the variants of the type `Foo`
 => println!("baz"),
 _ => ()
         }
index 293430691ddcf5a7128a38c55d1c3a12364d5c27..ebbf083b7dea8d1e1fa76ab98303a7fe2d22f02e 100644 (file)
@@ -1,17 +1,17 @@
-warning[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-19100.rs:18:1
+error[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-19100.rs:17:1
    |
 LL | Bar if true
    | ^^^ help: to match on the variant, qualify the path: `Foo::Bar`
    |
-   = note: `#[warn(bindings_with_variant_name)]` on by default
+   = note: `#[deny(bindings_with_variant_name)]` on by default
 
-warning[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-19100.rs:22:1
+error[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-19100.rs:21:1
    |
 LL | Baz if false
    | ^^^ help: to match on the variant, qualify the path: `Foo::Baz`
 
-warning: 2 warnings emitted
+error: aborting due to 2 previous errors
 
 For more information about this error, try `rustc --explain E0170`.
diff --git a/tests/ui/issues/issue-65634-raw-ident-suggestion.edition2015.stderr b/tests/ui/issues/issue-65634-raw-ident-suggestion.edition2015.stderr
new file mode 100644 (file)
index 0000000..d0cb169
--- /dev/null
@@ -0,0 +1,28 @@
+error[E0034]: multiple applicable items in scope
+  --> $DIR/issue-65634-raw-ident-suggestion.rs:24:13
+   |
+LL |     r#fn {}.r#struct();
+   |             ^^^^^^^^ multiple `r#struct` found
+   |
+note: candidate #1 is defined in an impl of the trait `async` for the type `r#fn`
+  --> $DIR/issue-65634-raw-ident-suggestion.rs:7:5
+   |
+LL |     fn r#struct(&self) {
+   |     ^^^^^^^^^^^^^^^^^^
+note: candidate #2 is defined in an impl of the trait `await` for the type `r#fn`
+  --> $DIR/issue-65634-raw-ident-suggestion.rs:13:5
+   |
+LL |     fn r#struct(&self) {
+   |     ^^^^^^^^^^^^^^^^^^
+help: disambiguate the associated function for candidate #1
+   |
+LL |     async::r#struct(&r#fn {});
+   |     ~~~~~~~~~~~~~~~~~~~~~~~~~
+help: disambiguate the associated function for candidate #2
+   |
+LL |     await::r#struct(&r#fn {});
+   |     ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0034`.
diff --git a/tests/ui/issues/issue-65634-raw-ident-suggestion.edition2018.stderr b/tests/ui/issues/issue-65634-raw-ident-suggestion.edition2018.stderr
new file mode 100644 (file)
index 0000000..a75c1c4
--- /dev/null
@@ -0,0 +1,28 @@
+error[E0034]: multiple applicable items in scope
+  --> $DIR/issue-65634-raw-ident-suggestion.rs:24:13
+   |
+LL |     r#fn {}.r#struct();
+   |             ^^^^^^^^ multiple `r#struct` found
+   |
+note: candidate #1 is defined in an impl of the trait `r#async` for the type `r#fn`
+  --> $DIR/issue-65634-raw-ident-suggestion.rs:7:5
+   |
+LL |     fn r#struct(&self) {
+   |     ^^^^^^^^^^^^^^^^^^
+note: candidate #2 is defined in an impl of the trait `r#await` for the type `r#fn`
+  --> $DIR/issue-65634-raw-ident-suggestion.rs:13:5
+   |
+LL |     fn r#struct(&self) {
+   |     ^^^^^^^^^^^^^^^^^^
+help: disambiguate the associated function for candidate #1
+   |
+LL |     r#async::r#struct(&r#fn {});
+   |     ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+help: disambiguate the associated function for candidate #2
+   |
+LL |     r#await::r#struct(&r#fn {});
+   |     ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0034`.
index b928510258b2f4a13fb4601473ff5d2d71e9a362..03dd0340c9d69711f844224ff8ef523d847ef003 100644 (file)
@@ -1,3 +1,6 @@
+// revisions: edition2015 edition2018
+//[edition2018]edition:2018
+
 #![allow(non_camel_case_types)]
 
 trait r#async {
diff --git a/tests/ui/issues/issue-65634-raw-ident-suggestion.stderr b/tests/ui/issues/issue-65634-raw-ident-suggestion.stderr
deleted file mode 100644 (file)
index 68ccf5c..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-error[E0034]: multiple applicable items in scope
-  --> $DIR/issue-65634-raw-ident-suggestion.rs:21:13
-   |
-LL |     r#fn {}.r#struct();
-   |             ^^^^^^^^ multiple `r#struct` found
-   |
-note: candidate #1 is defined in an impl of the trait `async` for the type `fn`
-  --> $DIR/issue-65634-raw-ident-suggestion.rs:4:5
-   |
-LL |     fn r#struct(&self) {
-   |     ^^^^^^^^^^^^^^^^^^
-note: candidate #2 is defined in an impl of the trait `await` for the type `fn`
-  --> $DIR/issue-65634-raw-ident-suggestion.rs:10:5
-   |
-LL |     fn r#struct(&self) {
-   |     ^^^^^^^^^^^^^^^^^^
-help: disambiguate the associated function for candidate #1
-   |
-LL |     async::r#struct(&r#fn {});
-   |     ~~~~~~~~~~~~~~~~~~~~~~~~~
-help: disambiguate the associated function for candidate #2
-   |
-LL |     await::r#struct(&r#fn {});
-   |     ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0034`.
index 9d11cf19ea77c41a2f65c2c1f18e5aea41ed7f66..fc343bb54aace29f40aa5fd3cd5e87a2497de3e3 100644 (file)
@@ -1,14 +1,16 @@
-error[E0282]: type annotations needed
-  --> $DIR/issue-69455.rs:29:20
+error[E0284]: type annotations needed
+  --> $DIR/issue-69455.rs:29:41
    |
 LL |     println!("{}", 23u64.test(xs.iter().sum()));
-   |                    ^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `T` declared on the associated function `new_display`
+   |                          ----           ^^^ cannot infer type of the type parameter `S` declared on the associated function `sum`
+   |                          |
+   |                          type must be known at this point
    |
-   = note: this error originates in the macro `$crate::format_args_nl` which comes from the expansion of the macro `println` (in Nightly builds, run with -Z macro-backtrace for more info)
+   = note: cannot satisfy `<u64 as Test<_>>::Output == _`
 help: consider specifying the generic argument
    |
-LL |     println!("{}", 23u64.test(xs.iter().sum())::<T>);
-   |                                               +++++
+LL |     println!("{}", 23u64.test(xs.iter().sum::<S>()));
+   |                                            +++++
 
 error[E0283]: type annotations needed
   --> $DIR/issue-69455.rs:29:41
@@ -33,5 +35,5 @@ LL |     println!("{}", 23u64.test(xs.iter().sum::<S>()));
 
 error: aborting due to 2 previous errors
 
-Some errors have detailed explanations: E0282, E0283.
-For more information about an error, try `rustc --explain E0282`.
+Some errors have detailed explanations: E0283, E0284.
+For more information about an error, try `rustc --explain E0283`.
diff --git a/tests/ui/lifetimes/issue-105507.fixed b/tests/ui/lifetimes/issue-105507.fixed
new file mode 100644 (file)
index 0000000..277ce8a
--- /dev/null
@@ -0,0 +1,43 @@
+// run-rustfix
+//
+#![allow(warnings)]
+struct Wrapper<'a, T: ?Sized>(&'a T);
+
+trait Project {
+    type Projected<'a> where Self: 'a;
+    fn project(this: Wrapper<'_, Self>) -> Self::Projected<'_>;
+}
+trait MyTrait {}
+trait ProjectedMyTrait {}
+
+impl<T> Project for Option<T> {
+    type Projected<'a> = Option<Wrapper<'a, T>> where T: 'a;
+    fn project(this: Wrapper<'_, Self>) -> Self::Projected<'_> {
+        this.0.as_ref().map(Wrapper)
+    }
+}
+
+impl<T: MyTrait> MyTrait for Option<Wrapper<'_, T>> {}
+
+impl<T: ProjectedMyTrait> MyTrait for Wrapper<'_, T> {}
+
+impl<T> ProjectedMyTrait for T
+    where
+        T: Project,
+        for<'a> T::Projected<'a>: MyTrait,
+        //~^ NOTE due to current limitations in the borrow checker, this implies a `'static` lifetime
+        //~| NOTE due to current limitations in the borrow checker, this implies a `'static` lifetime
+{}
+
+fn require_trait<T: MyTrait>(_: T) {}
+
+fn foo<T : MyTrait + 'static + 'static, U : MyTrait + 'static + 'static>(wrap: Wrapper<'_, Option<T>>, wrap1: Wrapper<'_, Option<U>>) {
+    //~^ HELP consider restricting the type parameter to the `'static` lifetime
+    //~| HELP consider restricting the type parameter to the `'static` lifetime
+    require_trait(wrap);
+    //~^ ERROR `T` does not live long enough
+    require_trait(wrap1);
+    //~^ ERROR `U` does not live long enough
+}
+
+fn main() {}
diff --git a/tests/ui/lifetimes/issue-105507.rs b/tests/ui/lifetimes/issue-105507.rs
new file mode 100644 (file)
index 0000000..f46c6b6
--- /dev/null
@@ -0,0 +1,43 @@
+// run-rustfix
+//
+#![allow(warnings)]
+struct Wrapper<'a, T: ?Sized>(&'a T);
+
+trait Project {
+    type Projected<'a> where Self: 'a;
+    fn project(this: Wrapper<'_, Self>) -> Self::Projected<'_>;
+}
+trait MyTrait {}
+trait ProjectedMyTrait {}
+
+impl<T> Project for Option<T> {
+    type Projected<'a> = Option<Wrapper<'a, T>> where T: 'a;
+    fn project(this: Wrapper<'_, Self>) -> Self::Projected<'_> {
+        this.0.as_ref().map(Wrapper)
+    }
+}
+
+impl<T: MyTrait> MyTrait for Option<Wrapper<'_, T>> {}
+
+impl<T: ProjectedMyTrait> MyTrait for Wrapper<'_, T> {}
+
+impl<T> ProjectedMyTrait for T
+    where
+        T: Project,
+        for<'a> T::Projected<'a>: MyTrait,
+        //~^ NOTE due to current limitations in the borrow checker, this implies a `'static` lifetime
+        //~| NOTE due to current limitations in the borrow checker, this implies a `'static` lifetime
+{}
+
+fn require_trait<T: MyTrait>(_: T) {}
+
+fn foo<T : MyTrait, U : MyTrait>(wrap: Wrapper<'_, Option<T>>, wrap1: Wrapper<'_, Option<U>>) {
+    //~^ HELP consider restricting the type parameter to the `'static` lifetime
+    //~| HELP consider restricting the type parameter to the `'static` lifetime
+    require_trait(wrap);
+    //~^ ERROR `T` does not live long enough
+    require_trait(wrap1);
+    //~^ ERROR `U` does not live long enough
+}
+
+fn main() {}
diff --git a/tests/ui/lifetimes/issue-105507.stderr b/tests/ui/lifetimes/issue-105507.stderr
new file mode 100644 (file)
index 0000000..44d3a7e
--- /dev/null
@@ -0,0 +1,34 @@
+error: `T` does not live long enough
+  --> $DIR/issue-105507.rs:37:5
+   |
+LL |     require_trait(wrap);
+   |     ^^^^^^^^^^^^^^^^^^^
+   |
+note: due to current limitations in the borrow checker, this implies a `'static` lifetime
+  --> $DIR/issue-105507.rs:27:35
+   |
+LL |         for<'a> T::Projected<'a>: MyTrait,
+   |                                   ^^^^^^^
+help: consider restricting the type parameter to the `'static` lifetime
+   |
+LL | fn foo<T : MyTrait + 'static, U : MyTrait + 'static>(wrap: Wrapper<'_, Option<T>>, wrap1: Wrapper<'_, Option<U>>) {
+   |                    +++++++++              +++++++++
+
+error: `U` does not live long enough
+  --> $DIR/issue-105507.rs:39:5
+   |
+LL |     require_trait(wrap1);
+   |     ^^^^^^^^^^^^^^^^^^^^
+   |
+note: due to current limitations in the borrow checker, this implies a `'static` lifetime
+  --> $DIR/issue-105507.rs:27:35
+   |
+LL |         for<'a> T::Projected<'a>: MyTrait,
+   |                                   ^^^^^^^
+help: consider restricting the type parameter to the `'static` lifetime
+   |
+LL | fn foo<T : MyTrait + 'static, U : MyTrait + 'static>(wrap: Wrapper<'_, Option<T>>, wrap1: Wrapper<'_, Option<U>>) {
+   |                    +++++++++              +++++++++
+
+error: aborting due to 2 previous errors
+
index 133637f9a058be0ebadf393bc84a213d5b20bec9..0d79fc0c770698246470e96fbf2291491cd712bd 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[usize; 4294967295]` are too big for the current architecture
+error: values of the type `[usize; usize::MAX]` are too big for the current architecture
   --> $DIR/issue-15919-32.rs:9:9
    |
 LL |     let x = [0usize; 0xffff_ffff];
index 193b823035c09951f1dd62db96258251e5c8c5ee..3399d644ede3ae8fe28517d997af8e8368b42b55 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[usize; 18446744073709551615]` are too big for the current architecture
+error: values of the type `[usize; usize::MAX]` are too big for the current architecture
   --> $DIR/issue-15919-64.rs:9:9
    |
 LL |     let x = [0usize; 0xffff_ffff_ffff_ffff];
index 8d4cbe201846170474309da211fa78bddf51ae86..56cf5d831bd7e7491d8b75baacd8c05a3b7a5b1e 100644 (file)
@@ -1,5 +1,5 @@
 // build-fail
-// normalize-stderr-test "\[&usize; \d+\]" -> "[&usize; N]"
+// normalize-stderr-test "\[&usize; \d+\]" -> "[&usize; usize::MAX]"
 // error-pattern: too big for the current architecture
 
 // FIXME https://github.com/rust-lang/rust/issues/59774
index 9a6431d44700492cb0ecdb9b63bb9b7bfc4506c6..684db53a9190988b8ab31b73ef7a893fe67ec5ae 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[&usize; N]` are too big for the current architecture
+error: values of the type `[&usize; usize::MAX]` are too big for the current architecture
 
 error: aborting due to previous error
 
index f455dcb06f79dc97aa9e3072699154a3bb8121e5..99f1fdf755aa27c1406e9f48f2f96c90565e6e5e 100644 (file)
@@ -1,7 +1,7 @@
-error[E0080]: values of the type `[u8; SIZE]` are too big for the current architecture
+error[E0080]: values of the type `[u8; usize::MAX]` are too big for the current architecture
   --> $SRC_DIR/core/src/mem/mod.rs:LL:COL
    |
-note: inside `std::mem::size_of::<[u8; SIZE]>`
+note: inside `std::mem::size_of::<[u8; usize::MAX]>`
   --> $SRC_DIR/core/src/mem/mod.rs:LL:COL
 note: inside `main`
   --> $DIR/issue-55878.rs:7:26
index f7923bd47439f114235b3f16846ae7a56c24e56b..44b2be269494aa94a952ee24195d28adda0823f0 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[u8; 18446744073709551615]` are too big for the current architecture
+error: values of the type `[u8; usize::MAX]` are too big for the current architecture
   --> $DIR/issue-69485-var-size-diffs-too-large.rs:6:5
    |
 LL |     Bug::V([0; !0]);
index dc11d056154273a79626566e0d6d99f4c5a54b07..d5991bcf5693df726787ee5cb4f2ed4005fd4cf2 100644 (file)
@@ -1,4 +1,4 @@
-error: values of the type `[u8; 18446744073709551615]` are too big for the current architecture
+error: values of the type `[u8; usize::MAX]` are too big for the current architecture
 
 error: aborting due to previous error
 
index c37d4f29d10e36bd144a2d7c962179f01a2d8b8e..5eccb8cd5d8d2cc59ea45992eac2ddd530285a9e 100644 (file)
@@ -11,7 +11,7 @@ enum Stack<T> {
 fn is_empty<T>(s: Stack<T>) -> bool {
     match s {
         Nil => true,
-//~^ WARN pattern binding `Nil` is named the same as one of the variants of the type `Stack`
+//~^ ERROR pattern binding `Nil` is named the same as one of the variants of the type `Stack`
         _ => false
 //~^ ERROR unreachable pattern
     }
index 849ff1ebd9236183243008893688da508441acfa..baf6c0d7a59d84da505cd17ade601b2efb33496e 100644 (file)
@@ -1,10 +1,10 @@
-warning[E0170]: pattern binding `Nil` is named the same as one of the variants of the type `Stack`
+error[E0170]: pattern binding `Nil` is named the same as one of the variants of the type `Stack`
   --> $DIR/issue-30302.rs:13:9
    |
 LL |         Nil => true,
    |         ^^^ help: to match on the variant, qualify the path: `Stack::Nil`
    |
-   = note: `#[warn(bindings_with_variant_name)]` on by default
+   = note: `#[deny(bindings_with_variant_name)]` on by default
 
 error: unreachable pattern
   --> $DIR/issue-30302.rs:15:9
@@ -21,6 +21,6 @@ note: the lint level is defined here
 LL | #![deny(unreachable_patterns)]
    |         ^^^^^^^^^^^^^^^^^^^^
 
-error: aborting due to previous error; 1 warning emitted
+error: aborting due to 2 previous errors
 
 For more information about this error, try `rustc --explain E0170`.
index d4e88aa26436162160ff0ccc02e3b169e9b4de17..59dba536f24b61bf5635cad404d4ba8695bbedbf 100644 (file)
@@ -21,18 +21,18 @@ fn main() {
     match foo::Foo::Foo {
         Foo => {}
     //~^ ERROR variable `Foo` should have a snake case name
-    //~^^ WARN `Foo` is named the same as one of the variants of the type `foo::Foo`
+    //~^^ ERROR `Foo` is named the same as one of the variants of the type `foo::Foo`
     //~^^^ WARN unused variable: `Foo`
     }
 
     let Foo = foo::Foo::Foo;
     //~^ ERROR variable `Foo` should have a snake case name
-    //~^^ WARN `Foo` is named the same as one of the variants of the type `foo::Foo`
+    //~^^ ERROR `Foo` is named the same as one of the variants of the type `foo::Foo`
     //~^^^ WARN unused variable: `Foo`
 
     fn in_param(Foo: foo::Foo) {}
     //~^ ERROR variable `Foo` should have a snake case name
-    //~^^ WARN `Foo` is named the same as one of the variants of the type `foo::Foo`
+    //~^^ ERROR `Foo` is named the same as one of the variants of the type `foo::Foo`
     //~^^^ WARN unused variable: `Foo`
 
     test(1);
index d476d856e24c59b060ccdd85be6ff1c681e4180a..42ec9364bc6e68f973295fb9258b34da958bc89d 100644 (file)
@@ -1,18 +1,18 @@
-warning[E0170]: pattern binding `Foo` is named the same as one of the variants of the type `foo::Foo`
+error[E0170]: pattern binding `Foo` is named the same as one of the variants of the type `foo::Foo`
   --> $DIR/lint-uppercase-variables.rs:22:9
    |
 LL |         Foo => {}
    |         ^^^ help: to match on the variant, qualify the path: `foo::Foo::Foo`
    |
-   = note: `#[warn(bindings_with_variant_name)]` on by default
+   = note: `#[deny(bindings_with_variant_name)]` on by default
 
-warning[E0170]: pattern binding `Foo` is named the same as one of the variants of the type `foo::Foo`
+error[E0170]: pattern binding `Foo` is named the same as one of the variants of the type `foo::Foo`
   --> $DIR/lint-uppercase-variables.rs:28:9
    |
 LL |     let Foo = foo::Foo::Foo;
    |         ^^^ help: to match on the variant, qualify the path: `foo::Foo::Foo`
 
-warning[E0170]: pattern binding `Foo` is named the same as one of the variants of the type `foo::Foo`
+error[E0170]: pattern binding `Foo` is named the same as one of the variants of the type `foo::Foo`
   --> $DIR/lint-uppercase-variables.rs:33:17
    |
 LL |     fn in_param(Foo: foo::Foo) {}
@@ -85,6 +85,6 @@ error: variable `Foo` should have a snake case name
 LL |     fn in_param(Foo: foo::Foo) {}
    |                 ^^^ help: convert the identifier to snake case (notice the capitalization): `foo`
 
-error: aborting due to 6 previous errors; 6 warnings emitted
+error: aborting due to 9 previous errors; 3 warnings emitted
 
 For more information about this error, try `rustc --explain E0170`.
index 49608c20524d3a59d73b3463b06b647109b2a143..c60120061643da4f595aef35afbc4279bb0d0693 100644 (file)
@@ -46,3 +46,140 @@ LL |     let _ = #[allow(semicolon_in_expressions_from_macros)] foo!(allow_does_
 
 warning: 3 warnings emitted
 
+Future incompatibility report: Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |         foo!(first)
+   |         ----------- in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+   = note: macro invocations at the end of a block are treated as expressions
+   = note: to ignore the value produced by the macro, add a semicolon after the invocation of `foo`
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:24:13
+   |
+LL |     #[allow(semicolon_in_expressions_from_macros)]
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |     let _ = foo!(second);
+   |             ------------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:29:13
+   |
+LL |     #[allow(semicolon_in_expressions_from_macros)]
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |         let _ = foo!(third);
+   |                 ----------- in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:32:13
+   |
+LL |     #[allow(semicolon_in_expressions_from_macros)]
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |         let _ = foo!(fourth);
+   |                 ------------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:37:13
+   |
+LL |     #[allow(semicolon_in_expressions_from_macros)]
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |         foo!(warn_in_block)
+   |         ------------------- in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+   = note: macro invocations at the end of a block are treated as expressions
+   = note: to ignore the value produced by the macro, add a semicolon after the invocation of `foo`
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:4:9
+   |
+LL | #![warn(semicolon_in_expressions_from_macros)]
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |     let _ = foo!(warn_in_expr);
+   |             ------------------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:4:9
+   |
+LL | #![warn(semicolon_in_expressions_from_macros)]
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/semicolon-in-expressions-from-macros.rs:9:13
+   |
+LL |         true;
+   |             ^
+...
+LL |     let _ = #[allow(semicolon_in_expressions_from_macros)] foo!(allow_does_not_work);
+   |                                                            ------------------------- in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+note: the lint level is defined here
+  --> $DIR/semicolon-in-expressions-from-macros.rs:4:9
+   |
+LL | #![warn(semicolon_in_expressions_from_macros)]
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
index 16c152eb23c2d7f12a05b99db2d55d09b9ad3e8d..0fec4996f1a0ae16dbce4848a7f7ec446730170f 100644 (file)
@@ -14,3 +14,18 @@ LL |         _ => foo!()
 
 warning: 1 warning emitted
 
+Future incompatibility report: Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/warn-semicolon-in-expressions-from-macros.rs:6:13
+   |
+LL |         true;
+   |             ^
+...
+LL |         _ => foo!()
+   |              ------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+   = note: `#[warn(semicolon_in_expressions_from_macros)]` on by default
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
diff --git a/tests/ui/lint/unused/issue-105061-array-lint.rs b/tests/ui/lint/unused/issue-105061-array-lint.rs
new file mode 100644 (file)
index 0000000..9b06a4f
--- /dev/null
@@ -0,0 +1,11 @@
+#![warn(unused)]
+#![deny(warnings)]
+
+fn main() {
+    let _x: ([u32; 3]); //~ ERROR unnecessary parentheses around type
+    let _y: [u8; (3)]; //~ ERROR unnecessary parentheses around const expression
+    let _z: ([u8; (3)]);
+    //~^ ERROR unnecessary parentheses around const expression
+    //~| ERROR unnecessary parentheses around type
+
+}
diff --git a/tests/ui/lint/unused/issue-105061-array-lint.stderr b/tests/ui/lint/unused/issue-105061-array-lint.stderr
new file mode 100644 (file)
index 0000000..7eb761a
--- /dev/null
@@ -0,0 +1,56 @@
+error: unnecessary parentheses around type
+  --> $DIR/issue-105061-array-lint.rs:5:13
+   |
+LL |     let _x: ([u32; 3]);
+   |             ^        ^
+   |
+note: the lint level is defined here
+  --> $DIR/issue-105061-array-lint.rs:2:9
+   |
+LL | #![deny(warnings)]
+   |         ^^^^^^^^
+   = note: `#[deny(unused_parens)]` implied by `#[deny(warnings)]`
+help: remove these parentheses
+   |
+LL -     let _x: ([u32; 3]);
+LL +     let _x: [u32; 3];
+   |
+
+error: unnecessary parentheses around const expression
+  --> $DIR/issue-105061-array-lint.rs:6:18
+   |
+LL |     let _y: [u8; (3)];
+   |                  ^ ^
+   |
+help: remove these parentheses
+   |
+LL -     let _y: [u8; (3)];
+LL +     let _y: [u8; 3];
+   |
+
+error: unnecessary parentheses around type
+  --> $DIR/issue-105061-array-lint.rs:7:13
+   |
+LL |     let _z: ([u8; (3)]);
+   |             ^         ^
+   |
+help: remove these parentheses
+   |
+LL -     let _z: ([u8; (3)]);
+LL +     let _z: [u8; (3)];
+   |
+
+error: unnecessary parentheses around const expression
+  --> $DIR/issue-105061-array-lint.rs:7:19
+   |
+LL |     let _z: ([u8; (3)]);
+   |                   ^ ^
+   |
+help: remove these parentheses
+   |
+LL -     let _z: ([u8; (3)]);
+LL +     let _z: ([u8; 3]);
+   |
+
+error: aborting due to 4 previous errors
+
diff --git a/tests/ui/lint/unused/issue-105061-should-lint.rs b/tests/ui/lint/unused/issue-105061-should-lint.rs
new file mode 100644 (file)
index 0000000..7e4e094
--- /dev/null
@@ -0,0 +1,23 @@
+#![warn(unused)]
+#![deny(warnings)]
+
+struct Inv<'a>(&'a mut &'a ());
+
+trait Trait<'a> {}
+impl<'b> Trait<'b> for for<'a> fn(Inv<'a>) {}
+
+fn with_bound()
+where
+    for<'b> (for<'a> fn(Inv<'a>)): Trait<'b>, //~ ERROR unnecessary parentheses around type
+{}
+
+trait Hello<T> {}
+fn with_dyn_bound<T>()
+where
+    (dyn Hello<(for<'b> fn(&'b ()))>): Hello<T> //~ ERROR unnecessary parentheses around type
+{}
+
+fn main() {
+    with_bound();
+    with_dyn_bound();
+}
diff --git a/tests/ui/lint/unused/issue-105061-should-lint.stderr b/tests/ui/lint/unused/issue-105061-should-lint.stderr
new file mode 100644 (file)
index 0000000..e591f1f
--- /dev/null
@@ -0,0 +1,32 @@
+error: unnecessary parentheses around type
+  --> $DIR/issue-105061-should-lint.rs:11:13
+   |
+LL |     for<'b> (for<'a> fn(Inv<'a>)): Trait<'b>,
+   |             ^                   ^
+   |
+note: the lint level is defined here
+  --> $DIR/issue-105061-should-lint.rs:2:9
+   |
+LL | #![deny(warnings)]
+   |         ^^^^^^^^
+   = note: `#[deny(unused_parens)]` implied by `#[deny(warnings)]`
+help: remove these parentheses
+   |
+LL -     for<'b> (for<'a> fn(Inv<'a>)): Trait<'b>,
+LL +     for<'b> for<'a> fn(Inv<'a>): Trait<'b>,
+   |
+
+error: unnecessary parentheses around type
+  --> $DIR/issue-105061-should-lint.rs:17:16
+   |
+LL |     (dyn Hello<(for<'b> fn(&'b ()))>): Hello<T>
+   |                ^                  ^
+   |
+help: remove these parentheses
+   |
+LL -     (dyn Hello<(for<'b> fn(&'b ()))>): Hello<T>
+LL +     (dyn Hello<for<'b> fn(&'b ())>): Hello<T>
+   |
+
+error: aborting due to 2 previous errors
+
diff --git a/tests/ui/lint/unused/issue-105061.rs b/tests/ui/lint/unused/issue-105061.rs
new file mode 100644 (file)
index 0000000..92d636d
--- /dev/null
@@ -0,0 +1,17 @@
+#![warn(unused)]
+#![deny(warnings)]
+
+struct Inv<'a>(&'a mut &'a ());
+
+trait Trait {}
+impl Trait for (for<'a> fn(Inv<'a>),) {}
+
+
+fn with_bound()
+where
+    ((for<'a> fn(Inv<'a>)),): Trait, //~ ERROR unnecessary parentheses around type
+{}
+
+fn main() {
+    with_bound();
+}
diff --git a/tests/ui/lint/unused/issue-105061.stderr b/tests/ui/lint/unused/issue-105061.stderr
new file mode 100644 (file)
index 0000000..f07aa20
--- /dev/null
@@ -0,0 +1,20 @@
+error: unnecessary parentheses around type
+  --> $DIR/issue-105061.rs:12:6
+   |
+LL |     ((for<'a> fn(Inv<'a>)),): Trait,
+   |      ^                   ^
+   |
+note: the lint level is defined here
+  --> $DIR/issue-105061.rs:2:9
+   |
+LL | #![deny(warnings)]
+   |         ^^^^^^^^
+   = note: `#[deny(unused_parens)]` implied by `#[deny(warnings)]`
+help: remove these parentheses
+   |
+LL -     ((for<'a> fn(Inv<'a>)),): Trait,
+LL +     (for<'a> fn(Inv<'a>),): Trait,
+   |
+
+error: aborting due to previous error
+
index 1a88d985dd86a6438bf0b6847895a8255c24d5fa..e691fb37e6c43e51341367474a4fe12ac274edd0 100644 (file)
@@ -50,4 +50,8 @@ fn main() {
     if { return } {
 
     }
+
+    // regression test for https://github.com/rust-lang/rust/issues/106899
+    return println!("!");
+    //~^ WARN unnecessary braces
 }
index 5ca4811fc32d8342f523d5af85ea902dcf852510..0d260d2cbc93f5d82dcc96f0c1fee67871bbf273 100644 (file)
@@ -50,4 +50,8 @@ fn main() {
     if { return } {
 
     }
+
+    // regression test for https://github.com/rust-lang/rust/issues/106899
+    return { println!("!") };
+    //~^ WARN unnecessary braces
 }
index 7773f44ea2d38c0860855afe26fd1cc50f0643a8..0b4a1c321805ddea51a8b4261d5794fd89b38efc 100644 (file)
@@ -68,5 +68,17 @@ LL -     consume({ 7 });
 LL +     consume(7);
    |
 
-warning: 5 warnings emitted
+warning: unnecessary braces around `return` value
+  --> $DIR/unused_braces.rs:55:12
+   |
+LL |     return { println!("!") };
+   |            ^^             ^^
+   |
+help: remove these braces
+   |
+LL -     return { println!("!") };
+LL +     return println!("!");
+   |
+
+warning: 6 warnings emitted
 
index 306c08b13573ea5ab1742f69f2e86c49b85d6934..29ccd17e06999abf8a20e0c01dd8eb5775d75e1d 100644 (file)
@@ -18,3 +18,22 @@ LL | #![deny(semicolon_in_expressions_from_macros)]
 
 error: aborting due to previous error
 
+Future incompatibility report: Future breakage diagnostic:
+error: trailing semicolon in macro used in expression position
+  --> $DIR/issue-84195-lint-anon-const.rs:8:14
+   |
+LL |     () => { 0; };
+   |              ^
+...
+LL |     let val: [u8; len!()] = [];
+   |                   ------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+note: the lint level is defined here
+  --> $DIR/issue-84195-lint-anon-const.rs:5:9
+   |
+LL | #![deny(semicolon_in_expressions_from_macros)]
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this error originates in the macro `len` (in Nightly builds, run with -Z macro-backtrace for more info)
+
index cbdef5f0d40a94eb2d4a705ffe70303bda06136a..60ba2eab7a7bfbe8dbfb6f62383a68a2842482da 100644 (file)
@@ -8,7 +8,7 @@ mod hey {
 
 #[derive(Bla)]
 //~^ ERROR cannot find derive macro `Bla`
-//~| NOTE consider importing this derive macro
+//~| HELP consider importing this derive macro
 struct A;
 
 #[derive(println)]
@@ -19,5 +19,5 @@ mod hey {
 fn main() {
     bla!();
     //~^ ERROR cannot find macro `bla`
-    //~| NOTE consider importing this macro
+    //~| HELP consider importing this macro
 }
index 62afa67a783c9404c4288da55d8287035f1ad5b1..fe8a1deaedd77e2f2029b333344f6bc30b258697 100644 (file)
@@ -4,7 +4,7 @@ error: cannot find macro `bla` in this scope
 LL |     bla!();
    |     ^^^
    |
-   = note: consider importing this macro:
+   = help: consider importing this macro:
            crate::hey::bla
 
 error: cannot find derive macro `println` in this scope
@@ -21,7 +21,7 @@ error: cannot find derive macro `Bla` in this scope
 LL | #[derive(Bla)]
    |          ^^^
    |
-   = note: consider importing this derive macro:
+   = help: consider importing this derive macro:
            crate::hey::Bla
 
 error: aborting due to 3 previous errors
index 6ab121f7c06c612605e495221b3f79ebfc6907ac..13cecc3a31d233cb662a6f6a1b62596452fd8a5b 100644 (file)
@@ -16,3 +16,20 @@ LL |     expand_it!()
 
 warning: 1 warning emitted
 
+Future incompatibility report: Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/lint-trailing-macro-call.rs:9:25
+   |
+LL |         #[cfg(FALSE)] 25;
+   |                         ^
+...
+LL |     expand_it!()
+   |     ------------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+   = note: macro invocations at the end of a block are treated as expressions
+   = note: to ignore the value produced by the macro, add a semicolon after the invocation of `expand_it`
+   = note: `#[warn(semicolon_in_expressions_from_macros)]` on by default
+   = note: this warning originates in the macro `expand_it` (in Nightly builds, run with -Z macro-backtrace for more info)
+
index f597c398b7c17f87d5ba9dcc7cfe1a16abb9dc31..7785f415946277401d7cc61e4d8b0b11af93d987 100644 (file)
@@ -82,3 +82,18 @@ error: aborting due to 6 previous errors; 1 warning emitted
 
 Some errors have detailed explanations: E0412, E0425.
 For more information about an error, try `rustc --explain E0412`.
+Future incompatibility report: Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/macro-context.rs:3:15
+   |
+LL |     () => ( i ; typeof );
+   |               ^
+...
+LL |     let i = m!();
+   |             ---- in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+   = note: `#[warn(semicolon_in_expressions_from_macros)]` on by default
+   = note: this warning originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info)
+
index 36aba8aa08a0b18a95c35a0187a8c569e2e55f8c..3f492b141a5f5949875f5c9f33d728ab2c83ddca 100644 (file)
@@ -31,3 +31,20 @@ LL |     foo!()
 
 error: aborting due to previous error; 1 warning emitted
 
+Future incompatibility report: Future breakage diagnostic:
+warning: trailing semicolon in macro used in expression position
+  --> $DIR/macro-in-expression-context.rs:5:29
+   |
+LL |         assert_eq!("A", "A");
+   |                             ^
+...
+LL |     foo!()
+   |     ------ in this macro invocation
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #79813 <https://github.com/rust-lang/rust/issues/79813>
+   = note: macro invocations at the end of a block are treated as expressions
+   = note: to ignore the value produced by the macro, add a semicolon after the invocation of `foo`
+   = note: `#[warn(semicolon_in_expressions_from_macros)]` on by default
+   = note: this warning originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
+
index 326001fc15a9a0b1a6386109bad67daa8a3cc07c..ca5f0f190e8ba445c34214361d8b5d9ab54b762c 100644 (file)
@@ -9,7 +9,7 @@ LL |     macro_two!();
 LL | macro_rules! macro_one { () => ("one") }
    | ---------------------- similarly named macro `macro_one` defined here
    |
-   = note: consider importing this macro:
+   = help: consider importing this macro:
            two_macros::macro_two
 
 error: aborting due to previous error
diff --git a/tests/ui/methods/method-not-found-but-doc-alias.rs b/tests/ui/methods/method-not-found-but-doc-alias.rs
new file mode 100644 (file)
index 0000000..9c6d100
--- /dev/null
@@ -0,0 +1,11 @@
+struct Foo;
+
+impl Foo {
+    #[doc(alias = "quux")]
+    fn bar(&self) {}
+}
+
+fn main() {
+    Foo.quux();
+    //~^ ERROR  no method named `quux` found for struct `Foo` in the current scope
+}
diff --git a/tests/ui/methods/method-not-found-but-doc-alias.stderr b/tests/ui/methods/method-not-found-but-doc-alias.stderr
new file mode 100644 (file)
index 0000000..5102a45
--- /dev/null
@@ -0,0 +1,12 @@
+error[E0599]: no method named `quux` found for struct `Foo` in the current scope
+  --> $DIR/method-not-found-but-doc-alias.rs:9:9
+   |
+LL | struct Foo;
+   | ---------- method `quux` not found for this struct
+...
+LL |     Foo.quux();
+   |         ^^^^ help: there is a method with a similar name: `bar`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0599`.
index eab8e8e80c424234eea1b862ab531d227cb9cafd..2a36a352c7341644a4c6a653b6b25bcc34e1a476 100644 (file)
@@ -243,10 +243,12 @@ error[E0606]: casting `&{float}` as `f32` is invalid
   --> $DIR/cast-rfc0401.rs:71:30
    |
 LL |     vec![0.0].iter().map(|s| s as f32).collect::<Vec<f32>>();
-   |                              -^^^^^^^
-   |                              |
-   |                              cannot cast `&{float}` as `f32`
-   |                              help: dereference the expression: `*s`
+   |                              ^^^^^^^^
+   |
+help: dereference the expression
+   |
+LL |     vec![0.0].iter().map(|s| *s as f32).collect::<Vec<f32>>();
+   |                              +
 
 error: aborting due to 34 previous errors
 
diff --git a/tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.fixed b/tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.fixed
new file mode 100644 (file)
index 0000000..6315fcc
--- /dev/null
@@ -0,0 +1,5 @@
+// run-rustfix
+fn main() {
+    let _ = (-10..=10).find(|x: &i32| x.signum() == 0); //~ ERROR type mismatch in closure arguments
+    let _ = (-10..=10).find(|x: &i32| x.signum() == 0); //~ ERROR type mismatch in closure arguments
+}
diff --git a/tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.rs b/tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.rs
new file mode 100644 (file)
index 0000000..c12c536
--- /dev/null
@@ -0,0 +1,5 @@
+// run-rustfix
+fn main() {
+    let _ = (-10..=10).find(|x: i32| x.signum() == 0); //~ ERROR type mismatch in closure arguments
+    let _ = (-10..=10).find(|x: &&&i32| x.signum() == 0); //~ ERROR type mismatch in closure arguments
+}
diff --git a/tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.stderr b/tests/ui/mismatched_types/closure-arg-type-mismatch-issue-45727.stderr
new file mode 100644 (file)
index 0000000..fb8af4b
--- /dev/null
@@ -0,0 +1,38 @@
+error[E0631]: type mismatch in closure arguments
+  --> $DIR/closure-arg-type-mismatch-issue-45727.rs:3:24
+   |
+LL |     let _ = (-10..=10).find(|x: i32| x.signum() == 0);
+   |                        ^^^^ -------- found signature defined here
+   |                        |
+   |                        expected due to this
+   |
+   = note: expected closure signature `for<'a> fn(&'a {integer}) -> _`
+              found closure signature `fn(i32) -> _`
+note: required by a bound in `find`
+  --> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
+help: consider borrowing the argument
+   |
+LL |     let _ = (-10..=10).find(|x: &i32| x.signum() == 0);
+   |                                 +
+
+error[E0631]: type mismatch in closure arguments
+  --> $DIR/closure-arg-type-mismatch-issue-45727.rs:4:24
+   |
+LL |     let _ = (-10..=10).find(|x: &&&i32| x.signum() == 0);
+   |                        ^^^^ ----------- found signature defined here
+   |                        |
+   |                        expected due to this
+   |
+   = note: expected closure signature `for<'a> fn(&'a {integer}) -> _`
+              found closure signature `for<'a, 'b, 'c> fn(&'a &'b &'c i32) -> _`
+note: required by a bound in `find`
+  --> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
+help: do not borrow the argument
+   |
+LL -     let _ = (-10..=10).find(|x: &&&i32| x.signum() == 0);
+LL +     let _ = (-10..=10).find(|x: &i32| x.signum() == 0);
+   |
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0631`.
index fab9b7edc0cc5a38bb0c6ab566171b51fa5a4b62..811ff0533f0124ff219f580a458a4d1d830af712 100644 (file)
@@ -2,16 +2,18 @@ error[E0631]: type mismatch in closure arguments
   --> $DIR/closure-arg-type-mismatch.rs:3:14
    |
 LL |     a.iter().map(|_: (u32, u32)| 45);
-   |              ^^^ ---------------
-   |              |   |   |
-   |              |   |   help: consider borrowing the argument: `&(u32, u32)`
-   |              |   found signature defined here
+   |              ^^^ --------------- found signature defined here
+   |              |
    |              expected due to this
    |
    = note: expected closure signature `fn(&(u32, u32)) -> _`
               found closure signature `fn((u32, u32)) -> _`
 note: required by a bound in `map`
   --> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
+help: consider borrowing the argument
+   |
+LL |     a.iter().map(|_: &(u32, u32)| 45);
+   |                      +
 
 error[E0631]: type mismatch in closure arguments
   --> $DIR/closure-arg-type-mismatch.rs:4:14
index 72fb0e4d774312780cb81e6e5f8fc1617f0d8113..a6764a1dc6d31675bdc009c343bf93dabda2af0f 100644 (file)
@@ -2,16 +2,18 @@ error[E0631]: type mismatch in closure arguments
   --> $DIR/issue-36053-2.rs:7:32
    |
 LL |     once::<&str>("str").fuse().filter(|a: &str| true).count();
-   |                                ^^^^^^ ---------
-   |                                |      |   |
-   |                                |      |   help: consider borrowing the argument: `&&str`
-   |                                |      found signature defined here
+   |                                ^^^^^^ --------- found signature defined here
+   |                                |
    |                                expected due to this
    |
    = note: expected closure signature `for<'a> fn(&'a &str) -> _`
               found closure signature `for<'a> fn(&'a str) -> _`
 note: required by a bound in `filter`
   --> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL
+help: consider borrowing the argument
+   |
+LL |     once::<&str>("str").fuse().filter(|a: &&str| true).count();
+   |                                           +
 
 error[E0599]: the method `count` exists for struct `Filter<Fuse<Once<&str>>, [closure@issue-36053-2.rs:7:39]>`, but its trait bounds were not satisfied
   --> $DIR/issue-36053-2.rs:7:55
index ced062269df68569795cb6fde437c35749c9a5b7..99e291cda0377ccb0e3b05362b0683372529a128 100644 (file)
@@ -4,7 +4,7 @@ error: cannot find macro `macro_two` in this scope
 LL |     macro_two!();
    |     ^^^^^^^^^
    |
-   = note: consider importing this macro:
+   = help: consider importing this macro:
            two_macros::macro_two
 
 error: aborting due to previous error
diff --git a/tests/ui/parser/recover-unticked-labels.fixed b/tests/ui/parser/recover-unticked-labels.fixed
new file mode 100644 (file)
index 0000000..159d995
--- /dev/null
@@ -0,0 +1,7 @@
+// run-rustfix
+
+fn main() {
+    'label: loop { break 'label };    //~ error: cannot find value `label` in this scope
+    'label: loop { break 'label 0 };  //~ error: expected a label, found an identifier
+    'label: loop { continue 'label }; //~ error: expected a label, found an identifier
+}
diff --git a/tests/ui/parser/recover-unticked-labels.rs b/tests/ui/parser/recover-unticked-labels.rs
new file mode 100644 (file)
index 0000000..56034de
--- /dev/null
@@ -0,0 +1,7 @@
+// run-rustfix
+
+fn main() {
+    'label: loop { break label };    //~ error: cannot find value `label` in this scope
+    'label: loop { break label 0 };  //~ error: expected a label, found an identifier
+    'label: loop { continue label }; //~ error: expected a label, found an identifier
+}
diff --git a/tests/ui/parser/recover-unticked-labels.stderr b/tests/ui/parser/recover-unticked-labels.stderr
new file mode 100644 (file)
index 0000000..c115dff
--- /dev/null
@@ -0,0 +1,25 @@
+error: expected a label, found an identifier
+  --> $DIR/recover-unticked-labels.rs:5:26
+   |
+LL |     'label: loop { break label 0 };
+   |                          ^^^^^ help: labels start with a tick: `'label`
+
+error: expected a label, found an identifier
+  --> $DIR/recover-unticked-labels.rs:6:29
+   |
+LL |     'label: loop { continue label };
+   |                             ^^^^^ help: labels start with a tick: `'label`
+
+error[E0425]: cannot find value `label` in this scope
+  --> $DIR/recover-unticked-labels.rs:4:26
+   |
+LL |     'label: loop { break label };
+   |     ------               ^^^^^
+   |     |                    |
+   |     |                    not found in this scope
+   |     |                    help: use the similarly named label: `'label`
+   |     a label with a similar name exists
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0425`.
index ba35e95c82a493f86da4b9bcaa89252879faca48..cd25c7566897a752b2bb92223b7ba1aa11e02d83 100644 (file)
@@ -2,8 +2,11 @@ fn main() {
     let y = 0;
     //~^ ERROR unknown start of token: \u{37e}
     //~^^ HELP Unicode character ';' (Greek Question Mark) looks like ';' (Semicolon), but it is not
-        let x = 0;
+        let x = 0;
     //~^ ERROR unknown start of token: \u{a0}
     //~^^ NOTE character appears 3 more times
     //~^^^ HELP Unicode character ' ' (No-Break Space) looks like ' ' (Space), but it is not
+    let _ = 1 ⩵ 2;
+    //~^ ERROR unknown start of token
+    //~^^ HELP Unicode character '⩵' (Two Consecutive Equals Signs) looks like '==' (Double Equals Sign), but it is not
 }
index 6a5b27872e73829de3012348d4f52cdd6daf1f2f..086de5ec0997e82d34ced9cb6b4bcfb11b6d20bb 100644 (file)
@@ -12,14 +12,25 @@ LL |     let y = 0;
 error: unknown start of token: \u{a0}
   --> $DIR/unicode-chars.rs:5:5
    |
-LL |         let x = 0;
+LL |         let x = 0;
    |     ^^^^
    |
    = note: character appears 3 more times
 help: Unicode character ' ' (No-Break Space) looks like ' ' (Space), but it is not
    |
-LL |         let x = 0;
+LL |         let x = 0;
    |     ++++
 
-error: aborting due to 2 previous errors
+error: unknown start of token: \u{2a75}
+  --> $DIR/unicode-chars.rs:9:15
+   |
+LL |     let _ = 1 ⩵ 2;
+   |               ^
+   |
+help: Unicode character '⩵' (Two Consecutive Equals Signs) looks like '==' (Double Equals Sign), but it is not
+   |
+LL |     let _ = 1 == 2;
+   |               ~~
+
+error: aborting due to 3 previous errors
 
diff --git a/tests/ui/pattern/issue-106552.rs b/tests/ui/pattern/issue-106552.rs
new file mode 100644 (file)
index 0000000..aa2c141
--- /dev/null
@@ -0,0 +1,7 @@
+fn main() {
+    let 5 = 6;
+    //~^ error refutable pattern in local binding [E0005]
+
+    let x @ 5 = 6;
+    //~^ error refutable pattern in local binding [E0005]
+}
diff --git a/tests/ui/pattern/issue-106552.stderr b/tests/ui/pattern/issue-106552.stderr
new file mode 100644 (file)
index 0000000..ed5d40c
--- /dev/null
@@ -0,0 +1,35 @@
+error[E0005]: refutable pattern in local binding
+  --> $DIR/issue-106552.rs:2:9
+   |
+LL |     let 5 = 6;
+   |         ^ patterns `i32::MIN..=4_i32` and `6_i32..=i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+   = note: the matched value is of type `i32`
+help: you might want to use `if let` to ignore the variants that aren't matched
+   |
+LL |     if let 5 = 6 { todo!() }
+   |     ++           ~~~~~~~~~~~
+help: alternatively, you could prepend the pattern with an underscore to define a new named variable; identifiers cannot begin with digits
+   |
+LL |     let _5 = 6;
+   |         +
+
+error[E0005]: refutable pattern in local binding
+  --> $DIR/issue-106552.rs:5:9
+   |
+LL |     let x @ 5 = 6;
+   |         ^^^^^ patterns `i32::MIN..=4_i32` and `6_i32..=i32::MAX` not covered
+   |
+   = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant
+   = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html
+   = note: the matched value is of type `i32`
+help: you might want to use `let else` to handle the variants that aren't matched
+   |
+LL |     let x @ 5 = 6 else { todo!() };
+   |                   ++++++++++++++++
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0005`.
index 282c411136922b17d69f88cf0c5105f817141736..13427d2c9b208cb923b66d92cd25800f23d8c2e0 100644 (file)
@@ -11,9 +11,9 @@ pub mod b {
     pub fn key(e: ::E) -> &'static str {
         match e {
             A => "A",
-//~^ WARN pattern binding `A` is named the same as one of the variants of the type `E`
+//~^ ERROR pattern binding `A` is named the same as one of the variants of the type `E`
             B => "B", //~ ERROR: unreachable pattern
-//~^ WARN pattern binding `B` is named the same as one of the variants of the type `E`
+//~^ ERROR pattern binding `B` is named the same as one of the variants of the type `E`
         }
     }
 }
index fc8ae1ed7b5b07e06812fbeb7766c3671afb96d4..7ea51b5f804c074abe85ce83a95ba042bbc67948 100644 (file)
@@ -1,12 +1,12 @@
-warning[E0170]: pattern binding `A` is named the same as one of the variants of the type `E`
+error[E0170]: pattern binding `A` is named the same as one of the variants of the type `E`
   --> $DIR/issue-14221.rs:13:13
    |
 LL |             A => "A",
    |             ^ help: to match on the variant, qualify the path: `E::A`
    |
-   = note: `#[warn(bindings_with_variant_name)]` on by default
+   = note: `#[deny(bindings_with_variant_name)]` on by default
 
-warning[E0170]: pattern binding `B` is named the same as one of the variants of the type `E`
+error[E0170]: pattern binding `B` is named the same as one of the variants of the type `E`
   --> $DIR/issue-14221.rs:15:13
    |
 LL |             B => "B",
@@ -27,6 +27,6 @@ note: the lint level is defined here
 LL | #![deny(unreachable_patterns)]
    |         ^^^^^^^^^^^^^^^^^^^^
 
-error: aborting due to previous error; 2 warnings emitted
+error: aborting due to 3 previous errors
 
 For more information about this error, try `rustc --explain E0170`.
index 6fd5768a5a26dd2e032ed6726fc13160155ef256..05d097eaf14e4a02a789cfc56f621dfedb80ad80 100644 (file)
@@ -1,7 +1,5 @@
 // Test for issue #67776: binding named the same as enum variant
-// should report a warning even when matching against a reference type
-
-// check-pass
+// should report an error even when matching against a reference type
 
 #![allow(unused_variables)]
 #![allow(non_snake_case)]
@@ -15,27 +13,27 @@ enum Foo {
 fn fn1(e: Foo) {
     match e {
         Bar => {},
-        //~^ WARNING named the same as one of the variants of the type `Foo`
+        //~^ ERROR named the same as one of the variants of the type `Foo`
         Baz => {},
-        //~^ WARNING named the same as one of the variants of the type `Foo`
+        //~^ ERROR named the same as one of the variants of the type `Foo`
     }
 }
 
 fn fn2(e: &Foo) {
     match e {
         Bar => {},
-        //~^ WARNING named the same as one of the variants of the type `Foo`
+        //~^ ERROR named the same as one of the variants of the type `Foo`
         Baz => {},
-        //~^ WARNING named the same as one of the variants of the type `Foo`
+        //~^ ERROR named the same as one of the variants of the type `Foo`
     }
 }
 
 fn fn3(e: &mut &&mut Foo) {
     match e {
         Bar => {},
-        //~^ WARNING named the same as one of the variants of the type `Foo`
+        //~^ ERROR named the same as one of the variants of the type `Foo`
         Baz => {},
-        //~^ WARNING named the same as one of the variants of the type `Foo`
+        //~^ ERROR named the same as one of the variants of the type `Foo`
     }
 }
 
index 6f3613b63c9aa4245f3b2460bdf1fdaa3779b57e..da580c7accb97c6574e1b0ae0af332b8fb073973 100644 (file)
@@ -1,41 +1,41 @@
-warning[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:17:9
+error[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:15:9
    |
 LL |         Bar => {},
    |         ^^^ help: to match on the variant, qualify the path: `Foo::Bar`
    |
-   = note: `#[warn(bindings_with_variant_name)]` on by default
+   = note: `#[deny(bindings_with_variant_name)]` on by default
 
-warning[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:19:9
+error[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:17:9
    |
 LL |         Baz => {},
    |         ^^^ help: to match on the variant, qualify the path: `Foo::Baz`
 
-warning[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:26:9
+error[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:24:9
    |
 LL |         Bar => {},
    |         ^^^ help: to match on the variant, qualify the path: `Foo::Bar`
 
-warning[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:28:9
+error[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:26:9
    |
 LL |         Baz => {},
    |         ^^^ help: to match on the variant, qualify the path: `Foo::Baz`
 
-warning[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:35:9
+error[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:33:9
    |
 LL |         Bar => {},
    |         ^^^ help: to match on the variant, qualify the path: `Foo::Bar`
 
-warning[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:37:9
+error[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+  --> $DIR/issue-67776-match-same-name-enum-variant-refs.rs:35:9
    |
 LL |         Baz => {},
    |         ^^^ help: to match on the variant, qualify the path: `Foo::Baz`
 
-warning: 6 warnings emitted
+error: aborting due to 6 previous errors
 
 For more information about this error, try `rustc --explain E0170`.
index 9c52ca422411619619b3aa74a3ee0c49b26d3046..de2c27a878c678688db5b389f07658344d79de47 100644 (file)
@@ -16,7 +16,7 @@ error: cannot find attribute `empty_helper` in this scope
 LL |             #[derive(GenHelperUse)]
    |                      ^^^^^^^^^^^^
    |
-   = note: consider importing this attribute macro:
+   = help: consider importing this attribute macro:
            empty_helper
    = note: this error originates in the derive macro `GenHelperUse` (in Nightly builds, run with -Z macro-backtrace for more info)
 
@@ -29,7 +29,7 @@ LL |         #[empty_helper]
 LL |             gen_helper_use!();
    |             ----------------- in this macro invocation
    |
-   = note: consider importing this attribute macro:
+   = help: consider importing this attribute macro:
            crate::empty_helper
    = note: this error originates in the macro `gen_helper_use` (in Nightly builds, run with -Z macro-backtrace for more info)
 
index 901b3a951023c3505bbc3d3a952b9cb045765b83..700aac41c449a10890defc06ac3a17341ac3b100 100644 (file)
@@ -1,4 +1,6 @@
 // aux-build:expand-expr.rs
+// no-remap-src-base: check_expand_expr_file!() fails when enabled.
+
 #![feature(concat_bytes)]
 extern crate expand_expr;
 
@@ -8,7 +10,7 @@
 
 // Check builtin macros can be expanded.
 
-expand_expr_is!(11u32, line!());
+expand_expr_is!(13u32, line!());
 expand_expr_is!(24u32, column!());
 
 expand_expr_is!("Hello, World!", concat!("Hello, ", "World", "!"));
index 0004f2fe17f01ebfb65aaabbd86b376482eee382..df61e9972896b05734f2641b013bdbad66dc9539 100644 (file)
@@ -1,29 +1,29 @@
 error: expected one of `.`, `?`, or an operator, found `;`
-  --> $DIR/expand-expr.rs:106:27
+  --> $DIR/expand-expr.rs:108:27
    |
 LL | expand_expr_fail!("string"; hello);
    |                           ^ expected one of `.`, `?`, or an operator
 
 error: expected expression, found `$`
-  --> $DIR/expand-expr.rs:109:19
+  --> $DIR/expand-expr.rs:111:19
    |
 LL | expand_expr_fail!($);
    |                   ^ expected expression
 
 error: expected expression, found `$`
-  --> $DIR/expand-expr.rs:38:23
+  --> $DIR/expand-expr.rs:40:23
    |
 LL |     ($($t:tt)*) => { $($t)* };
    |                       ^^^^ expected expression
 
 error: expected expression, found `$`
-  --> $DIR/expand-expr.rs:111:28
+  --> $DIR/expand-expr.rs:113:28
    |
 LL | expand_expr_fail!(echo_pm!($));
    |                            ^ expected expression
 
 error: macro expansion ignores token `hello` and any following
-  --> $DIR/expand-expr.rs:115:47
+  --> $DIR/expand-expr.rs:117:47
    |
 LL | expand_expr_is!("string", echo_tts!("string"; hello));
    |                           --------------------^^^^^- caused by the macro expansion here
@@ -35,7 +35,7 @@ LL | expand_expr_is!("string", echo_tts!("string"; hello););
    |                                                     +
 
 error: macro expansion ignores token `;` and any following
-  --> $DIR/expand-expr.rs:116:44
+  --> $DIR/expand-expr.rs:118:44
    |
 LL | expand_expr_is!("string", echo_pm!("string"; hello));
    |                           -----------------^------- caused by the macro expansion here
@@ -47,7 +47,7 @@ LL | expand_expr_is!("string", echo_pm!("string"; hello););
    |                                                    +
 
 error: recursion limit reached while expanding `recursive_expand!`
-  --> $DIR/expand-expr.rs:124:16
+  --> $DIR/expand-expr.rs:126:16
    |
 LL | const _: u32 = recursive_expand!();
    |                ^^^^^^^^^^^^^^^^^^^
index 9eea630c310ecb1fc17900e7744332b240d4291b..471f317edf96479f64ac9bbc3beadf5d934aaf71 100644 (file)
 
 #[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
                                      //~| ERROR cannot find type `OuterDerive` in this scope
+                                     //~| WARN this was previously accepted
+                                     //~| WARN this was previously accepted
 struct Z;
 
 fn inner_block() {
     #[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
                                         //~| ERROR cannot find type `OuterDerive` in this scope
+                                        //~| WARN this was previously accepted
+                                        //~| WARN this was previously accepted
     struct InnerZ;
 }
 
-#[derive(generate_mod::CheckDeriveLint)] //~  ERROR cannot find type `OuterDeriveLint` in this scope
-                                         //~| ERROR cannot find type `FromOutside` in this scope
+#[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
 struct W;
 
 fn main() {}
index 64042ca0ecdea1e58a2541285084d901fce7b7c5..db629b5b5e239cc1f5557ea829988d2030597bf6 100644 (file)
@@ -4,7 +4,7 @@ error[E0412]: cannot find type `FromOutside` in this scope
 LL | generate_mod::check!();
    | ^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
    |
-   = note: consider importing this struct:
+   = help: consider importing this struct:
            FromOutside
    = note: this error originates in the macro `generate_mod::check` (in Nightly builds, run with -Z macro-backtrace for more info)
 
@@ -14,7 +14,7 @@ error[E0412]: cannot find type `Outer` in this scope
 LL | generate_mod::check!();
    | ^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
    |
-   = note: consider importing this struct:
+   = help: consider importing this struct:
            Outer
    = note: this error originates in the macro `generate_mod::check` (in Nightly builds, run with -Z macro-backtrace for more info)
 
@@ -24,7 +24,7 @@ error[E0412]: cannot find type `FromOutside` in this scope
 LL | #[generate_mod::check_attr]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
    |
-   = note: consider importing this struct:
+   = help: consider importing this struct:
            FromOutside
    = note: this error originates in the attribute macro `generate_mod::check_attr` (in Nightly builds, run with -Z macro-backtrace for more info)
 
@@ -34,70 +34,131 @@ error[E0412]: cannot find type `OuterAttr` in this scope
 LL | #[generate_mod::check_attr]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
    |
-   = note: consider importing this struct:
+   = help: consider importing this struct:
            OuterAttr
    = note: this error originates in the attribute macro `generate_mod::check_attr` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0412]: cannot find type `FromOutside` in this scope
+error: cannot find type `FromOutside` in this scope
   --> $DIR/generate-mod.rs:16:10
    |
 LL | #[derive(generate_mod::CheckDerive)]
-   |          ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
    |
-   = note: consider importing this struct:
-           FromOutside
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+   = note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
    = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0412]: cannot find type `OuterDerive` in this scope
+error: cannot find type `OuterDerive` in this scope
   --> $DIR/generate-mod.rs:16:10
    |
 LL | #[derive(generate_mod::CheckDerive)]
-   |          ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
    |
-   = note: consider importing this struct:
-           OuterDerive
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
    = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0412]: cannot find type `FromOutside` in this scope
-  --> $DIR/generate-mod.rs:21:14
+error: cannot find type `FromOutside` in this scope
+  --> $DIR/generate-mod.rs:23:14
    |
 LL |     #[derive(generate_mod::CheckDerive)]
-   |              ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+   |              ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
    |
-   = note: consider importing this struct:
-           FromOutside
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
    = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0412]: cannot find type `OuterDerive` in this scope
-  --> $DIR/generate-mod.rs:21:14
+error: cannot find type `OuterDerive` in this scope
+  --> $DIR/generate-mod.rs:23:14
    |
 LL |     #[derive(generate_mod::CheckDerive)]
-   |              ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+   |              ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
    |
-   = note: consider importing this struct:
-           OuterDerive
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
    = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0412]: cannot find type `FromOutside` in this scope
-  --> $DIR/generate-mod.rs:26:10
+error: aborting due to 8 previous errors
+
+For more information about this error, try `rustc --explain E0412`.
+Future incompatibility report: Future breakage diagnostic:
+error: cannot find type `FromOutside` in this scope
+  --> $DIR/generate-mod.rs:16:10
    |
-LL | #[derive(generate_mod::CheckDeriveLint)]
-   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+LL | #[derive(generate_mod::CheckDerive)]
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
    |
-   = note: consider importing this struct:
-           FromOutside
-   = note: this error originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+   = note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
+   = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+error: cannot find type `OuterDerive` in this scope
+  --> $DIR/generate-mod.rs:16:10
+   |
+LL | #[derive(generate_mod::CheckDerive)]
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+   = note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
+   = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0412]: cannot find type `OuterDeriveLint` in this scope
-  --> $DIR/generate-mod.rs:26:10
+Future breakage diagnostic:
+error: cannot find type `FromOutside` in this scope
+  --> $DIR/generate-mod.rs:23:14
    |
-LL | #[derive(generate_mod::CheckDeriveLint)]
-   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+LL |     #[derive(generate_mod::CheckDerive)]
+   |              ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
    |
-   = note: consider importing this struct:
-           OuterDeriveLint
-   = note: this error originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+   = note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
+   = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error: aborting due to 10 previous errors
+Future breakage diagnostic:
+error: cannot find type `OuterDerive` in this scope
+  --> $DIR/generate-mod.rs:23:14
+   |
+LL |     #[derive(generate_mod::CheckDerive)]
+   |              ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+   = note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
+   = note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: cannot find type `FromOutside` in this scope
+  --> $DIR/generate-mod.rs:30:10
+   |
+LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+note: the lint level is defined here
+  --> $DIR/generate-mod.rs:30:10
+   |
+LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+Future breakage diagnostic:
+warning: cannot find type `OuterDeriveLint` in this scope
+  --> $DIR/generate-mod.rs:30:10
+   |
+LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
+   |
+   = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+   = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+note: the lint level is defined here
+  --> $DIR/generate-mod.rs:30:10
+   |
+LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: this warning originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-For more information about this error, try `rustc --explain E0412`.
index ab5013848891e549c8682a1fd89d234d5f729865..873054927c96279723f7b87ae31a908372109ca1 100644 (file)
@@ -1,5 +1,5 @@
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -10,7 +10,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: `#[deny(proc_macro_back_compat)]` on by default
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -20,7 +20,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: older versions of the `rental` crate will stop compiling in future versions of Rust; please update to `rental` v0.5.6, or switch to one of the `rental` alternatives
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -30,7 +30,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: older versions of the `rental` crate will stop compiling in future versions of Rust; please update to `rental` v0.5.6, or switch to one of the `rental` alternatives
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -40,7 +40,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: older versions of the `rental` crate will stop compiling in future versions of Rust; please update to `rental` v0.5.6, or switch to one of the `rental` alternatives
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -50,7 +50,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: older versions of the `rental` crate will stop compiling in future versions of Rust; please update to `rental` v0.5.6, or switch to one of the `rental` alternatives
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -60,7 +60,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: older versions of the `rental` crate will stop compiling in future versions of Rust; please update to `rental` v0.5.6, or switch to one of the `rental` alternatives
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -70,7 +70,7 @@ LL | enum ProceduralMasqueradeDummyType {
    = note: older versions of the `rental` crate will stop compiling in future versions of Rust; please update to `rental` v0.5.6, or switch to one of the `rental` alternatives
 
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -83,7 +83,7 @@ error: aborting due to 8 previous errors
 
 Future incompatibility report: Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -95,7 +95,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -107,7 +107,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -119,7 +119,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -131,7 +131,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -143,7 +143,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -155,7 +155,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -167,7 +167,7 @@ LL | enum ProceduralMasqueradeDummyType {
 
 Future breakage diagnostic:
 error: using an old version of `rental`
-  --> remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
+  --> $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6
    |
 LL | enum ProceduralMasqueradeDummyType {
    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
index 61ca53b28d40d850caf6858fc8a41fcb4b02b9ac..3d793d2a0145c984f27fa8092f21df04c2ba724e 100644 (file)
@@ -3,21 +3,21 @@ PRINT-DERIVE RE-COLLECTED (DISPLAY): enum ProceduralMasqueradeDummyType { Input
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "enum",
-        span: remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:1: 4:5 (#0),
+        span: $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:1: 4:5 (#0),
     },
     Ident {
         ident: "ProceduralMasqueradeDummyType",
-        span: remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6: 4:35 (#0),
+        span: $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:6: 4:35 (#0),
     },
     Group {
         delimiter: Brace,
         stream: TokenStream [
             Ident {
                 ident: "Input",
-                span: remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:13:5: 13:10 (#0),
+                span: $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:13:5: 13:10 (#0),
             },
         ],
-        span: remapped/proc-macro/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:36: 14:2 (#0),
+        span: $DIR/pretty-print-hack/allsorts-rental-0.5.6/src/lib.rs:4:36: 14:2 (#0),
     },
 ]
 PRINT-DERIVE INPUT (DISPLAY): enum ProceduralMasqueradeDummyType { Input, }
@@ -25,20 +25,20 @@ PRINT-DERIVE RE-COLLECTED (DISPLAY): enum ProceduralMasqueradeDummyType { Input
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "enum",
-        span: remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:1: 4:5 (#0),
+        span: $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:1: 4:5 (#0),
     },
     Ident {
         ident: "ProceduralMasqueradeDummyType",
-        span: remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6: 4:35 (#0),
+        span: $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:6: 4:35 (#0),
     },
     Group {
         delimiter: Brace,
         stream: TokenStream [
             Ident {
                 ident: "Input",
-                span: remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:13:5: 13:10 (#0),
+                span: $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:13:5: 13:10 (#0),
             },
         ],
-        span: remapped/proc-macro/pretty-print-hack/rental-0.5.5/src/lib.rs:4:36: 14:2 (#0),
+        span: $DIR/pretty-print-hack/rental-0.5.5/src/lib.rs:4:36: 14:2 (#0),
     },
 ]
index e9ff66ba45a08194678ad1ddb7f8135952dd4cfa..24a389c450ea01d176cab209eb54b3cf4430c8cf 100644 (file)
@@ -1,11 +1,8 @@
 // aux-build:test-macros.rs
 // compile-flags: -Z span-debug
 // revisions: local remapped
-// [remapped]compile-flags: --remap-path-prefix={{src-base}}=remapped
-
-// The remapped paths are not normalized by compiletest.
-// normalize-stdout-test: "\\(proc-macro|pretty-print-hack)" -> "/$1"
-// normalize-stderr-test: "\\(proc-macro|pretty-print-hack)" -> "/$1"
+// [local] no-remap-src-base: The hack should work regardless of remapping.
+// [remapped] remap-src-base
 
 #![no_std] // Don't load unnecessary hygiene information from std
 extern crate std;
diff --git a/tests/ui/remap-path-prefix.rs b/tests/ui/remap-path-prefix.rs
deleted file mode 100644 (file)
index 2eef970..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-// compile-flags: --remap-path-prefix={{src-base}}=remapped
-
-fn main() {
-    // We cannot actually put an ERROR marker here because
-    // the file name in the error message is not what the
-    // test framework expects (since the filename gets remapped).
-    // We still test the expected error in the stderr file.
-    ferris
-}
diff --git a/tests/ui/remap-path-prefix.stderr b/tests/ui/remap-path-prefix.stderr
deleted file mode 100644 (file)
index ad6a35d..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-error[E0425]: cannot find value `ferris` in this scope
-  --> remapped/remap-path-prefix.rs:8:5
-   |
-LL |     ferris
-   |     ^^^^^^ not found in this scope
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0425`.
index 761089cd3871a7ead4a8ea5f45d85b531d89c029..122e8fd350cb2f4ef15abc373500f807536b0488 100644 (file)
@@ -7,9 +7,9 @@ LL | use alloc;
 help: consider importing one of these items instead
    |
 LL | use core::alloc;
-   |     ~~~~~~~~~~~~
-LL | use std::alloc;
    |     ~~~~~~~~~~~
+LL | use std::alloc;
+   |     ~~~~~~~~~~
 
 error: aborting due to previous error
 
index 8881ede0dbca7e362d5f3cf92fb1444fa2a2520c..f8b3e6d65afb64cac3492bf2a9f27b11cd58b813 100644 (file)
@@ -15,7 +15,7 @@ LL | use std::simd::intrinsics;
 help: consider importing this module instead
    |
 LL | use std::intrinsics;
-   |     ~~~~~~~~~~~~~~~~
+   |     ~~~~~~~~~~~~~~~
 
 error: aborting due to 2 previous errors
 
diff --git a/tests/ui/single-use-lifetime/issue-104440.rs b/tests/ui/single-use-lifetime/issue-104440.rs
new file mode 100644 (file)
index 0000000..0795e95
--- /dev/null
@@ -0,0 +1,100 @@
+#![feature(decl_macro, rustc_attrs)]
+#![deny(single_use_lifetimes)]
+
+mod type_params {
+    macro m($T:ident) {
+        fn f<$T: Clone, T: PartialEq>(t1: $T, t2: T) -> ($T, bool) {
+            (t1.clone(), t2 == t2)
+        }
+    }
+
+    #[rustc_macro_transparency = "semitransparent"]
+    macro n($T:ident) {
+        fn g<$T: Clone>(t1: $T, t2: T) -> (T, $T) {
+            (t1.clone(), t2.clone())
+        }
+        fn h<T: Clone>(t1: $T, t2: T) -> (T, $T) {
+            (t1.clone(), t2.clone())
+        }
+    }
+
+    #[rustc_macro_transparency = "transparent"]
+    macro p($T:ident) {
+        fn j<$T: Clone>(t1: $T, t2: T) -> (T, $T) {
+            (t1.clone(), t2.clone())
+        }
+        fn k<T: Clone>(t1: $T, t2: T) -> (T, $T) {
+            (t1.clone(), t2.clone())
+        }
+    }
+
+    m!(T);
+    n!(T);
+    p!(T);
+}
+
+mod lifetime_params {
+    macro m($a:lifetime) {
+        fn f<'b, 'c, $a: 'b, 'a: 'c>(t1: &$a(), t2: &'a ()) -> (&'b (), &'c ()) { //~ ERROR lifetime parameter `'a` only used once
+            (t1, t2)
+        }
+    }
+
+    #[rustc_macro_transparency = "semitransparent"]
+    macro n($a:lifetime) {
+        fn g<$a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
+            (t1, t2)
+        }
+        fn h<'a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
+            (t1, t2)
+        }
+    }
+
+    #[rustc_macro_transparency = "transparent"]
+    macro p($a:lifetime) {
+        fn j<$a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
+            (t1, t2)
+        }
+        fn k<'a>(t1: &$a(), t2: &'a ()) -> (&'a (), &$a ()) {
+            (t1, t2)
+        }
+    }
+
+    m!('a); //~ ERROR lifetime parameter `'a` only used once
+    n!('a);
+    p!('a);
+}
+
+mod const_params {
+    macro m($C:ident) {
+        fn f<const $C: usize, const C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); $C], [(); C]) {
+            (t1, t2)
+        }
+    }
+
+    #[rustc_macro_transparency = "semitransparent"]
+    macro n($C:ident) {
+        fn g<const $C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
+            (t1, t2)
+        }
+        fn h<const C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
+            (t1, t2)
+        }
+    }
+
+    #[rustc_macro_transparency = "transparent"]
+    macro p($C:ident) {
+        fn j<const $C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
+            (t1, t2)
+        }
+        fn k<const C: usize>(t1: [(); $C], t2: [(); C]) -> ([(); C], [(); $C]) {
+            (t1, t2)
+        }
+    }
+
+    m!(C);
+    n!(C);
+    p!(C);
+}
+
+fn main() {}
diff --git a/tests/ui/single-use-lifetime/issue-104440.stderr b/tests/ui/single-use-lifetime/issue-104440.stderr
new file mode 100644 (file)
index 0000000..54ded31
--- /dev/null
@@ -0,0 +1,28 @@
+error: lifetime parameter `'a` only used once
+  --> $DIR/issue-104440.rs:63:8
+   |
+LL |     m!('a);
+   |        ^^
+   |        |
+   |        this lifetime...
+   |        ...is used only here
+   |
+note: the lint level is defined here
+  --> $DIR/issue-104440.rs:2:9
+   |
+LL | #![deny(single_use_lifetimes)]
+   |         ^^^^^^^^^^^^^^^^^^^^
+
+error: lifetime parameter `'a` only used once
+  --> $DIR/issue-104440.rs:38:30
+   |
+LL |         fn f<'b, 'c, $a: 'b, 'a: 'c>(t1: &$a(), t2: &'a ()) -> (&'b (), &'c ()) {
+   |                              ^^ this lifetime...     -- ...is used only here
+...
+LL |     m!('a);
+   |     ------ in this macro invocation
+   |
+   = note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 2 previous errors
+
diff --git a/tests/ui/stability-attribute/issue-106589.rs b/tests/ui/stability-attribute/issue-106589.rs
new file mode 100644 (file)
index 0000000..3cad9a3
--- /dev/null
@@ -0,0 +1,10 @@
+// #![feature(staged_api)] // note: `staged_api` not enabled
+
+#![stable(feature = "foo", since = "1.0.0")]
+//~^ ERROR stability attributes may not be used outside of the standard library
+
+#[unstable(feature = "foo", issue = "none")]
+//~^ ERROR stability attributes may not be used outside of the standard library
+fn foo_unstable() {}
+
+fn main() {}
diff --git a/tests/ui/stability-attribute/issue-106589.stderr b/tests/ui/stability-attribute/issue-106589.stderr
new file mode 100644 (file)
index 0000000..ccf3f71
--- /dev/null
@@ -0,0 +1,15 @@
+error[E0734]: stability attributes may not be used outside of the standard library
+  --> $DIR/issue-106589.rs:6:1
+   |
+LL | #[unstable(feature = "foo", issue = "none")]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error[E0734]: stability attributes may not be used outside of the standard library
+  --> $DIR/issue-106589.rs:3:1
+   |
+LL | #![stable(feature = "foo", since = "1.0.0")]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0734`.
diff --git a/tests/ui/suggestions/call-on-unimplemented-with-autoderef.rs b/tests/ui/suggestions/call-on-unimplemented-with-autoderef.rs
new file mode 100644 (file)
index 0000000..9021dd7
--- /dev/null
@@ -0,0 +1,13 @@
+trait Foo {}
+
+impl Foo for i32 {}
+
+fn needs_foo(_: impl Foo) {}
+
+fn test(x: &Box<dyn Fn() -> i32>) {
+    needs_foo(x);
+    //~^ ERROR the trait bound
+    //~| HELP use parentheses to call this trait object
+}
+
+fn main() {}
diff --git a/tests/ui/suggestions/call-on-unimplemented-with-autoderef.stderr b/tests/ui/suggestions/call-on-unimplemented-with-autoderef.stderr
new file mode 100644 (file)
index 0000000..90f44cc
--- /dev/null
@@ -0,0 +1,21 @@
+error[E0277]: the trait bound `&Box<dyn Fn() -> i32>: Foo` is not satisfied
+  --> $DIR/call-on-unimplemented-with-autoderef.rs:8:15
+   |
+LL |     needs_foo(x);
+   |     --------- ^ the trait `Foo` is not implemented for `&Box<dyn Fn() -> i32>`
+   |     |
+   |     required by a bound introduced by this call
+   |
+note: required by a bound in `needs_foo`
+  --> $DIR/call-on-unimplemented-with-autoderef.rs:5:22
+   |
+LL | fn needs_foo(_: impl Foo) {}
+   |                      ^^^ required by this bound in `needs_foo`
+help: use parentheses to call this trait object
+   |
+LL |     needs_foo(x());
+   |                ++
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
index 477eb2786799a5d9320e5ed5b210dfef659c3af8..2af7c2f6971484e925eafdc4d402dbf0de7be0c6 100644 (file)
@@ -24,16 +24,7 @@ error[E0425]: cannot find value `F` in this scope
   --> $DIR/constrain-suggest-ice.rs:6:9
    |
 LL |         F
-   |         ^
-   |
-help: a local variable with a similar name exists
-   |
-LL |         x
-   |         ~
-help: you might be missing a type parameter
-   |
-LL | struct Bug<S, F>{
-   |             +++
+   |         ^ help: a local variable with a similar name exists: `x`
 
 error: generic `Self` types are currently not permitted in anonymous constants
   --> $DIR/constrain-suggest-ice.rs:3:21
index e63210a3e987efd7c8bf7e1549b15b0aa8e01e0c..d161ed284f6d9dd292ce523ffb1ebaa99e4c7e67 100644 (file)
@@ -1,9 +1,8 @@
 #![allow(unused, nonstandard_style)]
-#![deny(bindings_with_variant_name)]
 
 // If an enum has two different variants,
 // then it cannot be matched upon in a function argument.
-// It still gets a warning, but no suggestions.
+// It still gets an error, but no suggestions.
 enum Foo {
     C,
     D,
index eb22b0ea5c83da68b71dbe18c4752664f228331f..0bd1b7ba4bacfdbe35e8f85a82a2b8b96fe115ac 100644 (file)
@@ -1,17 +1,13 @@
 error[E0170]: pattern binding `C` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-88730.rs:12:8
+  --> $DIR/issue-88730.rs:11:8
    |
 LL | fn foo(C: Foo) {}
    |        ^
    |
-note: the lint level is defined here
-  --> $DIR/issue-88730.rs:2:9
-   |
-LL | #![deny(bindings_with_variant_name)]
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: `#[deny(bindings_with_variant_name)]` on by default
 
 error[E0170]: pattern binding `C` is named the same as one of the variants of the type `Foo`
-  --> $DIR/issue-88730.rs:15:9
+  --> $DIR/issue-88730.rs:14:9
    |
 LL |     let C = Foo::D;
    |         ^
diff --git a/tests/ui/suggestions/suggest-remove-deref.fixed b/tests/ui/suggestions/suggest-remove-deref.fixed
new file mode 100644 (file)
index 0000000..4dc12da
--- /dev/null
@@ -0,0 +1,28 @@
+// run-rustfix
+
+//issue #106496
+
+struct S;
+
+trait X {}
+impl X for S {}
+
+fn foo<T: X>(_: &T) {}
+fn test_foo() {
+    let hello = &S;
+    foo(hello);
+    //~^ ERROR mismatched types
+}
+
+fn bar(_: &String) {}
+fn test_bar() {
+    let v = String::from("hello");
+    let s = &v;
+    bar(s);
+    //~^ ERROR mismatched types
+}
+
+fn main() {
+    test_foo();
+    test_bar();
+}
diff --git a/tests/ui/suggestions/suggest-remove-deref.rs b/tests/ui/suggestions/suggest-remove-deref.rs
new file mode 100644 (file)
index 0000000..c2d385c
--- /dev/null
@@ -0,0 +1,28 @@
+// run-rustfix
+
+//issue #106496
+
+struct S;
+
+trait X {}
+impl X for S {}
+
+fn foo<T: X>(_: &T) {}
+fn test_foo() {
+    let hello = &S;
+    foo(*hello);
+    //~^ ERROR mismatched types
+}
+
+fn bar(_: &String) {}
+fn test_bar() {
+    let v = String::from("hello");
+    let s = &v;
+    bar(*s);
+    //~^ ERROR mismatched types
+}
+
+fn main() {
+    test_foo();
+    test_bar();
+}
diff --git a/tests/ui/suggestions/suggest-remove-deref.stderr b/tests/ui/suggestions/suggest-remove-deref.stderr
new file mode 100644 (file)
index 0000000..f5d810e
--- /dev/null
@@ -0,0 +1,43 @@
+error[E0308]: mismatched types
+  --> $DIR/suggest-remove-deref.rs:13:9
+   |
+LL |     foo(*hello);
+   |     --- ^^^^^^ expected reference, found struct `S`
+   |     |
+   |     arguments to this function are incorrect
+   |
+   = note: expected reference `&_`
+                 found struct `S`
+note: function defined here
+  --> $DIR/suggest-remove-deref.rs:10:4
+   |
+LL | fn foo<T: X>(_: &T) {}
+   |    ^^^       -----
+help: consider removing deref here
+   |
+LL -     foo(*hello);
+LL +     foo(hello);
+   |
+
+error[E0308]: mismatched types
+  --> $DIR/suggest-remove-deref.rs:21:9
+   |
+LL |     bar(*s);
+   |     --- ^^ expected `&String`, found struct `String`
+   |     |
+   |     arguments to this function are incorrect
+   |
+note: function defined here
+  --> $DIR/suggest-remove-deref.rs:17:4
+   |
+LL | fn bar(_: &String) {}
+   |    ^^^ ----------
+help: consider removing deref here
+   |
+LL -     bar(*s);
+LL +     bar(s);
+   |
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/tests/ui/suggestions/type-mismatch-byte-literal.rs b/tests/ui/suggestions/type-mismatch-byte-literal.rs
new file mode 100644 (file)
index 0000000..34199f8
--- /dev/null
@@ -0,0 +1,18 @@
+// Tests that a suggestion is issued for type mismatch errors when a
+// u8 is expected and a char literal which is ASCII is supplied.
+
+fn foo(_t: u8) {}
+
+fn main() {
+    let _x: u8 = 'X';
+    //~^ ERROR: mismatched types [E0308]
+    //~| HELP: if you meant to write a byte literal, prefix with `b`
+
+    foo('#');
+    //~^ ERROR: mismatched types [E0308]
+    //~| HELP: if you meant to write a byte literal, prefix with `b`
+
+    // Do not issue the suggestion if the char literal isn't ASCII
+    let _t: u8 = '€';
+    //~^ ERROR: mismatched types [E0308]
+}
diff --git a/tests/ui/suggestions/type-mismatch-byte-literal.stderr b/tests/ui/suggestions/type-mismatch-byte-literal.stderr
new file mode 100644 (file)
index 0000000..c9c2e74
--- /dev/null
@@ -0,0 +1,42 @@
+error[E0308]: mismatched types
+  --> $DIR/type-mismatch-byte-literal.rs:7:18
+   |
+LL |     let _x: u8 = 'X';
+   |             --   ^^^ expected `u8`, found `char`
+   |             |
+   |             expected due to this
+   |
+help: if you meant to write a byte literal, prefix with `b`
+   |
+LL |     let _x: u8 = b'X';
+   |                  ~~~~
+
+error[E0308]: mismatched types
+  --> $DIR/type-mismatch-byte-literal.rs:11:9
+   |
+LL |     foo('#');
+   |     --- ^^^ expected `u8`, found `char`
+   |     |
+   |     arguments to this function are incorrect
+   |
+note: function defined here
+  --> $DIR/type-mismatch-byte-literal.rs:4:4
+   |
+LL | fn foo(_t: u8) {}
+   |    ^^^ ------
+help: if you meant to write a byte literal, prefix with `b`
+   |
+LL |     foo(b'#');
+   |         ~~~~
+
+error[E0308]: mismatched types
+  --> $DIR/type-mismatch-byte-literal.rs:16:18
+   |
+LL |     let _t: u8 = '€';
+   |             --   ^^^ expected `u8`, found `char`
+   |             |
+   |             expected due to this
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
index 08add29cb9cd521431521e535f764ff47fbd41d7..81aba403d0ba2cecb2bcf17238c207d46bdfd003 100644 (file)
@@ -8,9 +8,8 @@ trait Foo {
 }
 
 impl Foo for [u8; 1 + 2] {
-    #[rustc_def_path] //~ ERROR def-path(<[u8; _] as Foo>::baz)
-    fn baz() { }
+    #[rustc_def_path] //~ ERROR def-path(<[u8; 1 + 2] as Foo>::baz)
+    fn baz() {}
 }
 
-fn main() {
-}
+fn main() {}
index 98330031602233859d6d2fbe6980a17881c5f0ef..0c3205e0108e66e50589e2bab2b702537991b76e 100644 (file)
@@ -1,4 +1,4 @@
-error: def-path(<[u8; _] as Foo>::baz)
+error: def-path(<[u8; 1 + 2] as Foo>::baz)
   --> $DIR/impl2.rs:11:5
    |
 LL |     #[rustc_def_path]
index a45c5bd45880fde209a40cb74048715bf0a58c05..7635f579d66b94cd9e2ce76388f132e3517d1fee 100644 (file)
@@ -13,7 +13,7 @@ LL | use test as y;
 help: consider importing this module instead
    |
 LL | use test::test as y;
-   |     ~~~~~~~~~~~~~~~~
+   |     ~~~~~~~~~~~~~~~
 
 error: aborting due to 2 previous errors
 
index 68b95b42b3463f32261aeb309084473118464ed1..86c511c0895670325eede43ff595e3db6e964e50 100644 (file)
@@ -4,6 +4,16 @@ error[E0277]: the trait bound `T: TraitFoo` is not satisfied
 LL | impl<T> Copy for Foo<T> {}
    |                  ^^^^^^ the trait `TraitFoo` is not implemented for `T`
    |
+note: required for `Foo<T>` to implement `Clone`
+  --> $DIR/copy-impl-cannot-normalize.rs:12:9
+   |
+LL | impl<T> Clone for Foo<T>
+   |         ^^^^^     ^^^^^^
+LL | where
+LL |     T: TraitFoo,
+   |        -------- unsatisfied trait bound introduced here
+note: required by a bound in `Copy`
+  --> $SRC_DIR/core/src/marker.rs:LL:COL
 help: consider restricting type parameter `T`
    |
 LL | impl<T: TraitFoo> Copy for Foo<T> {}
diff --git a/tests/ui/traits/copy-is-not-modulo-regions.not_static.stderr b/tests/ui/traits/copy-is-not-modulo-regions.not_static.stderr
new file mode 100644 (file)
index 0000000..edd94d2
--- /dev/null
@@ -0,0 +1,22 @@
+error[E0204]: the trait `Copy` may not be implemented for this type
+  --> $DIR/copy-is-not-modulo-regions.rs:13:21
+   |
+LL | struct Bar<'lt>(Foo<'lt>);
+   |                 -------- this field does not implement `Copy`
+...
+LL | impl<'any> Copy for Bar<'any> {}
+   |                     ^^^^^^^^^
+   |
+note: the `Copy` impl for `Foo<'any>` requires that `'any: 'static`
+  --> $DIR/copy-is-not-modulo-regions.rs:10:17
+   |
+LL | struct Bar<'lt>(Foo<'lt>);
+   |                 ^^^^^^^^
+help: consider restricting type parameter `'any`
+   |
+LL | impl<'any: 'static> Copy for Bar<'any> {}
+   |          +++++++++
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0204`.
diff --git a/tests/ui/traits/copy-is-not-modulo-regions.rs b/tests/ui/traits/copy-is-not-modulo-regions.rs
new file mode 100644 (file)
index 0000000..adb8702
--- /dev/null
@@ -0,0 +1,19 @@
+// revisions: not_static yes_static
+//[yes_static] check-pass
+
+#[derive(Clone)]
+struct Foo<'lt>(&'lt ());
+
+impl Copy for Foo<'static> {}
+
+#[derive(Clone)]
+struct Bar<'lt>(Foo<'lt>);
+
+#[cfg(not_static)]
+impl<'any> Copy for Bar<'any> {}
+//[not_static]~^ the trait `Copy` may not be implemented for this type
+
+#[cfg(yes_static)]
+impl<'any> Copy for Bar<'static> {}
+
+fn main() {}
diff --git a/tests/ui/traits/copy-requires-self-wf.rs b/tests/ui/traits/copy-requires-self-wf.rs
new file mode 100644 (file)
index 0000000..9abfdfa
--- /dev/null
@@ -0,0 +1,14 @@
+// check-pass
+
+#[derive(Clone)]
+struct A<'a, T>(&'a T);
+
+impl<'a, T: Copy + 'a> Copy for A<'a, T> {}
+
+#[derive(Clone)]
+struct B<'a, T>(A<'a, T>);
+
+// `T: '_` should be implied by `WF(B<'_, T>)`.
+impl<T: Copy> Copy for B<'_, T> {}
+
+fn main() {}
diff --git a/tests/ui/traits/issue-106072.rs b/tests/ui/traits/issue-106072.rs
new file mode 100644 (file)
index 0000000..7064a39
--- /dev/null
@@ -0,0 +1,5 @@
+#[derive(Clone)] //~  trait objects must include the `dyn` keyword
+                 //~| trait objects must include the `dyn` keyword
+struct Foo;
+trait Foo {} //~ the name `Foo` is defined multiple times
+fn main() {}
diff --git a/tests/ui/traits/issue-106072.stderr b/tests/ui/traits/issue-106072.stderr
new file mode 100644 (file)
index 0000000..f9b7b81
--- /dev/null
@@ -0,0 +1,30 @@
+error[E0428]: the name `Foo` is defined multiple times
+  --> $DIR/issue-106072.rs:4:1
+   |
+LL | struct Foo;
+   | ----------- previous definition of the type `Foo` here
+LL | trait Foo {}
+   | ^^^^^^^^^ `Foo` redefined here
+   |
+   = note: `Foo` must be defined only once in the type namespace of this module
+
+error[E0782]: trait objects must include the `dyn` keyword
+  --> $DIR/issue-106072.rs:1:10
+   |
+LL | #[derive(Clone)]
+   |          ^^^^^
+   |
+   = note: this error originates in the derive macro `Clone` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error[E0782]: trait objects must include the `dyn` keyword
+  --> $DIR/issue-106072.rs:1:10
+   |
+LL | #[derive(Clone)]
+   |          ^^^^^
+   |
+   = note: this error originates in the derive macro `Clone` (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 3 previous errors
+
+Some errors have detailed explanations: E0428, E0782.
+For more information about an error, try `rustc --explain E0428`.
index 10597caf5b2dc09759e2a3430a26f592b7938e68..005939e0c46e4d39093d411cfaa3a832cdaa9545 100644 (file)
@@ -5,13 +5,11 @@
 //~| ERROR cannot find type `NotDefined` in this scope
 //~| ERROR cannot find type `N` in this scope
 //~| ERROR cannot find type `N` in this scope
-//~| ERROR `i32` is not an iterator
 
 #[derive(Clone, Copy)]
 //~^ ERROR the trait `Copy` may not be implemented for this type
 struct Bar<T>(T, N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
 //~^ ERROR cannot find type `NotDefined` in this scope
 //~| ERROR cannot find type `N` in this scope
-//~| ERROR `i32` is not an iterator
 
 fn main() {}
index aa8384e98053969edf0601e4739900f1c7a843f4..5063fdca092731aeb154a9b66a55cff4b94006bb 100644 (file)
@@ -38,7 +38,7 @@ LL | struct Foo<NotDefined>(N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, St
    |           ++++++++++++
 
 error[E0412]: cannot find type `N` in this scope
-  --> $DIR/issue-50480.rs:12:18
+  --> $DIR/issue-50480.rs:11:18
    |
 LL | struct Bar<T>(T, N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
    |            -     ^
@@ -55,20 +55,11 @@ LL | struct Bar<T, N>(T, N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, Strin
    |             +++
 
 error[E0412]: cannot find type `NotDefined` in this scope
-  --> $DIR/issue-50480.rs:12:21
+  --> $DIR/issue-50480.rs:11:21
    |
 LL | struct Bar<T>(T, N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
    |                     ^^^^^^^^^^ not found in this scope
 
-error[E0277]: `i32` is not an iterator
-  --> $DIR/issue-50480.rs:3:27
-   |
-LL | struct Foo(N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
-   |                           ^^^^^^^^^^^^^^^^^^^^^^^ `i32` is not an iterator
-   |
-   = help: the trait `Iterator` is not implemented for `i32`
-   = note: if you want to iterate between `start` until a value `end`, use the exclusive range syntax `start..end` or the inclusive range syntax `start..=end`
-
 error[E0204]: the trait `Copy` may not be implemented for this type
   --> $DIR/issue-50480.rs:1:17
    |
@@ -82,17 +73,8 @@ LL | struct Foo(N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
    |
    = note: this error originates in the derive macro `Copy` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error[E0277]: `i32` is not an iterator
-  --> $DIR/issue-50480.rs:12:33
-   |
-LL | struct Bar<T>(T, N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
-   |                                 ^^^^^^^^^^^^^^^^^^^^^^^ `i32` is not an iterator
-   |
-   = help: the trait `Iterator` is not implemented for `i32`
-   = note: if you want to iterate between `start` until a value `end`, use the exclusive range syntax `start..end` or the inclusive range syntax `start..=end`
-
 error[E0204]: the trait `Copy` may not be implemented for this type
-  --> $DIR/issue-50480.rs:10:17
+  --> $DIR/issue-50480.rs:9:17
    |
 LL | #[derive(Clone, Copy)]
    |                 ^^^^
@@ -104,7 +86,7 @@ LL | struct Bar<T>(T, N, NotDefined, <i32 as Iterator>::Item, Vec<i32>, String);
    |
    = note: this error originates in the derive macro `Copy` (in Nightly builds, run with -Z macro-backtrace for more info)
 
-error: aborting due to 10 previous errors
+error: aborting due to 8 previous errors
 
-Some errors have detailed explanations: E0204, E0277, E0412.
+Some errors have detailed explanations: E0204, E0412.
 For more information about an error, try `rustc --explain E0204`.
diff --git a/tests/ui/traits/new-solver/fn-trait-closure.rs b/tests/ui/traits/new-solver/fn-trait-closure.rs
new file mode 100644 (file)
index 0000000..bd65737
--- /dev/null
@@ -0,0 +1,8 @@
+// compile-flags: -Ztrait-solver=next
+// check-pass
+
+fn require_fn(_: impl Fn() -> i32) {}
+
+fn main() {
+    require_fn(|| -> i32 { 1i32 });
+}
diff --git a/tests/ui/traits/new-solver/fn-trait.rs b/tests/ui/traits/new-solver/fn-trait.rs
new file mode 100644 (file)
index 0000000..d566ead
--- /dev/null
@@ -0,0 +1,13 @@
+// compile-flags: -Ztrait-solver=next
+// check-pass
+
+fn require_fn(_: impl Fn() -> i32) {}
+
+fn f() -> i32 {
+    1i32
+}
+
+fn main() {
+    require_fn(f);
+    require_fn(f as fn() -> i32);
+}
diff --git a/tests/ui/traits/new-solver/pointer-sized.rs b/tests/ui/traits/new-solver/pointer-sized.rs
new file mode 100644 (file)
index 0000000..15681cd
--- /dev/null
@@ -0,0 +1,12 @@
+#![feature(pointer_sized_trait)]
+
+use std::marker::PointerSized;
+
+fn require_pointer_sized(_: impl PointerSized) {}
+
+fn main() {
+    require_pointer_sized(1usize);
+    require_pointer_sized(1u16);
+    //~^ ERROR `u16` needs to be a pointer-sized type
+    require_pointer_sized(&1i16);
+}
diff --git a/tests/ui/traits/new-solver/pointer-sized.stderr b/tests/ui/traits/new-solver/pointer-sized.stderr
new file mode 100644 (file)
index 0000000..b250b13
--- /dev/null
@@ -0,0 +1,24 @@
+error[E0277]: `u16` needs to be a pointer-sized type
+  --> $DIR/pointer-sized.rs:9:27
+   |
+LL |     require_pointer_sized(1u16);
+   |     --------------------- ^^^^ the trait `PointerSized` is not implemented for `u16`
+   |     |
+   |     required by a bound introduced by this call
+   |
+   = note: the trait bound `u16: PointerSized` is not satisfied
+note: required by a bound in `require_pointer_sized`
+  --> $DIR/pointer-sized.rs:5:34
+   |
+LL | fn require_pointer_sized(_: impl PointerSized) {}
+   |                                  ^^^^^^^^^^^^ required by this bound in `require_pointer_sized`
+help: consider borrowing here
+   |
+LL |     require_pointer_sized(&1u16);
+   |                           +
+LL |     require_pointer_sized(&mut 1u16);
+   |                           ++++
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
index 4d2890b5de583c48bc63a8e6d727e4d3516f03f1..0bcc9e002ca0449040b54f47cade01bcc9e0b259 100644 (file)
@@ -14,5 +14,5 @@ impl<W> Trait<W> for () {}
 
 fn foo_desugared<T: TraitWithAssoc>(_: T) -> Foo<T::Assoc> {
     ()
-    //~^ ERROR non-defining opaque type use
+    //~^ ERROR expected generic type parameter, found `<T as TraitWithAssoc>::Assoc`
 }
index c405b1f6af2057428a78bf0dc88993d954d99ad7..3c259bd9e97cc08c2d3e04eaf57ac048d94924d9 100644 (file)
@@ -1,14 +1,12 @@
-error: non-defining opaque type use in defining scope
+error[E0792]: expected generic type parameter, found `<T as TraitWithAssoc>::Assoc`
   --> $DIR/bound_reduction2.rs:16:5
    |
+LL | type Foo<V> = impl Trait<V>;
+   |          - this generic parameter must be used with a generic type parameter
+...
 LL |     ()
    |     ^^
-   |
-note: used non-generic type `<T as TraitWithAssoc>::Assoc` for generic parameter
-  --> $DIR/bound_reduction2.rs:9:10
-   |
-LL | type Foo<V> = impl Trait<V>;
-   |          ^
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0792`.
index f39741a6a625cc2d693309227e45982ad3515156..f5045d382aac4ed63c4be46e727c657ea6444a1c 100644 (file)
@@ -10,12 +10,11 @@ fn main() {}
 
 type OneConst<const X: usize> = impl Debug;
 
-
 // Not defining uses, because they doesn't define *all* possible generics.
 
 fn concrete_ty() -> OneTy<u32> {
     5u32
-    //~^ ERROR non-defining opaque type use in defining scope
+    //~^ ERROR expected generic type parameter, found `u32`
 }
 
 fn concrete_lifetime() -> OneLifetime<'static> {
@@ -25,5 +24,5 @@ fn concrete_lifetime() -> OneLifetime<'static> {
 
 fn concrete_const() -> OneConst<{ 123 }> {
     7u32
-    //~^ ERROR non-defining opaque type use in defining scope
+    //~^ ERROR expected generic constant parameter, found `123`
 }
index e7565525ad3387396ac5c7f9d2efd85ae366943f..564648630b16122127ce6a1c026785740f68c118 100644 (file)
@@ -1,17 +1,14 @@
-error: non-defining opaque type use in defining scope
-  --> $DIR/generic_nondefining_use.rs:17:5
+error[E0792]: expected generic type parameter, found `u32`
+  --> $DIR/generic_nondefining_use.rs:16:5
    |
+LL | type OneTy<T> = impl Debug;
+   |            - this generic parameter must be used with a generic type parameter
+...
 LL |     5u32
    |     ^^^^
-   |
-note: used non-generic type `u32` for generic parameter
-  --> $DIR/generic_nondefining_use.rs:7:12
-   |
-LL | type OneTy<T> = impl Debug;
-   |            ^
 
 error: non-defining opaque type use in defining scope
-  --> $DIR/generic_nondefining_use.rs:22:5
+  --> $DIR/generic_nondefining_use.rs:21:5
    |
 LL | type OneLifetime<'a> = impl Debug;
    |                  -- cannot use static lifetime; use a bound lifetime instead or remove the lifetime parameter from the opaque type
@@ -19,17 +16,15 @@ LL | type OneLifetime<'a> = impl Debug;
 LL |     6u32
    |     ^^^^
 
-error: non-defining opaque type use in defining scope
-  --> $DIR/generic_nondefining_use.rs:27:5
+error[E0792]: expected generic constant parameter, found `123`
+  --> $DIR/generic_nondefining_use.rs:26:5
    |
+LL | type OneConst<const X: usize> = impl Debug;
+   |               -------------- this generic parameter must be used with a generic constant parameter
+...
 LL |     7u32
    |     ^^^^
-   |
-note: used non-generic constant `123` for generic parameter
-  --> $DIR/generic_nondefining_use.rs:11:15
-   |
-LL | type OneConst<const X: usize> = impl Debug;
-   |               ^^^^^^^^^^^^^^
 
 error: aborting due to 3 previous errors
 
+For more information about this error, try `rustc --explain E0792`.
index cb90776472b5dc543fd395548d0cbe41082ca61f..d3e169a70d3f7f30471d26e9186092e2a83ad880 100644 (file)
@@ -4,7 +4,7 @@ fn main() {
     let y = 42;
     let x = wrong_generic(&y);
     let z: i32 = x;
-    //~^ ERROR non-defining opaque type use
+    //~^ ERROR expected generic type parameter, found `&'static i32
 }
 
 type WrongGeneric<T> = impl 'static;
index ba583241a696b57dc23c5ac20908c54d29abfb29..19115fd28662be97e91c77088a4f7b2cb5c299a4 100644 (file)
@@ -4,17 +4,14 @@ error: at least one trait must be specified
 LL | type WrongGeneric<T> = impl 'static;
    |                        ^^^^^^^^^^^^
 
-error: non-defining opaque type use in defining scope
+error[E0792]: expected generic type parameter, found `&'static i32`
   --> $DIR/generic_type_does_not_live_long_enough.rs:6:18
    |
 LL |     let z: i32 = x;
    |                  ^
-   |
-note: used non-generic type `&'static i32` for generic parameter
-  --> $DIR/generic_type_does_not_live_long_enough.rs:10:19
-   |
+...
 LL | type WrongGeneric<T> = impl 'static;
-   |                   ^
+   |                   - this generic parameter must be used with a generic type parameter
 
 error[E0310]: the parameter type `T` may not live long enough
   --> $DIR/generic_type_does_not_live_long_enough.rs:14:5
@@ -29,4 +26,5 @@ LL | fn wrong_generic<T: 'static>(t: T) -> WrongGeneric<T> {
 
 error: aborting due to 3 previous errors
 
-For more information about this error, try `rustc --explain E0310`.
+Some errors have detailed explanations: E0310, E0792.
+For more information about an error, try `rustc --explain E0310`.
diff --git a/tests/ui/type-alias-impl-trait/issue-104817.rs b/tests/ui/type-alias-impl-trait/issue-104817.rs
new file mode 100644 (file)
index 0000000..0d3bace
--- /dev/null
@@ -0,0 +1,19 @@
+#![feature(type_alias_impl_trait)]
+#![cfg_attr(specialized, feature(specialization))]
+#![allow(incomplete_features)]
+
+// revisions: stock specialized
+// [specialized]check-pass
+
+trait OpaqueTrait {}
+impl<T> OpaqueTrait for T {}
+type OpaqueType = impl OpaqueTrait;
+fn mk_opaque() -> OpaqueType {
+    || 0
+}
+trait AnotherTrait {}
+impl<T: Send> AnotherTrait for T {}
+impl AnotherTrait for OpaqueType {}
+//[stock]~^ conflicting implementations of trait `AnotherTrait` for type `OpaqueType`
+
+fn main() {}
diff --git a/tests/ui/type-alias-impl-trait/issue-104817.stock.stderr b/tests/ui/type-alias-impl-trait/issue-104817.stock.stderr
new file mode 100644 (file)
index 0000000..47bae8b
--- /dev/null
@@ -0,0 +1,11 @@
+error[E0119]: conflicting implementations of trait `AnotherTrait` for type `OpaqueType`
+  --> $DIR/issue-104817.rs:16:1
+   |
+LL | impl<T: Send> AnotherTrait for T {}
+   | -------------------------------- first implementation here
+LL | impl AnotherTrait for OpaqueType {}
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `OpaqueType`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0119`.
index 438ac35fdea515276a4d7bfb41028aa21fd9ea21..057930f0c1ce759a4d12c4ebff64c43076ab3714 100644 (file)
@@ -4,7 +4,6 @@
 type Bug<T, U> = impl Fn(T) -> U + Copy; //~ ERROR cycle detected
 
 const CONST_BUG: Bug<u8, ()> = unsafe { std::mem::transmute(|_: u8| ()) };
-//~^ ERROR: cannot transmute
 
 fn make_bug<T, U: From<T>>() -> Bug<T, U> {
     |x| x.into() //~ ERROR the trait bound `U: From<T>` is not satisfied
index 1b89d55711dbd690c37f78b37af5a46e868d4cdc..2565a28b493541c1a48bc195e01546d1dfe66160 100644 (file)
@@ -24,23 +24,14 @@ LL | |     CONST_BUG(0);
 LL | | }
    | |_^
 
-error[E0512]: cannot transmute between types of different sizes, or dependently-sized types
-  --> $DIR/issue-53092-2.rs:6:41
-   |
-LL | const CONST_BUG: Bug<u8, ()> = unsafe { std::mem::transmute(|_: u8| ()) };
-   |                                         ^^^^^^^^^^^^^^^^^^^
-   |
-   = note: source type: `[closure@$DIR/issue-53092-2.rs:6:61: 6:68]` (0 bits)
-   = note: target type: `Bug<u8, ()>` (size can vary because of [type error])
-
 error[E0277]: the trait bound `U: From<T>` is not satisfied
-  --> $DIR/issue-53092-2.rs:10:5
+  --> $DIR/issue-53092-2.rs:9:5
    |
 LL |     |x| x.into()
    |     ^^^^^^^^^^^^ the trait `From<T>` is not implemented for `U`
    |
 note: required by a bound in `make_bug`
-  --> $DIR/issue-53092-2.rs:9:19
+  --> $DIR/issue-53092-2.rs:8:19
    |
 LL | fn make_bug<T, U: From<T>>() -> Bug<T, U> {
    |                   ^^^^^^^ required by this bound in `make_bug`
@@ -49,7 +40,7 @@ help: consider restricting type parameter `U`
 LL | type Bug<T, U: std::convert::From<T>> = impl Fn(T) -> U + Copy;
    |              +++++++++++++++++++++++
 
-error: aborting due to 3 previous errors
+error: aborting due to 2 previous errors
 
-Some errors have detailed explanations: E0277, E0391, E0512.
+Some errors have detailed explanations: E0277, E0391.
 For more information about an error, try `rustc --explain E0277`.
index 4fc7679311a2e55b031dd0cf0c460c979476212b..c2f4c37080746f3ef2437046074e3434ecbc38e4 100644 (file)
@@ -18,7 +18,7 @@ impl<T: Copy, E> IterBits for T
     type BitsIter = IterBitsIter<T, E, u8>;
     fn iter_bits(self, n: u8) -> Self::BitsIter {
         (0u8..n).rev().map(move |shift| ((self >> T::from(shift)) & T::from(1)).try_into().unwrap())
-        //~^ ERROR non-defining opaque type use in defining scope
+        //~^ ERROR expected generic type parameter, found `u8`
     }
 }
 
index bbc93657be32f27501c851460adbfb0554ee0826..f8fdb004d098996e1af34800fe48011c533c341f 100644 (file)
@@ -1,14 +1,12 @@
-error: non-defining opaque type use in defining scope
+error[E0792]: expected generic type parameter, found `u8`
   --> $DIR/issue-60564.rs:20:9
    |
+LL | type IterBitsIter<T, E, I> = impl std::iter::Iterator<Item = I>;
+   |                         - this generic parameter must be used with a generic type parameter
+...
 LL |         (0u8..n).rev().map(move |shift| ((self >> T::from(shift)) & T::from(1)).try_into().unwrap())
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |
-note: used non-generic type `u8` for generic parameter
-  --> $DIR/issue-60564.rs:8:25
-   |
-LL | type IterBitsIter<T, E, I> = impl std::iter::Iterator<Item = I>;
-   |                         ^
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0792`.
index 5223fb1c702d6753c92e6f6ac2e9938732faf792..5e0a82a72868ab88337ba20251768de6c23193b3 100644 (file)
@@ -7,7 +7,7 @@ trait Trait<T> {}
 type Alias<'a, U> = impl Trait<U>;
 
 fn f<'a>() -> Alias<'a, ()> {}
-//~^ ERROR non-defining opaque type use in defining scope
+//~^ ERROR expected generic type parameter, found `()`
 
 fn main() {}
 
index 7fb9a0c410e83c8c976a78979f913279f462b84e..271743a4010c8faa841a82027d8520e96a019546 100644 (file)
@@ -1,14 +1,12 @@
-error: non-defining opaque type use in defining scope
+error[E0792]: expected generic type parameter, found `()`
   --> $DIR/issue-68368-non-defining-use-2.rs:9:29
    |
+LL | type Alias<'a, U> = impl Trait<U>;
+   |                - this generic parameter must be used with a generic type parameter
+LL |
 LL | fn f<'a>() -> Alias<'a, ()> {}
    |                             ^^
-   |
-note: used non-generic type `()` for generic parameter
-  --> $DIR/issue-68368-non-defining-use-2.rs:7:16
-   |
-LL | type Alias<'a, U> = impl Trait<U>;
-   |                ^
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0792`.
index b50462bf237bb9509f9d090097ef9675af48b8b8..3b32260c96fe14b9a84821de686d21de5aec353c 100644 (file)
@@ -7,7 +7,7 @@ trait Trait<T> {}
 type Alias<'a, U> = impl Trait<U>;
 
 fn f<'a>() -> Alias<'a, ()> {}
-//~^ ERROR non-defining opaque type use in defining scope
+//~^ ERROR expected generic type parameter, found `()`
 
 fn main() {}
 
index 8059621b61a096bc84ed17714d7130d44e864d72..4d9a8d6eef9156bbda6c22542093104cd88c504f 100644 (file)
@@ -1,14 +1,12 @@
-error: non-defining opaque type use in defining scope
+error[E0792]: expected generic type parameter, found `()`
   --> $DIR/issue-68368-non-defining-use.rs:9:29
    |
+LL | type Alias<'a, U> = impl Trait<U>;
+   |                - this generic parameter must be used with a generic type parameter
+LL |
 LL | fn f<'a>() -> Alias<'a, ()> {}
    |                             ^^
-   |
-note: used non-generic type `()` for generic parameter
-  --> $DIR/issue-68368-non-defining-use.rs:7:16
-   |
-LL | type Alias<'a, U> = impl Trait<U>;
-   |                ^
 
 error: aborting due to previous error
 
+For more information about this error, try `rustc --explain E0792`.
index 428454bc04844d836b75c3a7fbde6796a1e84b53..7657fe2fb1aee34c86e37a4aa96905f7bb3c3a80 100644 (file)
@@ -18,6 +18,6 @@ impl<T> WithAssoc<T> for () {
 //~^ ERROR use of undeclared lifetime name `'a`
 
 fn my_fun() -> Return<()> {}
-//~^ ERROR non-defining opaque type use in defining scope
+//~^ ERROR expected generic type parameter, found `()`
 
 fn main() {}
index 7b50c8af26e5fe98d74a704b12ff57060e2f612b..d1250786d938c367c5cb2be6ab0ad6a5b693ee45 100644 (file)
@@ -14,18 +14,16 @@ help: consider introducing lifetime `'a` here
 LL | type Return<'a, A> = impl WithAssoc<A, AssocType = impl SomeTrait + 'a>;
    |             +++
 
-error: non-defining opaque type use in defining scope
+error[E0792]: expected generic type parameter, found `()`
   --> $DIR/issue-69136-inner-lifetime-resolve-error.rs:20:27
    |
+LL | type Return<A> = impl WithAssoc<A, AssocType = impl SomeTrait + 'a>;
+   |             - this generic parameter must be used with a generic type parameter
+...
 LL | fn my_fun() -> Return<()> {}
    |                           ^^
-   |
-note: used non-generic type `()` for generic parameter
-  --> $DIR/issue-69136-inner-lifetime-resolve-error.rs:17:13
-   |
-LL | type Return<A> = impl WithAssoc<A, AssocType = impl SomeTrait + 'a>;
-   |             ^
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0261`.
+Some errors have detailed explanations: E0261, E0792.
+For more information about an error, try `rustc --explain E0261`.
index 46621362e4f73a2bc56837348fa7cf133a639fd1..0f0a02e97d82db4054e6531932337b7c3b939a3a 100644 (file)
@@ -15,5 +15,4 @@ pub fn bar(x: Foo) -> Foo {
 
 fn main() {
     let _: foo::Foo = std::mem::transmute(0u8);
-    //~^ ERROR cannot transmute between types of different sizes, or dependently-sized types
 }
index 337708b876524920b548aac95dfca8ffb6d0845f..f3e8ae9c7dbae9ff379197ee592b8de713090669 100644 (file)
@@ -6,15 +6,5 @@ LL |     pub type Foo = impl Copy;
    |
    = note: `Foo` must be used in combination with a concrete type within the same module
 
-error[E0512]: cannot transmute between types of different sizes, or dependently-sized types
-  --> $DIR/no_inferrable_concrete_type.rs:17:23
-   |
-LL |     let _: foo::Foo = std::mem::transmute(0u8);
-   |                       ^^^^^^^^^^^^^^^^^^^
-   |
-   = note: source type: `u8` (8 bits)
-   = note: target type: `Foo` (size can vary because of [type error])
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
 
-For more information about this error, try `rustc --explain E0512`.
diff --git a/tests/ui/type-alias-impl-trait/outlives-bound-var.rs b/tests/ui/type-alias-impl-trait/outlives-bound-var.rs
new file mode 100644 (file)
index 0000000..b8fac45
--- /dev/null
@@ -0,0 +1,18 @@
+// Here we process outlive obligations involving
+// opaque types with bound vars in substs.
+// This was an ICE.
+//
+// check-pass
+#![feature(type_alias_impl_trait)]
+
+type Ty<'a> = impl Sized + 'a;
+fn define<'a>() -> Ty<'a> {}
+
+// Ty<'^0>: 'static
+fn test1(_: &'static fn(Ty<'_>)) {}
+
+fn test2() {
+    None::<&fn(Ty<'_>)>;
+}
+
+fn main() { }
index be60cda68b9f0fb0ce1951d6d84934f929f10a1d..e544b369515489ba17258e2eac2ea7eb50575cb5 100644 (file)
@@ -2,7 +2,7 @@ error[E0282]: type annotations needed for `(Vec<T>,)`
   --> $DIR/cannot_infer_local_or_vec_in_tuples.rs:2:9
    |
 LL |     let (x, ) = (vec![], );
-   |         ^^^^^
+   |         ^^^^^   ---------- type must be known at this point
    |
 help: consider giving this pattern a type, where the type for type parameter `T` is specified
    |
index 5561673f3c6728e6bba507c3fd1305e82d85b498..42cfe38aed888e0b8dd69e61f44a5f03c670c1ce 100644 (file)
@@ -3,11 +3,6 @@ error[E0405]: cannot find trait `Oops` in this scope
    |
 LL |     let _: S<impl Oops> = S;
    |                   ^^^^ not found in this scope
-   |
-help: you might be missing a type parameter
-   |
-LL | fn f<Oops>() {
-   |     ++++++
 
 error[E0562]: `impl Trait` only allowed in function and inherent method return types, not in variable binding
   --> $DIR/issue-104513-ice.rs:3:14
index bbd3eec2a54317a0e1df3451628d5c4762e5240a..ea737c567b96047f01e68084dd2bc7aa215ca689 100644 (file)
@@ -7,7 +7,7 @@ LL |     use Trait;
 help: consider importing this trait instead
    |
 LL |     use a::Trait;
-   |         ~~~~~~~~~
+   |         ~~~~~~~~
 
 error[E0405]: cannot find trait `Trait` in this scope
   --> $DIR/unresolved-candidates.rs:10:10
index f9732d02cb285c57fa06dff908511071f3387a19..4df2d8da3d6806ca5fa44b440f906204b1d329b0 100644 (file)
@@ -1,4 +1,4 @@
-error[E0208]: [o]
+error: [o]
   --> $DIR/variance-associated-consts.rs:13:1
    |
 LL | struct Foo<T: Trait> {
@@ -6,4 +6,3 @@ LL | struct Foo<T: Trait> {
 
 error: aborting due to previous error
 
-For more information about this error, try `rustc --explain E0208`.
index 5ce62884e1d83ee15bf557e1a6271b6c6d34a753..51f17c7c2288720941d54229652087e97efdc416 100644 (file)
@@ -1,10 +1,10 @@
-error[E0208]: [-, +]
+error: [-, +]
   --> $DIR/variance-associated-types.rs:13:1
    |
 LL | struct Foo<'a, T : Trait<'a>> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o, o]
+error: [o, o]
   --> $DIR/variance-associated-types.rs:18:1
    |
 LL | struct Bar<'a, T : Trait<'a>> {
@@ -12,4 +12,3 @@ LL | struct Bar<'a, T : Trait<'a>> {
 
 error: aborting due to 2 previous errors
 
-For more information about this error, try `rustc --explain E0208`.
index 1c3c1a6d1f223ce564326a353016f169af01485d..55a760425ee592e6ab24d38560e0967796b1c86e 100644 (file)
@@ -1,4 +1,4 @@
-error[E0208]: [o]
+error: [o]
   --> $DIR/variance-object-types.rs:7:1
    |
 LL | struct Foo<'a> {
@@ -6,4 +6,3 @@ LL | struct Foo<'a> {
 
 error: aborting due to previous error
 
-For more information about this error, try `rustc --explain E0208`.
index 27d69b6e82575d319506341a2e3b9b6204e6c980..eda02e9b03bb89b49fd1f3ea81c5fb6071c022d9 100644 (file)
@@ -1,40 +1,40 @@
-error[E0208]: [-, -, -]
+error: [-, -, -]
   --> $DIR/variance-regions-direct.rs:9:1
    |
 LL | struct Test2<'a, 'b, 'c> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, +, +]
+error: [+, +, +]
   --> $DIR/variance-regions-direct.rs:18:1
    |
 LL | struct Test3<'a, 'b, 'c> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [-, o]
+error: [-, o]
   --> $DIR/variance-regions-direct.rs:27:1
    |
 LL | struct Test4<'a, 'b:'a> {
    | ^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, o]
+error: [+, o]
   --> $DIR/variance-regions-direct.rs:35:1
    |
 LL | struct Test5<'a, 'b:'a> {
    | ^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [-, o]
+error: [-, o]
   --> $DIR/variance-regions-direct.rs:45:1
    |
 LL | struct Test6<'a, 'b:'a> {
    | ^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [*]
+error: [*]
   --> $DIR/variance-regions-direct.rs:52:1
    |
 LL | struct Test7<'a> {
    | ^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, -, o]
+error: [+, -, o]
   --> $DIR/variance-regions-direct.rs:59:1
    |
 LL | enum Test8<'a, 'b, 'c:'b> {
@@ -42,4 +42,3 @@ LL | enum Test8<'a, 'b, 'c:'b> {
 
 error: aborting due to 7 previous errors
 
-For more information about this error, try `rustc --explain E0208`.
index 535e97db3fb10fce823aa4a53d9182ba5e0fadbc..fa2f4d507f3d53ff11391a7a520182ef1b2e9ca6 100644 (file)
@@ -1,28 +1,28 @@
-error[E0208]: [+, -, o, *]
+error: [+, -, o, *]
   --> $DIR/variance-regions-indirect.rs:8:1
    |
 LL | enum Base<'a, 'b, 'c:'b, 'd> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [*, o, -, +]
+error: [*, o, -, +]
   --> $DIR/variance-regions-indirect.rs:15:1
    |
 LL | struct Derived1<'w, 'x:'y, 'y, 'z> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o, o, *]
+error: [o, o, *]
   --> $DIR/variance-regions-indirect.rs:20:1
    |
 LL | struct Derived2<'a, 'b:'a, 'c> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o, -, *]
+error: [o, -, *]
   --> $DIR/variance-regions-indirect.rs:25:1
    |
 LL | struct Derived3<'a:'b, 'b, 'c> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, -, o]
+error: [+, -, o]
   --> $DIR/variance-regions-indirect.rs:30:1
    |
 LL | struct Derived4<'a, 'b, 'c:'b> {
@@ -30,4 +30,3 @@ LL | struct Derived4<'a, 'b, 'c:'b> {
 
 error: aborting due to 5 previous errors
 
-For more information about this error, try `rustc --explain E0208`.
index 3f6ca62a64069776d37fb0c7e966ef9f8be3c4ab..5a73e541c3a173d047232305e99b7fcb65a8c1a6 100644 (file)
@@ -1,22 +1,22 @@
-error[E0208]: [+, +]
+error: [+, +]
   --> $DIR/variance-trait-bounds.rs:16:1
    |
 LL | struct TestStruct<U,T:Setter<U>> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [*, +]
+error: [*, +]
   --> $DIR/variance-trait-bounds.rs:21:1
    |
 LL | enum TestEnum<U,T:Setter<U>> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [*, +]
+error: [*, +]
   --> $DIR/variance-trait-bounds.rs:26:1
    |
 LL | struct TestContraStruct<U,T:Setter<U>> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [*, +]
+error: [*, +]
   --> $DIR/variance-trait-bounds.rs:31:1
    |
 LL | struct TestBox<U,T:Getter<U>+Setter<U>> {
@@ -24,4 +24,3 @@ LL | struct TestBox<U,T:Getter<U>+Setter<U>> {
 
 error: aborting due to 4 previous errors
 
-For more information about this error, try `rustc --explain E0208`.
index 9a2c924b96a9539fc07f2dde3508b91d12f09f96..7c46b553f43949459b0417b3dcf3d9e325e43ef5 100644 (file)
@@ -1,4 +1,4 @@
-error[E0208]: [-]
+error: [-]
   --> $DIR/variance-trait-object-bound.rs:14:1
    |
 LL | struct TOption<'a> {
@@ -6,4 +6,3 @@ LL | struct TOption<'a> {
 
 error: aborting due to previous error
 
-For more information about this error, try `rustc --explain E0208`.
index 523763b8a07b44340cf9f47e16278a6516a74d71..bb81644347693b4eb710565fffd2457f8a746ffb 100644 (file)
@@ -1,28 +1,28 @@
-error[E0208]: [+, +]
+error: [+, +]
   --> $DIR/variance-types-bounds.rs:7:1
    |
 LL | struct TestImm<A, B> {
    | ^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, o]
+error: [+, o]
   --> $DIR/variance-types-bounds.rs:13:1
    |
 LL | struct TestMut<A, B:'static> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, o]
+error: [+, o]
   --> $DIR/variance-types-bounds.rs:19:1
    |
 LL | struct TestIndirect<A:'static, B:'static> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o, o]
+error: [o, o]
   --> $DIR/variance-types-bounds.rs:24:1
    |
 LL | struct TestIndirect2<A:'static, B:'static> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o, o]
+error: [o, o]
   --> $DIR/variance-types-bounds.rs:38:1
    |
 LL | struct TestObject<A, R> {
@@ -30,4 +30,3 @@ LL | struct TestObject<A, R> {
 
 error: aborting due to 5 previous errors
 
-For more information about this error, try `rustc --explain E0208`.
index 5a5aaecffc5eb8d32ecb0b6b211cc25a66fbb1b4..9f7f1d9b0e332fcbfde63357d01fcf9a1440d0f9 100644 (file)
@@ -1,34 +1,34 @@
-error[E0208]: [-, o, o]
+error: [-, o, o]
   --> $DIR/variance-types.rs:10:1
    |
 LL | struct InvariantMut<'a,A:'a,B:'a> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o]
+error: [o]
   --> $DIR/variance-types.rs:15:1
    |
 LL | struct InvariantCell<A> {
    | ^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [o]
+error: [o]
   --> $DIR/variance-types.rs:20:1
    |
 LL | struct InvariantIndirect<A> {
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+]
+error: [+]
   --> $DIR/variance-types.rs:25:1
    |
 LL | struct Covariant<A> {
    | ^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [-]
+error: [-]
   --> $DIR/variance-types.rs:30:1
    |
 LL | struct Contravariant<A> {
    | ^^^^^^^^^^^^^^^^^^^^^^^
 
-error[E0208]: [+, -, o]
+error: [+, -, o]
   --> $DIR/variance-types.rs:35:1
    |
 LL | enum Enum<A,B,C> {
@@ -36,4 +36,3 @@ LL | enum Enum<A,B,C> {
 
 error: aborting due to 6 previous errors
 
-For more information about this error, try `rustc --explain E0208`.
index 14bade6472fa63b500ca6f0f6202a3ad01e4436d..16a3132151374d31dda0f22a0e822163a41e477b 100644 (file)
@@ -175,7 +175,7 @@ exclude_labels = [
     "T-*",
 ]
 
-[autolabel."A-bootstrap"]
+[autolabel."T-bootstrap"]
 trigger_files = [
     "x.py",
     "x",
@@ -185,7 +185,6 @@ trigger_files = [
     "src/tools/x",
     "configure",
     "Cargo.toml",
-    "Cargo.lock",
     "config.toml.example",
     "src/stage0.json"
 ]
@@ -494,6 +493,8 @@ libs = [
 ]
 bootstrap = [
     "@Mark-Simulacrum",
+    "@albertlarsan68",
+    "@ozkanonur",
 ]
 infra-ci = [
     "@Mark-Simulacrum",